From 12f4ea7b3bbe2681216a0925e7dba313c4383fdd Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 7 Jun 2024 14:23:50 +0200 Subject: [PATCH 01/58] Save allocating listener in hot loop in TransportShardBulkAction (#109445) No need to create a new listener for each item and that is almost never used anyway. Just create it once and keep it as a field like the other inputs `executeBulkItemRequest` is sort of an instance method here anyway and just static for testing reasons it seems. --- .../elasticsearch/action/bulk/TransportShardBulkAction.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a4c6b78f6a95..aca7c8752ef4 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -223,6 +223,8 @@ public class TransportShardBulkAction extends TransportWriteAction onMappingUpdateDone = ActionListener.wrap(v -> executor.execute(this), this::onRejection); + @Override protected void doRun() throws Exception { while (context.hasMoreOperationsToExecute()) { @@ -232,8 +234,7 @@ public class TransportShardBulkAction extends TransportWriteAction executor.execute(this), this::onRejection), + onMappingUpdateDone, documentParsingProvider ) == false) { // We are waiting for a mapping update on another thread, that will invoke this action again once its done From 1b9ce8ab0b91bdd3611a61410600870fe07a9fd8 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 7 Jun 2024 16:31:18 +0300 Subject: [PATCH 02/58] Enabling profiling for RankBuilders and adding tests for RRF (#109470) --- docs/changelog/109470.yaml | 5 + .../action/search/SearchRequest.java | 3 - .../action/search/SearchRequestTests.java | 12 - .../test/rrf/600_rrf_retriever_profile.yml | 218 ++++++++++++++++++ 4 files changed, 223 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/109470.yaml create mode 100644 x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml diff --git a/docs/changelog/109470.yaml b/docs/changelog/109470.yaml new file mode 100644 index 000000000000..837c1664b775 --- /dev/null +++ b/docs/changelog/109470.yaml @@ -0,0 +1,5 @@ +pr: 109470 +summary: Enabling profiling for `RankBuilders` and adding tests for RRF +area: Ranking +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 6dddac290262..e1fe6eac7e9c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -434,9 +434,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla if (source.pointInTimeBuilder() != null) { validationException = addValidationError("[rank] cannot be used with [point in time]", validationException); } - if (source.profile()) { - validationException = addValidationError("[rank] requires [profile] is [false]", validationException); - } } if (source.rescores() != null) { for (@SuppressWarnings("rawtypes") diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index cd176af3dafa..475f44238f36 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -517,18 +517,6 @@ public class SearchRequestTests extends AbstractSearchTestCase { assertEquals(1, validationErrors.validationErrors().size()); assertEquals("[rank] cannot be used with [point in time]", validationErrors.validationErrors().get(0)); } - { - SearchRequest searchRequest = new SearchRequest().source( - new SearchSourceBuilder().rankBuilder(new TestRankBuilder(100)) - .query(QueryBuilders.termQuery("field", "term")) - .knnSearch(List.of(new KnnSearchBuilder("vector", new float[] { 0f }, 10, 100, null))) - .profile(true) - ); - ActionRequestValidationException validationErrors = searchRequest.validate(); - assertNotNull(validationErrors); - assertEquals(1, validationErrors.validationErrors().size()); - assertEquals("[rank] requires [profile] is [false]", validationErrors.validationErrors().get(0)); - } { SearchRequest searchRequest = new SearchRequest("test").source( new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(BytesArray.EMPTY)) diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml new file mode 100644 index 000000000000..7308ce8947db --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml @@ -0,0 +1,218 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: 'profile for rrf was enabled in 8.15' + test_runner_features: close_to + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + text: + type: text + integer: + type: integer + vector: + type: dense_vector + dims: 1 + index: true + similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 + + - do: + index: + index: test + id: "1" + body: + text: "term" + integer: 1 + vector: [5] + + - do: + index: + index: test + id: "2" + body: + text: "term term" + integer: 2 + vector: [4] + + - do: + index: + index: test + id: "3" + body: + text: "term term term" + integer: 3 + vector: [3] + - do: + index: + index: test + id: "4" + body: + text: "term term term term" + integer: 3 + + - do: + index: + index: test + id: "5" + body: + integer: 1 + vector: [0] + + - do: + indices.refresh: {} + +--- +"profile standard and knn query": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: "term" + } + } + } + }, + { + standard: { + query: { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5 + } + } + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + profile: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - not_exists: profile.shards.0.dfs + - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } + - length: { profile.shards.0.searches.0.query.0.children: 1 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: DocAndScoreQuery } + +--- +"profile standard and knn dfs retrievers": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: "term" + } + } + } + }, + { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5 + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + profile: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - exists: profile.shards.0.dfs + - length: { profile.shards.0.dfs.knn: 1 } + - length: { profile.shards.0.dfs.knn.0.query: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.type: DocAndScoreQuery } + + - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } + - length: { profile.shards.0.searches.0.query.0.children: 1 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: KnnScoreDocQuery } + +--- +"using query and dfs knn search": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + query: { + term: { + text: { + value: "term" + } + } + } + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5 + } + rank: { + rrf: { + rank_window_size: 5, + rank_constant: 1 + } + } + size: 3 + profile: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - exists: profile.shards.0.dfs + - length: { profile.shards.0.dfs.knn: 1 } + - length: { profile.shards.0.dfs.knn.0.query: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.type: DocAndScoreQuery } + + - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } + - length: { profile.shards.0.searches.0.query.0.children: 1 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: KnnScoreDocQuery } From c22855d13b694b9df8d14ec64652446405c39e14 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 7 Jun 2024 16:44:25 +0300 Subject: [PATCH 03/58] Restore trace logging for DownsampleActionIT.testRollupIndex (#109473) Related to #105437 --- .../org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 15a370e99458..7a420aa41ce7 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -191,6 +191,7 @@ public class DownsampleActionIT extends ESRestTestCase { createIndexWithSettings(client(), index, alias, settings, mapping); } + @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/105437") public void testRollupIndex() throws Exception { createIndex(index, alias, true); index(client(), index, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); From 72b9ad6808d09c740b78f740c61d862711eaa595 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 7 Jun 2024 15:58:40 +0200 Subject: [PATCH 04/58] AwaitsFix: https://github.com/elastic/elasticsearch/issues/109477 --- muted-tests.yml | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 32128da9c371..e02bb3ea20dc 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -19,7 +19,8 @@ tests: method: "testGuessIsDayFirstFromLocale" - class: "org.elasticsearch.test.rest.ClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/108857" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ + \ dependent mappings / dates}" - class: "org.elasticsearch.upgrades.SearchStatesIT" issue: "https://github.com/elastic/elasticsearch/issues/108991" method: "testCanMatch" @@ -28,7 +29,8 @@ tests: method: "testTrainedModelInference" - class: "org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109188" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ + \ dependent mappings / dates}" - class: "org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT" issue: "https://github.com/elastic/elasticsearch/issues/109189" method: "test {p0=esql/70_locale/Date format with Italian locale}" @@ -43,7 +45,8 @@ tests: method: "testTimestampFieldTypeExposedByAllIndicesServices" - class: "org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109318" - method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling (too complex pattern)}" + method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling\ + \ (too complex pattern)}" - class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" issue: "https://github.com/elastic/elasticsearch/issues/101598" method: "testFeatureImportanceValues" @@ -59,6 +62,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppendTests method: testEvaluateBlockWithoutNulls {TestCase=, } issue: https://github.com/elastic/elasticsearch/issues/109409 +- class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlClientYamlIT" + issue: "https://github.com/elastic/elasticsearch/issues/109477" + method: "test {p0=esql/150_lookup/multivalued keys}" # Examples: # From 9212ab37c7ae8aab5830acacd2c24e3086893db6 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 7 Jun 2024 16:00:52 +0200 Subject: [PATCH 05/58] AwaitsFix: https://github.com/elastic/elasticsearch/issues/109478 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e02bb3ea20dc..96c72bd60e3d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -65,6 +65,9 @@ tests: - class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlClientYamlIT" issue: "https://github.com/elastic/elasticsearch/issues/109477" method: "test {p0=esql/150_lookup/multivalued keys}" +- class: "org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT" + issue: "https://github.com/elastic/elasticsearch/issues/109478" + method: "test {yaml=reference/esql/processing-commands/lookup/line_31}" # Examples: # From 8114dc261223c1c96827c28dc1caa87b8a26d579 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 7 Jun 2024 11:12:00 -0400 Subject: [PATCH 06/58] [Inference API] Add support for third party models to Semantic Text (#109442) * Cherry pick Refactor ChunkedInferenceServiceResults to have chunksAsMatchedTextAndByteReference to allow each implementation to contain the code needed to convert the format for semantic text * initial refactoring * fix a test * Allow semantic text fields to have a delta in their vecotrs to account for conversions of doubles and floats * merge fixes * clean up and test fixes * rename chunked results to differentiate from ml.trainedmodels classes * renaming * improvements from review * checkstlye --- .../ChunkedInferenceServiceResults.java | 20 +++ .../ChunkedSparseEmbeddingResults.java | 125 ------------- .../results/ChunkedTextEmbeddingResults.java | 133 -------------- .../results/ErrorChunkedInferenceResults.java | 9 + ...nferenceChunkedSparseEmbeddingResults.java | 156 +++++++++++++++++ ...renceChunkedTextEmbeddingByteResults.java} | 53 ++++-- ...enceChunkedTextEmbeddingFloatResults.java} | 80 ++++++++- ...=> InferenceTextEmbeddingByteResults.java} | 29 ++-- ...> InferenceTextEmbeddingFloatResults.java} | 52 +++--- .../results/LegacyTextEmbeddingResults.java | 6 +- .../core/inference/results/ResultUtils.java | 4 +- .../MlInferenceNamedXContentProvider.java | 22 ++- ...InferenceChunkedTextExpansionResults.java} | 8 +- ...> MlChunkedTextEmbeddingFloatResults.java} | 8 +- ...sults.java => MlTextEmbeddingResults.java} | 8 +- .../core/utils/FloatConversionUtils.java | 20 +++ ...ChunkedTextEmbeddingFloatResultsTests.java | 52 ++++++ .../action/InferModelActionResponseTests.java | 8 +- ...erTrainedModelDeploymentResponseTests.java | 10 +- ...enceChunkedTextExpansionResultsTests.java} | 18 +- ...hunkedTextEmbeddingFloatResultsTests.java} | 36 ++-- ....java => MlTextEmbeddingResultsTests.java} | 18 +- .../core/utils/FloatConversionUtilsTests.java | 22 +++ .../TestDenseInferenceServiceExtension.java | 34 ++-- .../TestSparseInferenceServiceExtension.java | 8 +- .../InferenceNamedWriteablesProvider.java | 42 ++--- .../common/EmbeddingRequestChunker.java | 22 +-- .../CohereEmbeddingsResponseEntity.java | 17 +- ...oogleAiStudioEmbeddingsResponseEntity.java | 13 +- .../HuggingFaceEmbeddingsResponseEntity.java | 27 +-- .../OpenAiEmbeddingsResponseEntity.java | 13 +- .../inference/mapper/SemanticTextField.java | 63 +------ .../mapper/SemanticTextFieldMapper.java | 8 +- .../queries/SemanticQueryBuilder.java | 29 ++-- .../inference/services/ServiceUtils.java | 4 +- .../azureaistudio/AzureAiStudioService.java | 10 +- .../azureopenai/AzureOpenAiService.java | 10 +- .../ElasticsearchInternalService.java | 13 +- .../services/elser/ElserInternalService.java | 10 +- .../huggingface/HuggingFaceBaseService.java | 20 ++- .../services/mistral/MistralService.java | 10 +- .../action/InferenceActionResponseTests.java | 6 +- .../ShardBulkInferenceActionFilterTests.java | 7 +- .../common/EmbeddingRequestChunkerTests.java | 40 ++--- ...AiStudioEmbeddingsResponseEntityTests.java | 9 +- .../CohereEmbeddingsResponseEntityTests.java | 38 ++-- ...AiStudioEmbeddingsResponseEntityTests.java | 15 +- ...gingFaceEmbeddingsResponseEntityTests.java | 50 ++++-- .../OpenAiEmbeddingsResponseEntityTests.java | 34 ++-- .../mapper/SemanticTextFieldTests.java | 61 ++++--- .../queries/SemanticQueryBuilderTests.java | 9 +- .../rest/RestInferenceActionTests.java | 6 +- ...ChunkedTextEmbeddingFloatResultsTests.java | 54 ------ .../ChunkedTextEmbeddingResultsTests.java | 164 ------------------ ...ceChunkedSparseEmbeddingResultsTests.java} | 36 ++-- ...ChunkedTextEmbeddingByteResultsTests.java} | 52 +++--- ...ferenceTextEmbeddingByteResultsTests.java} | 63 ++++--- ...=> LegacyMlTextEmbeddingResultsTests.java} | 2 +- .../results/TextEmbeddingResultsTests.java | 69 ++++---- .../inference/services/ServiceUtilsTests.java | 12 +- .../AzureAiStudioServiceTests.java | 51 +++--- .../azureopenai/AzureOpenAiServiceTests.java | 49 +++--- .../services/cohere/CohereServiceTests.java | 10 +- .../ElasticsearchInternalServiceTests.java | 51 ++++-- .../elser/ElserInternalServiceTests.java | 27 ++- .../GoogleAiStudioServiceTests.java | 10 +- .../huggingface/HuggingFaceServiceTests.java | 34 ++-- .../services/mistral/MistralServiceTests.java | 10 +- .../services/openai/OpenAiServiceTests.java | 10 +- .../inference/nlp/TextEmbeddingProcessor.java | 14 +- .../inference/nlp/TextExpansionProcessor.java | 8 +- .../TextEmbeddingQueryVectorBuilder.java | 6 +- .../nlp/TextEmbeddingProcessorTests.java | 12 +- .../nlp/TextExpansionProcessorTests.java | 6 +- .../TextEmbeddingQueryVectorBuilderTests.java | 4 +- 75 files changed, 1096 insertions(+), 1183 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/{ChunkedTextEmbeddingByteResults.java => InferenceChunkedTextEmbeddingByteResults.java} (65%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/{ChunkedTextEmbeddingFloatResults.java => InferenceChunkedTextEmbeddingFloatResults.java} (52%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/{TextEmbeddingByteResults.java => InferenceTextEmbeddingByteResults.java} (82%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/{TextEmbeddingResults.java => InferenceTextEmbeddingFloatResults.java} (76%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/{ChunkedTextExpansionResults.java => InferenceChunkedTextExpansionResults.java} (91%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/{ChunkedTextEmbeddingResults.java => MlChunkedTextEmbeddingFloatResults.java} (91%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/{TextEmbeddingResults.java => MlTextEmbeddingResults.java} (89%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/{ChunkedTextExpansionResultsTests.java => InferenceChunkedTextExpansionResultsTests.java} (57%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/{ChunkedTextEmbeddingResultsTests.java => MlChunkedTextEmbeddingFloatResultsTests.java} (53%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/{TextEmbeddingResultsTests.java => MlTextEmbeddingResultsTests.java} (68%) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/utils/FloatConversionUtilsTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/{ChunkedSparseEmbeddingResultsTests.java => InferenceChunkedSparseEmbeddingResultsTests.java} (68%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/{ChunkedTextEmbeddingByteResultsTests.java => InferenceChunkedTextEmbeddingByteResultsTests.java} (52%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/{TextEmbeddingByteResultsTests.java => InferenceTextEmbeddingByteResultsTests.java} (51%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/{LegacyTextEmbeddingResultsTests.java => LegacyMlTextEmbeddingResultsTests.java} (97%) diff --git a/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java b/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java index 5ba2196e9148..f3461aba13d9 100644 --- a/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java +++ b/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java @@ -8,6 +8,26 @@ package org.elasticsearch.inference; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContent; + +import java.util.Iterator; + public interface ChunkedInferenceServiceResults extends InferenceServiceResults { + /** + * Implementations of this function serialize their embeddings to {@link BytesReference} for storage in semantic text fields. + * The iterator iterates over all the chunks stored in the {@link ChunkedInferenceServiceResults}. + * + * @param xcontent provided by the SemanticTextField + * @return an iterator of the serialized {@link Chunk} which includes the matched text (input) and bytes reference (output/embedding). + */ + Iterator chunksAsMatchedTextAndByteReference(XContent xcontent); + + /** + * A chunk of inference results containing matched text and the bytes reference. + * @param matchedText + * @param bytesReference + */ + record Chunk(String matchedText, BytesReference bytesReference) {} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java deleted file mode 100644 index c91d0dc6fd53..000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.inference.results; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; - -public class ChunkedSparseEmbeddingResults implements ChunkedInferenceServiceResults { - - public static final String NAME = "chunked_sparse_embedding_results"; - public static final String FIELD_NAME = "sparse_embedding_chunk"; - - public static ChunkedSparseEmbeddingResults ofMlResult(ChunkedTextExpansionResults mlInferenceResults) { - return new ChunkedSparseEmbeddingResults(mlInferenceResults.getChunks()); - } - - /** - * Returns a list of {@link ChunkedSparseEmbeddingResults}. The number of entries in the list will match the input list size. - * Each {@link ChunkedSparseEmbeddingResults} will have a single chunk containing the entire results from the - * {@link SparseEmbeddingResults}. - */ - public static List of(List inputs, SparseEmbeddingResults sparseEmbeddingResults) { - validateInputSizeAgainstEmbeddings(inputs, sparseEmbeddingResults.embeddings().size()); - - var results = new ArrayList(inputs.size()); - for (int i = 0; i < inputs.size(); i++) { - results.add(of(inputs.get(i), sparseEmbeddingResults.embeddings().get(i))); - } - - return results; - } - - public static ChunkedSparseEmbeddingResults of(String input, SparseEmbeddingResults.Embedding embedding) { - var weightedTokens = embedding.tokens() - .stream() - .map(weightedToken -> new WeightedToken(weightedToken.token(), weightedToken.weight())) - .toList(); - - return new ChunkedSparseEmbeddingResults(List.of(new ChunkedTextExpansionResults.ChunkedResult(input, weightedTokens))); - } - - private final List chunkedResults; - - public ChunkedSparseEmbeddingResults(List chunks) { - this.chunkedResults = chunks; - } - - public ChunkedSparseEmbeddingResults(StreamInput in) throws IOException { - this.chunkedResults = in.readCollectionAsList(ChunkedTextExpansionResults.ChunkedResult::new); - } - - public List getChunkedResults() { - return chunkedResults; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(FIELD_NAME); - for (ChunkedTextExpansionResults.ChunkedResult chunk : chunkedResults) { - chunk.toXContent(builder, params); - } - builder.endArray(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeCollection(chunkedResults); - } - - @Override - public List transformToCoordinationFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the coordindated action"); - } - - @Override - public List transformToLegacyFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); - } - - @Override - public Map asMap() { - return Map.of( - FIELD_NAME, - chunkedResults.stream().map(ChunkedTextExpansionResults.ChunkedResult::asMap).collect(Collectors.toList()) - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ChunkedSparseEmbeddingResults that = (ChunkedSparseEmbeddingResults) o; - return Objects.equals(chunkedResults, that.chunkedResults); - } - - @Override - public int hashCode() { - return Objects.hash(chunkedResults); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java deleted file mode 100644 index f09eafc1591d..000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.inference.results; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.IntStream; - -import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; - -public class ChunkedTextEmbeddingResults implements ChunkedInferenceServiceResults { - - public static final String NAME = "chunked_text_embedding_service_results"; - - public static final String FIELD_NAME = "text_embedding_chunk"; - - public static ChunkedTextEmbeddingResults ofMlResult( - org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults mlInferenceResults - ) { - return new ChunkedTextEmbeddingResults(mlInferenceResults.getChunks()); - } - - /** - * Returns a list of {@link ChunkedTextEmbeddingResults}. The number of entries in the list will match the input list size. - * Each {@link ChunkedTextEmbeddingResults} will have a single chunk containing the entire results from the - * {@link TextEmbeddingResults}. - */ - public static List of(List inputs, TextEmbeddingResults textEmbeddings) { - validateInputSizeAgainstEmbeddings(inputs, textEmbeddings.embeddings().size()); - - var results = new ArrayList(inputs.size()); - for (int i = 0; i < inputs.size(); i++) { - results.add(ChunkedTextEmbeddingResults.of(inputs.get(i), textEmbeddings.embeddings().get(i).values())); - } - - return results; - } - - public static ChunkedTextEmbeddingResults of(String input, float[] floatEmbeddings) { - double[] doubleEmbeddings = IntStream.range(0, floatEmbeddings.length).mapToDouble(i -> floatEmbeddings[i]).toArray(); - - return new ChunkedTextEmbeddingResults( - List.of( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk(input, doubleEmbeddings) - ) - ); - } - - private static double floatToDouble(Float aFloat) { - return aFloat != null ? aFloat : 0; - } - - private final List chunks; - - public ChunkedTextEmbeddingResults( - List chunks - ) { - this.chunks = chunks; - } - - public ChunkedTextEmbeddingResults(StreamInput in) throws IOException { - this.chunks = in.readCollectionAsList( - org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk::new - ); - } - - public List getChunks() { - return chunks; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - // TODO add isTruncated flag - builder.startArray(FIELD_NAME); - for (var embedding : chunks) { - embedding.toXContent(builder, params); - } - builder.endArray(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeCollection(chunks); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public List transformToCoordinationFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the coordinated action"); - } - - @Override - public List transformToLegacyFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); - } - - @Override - public Map asMap() { - return Map.of(FIELD_NAME, chunks); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ChunkedTextEmbeddingResults that = (ChunkedTextEmbeddingResults) o; - return Objects.equals(chunks, that.chunks); - } - - @Override - public int hashCode() { - return Objects.hash(chunks); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java index eef864f2e899..376b8763a5eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java @@ -8,17 +8,21 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Stream; public class ErrorChunkedInferenceResults implements ChunkedInferenceServiceResults { @@ -94,4 +98,9 @@ public class ErrorChunkedInferenceResults implements ChunkedInferenceServiceResu public String getWriteableName() { return NAME; } + + @Override + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return Stream.of(exception).map(e -> new Chunk(e.getMessage(), BytesArray.EMPTY)).iterator(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java new file mode 100644 index 000000000000..2093b687a2ab --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.search.WeightedToken; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; + +public class InferenceChunkedSparseEmbeddingResults implements ChunkedInferenceServiceResults { + + public static final String NAME = "chunked_sparse_embedding_results"; + public static final String FIELD_NAME = "sparse_embedding_chunk"; + + public static InferenceChunkedSparseEmbeddingResults ofMlResult(InferenceChunkedTextExpansionResults mlInferenceResults) { + return new InferenceChunkedSparseEmbeddingResults(mlInferenceResults.getChunks()); + } + + /** + * Returns a list of {@link InferenceChunkedSparseEmbeddingResults}. The number of entries in the list will match the input list size. + * Each {@link InferenceChunkedSparseEmbeddingResults} will have a single chunk containing the entire results from the + * {@link SparseEmbeddingResults}. + */ + public static List listOf(List inputs, SparseEmbeddingResults sparseEmbeddingResults) { + validateInputSizeAgainstEmbeddings(inputs, sparseEmbeddingResults.embeddings().size()); + + var results = new ArrayList(inputs.size()); + for (int i = 0; i < inputs.size(); i++) { + results.add(ofSingle(inputs.get(i), sparseEmbeddingResults.embeddings().get(i))); + } + + return results; + } + + private static InferenceChunkedSparseEmbeddingResults ofSingle(String input, SparseEmbeddingResults.Embedding embedding) { + var weightedTokens = embedding.tokens() + .stream() + .map(weightedToken -> new WeightedToken(weightedToken.token(), weightedToken.weight())) + .toList(); + + return new InferenceChunkedSparseEmbeddingResults( + List.of(new InferenceChunkedTextExpansionResults.ChunkedResult(input, weightedTokens)) + ); + } + + private final List chunkedResults; + + public InferenceChunkedSparseEmbeddingResults(List chunks) { + this.chunkedResults = chunks; + } + + public InferenceChunkedSparseEmbeddingResults(StreamInput in) throws IOException { + this.chunkedResults = in.readCollectionAsList(InferenceChunkedTextExpansionResults.ChunkedResult::new); + } + + public List getChunkedResults() { + return chunkedResults; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(FIELD_NAME); + for (InferenceChunkedTextExpansionResults.ChunkedResult chunk : chunkedResults) { + chunk.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(chunkedResults); + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException("Chunked results are not returned in the coordindated action"); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); + } + + @Override + public Map asMap() { + return Map.of( + FIELD_NAME, + chunkedResults.stream().map(InferenceChunkedTextExpansionResults.ChunkedResult::asMap).collect(Collectors.toList()) + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceChunkedSparseEmbeddingResults that = (InferenceChunkedSparseEmbeddingResults) o; + return Objects.equals(chunkedResults, that.chunkedResults); + } + + @Override + public int hashCode() { + return Objects.hash(chunkedResults); + } + + @Override + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return chunkedResults.stream() + .map(chunk -> new Chunk(chunk.matchedText(), toBytesReference(xcontent, chunk.weightedTokens()))) + .iterator(); + } + + /** + * Serialises the {@link WeightedToken} list, according to the provided {@link XContent}, + * into a {@link BytesReference}. + */ + private static BytesReference toBytesReference(XContent xContent, List tokens) { + try { + XContentBuilder b = XContentBuilder.builder(xContent); + b.startObject(); + for (var weightedToken : tokens) { + weightedToken.toXContent(b, ToXContent.EMPTY_PARAMS); + } + b.endObject(); + return BytesReference.bytes(b); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java similarity index 65% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingByteResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java index 86ea70ddd62d..a2bc072064ea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java @@ -8,51 +8,56 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; -public record ChunkedTextEmbeddingByteResults(List chunks, boolean isTruncated) implements ChunkedInferenceServiceResults { +public record InferenceChunkedTextEmbeddingByteResults(List chunks, boolean isTruncated) + implements + ChunkedInferenceServiceResults { public static final String NAME = "chunked_text_embedding_service_byte_results"; public static final String FIELD_NAME = "text_embedding_byte_chunk"; /** - * Returns a list of {@link ChunkedTextEmbeddingByteResults}. The number of entries in the list will match the input list size. - * Each {@link ChunkedTextEmbeddingByteResults} will have a single chunk containing the entire results from the - * {@link TextEmbeddingByteResults}. + * Returns a list of {@link InferenceChunkedTextEmbeddingByteResults}. The number of entries in the list will match the input list size. + * Each {@link InferenceChunkedTextEmbeddingByteResults} will have a single chunk containing the entire results from the + * {@link InferenceTextEmbeddingByteResults}. */ - public static List of(List inputs, TextEmbeddingByteResults textEmbeddings) { + public static List listOf(List inputs, InferenceTextEmbeddingByteResults textEmbeddings) { validateInputSizeAgainstEmbeddings(inputs, textEmbeddings.embeddings().size()); var results = new ArrayList(inputs.size()); for (int i = 0; i < inputs.size(); i++) { - results.add(of(inputs.get(i), textEmbeddings.embeddings().get(i).values())); + results.add(ofSingle(inputs.get(i), textEmbeddings.embeddings().get(i).values())); } return results; } - public static ChunkedTextEmbeddingByteResults of(String input, byte[] byteEmbeddings) { - return new ChunkedTextEmbeddingByteResults(List.of(new EmbeddingChunk(input, byteEmbeddings)), false); + private static InferenceChunkedTextEmbeddingByteResults ofSingle(String input, byte[] byteEmbeddings) { + return new InferenceChunkedTextEmbeddingByteResults(List.of(new InferenceByteEmbeddingChunk(input, byteEmbeddings)), false); } - public ChunkedTextEmbeddingByteResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(EmbeddingChunk::new), in.readBoolean()); + public InferenceChunkedTextEmbeddingByteResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceByteEmbeddingChunk::new), in.readBoolean()); } @Override @@ -92,7 +97,7 @@ public record ChunkedTextEmbeddingByteResults(List chunks, boole return NAME; } - public List getChunks() { + public List getChunks() { return chunks; } @@ -100,7 +105,7 @@ public record ChunkedTextEmbeddingByteResults(List chunks, boole public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ChunkedTextEmbeddingByteResults that = (ChunkedTextEmbeddingByteResults) o; + InferenceChunkedTextEmbeddingByteResults that = (InferenceChunkedTextEmbeddingByteResults) o; return isTruncated == that.isTruncated && Objects.equals(chunks, that.chunks); } @@ -109,9 +114,9 @@ public record ChunkedTextEmbeddingByteResults(List chunks, boole return Objects.hash(chunks, isTruncated); } - public record EmbeddingChunk(String matchedText, byte[] embedding) implements Writeable, ToXContentObject { + public record InferenceByteEmbeddingChunk(String matchedText, byte[] embedding) implements Writeable, ToXContentObject { - public EmbeddingChunk(StreamInput in) throws IOException { + public InferenceByteEmbeddingChunk(StreamInput in) throws IOException { this(in.readString(), in.readByteArray()); } @@ -145,7 +150,7 @@ public record ChunkedTextEmbeddingByteResults(List chunks, boole public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - EmbeddingChunk that = (EmbeddingChunk) o; + InferenceByteEmbeddingChunk that = (InferenceByteEmbeddingChunk) o; return Objects.equals(matchedText, that.matchedText) && Arrays.equals(embedding, that.embedding); } @@ -156,4 +161,22 @@ public record ChunkedTextEmbeddingByteResults(List chunks, boole return result; } } + + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return chunks.stream().map(chunk -> new Chunk(chunk.matchedText(), toBytesReference(xcontent, chunk.embedding()))).iterator(); + } + + private static BytesReference toBytesReference(XContent xContent, byte[] value) { + try { + XContentBuilder b = XContentBuilder.builder(xContent); + b.startArray(); + for (byte v : value) { + b.value(v); + } + b.endArray(); + return BytesReference.bytes(b); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java similarity index 52% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java index 4fcd5a53fc28..9b625f9b1712 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -15,22 +16,61 @@ import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -public record ChunkedTextEmbeddingFloatResults(List chunks) implements ChunkedInferenceServiceResults { +import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; + +public record InferenceChunkedTextEmbeddingFloatResults(List chunks) + implements + ChunkedInferenceServiceResults { public static final String NAME = "chunked_text_embedding_service_float_results"; public static final String FIELD_NAME = "text_embedding_float_chunk"; - public ChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(EmbeddingChunk::new)); + public InferenceChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceFloatEmbeddingChunk::new)); + } + + /** + * Returns a list of {@link InferenceChunkedTextEmbeddingFloatResults}. + * Each {@link InferenceChunkedTextEmbeddingFloatResults} contain a single chunk with the text and the + * {@link InferenceTextEmbeddingFloatResults}. + */ + public static List listOf(List inputs, InferenceTextEmbeddingFloatResults textEmbeddings) { + validateInputSizeAgainstEmbeddings(inputs, textEmbeddings.embeddings().size()); + + var results = new ArrayList(inputs.size()); + + for (int i = 0; i < inputs.size(); i++) { + results.add( + new InferenceChunkedTextEmbeddingFloatResults( + List.of(new InferenceFloatEmbeddingChunk(inputs.get(i), textEmbeddings.embeddings().get(i).values())) + ) + ); + } + + return results; + } + + public static InferenceChunkedTextEmbeddingFloatResults ofMlResults(MlChunkedTextEmbeddingFloatResults mlInferenceResult) { + return new InferenceChunkedTextEmbeddingFloatResults( + mlInferenceResult.getChunks() + .stream() + .map(chunk -> new InferenceFloatEmbeddingChunk(chunk.matchedText(), FloatConversionUtils.floatArrayOf(chunk.embedding()))) + .toList() + ); } @Override @@ -69,7 +109,7 @@ public record ChunkedTextEmbeddingFloatResults(List chunks) impl return NAME; } - public List getChunks() { + public List getChunks() { return chunks; } @@ -77,7 +117,7 @@ public record ChunkedTextEmbeddingFloatResults(List chunks) impl public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ChunkedTextEmbeddingFloatResults that = (ChunkedTextEmbeddingFloatResults) o; + InferenceChunkedTextEmbeddingFloatResults that = (InferenceChunkedTextEmbeddingFloatResults) o; return Objects.equals(chunks, that.chunks); } @@ -86,12 +126,16 @@ public record ChunkedTextEmbeddingFloatResults(List chunks) impl return Objects.hash(chunks); } - public record EmbeddingChunk(String matchedText, float[] embedding) implements Writeable, ToXContentObject { + public record InferenceFloatEmbeddingChunk(String matchedText, float[] embedding) implements Writeable, ToXContentObject { - public EmbeddingChunk(StreamInput in) throws IOException { + public InferenceFloatEmbeddingChunk(StreamInput in) throws IOException { this(in.readString(), in.readFloatArray()); } + public static InferenceFloatEmbeddingChunk of(String matchedText, double[] doubleEmbedding) { + return new InferenceFloatEmbeddingChunk(matchedText, FloatConversionUtils.floatArrayOf(doubleEmbedding)); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(matchedText); @@ -122,7 +166,7 @@ public record ChunkedTextEmbeddingFloatResults(List chunks) impl public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - EmbeddingChunk that = (EmbeddingChunk) o; + InferenceFloatEmbeddingChunk that = (InferenceFloatEmbeddingChunk) o; return Objects.equals(matchedText, that.matchedText) && Arrays.equals(embedding, that.embedding); } @@ -134,4 +178,24 @@ public record ChunkedTextEmbeddingFloatResults(List chunks) impl } } + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return chunks.stream().map(chunk -> new Chunk(chunk.matchedText(), toBytesReference(xcontent, chunk.embedding()))).iterator(); + } + + /** + * Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}. + */ + private static BytesReference toBytesReference(XContent xContent, float[] value) { + try { + XContentBuilder b = XContentBuilder.builder(xContent); + b.startArray(); + for (float v : value) { + b.value(v); + } + b.endArray(); + return BytesReference.bytes(b); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java similarity index 82% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java index 04986b2d957d..8d94083bf324 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java @@ -17,6 +17,7 @@ import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -43,12 +44,12 @@ import java.util.Objects; * ] * } */ -public record TextEmbeddingByteResults(List embeddings) implements InferenceServiceResults, TextEmbedding { +public record InferenceTextEmbeddingByteResults(List embeddings) implements InferenceServiceResults, TextEmbedding { public static final String NAME = "text_embedding_service_byte_results"; public static final String TEXT_EMBEDDING_BYTES = "text_embedding_bytes"; - public TextEmbeddingByteResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(Embedding::new)); + public InferenceTextEmbeddingByteResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceByteEmbedding::new)); } @Override @@ -59,7 +60,7 @@ public record TextEmbeddingByteResults(List embeddings) implements In @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(TEXT_EMBEDDING_BYTES); - for (Embedding embedding : embeddings) { + for (InferenceByteEmbedding embedding : embeddings) { embedding.toXContent(builder, params); } builder.endArray(); @@ -79,13 +80,7 @@ public record TextEmbeddingByteResults(List embeddings) implements In @Override public List transformToCoordinationFormat() { return embeddings.stream() - .map( - embedding -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TEXT_EMBEDDING_BYTES, - embedding.toDoubleArray(), - false - ) - ) + .map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING_BYTES, embedding.toDoubleArray(), false)) .toList(); } @@ -110,7 +105,7 @@ public record TextEmbeddingByteResults(List embeddings) implements In public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - TextEmbeddingByteResults that = (TextEmbeddingByteResults) o; + InferenceTextEmbeddingByteResults that = (InferenceTextEmbeddingByteResults) o; return Objects.equals(embeddings, that.embeddings); } @@ -119,10 +114,10 @@ public record TextEmbeddingByteResults(List embeddings) implements In return Objects.hash(embeddings); } - public record Embedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt { + public record InferenceByteEmbedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt { public static final String EMBEDDING = "embedding"; - public Embedding(StreamInput in) throws IOException { + public InferenceByteEmbedding(StreamInput in) throws IOException { this(in.readByteArray()); } @@ -131,12 +126,12 @@ public record TextEmbeddingByteResults(List embeddings) implements In out.writeByteArray(values); } - public static Embedding of(List embeddingValuesList) { + public static InferenceByteEmbedding of(List embeddingValuesList) { byte[] embeddingValues = new byte[embeddingValuesList.size()]; for (int i = 0; i < embeddingValuesList.size(); i++) { embeddingValues[i] = embeddingValuesList.get(i); } - return new Embedding(embeddingValues); + return new InferenceByteEmbedding(embeddingValues); } @Override @@ -183,7 +178,7 @@ public record TextEmbeddingByteResults(List embeddings) implements In public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Embedding embedding = (Embedding) o; + InferenceByteEmbedding embedding = (InferenceByteEmbedding) o; return Arrays.equals(values, embedding.values); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java similarity index 76% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java index 152e10e82d5b..1822e3af28c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java @@ -20,6 +20,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -47,29 +48,32 @@ import java.util.stream.Collectors; * ] * } */ -public record TextEmbeddingResults(List embeddings) implements InferenceServiceResults, TextEmbedding { +public record InferenceTextEmbeddingFloatResults(List embeddings) + implements + InferenceServiceResults, + TextEmbedding { public static final String NAME = "text_embedding_service_results"; public static final String TEXT_EMBEDDING = TaskType.TEXT_EMBEDDING.toString(); - public TextEmbeddingResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(Embedding::new)); + public InferenceTextEmbeddingFloatResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceFloatEmbedding::new)); } @SuppressWarnings("deprecation") - TextEmbeddingResults(LegacyTextEmbeddingResults legacyTextEmbeddingResults) { + InferenceTextEmbeddingFloatResults(LegacyTextEmbeddingResults legacyTextEmbeddingResults) { this( legacyTextEmbeddingResults.embeddings() .stream() - .map(embedding -> new Embedding(embedding.values())) + .map(embedding -> new InferenceFloatEmbedding(embedding.values())) .collect(Collectors.toList()) ); } - public static TextEmbeddingResults of(List results) { - List embeddings = new ArrayList<>(results.size()); + public static InferenceTextEmbeddingFloatResults of(List results) { + List embeddings = new ArrayList<>(results.size()); for (InferenceResults result : results) { - if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults embeddingResult) { - embeddings.add(Embedding.of(embeddingResult)); + if (result instanceof MlTextEmbeddingResults embeddingResult) { + embeddings.add(InferenceFloatEmbedding.of(embeddingResult)); } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { throw statusException; @@ -86,7 +90,7 @@ public record TextEmbeddingResults(List embeddings) implements Infere ); } } - return new TextEmbeddingResults(embeddings); + return new InferenceTextEmbeddingFloatResults(embeddings); } @Override @@ -97,7 +101,7 @@ public record TextEmbeddingResults(List embeddings) implements Infere @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(TEXT_EMBEDDING); - for (Embedding embedding : embeddings) { + for (InferenceFloatEmbedding embedding : embeddings) { embedding.toXContent(builder, params); } builder.endArray(); @@ -116,15 +120,7 @@ public record TextEmbeddingResults(List embeddings) implements Infere @Override public List transformToCoordinationFormat() { - return embeddings.stream() - .map( - embedding -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TEXT_EMBEDDING, - embedding.asDoubleArray(), - false - ) - ) - .toList(); + return embeddings.stream().map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING, embedding.asDoubleArray(), false)).toList(); } @Override @@ -148,7 +144,7 @@ public record TextEmbeddingResults(List embeddings) implements Infere public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - TextEmbeddingResults that = (TextEmbeddingResults) o; + InferenceTextEmbeddingFloatResults that = (InferenceTextEmbeddingFloatResults) o; return Objects.equals(embeddings, that.embeddings); } @@ -157,24 +153,24 @@ public record TextEmbeddingResults(List embeddings) implements Infere return Objects.hash(embeddings); } - public record Embedding(float[] values) implements Writeable, ToXContentObject, EmbeddingInt { + public record InferenceFloatEmbedding(float[] values) implements Writeable, ToXContentObject, EmbeddingInt { public static final String EMBEDDING = "embedding"; - public Embedding(StreamInput in) throws IOException { + public InferenceFloatEmbedding(StreamInput in) throws IOException { this(in.readFloatArray()); } - public static Embedding of(org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults embeddingResult) { + public static InferenceFloatEmbedding of(MlTextEmbeddingResults embeddingResult) { float[] embeddingAsArray = embeddingResult.getInferenceAsFloat(); - return new Embedding(embeddingAsArray); + return new InferenceFloatEmbedding(embeddingAsArray); } - public static Embedding of(List embeddingValuesList) { + public static InferenceFloatEmbedding of(List embeddingValuesList) { float[] embeddingValues = new float[embeddingValuesList.size()]; for (int i = 0; i < embeddingValuesList.size(); i++) { embeddingValues[i] = embeddingValuesList.get(i); } - return new Embedding(embeddingValues); + return new InferenceFloatEmbedding(embeddingValues); } @Override @@ -218,7 +214,7 @@ public record TextEmbeddingResults(List embeddings) implements Infere public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Embedding embedding = (Embedding) o; + InferenceFloatEmbedding embedding = (InferenceFloatEmbedding) o; return Arrays.equals(values, embedding.values); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java index 87a56da93ec1..84a0928cae0d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java @@ -44,7 +44,7 @@ import java.util.Objects; * * Legacy text embedding results represents what was returned prior to the * {@link org.elasticsearch.TransportVersions#V_8_12_0} version. - * @deprecated use {@link TextEmbeddingResults} instead + * @deprecated use {@link InferenceTextEmbeddingFloatResults} instead */ @Deprecated public record LegacyTextEmbeddingResults(List embeddings) implements InferenceResults { @@ -114,8 +114,8 @@ public record LegacyTextEmbeddingResults(List embeddings) implements return Objects.hash(embeddings); } - public TextEmbeddingResults transformToTextEmbeddingResults() { - return new TextEmbeddingResults(this); + public InferenceTextEmbeddingFloatResults transformToTextEmbeddingResults() { + return new InferenceTextEmbeddingFloatResults(this); } public record Embedding(float[] values) implements Writeable, ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java index c865d23ef8e2..4fe2c9ae486f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java @@ -12,11 +12,11 @@ import org.elasticsearch.rest.RestStatus; public class ResultUtils { - public static ElasticsearchStatusException createInvalidChunkedResultException(String receivedResultName) { + public static ElasticsearchStatusException createInvalidChunkedResultException(String expectedResultName, String receivedResultName) { return new ElasticsearchStatusException( "Expected a chunked inference [{}] received [{}]", RestStatus.INTERNAL_SERVER_ERROR, - ChunkedTextEmbeddingResults.NAME, + expectedResultName, receivedResultName ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java index a3fb956c3252..354e898a514d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -20,17 +20,17 @@ import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; import org.elasticsearch.xpack.core.ml.inference.preprocessing.StrictlyParsedPreProcessor; import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncoding; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.FillMaskResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.NerResults; import org.elasticsearch.xpack.core.ml.inference.results.NlpClassificationInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.PyTorchPassThroughResults; import org.elasticsearch.xpack.core.ml.inference.results.QuestionAnsweringInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; @@ -652,7 +652,9 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider { new NamedWriteableRegistry.Entry(InferenceResults.class, PyTorchPassThroughResults.NAME, PyTorchPassThroughResults::new) ); namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceResults.class, TextExpansionResults.NAME, TextExpansionResults::new)); - namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceResults.class, TextEmbeddingResults.NAME, TextEmbeddingResults::new)); + namedWriteables.add( + new NamedWriteableRegistry.Entry(InferenceResults.class, MlTextEmbeddingResults.NAME, MlTextEmbeddingResults::new) + ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceResults.class, @@ -675,10 +677,18 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider { ) ); namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceResults.class, ChunkedTextEmbeddingResults.NAME, ChunkedTextEmbeddingResults::new) + new NamedWriteableRegistry.Entry( + InferenceResults.class, + MlChunkedTextEmbeddingFloatResults.NAME, + MlChunkedTextEmbeddingFloatResults::new + ) ); namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceResults.class, ChunkedTextExpansionResults.NAME, ChunkedTextExpansionResults::new) + new NamedWriteableRegistry.Entry( + InferenceResults.class, + InferenceChunkedTextExpansionResults.NAME, + InferenceChunkedTextExpansionResults::new + ) ); // Inference Configs diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResults.java similarity index 91% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResults.java index f2055e0930fd..3c719262fbfc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResults.java @@ -21,7 +21,7 @@ import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; -public class ChunkedTextExpansionResults extends ChunkedNlpInferenceResults { +public class InferenceChunkedTextExpansionResults extends ChunkedNlpInferenceResults { public static final String NAME = "chunked_text_expansion_result"; public record ChunkedResult(String matchedText, List weightedTokens) implements Writeable, ToXContentObject { @@ -60,13 +60,13 @@ public class ChunkedTextExpansionResults extends ChunkedNlpInferenceResults { private final String resultsField; private final List chunks; - public ChunkedTextExpansionResults(String resultField, List chunks, boolean isTruncated) { + public InferenceChunkedTextExpansionResults(String resultField, List chunks, boolean isTruncated) { super(isTruncated); this.resultsField = resultField; this.chunks = chunks; } - public ChunkedTextExpansionResults(StreamInput in) throws IOException { + public InferenceChunkedTextExpansionResults(StreamInput in) throws IOException { super(in); this.resultsField = in.readString(); this.chunks = in.readCollectionAsList(ChunkedResult::new); @@ -104,7 +104,7 @@ public class ChunkedTextExpansionResults extends ChunkedNlpInferenceResults { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - ChunkedTextExpansionResults that = (ChunkedTextExpansionResults) o; + InferenceChunkedTextExpansionResults that = (InferenceChunkedTextExpansionResults) o; return Objects.equals(resultsField, that.resultsField) && Objects.equals(chunks, that.chunks); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResults.java similarity index 91% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResults.java index e47554aebbad..aabd87c1c272 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResults.java @@ -21,7 +21,7 @@ import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; -public class ChunkedTextEmbeddingResults extends ChunkedNlpInferenceResults { +public class MlChunkedTextEmbeddingFloatResults extends ChunkedNlpInferenceResults { public record EmbeddingChunk(String matchedText, double[] embedding) implements Writeable, ToXContentObject { @@ -77,13 +77,13 @@ public class ChunkedTextEmbeddingResults extends ChunkedNlpInferenceResults { private final String resultsField; private final List chunks; - public ChunkedTextEmbeddingResults(String resultsField, List embeddings, boolean isTruncated) { + public MlChunkedTextEmbeddingFloatResults(String resultsField, List embeddings, boolean isTruncated) { super(isTruncated); this.resultsField = resultsField; this.chunks = embeddings; } - public ChunkedTextEmbeddingResults(StreamInput in) throws IOException { + public MlChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { super(in); this.resultsField = in.readString(); this.chunks = in.readCollectionAsList(EmbeddingChunk::new); @@ -134,7 +134,7 @@ public class ChunkedTextEmbeddingResults extends ChunkedNlpInferenceResults { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - ChunkedTextEmbeddingResults that = (ChunkedTextEmbeddingResults) o; + MlChunkedTextEmbeddingFloatResults that = (MlChunkedTextEmbeddingFloatResults) o; return Objects.equals(resultsField, that.resultsField) && Objects.equals(chunks, that.chunks); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResults.java similarity index 89% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResults.java index 526c2ec7b7aa..0c0fa6f3f690 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResults.java @@ -16,20 +16,20 @@ import java.util.Arrays; import java.util.Map; import java.util.Objects; -public class TextEmbeddingResults extends NlpInferenceResults { +public class MlTextEmbeddingResults extends NlpInferenceResults { public static final String NAME = "text_embedding_result"; private final String resultsField; private final double[] inference; - public TextEmbeddingResults(String resultsField, double[] inference, boolean isTruncated) { + public MlTextEmbeddingResults(String resultsField, double[] inference, boolean isTruncated) { super(isTruncated); this.inference = inference; this.resultsField = resultsField; } - public TextEmbeddingResults(StreamInput in) throws IOException { + public MlTextEmbeddingResults(StreamInput in) throws IOException { super(in); inference = in.readDoubleArray(); resultsField = in.readString(); @@ -89,7 +89,7 @@ public class TextEmbeddingResults extends NlpInferenceResults { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - TextEmbeddingResults that = (TextEmbeddingResults) o; + MlTextEmbeddingResults that = (MlTextEmbeddingResults) o; return Objects.equals(resultsField, that.resultsField) && Arrays.equals(inference, that.inference); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java new file mode 100644 index 000000000000..1b9ca54b394b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.utils; + +public class FloatConversionUtils { + + public static float[] floatArrayOf(double[] doublesArray) { + var floatArray = new float[doublesArray.length]; + for (int i = 0; i < doublesArray.length; i++) { + floatArray[i] = (float) doublesArray[i]; + } + return floatArray; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java new file mode 100644 index 000000000000..83678cd030bc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults.INFERENCE; +import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults.TEXT; + +public class InferenceChunkedTextEmbeddingFloatResultsTests extends ESTestCase { + /** + * Similar to {@link org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults#asMap()} but it converts the + * embeddings float array into a list of floats to make testing equality easier. + */ + public static Map asMapWithListsInsteadOfArrays(InferenceChunkedTextEmbeddingFloatResults result) { + return Map.of( + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, + result.getChunks() + .stream() + .map(InferenceChunkedTextEmbeddingFloatResultsTests::inferenceFloatEmbeddingChunkAsMapWithListsInsteadOfArrays) + .collect(Collectors.toList()) + ); + } + + /** + * Similar to {@link MlChunkedTextEmbeddingFloatResults.EmbeddingChunk#asMap()} but it converts the double array into a list of doubles + * to make testing equality easier. + */ + public static Map inferenceFloatEmbeddingChunkAsMapWithListsInsteadOfArrays( + InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk chunk + ) { + var chunkAsList = new ArrayList(chunk.embedding().length); + for (double embedding : chunk.embedding()) { + chunkAsList.add((float) embedding); + } + var map = new HashMap(); + map.put(TEXT, chunk.matchedText()); + map.put(INFERENCE, chunkAsList); + return map; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java index 4d8035864729..87049d6bde90 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInference import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.FillMaskResults; import org.elasticsearch.xpack.core.ml.inference.results.FillMaskResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.NerResults; import org.elasticsearch.xpack.core.ml.inference.results.NerResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.PyTorchPassThroughResults; @@ -25,8 +27,6 @@ import org.elasticsearch.xpack.core.ml.inference.results.QuestionAnsweringInfere import org.elasticsearch.xpack.core.ml.inference.results.QuestionAnsweringInferenceResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResultsTests; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults; @@ -50,7 +50,7 @@ public class InferModelActionResponseTests extends AbstractWireSerializingTestCa PyTorchPassThroughResults.NAME, QuestionAnsweringInferenceResults.NAME, RegressionInferenceResults.NAME, - TextEmbeddingResults.NAME, + MlTextEmbeddingResults.NAME, TextExpansionResults.NAME, TextSimilarityInferenceResults.NAME, WarningInferenceResults.NAME @@ -87,7 +87,7 @@ public class InferModelActionResponseTests extends AbstractWireSerializingTestCa case PyTorchPassThroughResults.NAME -> PyTorchPassThroughResultsTests.createRandomResults(); case QuestionAnsweringInferenceResults.NAME -> QuestionAnsweringInferenceResultsTests.createRandomResults(); case RegressionInferenceResults.NAME -> RegressionInferenceResultsTests.createRandomResults(); - case TextEmbeddingResults.NAME -> TextEmbeddingResultsTests.createRandomResults(); + case MlTextEmbeddingResults.NAME -> MlTextEmbeddingResultsTests.createRandomResults(); case TextExpansionResults.NAME -> TextExpansionResultsTests.createRandomResults(); case TextSimilarityInferenceResults.NAME -> TextSimilarityInferenceResultsTests.createRandomResults(); case WarningInferenceResults.NAME -> WarningInferenceResultsTests.createRandomResults(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java index 4db7d05b6065..eb373080eee4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; import org.junit.Before; import java.util.List; @@ -50,10 +50,10 @@ public class InferTrainedModelDeploymentResponseTests extends AbstractBWCWireSer protected InferTrainedModelDeploymentAction.Response createTestInstance() { return new InferTrainedModelDeploymentAction.Response( List.of( - TextEmbeddingResultsTests.createRandomResults(), - TextEmbeddingResultsTests.createRandomResults(), - TextEmbeddingResultsTests.createRandomResults(), - TextEmbeddingResultsTests.createRandomResults() + MlTextEmbeddingResultsTests.createRandomResults(), + MlTextEmbeddingResultsTests.createRandomResults(), + MlTextEmbeddingResultsTests.createRandomResults(), + MlTextEmbeddingResultsTests.createRandomResults() ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResultsTests.java similarity index 57% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResultsTests.java index 71eff531581e..f5db7a2863e0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResultsTests.java @@ -16,10 +16,10 @@ import java.util.ArrayList; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ChunkedTextExpansionResultsTests extends AbstractWireSerializingTestCase { +public class InferenceChunkedTextExpansionResultsTests extends AbstractWireSerializingTestCase { - public static ChunkedTextExpansionResults createRandomResults() { - var chunks = new ArrayList(); + public static InferenceChunkedTextExpansionResults createRandomResults() { + var chunks = new ArrayList(); int numChunks = randomIntBetween(1, 5); for (int i = 0; i < numChunks; i++) { @@ -28,24 +28,24 @@ public class ChunkedTextExpansionResultsTests extends AbstractWireSerializingTes for (int j = 0; j < numTokens; j++) { tokenWeights.add(new WeightedToken(Integer.toString(j), (float) randomDoubleBetween(0.0, 5.0, false))); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); } - return new ChunkedTextExpansionResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); + return new InferenceChunkedTextExpansionResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); } @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextExpansionResults::new; + protected Writeable.Reader instanceReader() { + return InferenceChunkedTextExpansionResults::new; } @Override - protected ChunkedTextExpansionResults createTestInstance() { + protected InferenceChunkedTextExpansionResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedTextExpansionResults mutateInstance(ChunkedTextExpansionResults instance) throws IOException { + protected InferenceChunkedTextExpansionResults mutateInstance(InferenceChunkedTextExpansionResults instance) throws IOException { return null; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResultsTests.java similarity index 53% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResultsTests.java index 1e8f5b6a26ad..c4d008ac7735 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResultsTests.java @@ -21,10 +21,10 @@ import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInfere import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults.TEXT; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ChunkedTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { +public class MlChunkedTextEmbeddingFloatResultsTests extends AbstractWireSerializingTestCase { - public static ChunkedTextEmbeddingResults createRandomResults() { - var chunks = new ArrayList(); + public static MlChunkedTextEmbeddingFloatResults createRandomResults() { + var chunks = new ArrayList(); int columns = randomIntBetween(5, 10); int numChunks = randomIntBetween(1, 5); @@ -33,17 +33,17 @@ public class ChunkedTextEmbeddingResultsTests extends AbstractWireSerializingTes for (int j = 0; j < columns; j++) { arr[j] = randomDouble(); } - chunks.add(new ChunkedTextEmbeddingResults.EmbeddingChunk(randomAlphaOfLength(6), arr)); + chunks.add(new MlChunkedTextEmbeddingFloatResults.EmbeddingChunk(randomAlphaOfLength(6), arr)); } - return new ChunkedTextEmbeddingResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); + return new MlChunkedTextEmbeddingFloatResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); } /** - * Similar to {@link ChunkedTextEmbeddingResults.EmbeddingChunk#asMap()} but it converts the double array into a list of doubles to - * make testing equality easier. + * Similar to {@link MlChunkedTextEmbeddingFloatResults.EmbeddingChunk#asMap()} but it converts the double array into a list of doubles + * to make testing equality easier. */ - public static Map asMapWithListsInsteadOfArrays(ChunkedTextEmbeddingResults.EmbeddingChunk chunk) { + public static Map asMapWithListsInsteadOfArrays(MlChunkedTextEmbeddingFloatResults.EmbeddingChunk chunk) { var map = new HashMap(); map.put(TEXT, chunk.matchedText()); map.put(INFERENCE, Arrays.stream(chunk.embedding()).boxed().collect(Collectors.toList())); @@ -51,20 +51,28 @@ public class ChunkedTextEmbeddingResultsTests extends AbstractWireSerializingTes } @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return MlChunkedTextEmbeddingFloatResults::new; } @Override - protected ChunkedTextEmbeddingResults createTestInstance() { + protected MlChunkedTextEmbeddingFloatResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedTextEmbeddingResults mutateInstance(ChunkedTextEmbeddingResults instance) throws IOException { + protected MlChunkedTextEmbeddingFloatResults mutateInstance(MlChunkedTextEmbeddingFloatResults instance) throws IOException { return switch (randomIntBetween(0, 1)) { - case 0 -> new ChunkedTextEmbeddingResults(instance.getResultsField() + "foo", instance.getChunks(), instance.isTruncated); - case 1 -> new ChunkedTextEmbeddingResults(instance.getResultsField(), instance.getChunks(), instance.isTruncated == false); + case 0 -> new MlChunkedTextEmbeddingFloatResults( + instance.getResultsField() + "foo", + instance.getChunks(), + instance.isTruncated + ); + case 1 -> new MlChunkedTextEmbeddingFloatResults( + instance.getResultsField(), + instance.getChunks(), + instance.isTruncated == false + ); default -> throw new IllegalArgumentException("unexpected case"); }; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResultsTests.java similarity index 68% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResultsTests.java index fd3ac7f8c0d1..3338609eebdc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResultsTests.java @@ -16,35 +16,35 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceCo import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -public class TextEmbeddingResultsTests extends InferenceResultsTestCase { +public class MlTextEmbeddingResultsTests extends InferenceResultsTestCase { - public static TextEmbeddingResults createRandomResults() { + public static MlTextEmbeddingResults createRandomResults() { int columns = randomIntBetween(1, 10); double[] arr = new double[columns]; for (int i = 0; i < columns; i++) { arr[i] = randomDouble(); } - return new TextEmbeddingResults(DEFAULT_RESULTS_FIELD, arr, randomBoolean()); + return new MlTextEmbeddingResults(DEFAULT_RESULTS_FIELD, arr, randomBoolean()); } @Override - protected Writeable.Reader instanceReader() { - return TextEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return MlTextEmbeddingResults::new; } @Override - protected TextEmbeddingResults createTestInstance() { + protected MlTextEmbeddingResults createTestInstance() { return createRandomResults(); } @Override - protected TextEmbeddingResults mutateInstance(TextEmbeddingResults instance) { + protected MlTextEmbeddingResults mutateInstance(MlTextEmbeddingResults instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } public void testAsMap() { - TextEmbeddingResults testInstance = createTestInstance(); + MlTextEmbeddingResults testInstance = createTestInstance(); Map asMap = testInstance.asMap(); int size = testInstance.isTruncated ? 2 : 1; assertThat(asMap.keySet(), hasSize(size)); @@ -55,7 +55,7 @@ public class TextEmbeddingResultsTests extends InferenceResultsTestCase input, int dimensions) { - List embeddings = new ArrayList<>(); + private InferenceTextEmbeddingFloatResults makeResults(List input, int dimensions) { + List embeddings = new ArrayList<>(); for (int i = 0; i < input.size(); i++) { - double[] doubleEmbeddings = generateEmbedding(input.get(i), dimensions); + float[] doubleEmbeddings = generateEmbedding(input.get(i), dimensions); List floatEmbeddings = new ArrayList<>(dimensions); for (int j = 0; j < dimensions; j++) { - floatEmbeddings.add((float) doubleEmbeddings[j]); + floatEmbeddings.add(doubleEmbeddings[j]); } - embeddings.add(TextEmbeddingResults.Embedding.of(floatEmbeddings)); + embeddings.add(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(floatEmbeddings)); } - return new TextEmbeddingResults(embeddings); + return new InferenceTextEmbeddingFloatResults(embeddings); } private List makeChunkedResults(List input, int dimensions) { - var results = new ArrayList(); + var chunks = new ArrayList(); for (int i = 0; i < input.size(); i++) { - double[] embeddings = generateEmbedding(input.get(i), dimensions); - results.add( - new org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults( - List.of(new ChunkedTextEmbeddingResults.EmbeddingChunk(input.get(i), embeddings)) - ) - ); + float[] embedding = generateEmbedding(input.get(i), dimensions); + chunks.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(embedding)); } - return results; + + return InferenceChunkedTextEmbeddingFloatResults.listOf(input, new InferenceTextEmbeddingFloatResults(chunks)); } protected ServiceSettings getServiceSettingsFromMap(Map serviceSettingsMap) { return TestServiceSettings.fromMap(serviceSettingsMap); } - private static double[] generateEmbedding(String input, int dimensions) { - double[] embedding = new double[dimensions]; + private static float[] generateEmbedding(String input, int dimensions) { + float[] embedding = new float[dimensions]; for (int j = 0; j < dimensions; j++) { embedding[j] = input.hashCode() + 1 + j; } - return embedding; } } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index 05e85334cff5..27fa55b7b7dc 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -28,9 +28,9 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.io.IOException; @@ -146,7 +146,9 @@ public class TestSparseInferenceServiceExtension implements InferenceServiceExte tokens.add(new WeightedToken("feature_" + j, generateEmbedding(input.get(i), j))); } results.add( - new ChunkedSparseEmbeddingResults(List.of(new ChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens))) + new InferenceChunkedSparseEmbeddingResults( + List.of(new InferenceChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens)) + ) ); } return results; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index bff7ecdcc4a0..b3dbd97d495a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -15,16 +15,15 @@ import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.azureaistudio.completion.AzureAiStudioChatCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.azureaistudio.completion.AzureAiStudioChatCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureaistudio.embeddings.AzureAiStudioEmbeddingsServiceSettings; @@ -304,29 +303,22 @@ public class InferenceNamedWriteablesProvider { namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceServiceResults.class, - ChunkedSparseEmbeddingResults.NAME, - ChunkedSparseEmbeddingResults::new + InferenceChunkedSparseEmbeddingResults.NAME, + InferenceChunkedSparseEmbeddingResults::new ) ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceServiceResults.class, - ChunkedTextEmbeddingResults.NAME, - ChunkedTextEmbeddingResults::new + InferenceChunkedTextEmbeddingFloatResults.NAME, + InferenceChunkedTextEmbeddingFloatResults::new ) ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceServiceResults.class, - ChunkedTextEmbeddingFloatResults.NAME, - ChunkedTextEmbeddingFloatResults::new - ) - ); - namedWriteables.add( - new NamedWriteableRegistry.Entry( - InferenceServiceResults.class, - ChunkedTextEmbeddingByteResults.NAME, - ChunkedTextEmbeddingByteResults::new + InferenceChunkedTextEmbeddingByteResults.NAME, + InferenceChunkedTextEmbeddingByteResults::new ) ); } @@ -336,10 +328,18 @@ public class InferenceNamedWriteablesProvider { new NamedWriteableRegistry.Entry(InferenceServiceResults.class, SparseEmbeddingResults.NAME, SparseEmbeddingResults::new) ); namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceServiceResults.class, TextEmbeddingResults.NAME, TextEmbeddingResults::new) + new NamedWriteableRegistry.Entry( + InferenceServiceResults.class, + InferenceTextEmbeddingFloatResults.NAME, + InferenceTextEmbeddingFloatResults::new + ) ); namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceServiceResults.class, TextEmbeddingByteResults.NAME, TextEmbeddingByteResults::new) + new NamedWriteableRegistry.Entry( + InferenceServiceResults.class, + InferenceTextEmbeddingByteResults.NAME, + InferenceTextEmbeddingByteResults::new + ) ); namedWriteables.add( new NamedWriteableRegistry.Entry(InferenceServiceResults.class, ChatCompletionResults.NAME, ChatCompletionResults::new) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java index 77d03ac66095..78a752244846 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java @@ -13,9 +13,9 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import java.util.ArrayList; import java.util.List; @@ -45,7 +45,7 @@ public class EmbeddingRequestChunker { private final int chunkOverlap; private List> chunkedInputs; - private List>> results; + private List>> results; private AtomicArray errors; private ActionListener> finalListener; @@ -160,7 +160,7 @@ public class EmbeddingRequestChunker { @Override public void onResponse(InferenceServiceResults inferenceServiceResults) { - if (inferenceServiceResults instanceof TextEmbeddingResults textEmbeddingResults) { // TODO byte embeddings + if (inferenceServiceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { // TODO byte embeddings int numRequests = positions.stream().mapToInt(SubBatchPositionsAndCount::embeddingCount).sum(); if (numRequests != textEmbeddingResults.embeddings().size()) { onFailure( @@ -212,11 +212,11 @@ public class EmbeddingRequestChunker { finalListener.onResponse(response); } - private ChunkedTextEmbeddingFloatResults merge( + private InferenceChunkedTextEmbeddingFloatResults merge( List chunks, - AtomicArray> debatchedResults + AtomicArray> debatchedResults ) { - var all = new ArrayList(); + var all = new ArrayList(); for (int i = 0; i < debatchedResults.length(); i++) { var subBatch = debatchedResults.get(i); all.addAll(subBatch); @@ -224,12 +224,14 @@ public class EmbeddingRequestChunker { assert chunks.size() == all.size(); - var embeddingChunks = new ArrayList(); + var embeddingChunks = new ArrayList(); for (int i = 0; i < chunks.size(); i++) { - embeddingChunks.add(new ChunkedTextEmbeddingFloatResults.EmbeddingChunk(chunks.get(i), all.get(i).values())); + embeddingChunks.add( + new InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk(chunks.get(i), all.get(i).values()) + ); } - return new ChunkedTextEmbeddingFloatResults(embeddingChunks); + return new InferenceChunkedTextEmbeddingFloatResults(embeddingChunks); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index ee4b43ec2e41..3fa9635d38e8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -17,8 +17,8 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.XContentUtils; @@ -187,14 +187,14 @@ public class CohereEmbeddingsResponseEntity { private static InferenceServiceResults parseByteEmbeddingsArray(XContentParser parser) throws IOException { var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); - return new TextEmbeddingByteResults(embeddingList); + return new InferenceTextEmbeddingByteResults(embeddingList); } - private static TextEmbeddingByteResults.Embedding parseByteArrayEntry(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding parseByteArrayEntry(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); List embeddingValuesList = parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry); - return TextEmbeddingByteResults.Embedding.of(embeddingValuesList); + return InferenceTextEmbeddingByteResults.InferenceByteEmbedding.of(embeddingValuesList); } private static Byte parseEmbeddingInt8Entry(XContentParser parser) throws IOException { @@ -215,13 +215,14 @@ public class CohereEmbeddingsResponseEntity { private static InferenceServiceResults parseFloatEmbeddingsArray(XContentParser parser) throws IOException { var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseFloatArrayEntry); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } - private static TextEmbeddingResults.Embedding parseFloatArrayEntry(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseFloatArrayEntry(XContentParser parser) + throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private CohereEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java index 69eefcefc614..543b8e39d85f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java @@ -12,7 +12,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.XContentUtils; @@ -70,7 +70,7 @@ public class GoogleAiStudioEmbeddingsResponseEntity { * */ - public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { + public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { @@ -81,16 +81,17 @@ public class GoogleAiStudioEmbeddingsResponseEntity { positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingList = parseList( + List embeddingList = parseList( jsonParser, GoogleAiStudioEmbeddingsResponseEntity::parseEmbeddingObject ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } } - private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) + throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); positionParserAtTokenAfterField(parser, "values", FAILED_TO_FIND_FIELD_TEMPLATE); @@ -99,7 +100,7 @@ public class GoogleAiStudioEmbeddingsResponseEntity { // parse and discard the rest of the object consumeUntilObjectEnd(parser); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private GoogleAiStudioEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java index 7e2e3abd6275..cdfe36447b88 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java @@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.XContentUtils; @@ -35,7 +35,7 @@ public class HuggingFaceEmbeddingsResponseEntity { * Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing * an array of arrays. */ - public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { + public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { @@ -93,10 +93,13 @@ public class HuggingFaceEmbeddingsResponseEntity { * sentence-transformers/all-MiniLM-L6-v2 * sentence-transformers/all-MiniLM-L12-v2 */ - private static TextEmbeddingResults parseArrayFormat(XContentParser parser) throws IOException { - List embeddingList = parseList(parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry); + private static InferenceTextEmbeddingFloatResults parseArrayFormat(XContentParser parser) throws IOException { + List embeddingList = parseList( + parser, + HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry + ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } /** @@ -135,19 +138,23 @@ public class HuggingFaceEmbeddingsResponseEntity { * intfloat/multilingual-e5-small * sentence-transformers/all-mpnet-base-v2 */ - private static TextEmbeddingResults parseObjectFormat(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingFloatResults parseObjectFormat(XContentParser parser) throws IOException { positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingList = parseList(parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry); + List embeddingList = parseList( + parser, + HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry + ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } - private static TextEmbeddingResults.Embedding parseEmbeddingEntry(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingEntry(XContentParser parser) + throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private HuggingFaceEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index eeed83dfe69b..ad6df0624708 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.XContentUtils; @@ -74,7 +74,7 @@ public class OpenAiEmbeddingsResponseEntity { * * */ - public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { + public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { @@ -85,16 +85,17 @@ public class OpenAiEmbeddingsResponseEntity { positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingList = parseList( + List embeddingList = parseList( jsonParser, OpenAiEmbeddingsResponseEntity::parseEmbeddingObject ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } } - private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) + throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); @@ -103,7 +104,7 @@ public class OpenAiEmbeddingsResponseEntity { // parse and discard the rest of the object consumeUntilObjectEnd(parser); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private OpenAiEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index f8fab4fadf4e..7b2e23f2e972 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.mapper; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -17,26 +16,21 @@ import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.support.MapXContentParser; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.io.IOException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -292,60 +286,13 @@ public record SemanticTextField(String fieldName, List originalValues, I ) { List chunks = new ArrayList<>(); for (var result : results) { - if (result instanceof ChunkedSparseEmbeddingResults textExpansionResults) { - for (var chunk : textExpansionResults.getChunkedResults()) { - chunks.add(new Chunk(chunk.matchedText(), toBytesReference(contentType.xContent(), chunk.weightedTokens()))); - } - } else if (result instanceof ChunkedTextEmbeddingResults textEmbeddingResults) { - for (var chunk : textEmbeddingResults.getChunks()) { - chunks.add(new Chunk(chunk.matchedText(), toBytesReference(contentType.xContent(), chunk.embedding()))); - } - } else { - throw new ElasticsearchStatusException( - "Invalid inference results format for field [{}] with inference id [{}], got {}", - RestStatus.BAD_REQUEST, - field, - inferenceId, - result.getWriteableName() - ); + for (Iterator it = result.chunksAsMatchedTextAndByteReference(contentType.xContent()); it + .hasNext();) { + var chunkAsByteReference = it.next(); + chunks.add(new Chunk(chunkAsByteReference.matchedText(), chunkAsByteReference.bytesReference())); } } return chunks; } - /** - * Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}. - */ - private static BytesReference toBytesReference(XContent xContent, double[] value) { - try { - XContentBuilder b = XContentBuilder.builder(xContent); - b.startArray(); - for (double v : value) { - b.value(v); - } - b.endArray(); - return BytesReference.bytes(b); - } catch (IOException exc) { - throw new RuntimeException(exc); - } - } - - /** - * Serialises the {@link WeightedToken} list, according to the provided {@link XContent}, - * into a {@link BytesReference}. - */ - private static BytesReference toBytesReference(XContent xContent, List tokens) { - try { - XContentBuilder b = XContentBuilder.builder(xContent); - b.startObject(); - for (var weightedToken : tokens) { - weightedToken.toXContent(b, ToXContent.EMPTY_PARAMS); - } - b.endObject(); - return BytesReference.bytes(b); - } catch (IOException exc) { - throw new RuntimeException(exc); - } - } - } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 355f52e5e3cd..c2a4907125a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -49,7 +49,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import java.io.IOException; @@ -400,12 +400,12 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie yield boolQuery; } case TEXT_EMBEDDING -> { - if (inferenceResults instanceof TextEmbeddingResults == false) { + if (inferenceResults instanceof MlTextEmbeddingResults == false) { throw new IllegalArgumentException( "Field [" + name() + "] expected query inference results to be of type [" - + TextEmbeddingResults.NAME + + MlTextEmbeddingResults.NAME + "]," + " got [" + inferenceResults.getWriteableName() @@ -413,7 +413,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie ); } - TextEmbeddingResults textEmbeddingResults = (TextEmbeddingResults) inferenceResults; + MlTextEmbeddingResults textEmbeddingResults = (MlTextEmbeddingResults) inferenceResults; float[] inference = textEmbeddingResults.getInferenceAsFloat(); if (inference.length != modelSettings.dimensions()) { throw new IllegalArgumentException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 4d90920f45ba..8f1e28d0d8ee 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -33,7 +33,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; @@ -256,19 +256,20 @@ public class SemanticQueryBuilder extends AbstractQueryBuilder inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + throw createInvalidChunkedResultException(InferenceTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 26bf6f1648d9..bd52bdb16514 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -24,9 +24,9 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -241,12 +241,12 @@ public class AzureOpenAiService extends SenderService { List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + throw createInvalidChunkedResultException(InferenceTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 67a45ba8b129..0052607ce325 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -28,10 +28,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; @@ -267,7 +268,7 @@ public class ElasticsearchInternalService implements InferenceService { InferModelAction.INSTANCE, request, listener.delegateFailureAndWrap( - (l, inferenceResult) -> l.onResponse(TextEmbeddingResults.of(inferenceResult.getInferenceResults())) + (l, inferenceResult) -> l.onResponse(InferenceTextEmbeddingFloatResults.of(inferenceResult.getInferenceResults())) ) ); } @@ -370,12 +371,12 @@ public class ElasticsearchInternalService implements InferenceService { } private static ChunkedInferenceServiceResults translateToChunkedResult(InferenceResults inferenceResult) { - if (inferenceResult instanceof org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults mlChunkedResult) { - return ChunkedTextEmbeddingResults.ofMlResult(mlChunkedResult); + if (inferenceResult instanceof MlChunkedTextEmbeddingFloatResults mlChunkedResult) { + return InferenceChunkedTextEmbeddingFloatResults.ofMlResults(mlChunkedResult); } else if (inferenceResult instanceof ErrorInferenceResults error) { return new ErrorChunkedInferenceResults(error.getException()); } else { - throw createInvalidChunkedResultException(inferenceResult.getWriteableName()); + throw createInvalidChunkedResultException(MlChunkedTextEmbeddingFloatResults.NAME, inferenceResult.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index a19e377d59c1..6e7c177861cd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -29,8 +29,8 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; @@ -40,8 +40,8 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -416,15 +416,15 @@ public class ElserInternalService implements InferenceService { var translated = new ArrayList(); for (var inferenceResult : inferenceResults) { - if (inferenceResult instanceof ChunkedTextExpansionResults mlChunkedResult) { - translated.add(ChunkedSparseEmbeddingResults.ofMlResult(mlChunkedResult)); + if (inferenceResult instanceof InferenceChunkedTextExpansionResults mlChunkedResult) { + translated.add(InferenceChunkedSparseEmbeddingResults.ofMlResult(mlChunkedResult)); } else if (inferenceResult instanceof ErrorInferenceResults error) { translated.add(new ErrorChunkedInferenceResults(error.getException())); } else { throw new ElasticsearchStatusException( "Expected a chunked inference [{}] received [{}]", RestStatus.INTERNAL_SERVER_ERROR, - ChunkedTextExpansionResults.NAME, + InferenceChunkedTextExpansionResults.NAME, inferenceResult.getWriteableName() ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index ef034816f762..78307ab280cb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.huggingface; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; @@ -18,11 +19,11 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -175,14 +176,19 @@ public abstract class HuggingFaceBaseService extends SenderService { List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof SparseEmbeddingResults sparseEmbeddingResults) { - return ChunkedSparseEmbeddingResults.of(inputs, sparseEmbeddingResults); + return InferenceChunkedSparseEmbeddingResults.listOf(inputs, sparseEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + String expectedClasses = Strings.format( + "One of [%s,%s]", + InferenceTextEmbeddingFloatResults.class.getSimpleName(), + SparseEmbeddingResults.class.getSimpleName() + ); + throw createInvalidChunkedResultException(expectedClasses, inferenceResults.getWriteableName()); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index 7ddb71d001e8..ee0cec1d7584 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -22,9 +22,9 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.mistral.MistralActionCreator; @@ -117,12 +117,12 @@ public class MistralService extends SenderService { List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + throw createInvalidChunkedResultException(InferenceChunkedTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java index 428dbca89243..cd14d9e54507 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; -import org.elasticsearch.xpack.inference.results.LegacyTextEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.results.LegacyMlTextEmbeddingResultsTests; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; @@ -44,7 +44,7 @@ public class InferenceActionResponseTests extends AbstractBWCWireSerializationTe protected InferenceAction.Response createTestInstance() { var result = switch (randomIntBetween(0, 2)) { case 0 -> TextEmbeddingResultsTests.createRandomResults(); - case 1 -> LegacyTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); + case 1 -> LegacyMlTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); default -> SparseEmbeddingResultsTests.createRandomResults(); }; @@ -73,7 +73,7 @@ public class InferenceActionResponseTests extends AbstractBWCWireSerializationTe } public void testSerializesOpenAiAddedVersion_UsingLegacyTextEmbeddingResult() throws IOException { - var embeddingResults = LegacyTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); + var embeddingResults = LegacyMlTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); var instance = new InferenceAction.Response(embeddingResults); var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), V_8_12_0); assertOnBWCObject(copy, instance, V_8_12_0); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index c87faa2b52cc..d501c9a65d80 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -33,14 +33,15 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.inference.model.TestModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.junit.After; import org.junit.Before; import org.mockito.stubbing.Answer; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -323,7 +324,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { private static BulkItemRequest[] randomBulkItemRequest( Map modelMap, Map fieldInferenceMap - ) { + ) throws IOException { Map docMap = new LinkedHashMap<>(); Map expectedDocMap = new LinkedHashMap<>(); XContentType requestContentType = randomFrom(XContentType.values()); @@ -376,7 +377,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { } ChunkedInferenceServiceResults getResults(String text) { - return resultMap.getOrDefault(text, new ChunkedSparseEmbeddingResults(List.of())); + return resultMap.getOrDefault(text, new InferenceChunkedSparseEmbeddingResults(List.of())); } void putResult(String text, ChunkedInferenceServiceResults result) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java index 164f975cc464..66079a00ee3b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java @@ -10,9 +10,9 @@ package org.elasticsearch.xpack.inference.common; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import java.util.ArrayList; import java.util.List; @@ -177,34 +177,34 @@ public class EmbeddingRequestChunkerTests extends ESTestCase { // 4 inputs in 2 batches { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < batchSize; i++) { - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); } - batches.get(0).listener().onResponse(new TextEmbeddingResults(embeddings)); + batches.get(0).listener().onResponse(new InferenceTextEmbeddingFloatResults(embeddings)); } { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < 4; i++) { // 4 requests in the 2nd batch - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); } - batches.get(1).listener().onResponse(new TextEmbeddingResults(embeddings)); + batches.get(1).listener().onResponse(new InferenceTextEmbeddingFloatResults(embeddings)); } assertNotNull(finalListener.results); assertThat(finalListener.results, hasSize(4)); { var chunkedResult = finalListener.results.get(0); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); assertEquals("1st small", chunkedFloatResult.chunks().get(0).matchedText()); } { // this is the large input split in multiple chunks var chunkedResult = finalListener.results.get(1); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(6)); assertThat(chunkedFloatResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); assertThat(chunkedFloatResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); @@ -215,15 +215,15 @@ public class EmbeddingRequestChunkerTests extends ESTestCase { } { var chunkedResult = finalListener.results.get(2); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); assertEquals("2nd small", chunkedFloatResult.chunks().get(0).matchedText()); } { var chunkedResult = finalListener.results.get(3); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); assertEquals("3rd small", chunkedFloatResult.chunks().get(0).matchedText()); } @@ -251,10 +251,10 @@ public class EmbeddingRequestChunkerTests extends ESTestCase { var batches = new EmbeddingRequestChunker(inputs, 10, 100, 0).batchRequestsWithListeners(listener); assertThat(batches, hasSize(1)); - var embeddings = new ArrayList(); - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); - batches.get(0).listener().onResponse(new TextEmbeddingResults(embeddings)); + var embeddings = new ArrayList(); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); + batches.get(0).listener().onResponse(new InferenceTextEmbeddingFloatResults(embeddings)); assertEquals("Error the number of embedding responses [2] does not equal the number of requests [3]", failureMessage.get()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java index 41768a6814f3..c2f93554c6b2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.xpack.inference.external.response.azureaistudio; import org.apache.http.HttpResponse; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -50,11 +50,14 @@ public class AzureAiStudioEmbeddingsResponseEntityTests extends ESTestCase { var entity = new AzureAiStudioEmbeddingsResponseEntity(); - var parsedResults = (TextEmbeddingResults) entity.apply( + var parsedResults = (InferenceTextEmbeddingFloatResults) entity.apply( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(TextEmbeddingResults.Embedding.of(List.of(0.014539449F, -0.015288644F))))); + assertThat( + parsedResults.embeddings(), + is(List.of(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(0.014539449F, -0.015288644F)))) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java index d809635aa4f3..691064b947e2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java @@ -10,8 +10,8 @@ package org.elasticsearch.xpack.inference.external.response.cohere; import org.apache.http.HttpResponse; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; import org.hamcrest.MatcherAssert; @@ -55,10 +55,10 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - MatcherAssert.assertThat(parsedResults, instanceOf(TextEmbeddingResults.class)); + MatcherAssert.assertThat(parsedResults, instanceOf(InferenceTextEmbeddingFloatResults.class)); MatcherAssert.assertThat( - ((TextEmbeddingResults) parsedResults).embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }))) + ((InferenceTextEmbeddingFloatResults) parsedResults).embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }))) ); } @@ -89,14 +89,14 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }))) ); } @@ -133,14 +133,14 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }))) ); } @@ -177,14 +177,14 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingByteResults parsedResults = (TextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingByteResults parsedResults = (InferenceTextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) -1, (byte) 0 }))) + is(List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 }))) ); } @@ -215,14 +215,14 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingByteResults parsedResults = (TextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingByteResults parsedResults = (InferenceTextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) -1, (byte) 0 }))) + is(List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 }))) ); } @@ -255,7 +255,7 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -264,8 +264,8 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }), - new TextEmbeddingResults.Embedding(new float[] { -0.123F, 0.123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.123F, 0.123F }) ) ) ); @@ -302,7 +302,7 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -311,8 +311,8 @@ public class CohereEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }), - new TextEmbeddingResults.Embedding(new float[] { -0.123F, 0.123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.123F, 0.123F }) ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java index 5d5096d0b1b5..170395e8af91 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.xpack.inference.external.response.googleaistudio; import org.apache.http.HttpResponse; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -36,12 +36,15 @@ public class GoogleAiStudioEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(TextEmbeddingResults.Embedding.of(List.of(-0.00606332F, 0.058092743F))))); + assertThat( + parsedResults.embeddings(), + is(List.of(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(-0.00606332F, 0.058092743F)))) + ); } public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { @@ -64,7 +67,7 @@ public class GoogleAiStudioEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -73,8 +76,8 @@ public class GoogleAiStudioEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - TextEmbeddingResults.Embedding.of(List.of(-0.00606332F, 0.058092743F)), - TextEmbeddingResults.Embedding.of(List.of(0.030681048F, 0.01714732F)) + InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(-0.00606332F, 0.058092743F)), + InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(0.030681048F, 0.01714732F)) ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java index 238dab592913..6f06a32f19a6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.inference.external.response.huggingface; import org.apache.http.HttpResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -32,14 +32,14 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }))) ); } @@ -55,14 +55,14 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }))) ); } @@ -80,7 +80,7 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -89,8 +89,8 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }), - new TextEmbeddingResults.Embedding(new float[] { 0.0123F, -0.0123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F }) ) ) ); @@ -112,7 +112,7 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -121,8 +121,8 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }), - new TextEmbeddingResults.Embedding(new float[] { 0.0123F, -0.0123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F }) ) ) ); @@ -255,12 +255,15 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 1.0F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 1.0F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ObjectFormat() throws IOException { @@ -274,12 +277,15 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 1.0F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 1.0F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ArrayFormat() throws IOException { @@ -291,12 +297,15 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 4.0294965E10F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 4.0294965E10F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ObjectFormat() throws IOException { @@ -310,12 +319,15 @@ public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 4.0294965E10F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 4.0294965E10F }))) + ); } public void testFromResponse_FailsWhenEmbeddingValueIsAnObject_ObjectFormat() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 6c38092f509a..8f5bd95126fb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.inference.external.response.openai; import org.apache.http.HttpResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -44,14 +44,14 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }))) ); } @@ -85,7 +85,7 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -94,8 +94,8 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }), - new TextEmbeddingResults.Embedding(new float[] { 0.0123F, -0.0123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F }) ) ) ); @@ -259,12 +259,15 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 1.0F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 1.0F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsLong() throws IOException { @@ -288,12 +291,15 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 4.0294965E10F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 4.0294965E10F }))) + ); } public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { @@ -373,7 +379,7 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { } }"""; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), response.getBytes(StandardCharsets.UTF_8)) ); @@ -382,9 +388,9 @@ public class OpenAiEmbeddingsResponseEntityTests extends ESTestCase { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { -0.9F, 0.5F, 0.3F }), - new TextEmbeddingResults.Embedding(new float[] { 0.1F, 0.5F }), - new TextEmbeddingResults.Embedding(new float[] { 0.5F, 0.5F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.9F, 0.5F, 0.3F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.1F, 0.5F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.5F, 0.5F }) ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index efc81606094b..51fa39b595a8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -18,10 +18,11 @@ import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; +import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import org.elasticsearch.xpack.inference.model.TestModel; import java.io.IOException; @@ -64,7 +65,7 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase { List expectedTokens = parseWeightedTokens( @@ -85,7 +86,12 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase rawValues = randomList(1, 5, () -> randomAlphaOfLengthBetween(10, 20)); - return randomSemanticText(NAME, TestModel.createRandomInstance(), rawValues, randomFrom(XContentType.values())); + try { // try catch required for override + return randomSemanticText(NAME, TestModel.createRandomInstance(), rawValues, randomFrom(XContentType.values())); + } catch (IOException e) { + fail("Failed to create random SemanticTextField instance"); + } + return null; } @Override @@ -132,33 +138,37 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase inputs) { - List chunks = new ArrayList<>(); + public static InferenceChunkedTextEmbeddingFloatResults randomInferenceChunkedTextEmbeddingFloatResults( + Model model, + List inputs + ) throws IOException { + List chunks = new ArrayList<>(); for (String input : inputs) { - double[] values = new double[model.getServiceSettings().dimensions()]; + float[] values = new float[model.getServiceSettings().dimensions()]; for (int j = 0; j < values.length; j++) { - values[j] = randomDouble(); + values[j] = (float) randomDouble(); } - chunks.add(new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk(input, values)); + chunks.add(new InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk(input, values)); } - return new ChunkedTextEmbeddingResults(chunks); + return new InferenceChunkedTextEmbeddingFloatResults(chunks); } - public static ChunkedSparseEmbeddingResults randomSparseEmbeddings(List inputs) { - List chunks = new ArrayList<>(); + public static InferenceChunkedSparseEmbeddingResults randomSparseEmbeddings(List inputs) { + List chunks = new ArrayList<>(); for (String input : inputs) { var tokens = new ArrayList(); for (var token : input.split("\\s+")) { tokens.add(new WeightedToken(token, randomFloat())); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(input, tokens)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(input, tokens)); } - return new ChunkedSparseEmbeddingResults(chunks); + return new InferenceChunkedSparseEmbeddingResults(chunks); } - public static SemanticTextField randomSemanticText(String fieldName, Model model, List inputs, XContentType contentType) { + public static SemanticTextField randomSemanticText(String fieldName, Model model, List inputs, XContentType contentType) + throws IOException { ChunkedInferenceServiceResults results = switch (model.getTaskType()) { - case TEXT_EMBEDDING -> randomTextEmbeddings(model, inputs); + case TEXT_EMBEDDING -> randomInferenceChunkedTextEmbeddingFloatResults(model, inputs); case SPARSE_EMBEDDING -> randomSparseEmbeddings(inputs); default -> throw new AssertionError("invalid task type: " + model.getTaskType().name()); }; @@ -174,19 +184,18 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase { - List chunks = new ArrayList<>(); + List chunks = new ArrayList<>(); for (var chunk : field.inference().chunks()) { var tokens = parseWeightedTokens(chunk.rawEmbeddings(), field.contentType()); - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(chunk.text(), tokens)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(chunk.text(), tokens)); } - return new ChunkedSparseEmbeddingResults(chunks); + return new InferenceChunkedSparseEmbeddingResults(chunks); } case TEXT_EMBEDDING -> { - List chunks = - new ArrayList<>(); + List chunks = new ArrayList<>(); for (var chunk : field.inference().chunks()) { double[] values = parseDenseVector( chunk.rawEmbeddings(), @@ -194,13 +203,13 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase throw new AssertionError("Invalid task_type: " + field.inference().modelSettings().taskType().name()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index bc9408bc59dd..07713952e36c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -44,9 +44,10 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.inference.InferencePlugin; @@ -258,11 +259,9 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase { - - public static ChunkedTextEmbeddingFloatResults createRandomResults() { - int numChunks = randomIntBetween(1, 5); - var chunks = new ArrayList(numChunks); - - for (int i = 0; i < numChunks; i++) { - chunks.add(createRandomChunk()); - } - - return new ChunkedTextEmbeddingFloatResults(chunks); - } - - private static ChunkedTextEmbeddingFloatResults.EmbeddingChunk createRandomChunk() { - int columns = randomIntBetween(1, 10); - float[] floats = new float[columns]; - for (int i = 0; i < columns; i++) { - floats[i] = randomFloat(); - } - - return new ChunkedTextEmbeddingFloatResults.EmbeddingChunk(randomAlphaOfLength(6), floats); - } - - @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingFloatResults::new; - } - - @Override - protected ChunkedTextEmbeddingFloatResults createTestInstance() { - return createRandomResults(); - } - - @Override - protected ChunkedTextEmbeddingFloatResults mutateInstance(ChunkedTextEmbeddingFloatResults instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java deleted file mode 100644 index 1fc0282b5d96..000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.results; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -public class ChunkedTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { - - public static ChunkedTextEmbeddingResults createRandomResults() { - var chunks = new ArrayList(); - int columns = randomIntBetween(5, 10); - int numChunks = randomIntBetween(1, 5); - - for (int i = 0; i < numChunks; i++) { - double[] arr = new double[columns]; - for (int j = 0; j < columns; j++) { - arr[j] = randomDouble(); - } - chunks.add( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk( - randomAlphaOfLength(6), - arr - ) - ); - } - - return new ChunkedTextEmbeddingResults(chunks); - } - - /** - * Similar to {@link ChunkedTextEmbeddingResults#asMap()} but it converts the embeddings double array into a list of doubles to - * make testing equality easier. - */ - public static Map asMapWithListsInsteadOfArrays(ChunkedTextEmbeddingResults result) { - return Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - result.getChunks() - .stream() - .map(org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests::asMapWithListsInsteadOfArrays) - .collect(Collectors.toList()) - ); - } - - public void testToXContent_CreatesTheRightJsonForASingleChunk() { - var entity = new ChunkedTextEmbeddingResults( - List.of( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk( - "text", - new double[] { 0.1, 0.2 } - ) - ) - ); - - assertThat( - asMapWithListsInsteadOfArrays(entity), - is( - Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - List.of(Map.of(ChunkedNlpInferenceResults.TEXT, "text", ChunkedNlpInferenceResults.INFERENCE, List.of(0.1, 0.2))) - ) - ) - ); - String xContentResult = Strings.toString(entity, true, true); - assertThat(xContentResult, is(""" - { - "text_embedding_chunk" : [ - { - "text" : "text", - "inference" : [ - 0.1, - 0.2 - ] - } - ] - }""")); - } - - public void testToXContent_CreatesTheRightJsonForASingleChunk_FromTextEmbeddingResults() { - var entity = ChunkedTextEmbeddingResults.of( - List.of("text"), - new TextEmbeddingResults(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1f, 0.2f }))) - ); - - assertThat(entity.size(), is(1)); - - var firstEntry = entity.get(0); - assertThat(firstEntry, instanceOf(ChunkedTextEmbeddingResults.class)); - assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) firstEntry), - is( - Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "text", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.1f, (double) 0.2f) - ) - ) - ) - ) - ); - String xContentResult = Strings.toString(firstEntry, true, true); - assertThat(xContentResult, is(Strings.format(""" - { - "text_embedding_chunk" : [ - { - "text" : "text", - "inference" : [ - %s, - %s - ] - } - ] - }""", (double) 0.1f, (double) 0.2f))); - } - - public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { - var exception = expectThrows( - IllegalArgumentException.class, - () -> ChunkedTextEmbeddingResults.of( - List.of("text", "text2"), - new TextEmbeddingResults(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1f, 0.2f }))) - ) - ); - - assertThat(exception.getMessage(), is("The number of inputs [2] does not match the embeddings [1]")); - } - - @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingResults::new; - } - - @Override - protected ChunkedTextEmbeddingResults createTestInstance() { - return createRandomResults(); - } - - @Override - protected ChunkedTextEmbeddingResults mutateInstance(ChunkedTextEmbeddingResults instance) throws IOException { - return randomValueOtherThan(instance, ChunkedTextEmbeddingResultsTests::createRandomResults); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedSparseEmbeddingResultsTests.java similarity index 68% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedSparseEmbeddingResultsTests.java index 073a662c1e8f..9a2afdade296 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedSparseEmbeddingResultsTests.java @@ -10,10 +10,10 @@ package org.elasticsearch.xpack.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.io.IOException; @@ -23,10 +23,10 @@ import java.util.Map; import static org.hamcrest.Matchers.is; -public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingTestCase { +public class InferenceChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingTestCase { - public static ChunkedSparseEmbeddingResults createRandomResults() { - var chunks = new ArrayList(); + public static InferenceChunkedSparseEmbeddingResults createRandomResults() { + var chunks = new ArrayList(); int numChunks = randomIntBetween(1, 5); for (int i = 0; i < numChunks; i++) { @@ -35,22 +35,22 @@ public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingT for (int j = 0; j < numTokens; j++) { tokenWeights.add(new WeightedToken(Integer.toString(j), (float) randomDoubleBetween(0.0, 5.0, false))); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); } - return new ChunkedSparseEmbeddingResults(chunks); + return new InferenceChunkedSparseEmbeddingResults(chunks); } public void testToXContent_CreatesTheRightJsonForASingleChunk() { - var entity = new ChunkedSparseEmbeddingResults( - List.of(new ChunkedTextExpansionResults.ChunkedResult("text", List.of(new WeightedToken("token", 0.1f)))) + var entity = new InferenceChunkedSparseEmbeddingResults( + List.of(new InferenceChunkedTextExpansionResults.ChunkedResult("text", List.of(new WeightedToken("token", 0.1f)))) ); assertThat( entity.asMap(), is( Map.of( - ChunkedSparseEmbeddingResults.FIELD_NAME, + InferenceChunkedSparseEmbeddingResults.FIELD_NAME, List.of(Map.of(ChunkedNlpInferenceResults.TEXT, "text", ChunkedNlpInferenceResults.INFERENCE, Map.of("token", 0.1f))) ) ) @@ -71,7 +71,7 @@ public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingT } public void testToXContent_CreatesTheRightJsonForASingleChunk_FromSparseEmbeddingResults() { - var entity = ChunkedSparseEmbeddingResults.of( + var entity = InferenceChunkedSparseEmbeddingResults.listOf( List.of("text"), new SparseEmbeddingResults(List.of(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken("token", 0.1f)), false))) ); @@ -84,7 +84,7 @@ public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingT firstEntry.asMap(), is( Map.of( - ChunkedSparseEmbeddingResults.FIELD_NAME, + InferenceChunkedSparseEmbeddingResults.FIELD_NAME, List.of(Map.of(ChunkedNlpInferenceResults.TEXT, "text", ChunkedNlpInferenceResults.INFERENCE, Map.of("token", 0.1f))) ) ) @@ -107,7 +107,7 @@ public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingT public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { var exception = expectThrows( IllegalArgumentException.class, - () -> ChunkedSparseEmbeddingResults.of( + () -> InferenceChunkedSparseEmbeddingResults.listOf( List.of("text", "text2"), new SparseEmbeddingResults(List.of(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken("token", 0.1f)), false))) ) @@ -117,17 +117,17 @@ public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingT } @Override - protected Writeable.Reader instanceReader() { - return ChunkedSparseEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return InferenceChunkedSparseEmbeddingResults::new; } @Override - protected ChunkedSparseEmbeddingResults createTestInstance() { + protected InferenceChunkedSparseEmbeddingResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedSparseEmbeddingResults mutateInstance(ChunkedSparseEmbeddingResults instance) throws IOException { - return randomValueOtherThan(instance, ChunkedSparseEmbeddingResultsTests::createRandomResults); + protected InferenceChunkedSparseEmbeddingResults mutateInstance(InferenceChunkedSparseEmbeddingResults instance) throws IOException { + return randomValueOtherThan(instance, InferenceChunkedSparseEmbeddingResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedTextEmbeddingByteResultsTests.java similarity index 52% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedTextEmbeddingByteResultsTests.java index 6d6fbe956280..c1215e8a3d71 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedTextEmbeddingByteResultsTests.java @@ -10,8 +10,8 @@ package org.elasticsearch.xpack.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import java.io.IOException; import java.util.ArrayList; @@ -20,32 +20,33 @@ import java.util.Map; import static org.hamcrest.Matchers.is; -public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { +public class InferenceChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase< + InferenceChunkedTextEmbeddingByteResults> { - public static ChunkedTextEmbeddingByteResults createRandomResults() { + public static InferenceChunkedTextEmbeddingByteResults createRandomResults() { int numChunks = randomIntBetween(1, 5); - var chunks = new ArrayList(numChunks); + var chunks = new ArrayList(numChunks); for (int i = 0; i < numChunks; i++) { chunks.add(createRandomChunk()); } - return new ChunkedTextEmbeddingByteResults(chunks, randomBoolean()); + return new InferenceChunkedTextEmbeddingByteResults(chunks, randomBoolean()); } - private static ChunkedTextEmbeddingByteResults.EmbeddingChunk createRandomChunk() { + private static InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk createRandomChunk() { int columns = randomIntBetween(1, 10); byte[] bytes = new byte[columns]; for (int i = 0; i < columns; i++) { bytes[i] = randomByte(); } - return new ChunkedTextEmbeddingByteResults.EmbeddingChunk(randomAlphaOfLength(6), bytes); + return new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk(randomAlphaOfLength(6), bytes); } public void testToXContent_CreatesTheRightJsonForASingleChunk() { - var entity = new ChunkedTextEmbeddingByteResults( - List.of(new ChunkedTextEmbeddingByteResults.EmbeddingChunk("text", new byte[] { (byte) 1 })), + var entity = new InferenceChunkedTextEmbeddingByteResults( + List.of(new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk("text", new byte[] { (byte) 1 })), false ); @@ -53,8 +54,8 @@ public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializin entity.asMap(), is( Map.of( - ChunkedTextEmbeddingByteResults.FIELD_NAME, - List.of(new ChunkedTextEmbeddingByteResults.EmbeddingChunk("text", new byte[] { (byte) 1 })) + InferenceChunkedTextEmbeddingByteResults.FIELD_NAME, + List.of(new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk("text", new byte[] { (byte) 1 })) ) ) ); @@ -73,9 +74,11 @@ public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializin } public void testToXContent_CreatesTheRightJsonForASingleChunk_ForTextEmbeddingByteResults() { - var entity = ChunkedTextEmbeddingByteResults.of( + var entity = InferenceChunkedTextEmbeddingByteResults.listOf( List.of("text"), - new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 1 }))) + new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 1 })) + ) ); assertThat(entity.size(), is(1)); @@ -86,8 +89,8 @@ public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializin firstEntry.asMap(), is( Map.of( - ChunkedTextEmbeddingByteResults.FIELD_NAME, - List.of(new ChunkedTextEmbeddingByteResults.EmbeddingChunk("text", new byte[] { (byte) 1 })) + InferenceChunkedTextEmbeddingByteResults.FIELD_NAME, + List.of(new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk("text", new byte[] { (byte) 1 })) ) ) ); @@ -108,9 +111,11 @@ public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializin public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { var exception = expectThrows( IllegalArgumentException.class, - () -> ChunkedTextEmbeddingByteResults.of( + () -> InferenceChunkedTextEmbeddingByteResults.listOf( List.of("text", "text2"), - new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 1 }))) + new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 1 })) + ) ) ); @@ -118,17 +123,18 @@ public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializin } @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingByteResults::new; + protected Writeable.Reader instanceReader() { + return InferenceChunkedTextEmbeddingByteResults::new; } @Override - protected ChunkedTextEmbeddingByteResults createTestInstance() { + protected InferenceChunkedTextEmbeddingByteResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedTextEmbeddingByteResults mutateInstance(ChunkedTextEmbeddingByteResults instance) throws IOException { - return randomValueOtherThan(instance, ChunkedTextEmbeddingByteResultsTests::createRandomResults); + protected InferenceChunkedTextEmbeddingByteResults mutateInstance(InferenceChunkedTextEmbeddingByteResults instance) + throws IOException { + return randomValueOtherThan(instance, InferenceChunkedTextEmbeddingByteResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java similarity index 51% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java index a15d6323d131..c6749e9822cf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java @@ -10,7 +10,8 @@ package org.elasticsearch.xpack.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -19,19 +20,19 @@ import java.util.Map; import static org.hamcrest.Matchers.is; -public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { - public static TextEmbeddingByteResults createRandomResults() { +public class InferenceTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { + public static InferenceTextEmbeddingByteResults createRandomResults() { int embeddings = randomIntBetween(1, 10); - List embeddingResults = new ArrayList<>(embeddings); + List embeddingResults = new ArrayList<>(embeddings); for (int i = 0; i < embeddings; i++) { embeddingResults.add(createRandomEmbedding()); } - return new TextEmbeddingByteResults(embeddingResults); + return new InferenceTextEmbeddingByteResults(embeddingResults); } - private static TextEmbeddingByteResults.Embedding createRandomEmbedding() { + private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding createRandomEmbedding() { int columns = randomIntBetween(1, 10); byte[] bytes = new byte[columns]; @@ -39,11 +40,13 @@ public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCa bytes[i] = randomByte(); } - return new TextEmbeddingByteResults.Embedding(bytes); + return new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(bytes); } public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { - var entity = new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23 }))); + var entity = new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23 })) + ); String xContentResult = Strings.toString(entity, true, true); assertThat(xContentResult, is(""" @@ -59,10 +62,10 @@ public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCa } public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException { - var entity = new TextEmbeddingByteResults( + var entity = new InferenceTextEmbeddingByteResults( List.of( - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23 }), - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 24 }) + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23 }), + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 24 }) ) ); @@ -85,10 +88,10 @@ public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCa } public void testTransformToCoordinationFormat() { - var results = new TextEmbeddingByteResults( + var results = new InferenceTextEmbeddingByteResults( List.of( - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23, (byte) 24 }), - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 25, (byte) 26 }) + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23, (byte) 24 }), + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 25, (byte) 26 }) ) ).transformToCoordinationFormat(); @@ -96,49 +99,43 @@ public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCa results, is( List.of( - new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - new double[] { 23F, 24F }, - false - ), - new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - new double[] { 25F, 26F }, - false - ) + new MlTextEmbeddingResults(InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, new double[] { 23F, 24F }, false), + new MlTextEmbeddingResults(InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, new double[] { 25F, 26F }, false) ) ) ); } @Override - protected Writeable.Reader instanceReader() { - return TextEmbeddingByteResults::new; + protected Writeable.Reader instanceReader() { + return InferenceTextEmbeddingByteResults::new; } @Override - protected TextEmbeddingByteResults createTestInstance() { + protected InferenceTextEmbeddingByteResults createTestInstance() { return createRandomResults(); } @Override - protected TextEmbeddingByteResults mutateInstance(TextEmbeddingByteResults instance) throws IOException { + protected InferenceTextEmbeddingByteResults mutateInstance(InferenceTextEmbeddingByteResults instance) throws IOException { // if true we reduce the embeddings list by a random amount, if false we add an embedding to the list if (randomBoolean()) { // -1 to remove at least one item from the list int end = randomInt(instance.embeddings().size() - 1); - return new TextEmbeddingByteResults(instance.embeddings().subList(0, end)); + return new InferenceTextEmbeddingByteResults(instance.embeddings().subList(0, end)); } else { - List embeddings = new ArrayList<>(instance.embeddings()); + List embeddings = new ArrayList<>(instance.embeddings()); embeddings.add(createRandomEmbedding()); - return new TextEmbeddingByteResults(embeddings); + return new InferenceTextEmbeddingByteResults(embeddings); } } public static Map buildExpectationByte(List> embeddings) { return Map.of( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - embeddings.stream().map(embedding -> Map.of(TextEmbeddingByteResults.Embedding.EMBEDDING, embedding)).toList() + InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, + embeddings.stream() + .map(embedding -> Map.of(InferenceTextEmbeddingByteResults.InferenceByteEmbedding.EMBEDDING, embedding)) + .toList() ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyMlTextEmbeddingResultsTests.java similarity index 97% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyMlTextEmbeddingResultsTests.java index 1fa08231dd6b..f7ed3f34d364 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyMlTextEmbeddingResultsTests.java @@ -23,7 +23,7 @@ import java.util.List; import static org.hamcrest.Matchers.is; @SuppressWarnings("deprecation") -public class LegacyTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { +public class LegacyMlTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { public static LegacyTextEmbeddingResults createRandomResults() { int embeddings = randomIntBetween(1, 10); List embeddingResults = new ArrayList<>(embeddings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index 716568fdb564..2c405aaeaba3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -10,8 +10,9 @@ package org.elasticsearch.xpack.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -20,30 +21,32 @@ import java.util.Map; import static org.hamcrest.Matchers.is; -public class TextEmbeddingResultsTests extends AbstractWireSerializingTestCase { - public static TextEmbeddingResults createRandomResults() { +public class TextEmbeddingResultsTests extends AbstractWireSerializingTestCase { + public static InferenceTextEmbeddingFloatResults createRandomResults() { int embeddings = randomIntBetween(1, 10); - List embeddingResults = new ArrayList<>(embeddings); + List embeddingResults = new ArrayList<>(embeddings); for (int i = 0; i < embeddings; i++) { embeddingResults.add(createRandomEmbedding()); } - return new TextEmbeddingResults(embeddingResults); + return new InferenceTextEmbeddingFloatResults(embeddingResults); } - private static TextEmbeddingResults.Embedding createRandomEmbedding() { + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding createRandomEmbedding() { int columns = randomIntBetween(1, 10); float[] floats = new float[columns]; for (int i = 0; i < columns; i++) { floats[i] = randomFloat(); } - return new TextEmbeddingResults.Embedding(floats); + return new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(floats); } public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { - var entity = new TextEmbeddingResults(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1F }))); + var entity = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.1F })) + ); String xContentResult = Strings.toString(entity, true, true); assertThat(xContentResult, is(""" @@ -59,8 +62,11 @@ public class TextEmbeddingResultsTests extends AbstractWireSerializingTestCase instanceReader() { - return TextEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return InferenceTextEmbeddingFloatResults::new; } @Override - protected TextEmbeddingResults createTestInstance() { + protected InferenceTextEmbeddingFloatResults createTestInstance() { return createRandomResults(); } @Override - protected TextEmbeddingResults mutateInstance(TextEmbeddingResults instance) throws IOException { + protected InferenceTextEmbeddingFloatResults mutateInstance(InferenceTextEmbeddingFloatResults instance) throws IOException { // if true we reduce the embeddings list by a random amount, if false we add an embedding to the list if (randomBoolean()) { // -1 to remove at least one item from the list int end = randomInt(instance.embeddings().size() - 1); - return new TextEmbeddingResults(instance.embeddings().subList(0, end)); + return new InferenceTextEmbeddingFloatResults(instance.embeddings().subList(0, end)); } else { - List embeddings = new ArrayList<>(instance.embeddings()); + List embeddings = new ArrayList<>(instance.embeddings()); embeddings.add(createRandomEmbedding()); - return new TextEmbeddingResults(embeddings); + return new InferenceTextEmbeddingFloatResults(embeddings); } } public static Map buildExpectationFloat(List embeddings) { - return Map.of(TextEmbeddingResults.TEXT_EMBEDDING, embeddings.stream().map(TextEmbeddingResults.Embedding::new).toList()); + return Map.of( + InferenceTextEmbeddingFloatResults.TEXT_EMBEDDING, + embeddings.stream().map(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding::new).toList() + ); } public static Map buildExpectationByte(List embeddings) { return Map.of( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - embeddings.stream().map(TextEmbeddingByteResults.Embedding::new).toList() + InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, + embeddings.stream().map(InferenceTextEmbeddingByteResults.InferenceByteEmbedding::new).toList() ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 44e3f34ffcc1..599df8d1cfb3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -19,9 +19,9 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.inference.results.TextEmbeddingByteResultsTests; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.inference.results.InferenceTextEmbeddingByteResultsTests; import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; import java.util.EnumSet; @@ -719,7 +719,7 @@ public class ServiceUtilsTests extends ESTestCase { doAnswer(invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[6]; - listener.onResponse(new TextEmbeddingResults(List.of())); + listener.onResponse(new InferenceTextEmbeddingFloatResults(List.of())); return Void.TYPE; }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); @@ -742,7 +742,7 @@ public class ServiceUtilsTests extends ESTestCase { doAnswer(invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[6]; - listener.onResponse(new TextEmbeddingByteResults(List.of())); + listener.onResponse(new InferenceTextEmbeddingByteResults(List.of())); return Void.TYPE; }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); @@ -786,7 +786,7 @@ public class ServiceUtilsTests extends ESTestCase { var model = mock(Model.class); when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); - var textEmbedding = TextEmbeddingByteResultsTests.createRandomResults(); + var textEmbedding = InferenceTextEmbeddingByteResultsTests.createRandomResults(); doAnswer(invocation -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index cacbba82446f..18d7b6e072fe 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -62,13 +62,13 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; +import static org.elasticsearch.xpack.inference.external.request.azureaistudio.AzureAiStudioRequestFields.API_KEY_HEADER; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureaistudio.AzureAiStudioConstants.API_KEY_FIELD; import static org.hamcrest.CoreMatchers.is; @@ -856,22 +856,22 @@ public class AzureAiStudioServiceTests extends ESTestCase { String responseJson = """ { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - 0.0123, - -0.0123 - ] - } - ], - "model": "text-embedding-ada-002-v2", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } } """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); @@ -901,20 +901,15 @@ public class AzureAiStudioServiceTests extends ESTestCase { ); var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(result, CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), + asMapWithListsInsteadOfArrays((InferenceChunkedTextEmbeddingFloatResults) result), Matchers.is( Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.0123f, (double) -0.0123f) - ) + Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, List.of(0.0123f, -0.0123f)) ) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index bb3407056d57..e59664d0e012 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -55,13 +55,13 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettingsTests.getAzureOpenAiSecretSettingsMap; @@ -1073,22 +1073,22 @@ public class AzureOpenAiServiceTests extends ESTestCase { String responseJson = """ { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - 0.0123, - -0.0123 - ] - } - ], - "model": "text-embedding-ada-002-v2", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } } """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); @@ -1107,20 +1107,15 @@ public class AzureOpenAiServiceTests extends ESTestCase { ); var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(result, CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), + asMapWithListsInsteadOfArrays((InferenceChunkedTextEmbeddingFloatResults) result), Matchers.is( Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.0123f, (double) -0.0123f) - ) + Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, List.of(0.0123f, -0.0123f)) ) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 902d96be2973..20eb183c1790 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -1221,15 +1221,15 @@ public class CohereServiceTests extends ESTestCase { var results = listener.actionGet(TIMEOUT); assertThat(results, hasSize(2)); { - assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("foo", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.123f, -0.123f }, floatResult.chunks().get(0).embedding())); } { - assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("bar", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.223f, -0.223f }, floatResult.chunks().get(0).embedding())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index b06f8b0027ca..8f8c73eaab79 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -27,17 +27,19 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResultsTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import org.junit.After; @@ -468,8 +470,8 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { @SuppressWarnings("unchecked") public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); - mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); - mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlChunkedTextEmbeddingFloatResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlChunkedTextEmbeddingFloatResultsTests.createRandomResults()); mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); @@ -493,18 +495,43 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { assertThat(chunkedResponse, hasSize(3)); - assertThat(chunkedResponse.get(0), instanceOf(ChunkedTextEmbeddingResults.class)); - var result1 = (ChunkedTextEmbeddingResults) chunkedResponse.get(0); + assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var result1 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(0); assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults) mlTrainedModelResults.get(0)).getChunks(), - result1.getChunks() + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().size(), + result1.getChunks().size() ); - assertThat(chunkedResponse.get(1), instanceOf(ChunkedTextEmbeddingResults.class)); - var result2 = (ChunkedTextEmbeddingResults) chunkedResponse.get(1); assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults) mlTrainedModelResults.get(1)).getChunks(), - result2.getChunks() + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().get(0).matchedText(), + result1.getChunks().get(0).matchedText() ); + assertArrayEquals( + (FloatConversionUtils.floatArrayOf( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().get(0).embedding() + )), + result1.getChunks().get(0).embedding(), + 0.0001f + ); + assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var result2 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(1); + // assertEquals(((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunks()); + + assertEquals( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().size(), + result2.getChunks().size() + ); + assertEquals( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().get(0).matchedText(), + result2.getChunks().get(0).matchedText() + ); + assertArrayEquals( + (FloatConversionUtils.floatArrayOf( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().get(0).embedding() + )), + result2.getChunks().get(0).embedding(), + 0.0001f + ); + var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); assertThat(result3.getException(), instanceOf(RuntimeException.class)); assertThat(result3.getException().getMessage(), containsString("boom")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index 2fdb208a56e1..27db8143f0c8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -23,12 +23,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import java.util.ArrayList; @@ -335,8 +336,8 @@ public class ElserInternalServiceTests extends ESTestCase { @SuppressWarnings("unchecked") public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); - mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); - mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(InferenceChunkedTextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(InferenceChunkedTextExpansionResultsTests.createRandomResults()); mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); @@ -361,18 +362,12 @@ public class ElserInternalServiceTests extends ESTestCase { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { assertThat(chunkedResponse, hasSize(3)); - assertThat(chunkedResponse.get(0), instanceOf(ChunkedSparseEmbeddingResults.class)); - var result1 = (ChunkedSparseEmbeddingResults) chunkedResponse.get(0); - assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults) mlTrainedModelResults.get(0)).getChunks(), - result1.getChunkedResults() - ); - assertThat(chunkedResponse.get(1), instanceOf(ChunkedSparseEmbeddingResults.class)); - var result2 = (ChunkedSparseEmbeddingResults) chunkedResponse.get(1); - assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), - result2.getChunkedResults() - ); + assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var result1 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(0); + assertEquals(((InferenceChunkedTextExpansionResults) mlTrainedModelResults.get(0)).getChunks(), result1.getChunkedResults()); + assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var result2 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(1); + assertEquals(((InferenceChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunkedResults()); var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); assertThat(result3.getException(), instanceOf(RuntimeException.class)); assertThat(result3.getException().getMessage(), containsString("boom")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index 110276e63d07..1cdd7997b96c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -710,8 +710,8 @@ public class GoogleAiStudioServiceTests extends ESTestCase { // first result { - assertThat(results.get(0), instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertEquals(input.get(0), floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.0123f, -0.0123f }, floatResult.chunks().get(0).embedding())); @@ -719,8 +719,8 @@ public class GoogleAiStudioServiceTests extends ESTestCase { // second result { - assertThat(results.get(1), instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertEquals(input.get(1), floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.0456f, -0.0456f }, floatResult.chunks().get(0).embedding())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index 914775bf9fa6..a36306e40f5c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -31,8 +31,8 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -56,11 +56,11 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettingsTests.getServiceSettingsMap; @@ -591,6 +591,7 @@ public class HuggingFaceServiceTests extends ESTestCase { } } + // TODO public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -598,12 +599,12 @@ public class HuggingFaceServiceTests extends ESTestCase { String responseJson = """ { - "embeddings": [ - [ - -0.0123, - 0.0123 - ] - ] + "embeddings": [ + [ + -0.0123, + 0.0123 + ] + ] { """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); @@ -621,20 +622,15 @@ public class HuggingFaceServiceTests extends ESTestCase { ); var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(result, CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); MatcherAssert.assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), + asMapWithListsInsteadOfArrays((InferenceChunkedTextEmbeddingFloatResults) result), Matchers.is( Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) -0.0123f, (double) 0.0123f) - ) + Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, List.of(-0.0123f, 0.0123f)) ) ) ) @@ -685,7 +681,7 @@ public class HuggingFaceServiceTests extends ESTestCase { result.asMap(), Matchers.is( Map.of( - ChunkedSparseEmbeddingResults.FIELD_NAME, + InferenceChunkedSparseEmbeddingResults.FIELD_NAME, List.of( Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, Map.of(".", 0.13315596f)) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index 624b24e61134..508d5a97fe56 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -476,14 +476,14 @@ public class MistralServiceTests extends ESTestCase { assertThat(results, hasSize(2)); { - assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertTrue(Arrays.equals(new float[] { 0.123f, -0.123f }, floatResult.chunks().get(0).embedding())); } { - assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertTrue(Arrays.equals(new float[] { 0.223f, -0.223f }, floatResult.chunks().get(0).embedding())); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 41995235565d..e0e1ee3e81ae 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -1262,15 +1262,15 @@ public class OpenAiServiceTests extends ESTestCase { var results = listener.actionGet(TIMEOUT); assertThat(results, hasSize(2)); { - assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("foo", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.123f, -0.123f }, floatResult.chunks().get(0).embedding())); } { - assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("bar", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.223f, -0.223f }, floatResult.chunks().get(0).embedding())); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java index 22d9294783e7..6b6ab43e10c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; @@ -60,22 +60,24 @@ public class TextEmbeddingProcessor extends NlpTask.Processor { boolean chunkResults ) { if (chunkResults) { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < pyTorchResult.getInferenceResult()[0].length; i++) { int startOffset = tokenization.getTokenization(i).tokens().get(0).get(0).startOffset(); int lastIndex = tokenization.getTokenization(i).tokens().get(0).size() - 1; int endOffset = tokenization.getTokenization(i).tokens().get(0).get(lastIndex).endOffset(); String matchedText = tokenization.getTokenization(i).input().get(0).substring(startOffset, endOffset); - embeddings.add(new ChunkedTextEmbeddingResults.EmbeddingChunk(matchedText, pyTorchResult.getInferenceResult()[0][i])); + embeddings.add( + new MlChunkedTextEmbeddingFloatResults.EmbeddingChunk(matchedText, pyTorchResult.getInferenceResult()[0][i]) + ); } - return new ChunkedTextEmbeddingResults( + return new MlChunkedTextEmbeddingFloatResults( Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), embeddings, tokenization.anyTruncated() ); } else { - return new TextEmbeddingResults( + return new MlTextEmbeddingResults( Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), pyTorchResult.getInferenceResult()[0][0], tokenization.anyTruncated() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java index 1b44614bf4a2..603abe6394b9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; import org.elasticsearch.xpack.core.ml.search.WeightedToken; @@ -72,7 +72,7 @@ public class TextExpansionProcessor extends NlpTask.Processor { boolean chunkResults ) { if (chunkResults) { - var chunkedResults = new ArrayList(); + var chunkedResults = new ArrayList(); for (int i = 0; i < pyTorchResult.getInferenceResult()[0].length; i++) { int startOffset = tokenization.getTokenization(i).tokens().get(0).get(0).startOffset(); @@ -82,10 +82,10 @@ public class TextExpansionProcessor extends NlpTask.Processor { var weightedTokens = sparseVectorToTokenWeights(pyTorchResult.getInferenceResult()[0][i], tokenization, replacementVocab); weightedTokens.sort((t1, t2) -> Float.compare(t2.weight(), t1.weight())); - chunkedResults.add(new ChunkedTextExpansionResults.ChunkedResult(matchedText, weightedTokens)); + chunkedResults.add(new InferenceChunkedTextExpansionResults.ChunkedResult(matchedText, weightedTokens)); } - return new ChunkedTextExpansionResults( + return new InferenceChunkedTextExpansionResults( Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), chunkedResults, tokenization.anyTruncated() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java index bd0916065ec5..6c7d9ef1b8a1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; @@ -110,14 +110,14 @@ public class TextEmbeddingQueryVectorBuilder implements QueryVectorBuilder { return; } - if (response.getInferenceResults().get(0) instanceof TextEmbeddingResults textEmbeddingResults) { + if (response.getInferenceResults().get(0) instanceof MlTextEmbeddingResults textEmbeddingResults) { listener.onResponse(textEmbeddingResults.getInferenceAsFloat()); } else if (response.getInferenceResults().get(0) instanceof WarningInferenceResults warning) { listener.onFailure(new IllegalStateException(warning.getWarning())); } else { throw new IllegalStateException( "expected a result of type [" - + TextEmbeddingResults.NAME + + MlTextEmbeddingResults.NAME + "] received [" + response.getInferenceResults().get(0).getWriteableName() + "]. Is [" diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java index ba93feee5c42..bba284478411 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizationResult; @@ -35,9 +35,9 @@ public class TextEmbeddingProcessorTests extends ESTestCase { var tokenization = tokenizer.tokenize(input, Tokenization.Truncate.NONE, 0, 0, null); var tokenizationResult = new BertTokenizationResult(TextExpansionProcessorTests.TEST_CASED_VOCAB, tokenization, 0); var inferenceResult = TextEmbeddingProcessor.processResult(tokenizationResult, pytorchResult, "foo", false); - assertThat(inferenceResult, instanceOf(TextEmbeddingResults.class)); + assertThat(inferenceResult, instanceOf(MlTextEmbeddingResults.class)); - var result = (TextEmbeddingResults) inferenceResult; + var result = (MlTextEmbeddingResults) inferenceResult; assertThat(result.getInference().length, greaterThan(0)); } } @@ -57,9 +57,9 @@ public class TextEmbeddingProcessorTests extends ESTestCase { var tokenization = tokenizer.tokenize(input, Tokenization.Truncate.NONE, 0, 0, null); var tokenizationResult = new BertTokenizationResult(TextExpansionProcessorTests.TEST_CASED_VOCAB, tokenization, 0); var inferenceResult = TextEmbeddingProcessor.processResult(tokenizationResult, pytorchResult, "foo", true); - assertThat(inferenceResult, instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(inferenceResult, instanceOf(MlChunkedTextEmbeddingFloatResults.class)); - var chunkedResult = (ChunkedTextEmbeddingResults) inferenceResult; + var chunkedResult = (MlChunkedTextEmbeddingFloatResults) inferenceResult; assertThat(chunkedResult.getChunks(), hasSize(2)); assertEquals("Elasticsearch darts champion little red", chunkedResult.getChunks().get(0).matchedText()); assertEquals("is fun car", chunkedResult.getChunks().get(1).matchedText()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java index 9f2d8bcbfe4a..add071b0a0de 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfig; @@ -137,9 +137,9 @@ public class TextExpansionProcessorTests extends ESTestCase { var tokenization = tokenizer.tokenize(input, Tokenization.Truncate.NONE, 0, 0, null); var tokenizationResult = new BertTokenizationResult(TEST_CASED_VOCAB, tokenization, 0); var inferenceResult = TextExpansionProcessor.processResult(tokenizationResult, pytorchResult, Map.of(), "foo", true); - assertThat(inferenceResult, instanceOf(ChunkedTextExpansionResults.class)); + assertThat(inferenceResult, instanceOf(InferenceChunkedTextExpansionResults.class)); - var chunkedResult = (ChunkedTextExpansionResults) inferenceResult; + var chunkedResult = (InferenceChunkedTextExpansionResults) inferenceResult; assertThat(chunkedResult.getChunks(), hasSize(2)); assertEquals("Elasticsearch darts champion little red", chunkedResult.getChunks().get(0).matchedText()); assertEquals("is fun car", chunkedResult.getChunks().get(1).matchedText()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java index 8575c7e1f4bf..7721c4c23953 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.ml.MachineLearningTests; import java.io.IOException; @@ -51,7 +51,7 @@ public class TextEmbeddingQueryVectorBuilderTests extends AbstractQueryVectorBui embedding[i] = array[i]; } return new InferModelAction.Response( - List.of(new TextEmbeddingResults("foo", embedding, randomBoolean())), + List.of(new MlTextEmbeddingResults("foo", embedding, randomBoolean())), builder.getModelId(), true ); From 864877fc07692d234892f425eb9f10643dce3982 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Fri, 7 Jun 2024 17:12:18 +0200 Subject: [PATCH 07/58] [Connector API] Include sync_cursor in last sync stats (#109466) --- .../80_connector_update_last_sync_stats.yml | 27 +++++++ .../application/connector/Connector.java | 2 +- .../UpdateConnectorLastSyncStatsAction.java | 80 +++++++++++++++---- .../connector/ConnectorIndexServiceTests.java | 34 +++++++- ...StatsActionRequestBWCSerializingTests.java | 6 +- 5 files changed, 127 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml index cafdf0791de2..731e4a6a30f3 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml @@ -75,6 +75,33 @@ setup: - match: { last_indexed_document_count: 42 } +--- +"Update Connector Last Sync Stats - Supports sync_cursor updates": + - do: + connector.last_sync: + connector_id: test-connector + body: + last_deleted_document_count: 123 + + - match: { result: updated } + + - do: + connector.last_sync: + connector_id: test-connector + body: + sync_cursor: { pointer: 42 } + + - match: { result: updated } + + + - do: + connector.get: + connector_id: test-connector + + - match: { sync_cursor: { pointer: 42 } } + - match: { last_deleted_document_count: 123 } + + --- "Update Connector Last Sync Stats - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 62f42d9a16ea..a9c488b024d4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -224,7 +224,7 @@ public class Connector implements NamedWriteable, ToXContentObject { public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); public static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); public static final ParseField STATUS_FIELD = new ParseField("status"); - static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); + public static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java index 4823803d9403..1628a493cbec 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContentObject; @@ -22,6 +23,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorSyncInfo; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorUtils; @@ -45,16 +47,20 @@ public class UpdateConnectorLastSyncStatsAction { private final String connectorId; private final ConnectorSyncInfo syncInfo; + @Nullable + private final Object syncCursor; - public Request(String connectorId, ConnectorSyncInfo syncInfo) { + private Request(String connectorId, ConnectorSyncInfo syncInfo, Object syncCursor) { this.connectorId = connectorId; this.syncInfo = syncInfo; + this.syncCursor = syncCursor; } public Request(StreamInput in) throws IOException { super(in); this.connectorId = in.readString(); this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new); + this.syncCursor = in.readGenericValue(); } public String getConnectorId() { @@ -65,6 +71,10 @@ public class UpdateConnectorLastSyncStatsAction { return syncInfo; } + public Object getSyncCursor() { + return syncCursor; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -79,20 +89,22 @@ public class UpdateConnectorLastSyncStatsAction { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("connector_update_last_sync_stats_request", false, ((args, connectorId) -> { int i = 0; - return new UpdateConnectorLastSyncStatsAction.Request( - connectorId, - new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) - .setLastAccessControlSyncScheduledAt((Instant) args[i++]) - .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastDeletedDocumentCount((Long) args[i++]) - .setLastIncrementalSyncScheduledAt((Instant) args[i++]) - .setLastIndexedDocumentCount((Long) args[i++]) - .setLastSyncError((String) args[i++]) - .setLastSyncScheduledAt((Instant) args[i++]) - .setLastSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastSynced((Instant) args[i++]) - .build() - ); + return new Builder().setConnectorId(connectorId) + .setSyncInfo( + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ) + .setSyncCursor(args[i]) + .build(); })); static { @@ -142,6 +154,7 @@ public class UpdateConnectorLastSyncStatsAction { ConnectorSyncInfo.LAST_SYNCED_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); + PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> p.map(), null, Connector.SYNC_CURSOR_FIELD); } public static UpdateConnectorLastSyncStatsAction.Request fromXContentBytes( @@ -166,6 +179,9 @@ public class UpdateConnectorLastSyncStatsAction { builder.startObject(); { syncInfo.toXContent(builder, params); + if (syncCursor != null) { + builder.field(Connector.SYNC_CURSOR_FIELD.getPreferredName(), syncCursor); + } } builder.endObject(); return builder; @@ -176,6 +192,7 @@ public class UpdateConnectorLastSyncStatsAction { super.writeTo(out); out.writeString(connectorId); out.writeOptionalWriteable(syncInfo); + out.writeGenericValue(syncCursor); } @Override @@ -183,12 +200,41 @@ public class UpdateConnectorLastSyncStatsAction { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(connectorId, request.connectorId) && Objects.equals(syncInfo, request.syncInfo); + return Objects.equals(connectorId, request.connectorId) + && Objects.equals(syncInfo, request.syncInfo) + && Objects.equals(syncCursor, request.syncCursor); } @Override public int hashCode() { - return Objects.hash(connectorId, syncInfo); + return Objects.hash(connectorId, syncInfo, syncCursor); } + + public static class Builder { + + private String connectorId; + private ConnectorSyncInfo syncInfo; + private Object syncCursor; + + public Builder setConnectorId(String connectorId) { + this.connectorId = connectorId; + return this; + } + + public Builder setSyncInfo(ConnectorSyncInfo syncInfo) { + this.syncInfo = syncInfo; + return this; + } + + public Builder setSyncCursor(Object syncCursor) { + this.syncCursor = syncCursor; + return this; + } + + public Request build() { + return new Request(connectorId, syncInfo, syncCursor); + } + } + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 21a0fede4675..698c061d1bd6 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptEngine; @@ -511,7 +512,9 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); - UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId( + connectorId + ).setSyncInfo(syncStats).build(); DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); @@ -533,7 +536,9 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { .setLastDeletedDocumentCount(randomLong()) .build(); - UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId( + connectorId + ).setSyncInfo(syncStats).build(); DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); @@ -547,7 +552,9 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { ConnectorSyncInfo nextSyncStats = new ConnectorSyncInfo.Builder().setLastIndexedDocumentCount(randomLong()).build(); - lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, nextSyncStats); + lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId(connectorId) + .setSyncInfo(nextSyncStats) + .build(); updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); @@ -563,6 +570,27 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { } + public void testUpdateConnectorLastSyncStats_syncCursor() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + Map syncCursor = randomMap(2, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))); + + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId( + connectorId + ).setSyncInfo(new ConnectorSyncInfo.Builder().build()).setSyncCursor(syncCursor).build(); + + DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + // Check sync_cursor got updated + assertThat(syncCursor, equalTo(indexedConnector.getSyncCursor())); + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java index 0728a7b328eb..b324a43b46b8 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.application.connector.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -28,7 +29,10 @@ public class UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests extend @Override protected UpdateConnectorLastSyncStatsAction.Request createTestInstance() { this.connectorId = randomUUID(); - return new UpdateConnectorLastSyncStatsAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorSyncInfo()); + return new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId(connectorId) + .setSyncInfo(ConnectorTestUtils.getRandomConnectorSyncInfo()) + .setSyncCursor(randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4)))) + .build(); } @Override From f2e218ac44dac826024d57df00b787771de63c87 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 7 Jun 2024 11:18:42 -0400 Subject: [PATCH 08/58] [ML] Add dry run and force to json spec for Delete Inference endpoint (#109402) * Add dry run and force to json spec * Rewording Co-authored-by: Tim Grein --------- Co-authored-by: Tim Grein --- .../rest-api-spec/api/inference.delete.json | 52 ++++++++++++------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json index 262899d5559a..745136848786 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json @@ -1,16 +1,18 @@ { - "inference.delete":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html", - "description":"Delete an inference endpoint" + "inference.delete": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html", + "description": "Delete an inference endpoint" }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ] }, - "url":{ - "paths":[ + "url": { + "paths": [ { "path": "/_inference/{inference_id}", "methods": [ @@ -24,22 +26,34 @@ } }, { - "path":"/_inference/{task_type}/{inference_id}", - "methods":[ + "path": "/_inference/{task_type}/{inference_id}", + "methods": [ "DELETE" ], - "parts":{ - "task_type":{ - "type":"string", - "description":"The task type" + "parts": { + "task_type": { + "type": "string", + "description": "The task type" }, - "inference_id":{ - "type":"string", - "description":"The inference Id" + "inference_id": { + "type": "string", + "description": "The inference Id" } } } ] + }, + "params": { + "dry_run": { + "type": "boolean", + "description": "If true the endpoint will not be deleted and a list of ingest processors which reference this endpoint will be returned.", + "required": false + }, + "force": { + "type": "boolean", + "description": "If true the endpoint will be forcefully stopped (regardless of whether or not it is referenced by any ingest processors or semantic text fields).", + "required": false + } } } } From e29bddce8a3453d8d85b7b0496ddff8192df521f Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Fri, 7 Jun 2024 11:20:22 -0400 Subject: [PATCH 09/58] [ESQL] More rule migration (#109429) This PR moves a few small optimizer rules out of core. None of these are particularly complicated, and they aren't touching any incorrect classes, so migrating these isn't a super high priority, but still gets some code out of core and into esql. --- .../esql/core/optimizer/OptimizerRules.java | 70 +------------------ .../esql/optimizer/LogicalPlanOptimizer.java | 8 +-- .../xpack/esql/optimizer/OptimizerRules.java | 3 +- .../esql/optimizer/rules/ConstantFolding.java | 24 +++++++ .../optimizer/rules/LiteralsOnTheRight.java | 24 +++++++ .../rules/PruneLiteralsInOrderBy.java | 42 +++++++++++ .../esql/optimizer/rules/SetAsOptimized.java | 26 +++++++ .../optimizer/LogicalPlanOptimizerTests.java | 4 +- .../esql/optimizer/OptimizerRulesTests.java | 5 +- 9 files changed, 128 insertions(+), 78 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java index 137f440f03b7..ba19a73f91c0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java @@ -12,11 +12,9 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.function.scalar.SurrogateFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; @@ -33,7 +31,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.compariso import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; @@ -60,20 +57,8 @@ import static org.elasticsearch.xpack.esql.core.util.CollectionUtils.combine; public final class OptimizerRules { - public static final class ConstantFolding extends OptimizerExpressionRule { - - public ConstantFolding() { - super(TransformDirection.DOWN); - } - - @Override - public Expression rule(Expression e) { - return e.foldable() ? Literal.of(e) : e; - } - } - /** - * This rule must always be placed after {@link LiteralsOnTheRight}, since it looks at TRUE/FALSE literals' existence + * This rule must always be placed after LiteralsOnTheRight, since it looks at TRUE/FALSE literals' existence * on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. */ public static final class BooleanFunctionEqualsElimination extends OptimizerExpressionRule { @@ -235,18 +220,6 @@ public final class OptimizerRules { } } - public static final class LiteralsOnTheRight extends OptimizerExpressionRule> { - - public LiteralsOnTheRight() { - super(TransformDirection.UP); - } - - @Override - public BinaryOperator rule(BinaryOperator be) { - return be.left() instanceof Literal && (be.right() instanceof Literal) == false ? be.swapLeftAndRight() : be; - } - } - /** * Combine disjunctions on the same field into an In expression. * This rule looks for both simple equalities: @@ -383,32 +356,6 @@ public final class OptimizerRules { } } - public static final class PruneLiteralsInOrderBy extends OptimizerRule { - - @Override - protected LogicalPlan rule(OrderBy ob) { - List prunedOrders = new ArrayList<>(); - - for (Order o : ob.order()) { - if (o.child().foldable()) { - prunedOrders.add(o); - } - } - - // everything was eliminated, the order isn't needed anymore - if (prunedOrders.size() == ob.order().size()) { - return ob.child(); - } - if (prunedOrders.size() > 0) { - List newOrders = new ArrayList<>(ob.order()); - newOrders.removeAll(prunedOrders); - return new OrderBy(ob.source(), ob.child(), newOrders); - } - - return ob; - } - } - // NB: it is important to start replacing casts from the bottom to properly replace aliases public abstract static class PruneCast extends Rule { @@ -571,21 +518,6 @@ public final class OptimizerRules { } } - public static final class SetAsOptimized extends Rule { - - @Override - public LogicalPlan apply(LogicalPlan plan) { - plan.forEachUp(SetAsOptimized::rule); - return plan; - } - - private static void rule(LogicalPlan plan) { - if (plan.optimized() == false) { - plan.setOptimized(); - } - } - } - public abstract static class OptimizerRule extends Rule { private final TransformDirection direction; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 60b758e94851..951c97d7e69f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -36,10 +36,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.StringPattern; import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.ConstantFolding; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.LiteralsOnTheRight; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PruneLiteralsInOrderBy; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.SetAsOptimized; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -63,6 +59,10 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; +import org.elasticsearch.xpack.esql.optimizer.rules.PruneLiteralsInOrderBy; +import org.elasticsearch.xpack.esql.optimizer.rules.SetAsOptimized; import org.elasticsearch.xpack.esql.optimizer.rules.SimplifyComparisonsArithmetics; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index 2041c08acbca..415de38e7a17 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; @@ -261,7 +262,7 @@ class OptimizerRules { } /** - * This rule must always be placed after {@link org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.LiteralsOnTheRight} + * This rule must always be placed after {@link LiteralsOnTheRight} * since it looks at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. */ public static final class BooleanFunctionEqualsElimination extends diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java new file mode 100644 index 000000000000..f2638333c960 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; + +public final class ConstantFolding extends OptimizerRules.OptimizerExpressionRule { + + public ConstantFolding() { + super(OptimizerRules.TransformDirection.DOWN); + } + + @Override + public Expression rule(Expression e) { + return e.foldable() ? Literal.of(e) : e; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java new file mode 100644 index 000000000000..528fe6576697 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; + +public final class LiteralsOnTheRight extends OptimizerRules.OptimizerExpressionRule> { + + public LiteralsOnTheRight() { + super(OptimizerRules.TransformDirection.UP); + } + + @Override + public BinaryOperator rule(BinaryOperator be) { + return be.left() instanceof Literal && (be.right() instanceof Literal) == false ? be.swapLeftAndRight() : be; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java new file mode 100644 index 000000000000..591cfe043c00 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Order; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; + +import java.util.ArrayList; +import java.util.List; + +public final class PruneLiteralsInOrderBy extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(OrderBy ob) { + List prunedOrders = new ArrayList<>(); + + for (Order o : ob.order()) { + if (o.child().foldable()) { + prunedOrders.add(o); + } + } + + // everything was eliminated, the order isn't needed anymore + if (prunedOrders.size() == ob.order().size()) { + return ob.child(); + } + if (prunedOrders.size() > 0) { + List newOrders = new ArrayList<>(ob.order()); + newOrders.removeAll(prunedOrders); + return new OrderBy(ob.source(), ob.child(), newOrders); + } + + return ob; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java new file mode 100644 index 000000000000..168270b68db2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.rule.Rule; + +public final class SetAsOptimized extends Rule { + + @Override + public LogicalPlan apply(LogicalPlan plan) { + plan.forEachUp(SetAsOptimized::rule); + return plan; + } + + private static void rule(LogicalPlan plan) { + if (plan.optimized() == false) { + plan.setOptimized(); + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index b544bda70a50..e8d1336e4b4f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -106,6 +105,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; @@ -200,7 +200,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Map mappingExtra; private static Analyzer analyzerExtra; private static EnrichResolution enrichResolution; - private static final OptimizerRules.LiteralsOnTheRight LITERALS_ON_THE_RIGHT = new OptimizerRules.LiteralsOnTheRight(); + private static final LiteralsOnTheRight LITERALS_ON_THE_RIGHT = new LiteralsOnTheRight(); private static class SubstitutionOnlyOptimizer extends LogicalPlanOptimizer { static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index b5400237bfee..fb4f35b7173c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.ConstantFolding; import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull; import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; @@ -51,6 +50,8 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Les import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; import java.util.List; @@ -834,7 +835,7 @@ public class OptimizerRulesTests extends ESTestCase { public void testLiteralsOnTheRight() { Alias a = new Alias(EMPTY, "a", new Literal(EMPTY, 10, INTEGER)); - Expression result = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.LiteralsOnTheRight().rule(equalsOf(FIVE, a)); + Expression result = new LiteralsOnTheRight().rule(equalsOf(FIVE, a)); assertTrue(result instanceof Equals); Equals eq = (Equals) result; assertEquals(a, eq.left()); From a609258b21137a12563045c5fb3b903ec4e64fc6 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 7 Jun 2024 16:34:54 +0100 Subject: [PATCH 10/58] Add a streaming mode in the synthetic source loader. (#109472) Relates #109117 --- .../index/get/ShardGetService.java | 2 +- .../index/mapper/SourceFieldMapper.java | 2 +- .../index/mapper/SourceLoader.java | 61 +++++++++++++++---- .../index/query/SearchExecutionContext.java | 2 +- .../search/lookup/SourceProvider.java | 2 +- .../index/mapper/MapperServiceTestCase.java | 44 +++++++------ 6 files changed, 81 insertions(+), 32 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 0c28601646ac..b50545efef89 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -306,7 +306,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { Map metadataFields = null; DocIdAndVersion docIdAndVersion = get.docIdAndVersion(); SourceLoader loader = forceSyntheticSource - ? new SourceLoader.Synthetic(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()) + ? new SourceLoader.Synthetic(mappingLookup.getMapping()::syntheticFieldLoader, mapperMetrics.sourceFieldMetrics()) : mappingLookup.newSourceLoader(mapperMetrics.sourceFieldMetrics()); StoredFieldLoader storedFieldLoader = buildStoredFieldLoader(storedFields, fetchSourceContext, loader); LeafStoredFieldLoader leafStoredFieldLoader = storedFieldLoader.getLoader(docIdAndVersion.reader.getContext(), null); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index d6a15ff9ec47..67e457907f8c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -390,7 +390,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { */ public SourceLoader newSourceLoader(Mapping mapping, SourceFieldMetrics metrics) { if (mode == Mode.SYNTHETIC) { - return new SourceLoader.Synthetic(mapping, metrics); + return new SourceLoader.Synthetic(mapping::syntheticFieldLoader, metrics); } return SourceLoader.FROM_STORED_SOURCE; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java index a1b95e7a2c8b..c9bea33852a2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java @@ -57,6 +57,14 @@ public interface SourceLoader { * @param docId the doc to load */ Source source(LeafStoredFieldLoader storedFields, int docId) throws IOException; + + /** + * Write the {@code _source} for a document in the provided {@link XContentBuilder}. + * @param storedFields a loader for stored fields + * @param docId the doc to load + * @param b the builder to write the xcontent + */ + void write(LeafStoredFieldLoader storedFields, int docId, XContentBuilder b) throws IOException; } /** @@ -70,7 +78,18 @@ public interface SourceLoader { @Override public Leaf leaf(LeafReader reader, int[] docIdsInLeaf) { - return (storedFieldLoader, docId) -> Source.fromBytes(storedFieldLoader.source()); + return new Leaf() { + @Override + public Source source(LeafStoredFieldLoader storedFields, int docId) throws IOException { + return Source.fromBytes(storedFields.source()); + } + + @Override + public void write(LeafStoredFieldLoader storedFields, int docId, XContentBuilder builder) throws IOException { + Source source = source(storedFields, docId); + builder.rawValue(source.internalSourceRef().streamInput(), source.sourceContentType()); + } + }; } @Override @@ -80,15 +99,20 @@ public interface SourceLoader { }; /** - * Load {@code _source} from doc values. + * Reconstructs {@code _source} from doc values anf stored fields. */ class Synthetic implements SourceLoader { private final Supplier syntheticFieldLoaderLeafSupplier; private final Set requiredStoredFields; private final SourceFieldMetrics metrics; - public Synthetic(Mapping mapping, SourceFieldMetrics metrics) { - this.syntheticFieldLoaderLeafSupplier = mapping::syntheticFieldLoader; + /** + * Creates a {@link SourceLoader} to reconstruct {@code _source} from doc values anf stored fields. + * @param fieldLoaderSupplier A supplier to create {@link SyntheticFieldLoader}, one for each leaf. + * @param metrics Metrics for profiling. + */ + public Synthetic(Supplier fieldLoaderSupplier, SourceFieldMetrics metrics) { + this.syntheticFieldLoaderLeafSupplier = fieldLoaderSupplier; this.requiredStoredFields = syntheticFieldLoaderLeafSupplier.get() .storedFieldLoaders() .map(Map.Entry::getKey) @@ -126,6 +150,16 @@ public interface SourceLoader { return source; } + + @Override + public void write(LeafStoredFieldLoader storedFields, int docId, XContentBuilder b) throws IOException { + long startTime = metrics.getRelativeTimeSupplier().getAsLong(); + + leaf.write(storedFields, docId, b); + + TimeValue duration = TimeValue.timeValueMillis(metrics.getRelativeTimeSupplier().getAsLong() - startTime); + metrics.recordSyntheticSourceLoadLatency(duration); + } } private static class SyntheticLeaf implements Leaf { @@ -143,6 +177,14 @@ public interface SourceLoader { @Override public Source source(LeafStoredFieldLoader storedFieldLoader, int docId) throws IOException { + try (XContentBuilder b = new XContentBuilder(JsonXContent.jsonXContent, new ByteArrayOutputStream())) { + write(storedFieldLoader, docId, b); + return Source.fromBytes(BytesReference.bytes(b), b.contentType()); + } + } + + @Override + public void write(LeafStoredFieldLoader storedFieldLoader, int docId, XContentBuilder b) throws IOException { // Maps the names of existing objects to lists of ignored fields they contain. Map> objectsWithIgnoredFields = null; @@ -168,13 +210,10 @@ public interface SourceLoader { docValuesLoader.advanceToDoc(docId); } // TODO accept a requested xcontent type - try (XContentBuilder b = new XContentBuilder(JsonXContent.jsonXContent, new ByteArrayOutputStream())) { - if (loader.hasValue()) { - loader.write(b); - } else { - b.startObject().endObject(); - } - return Source.fromBytes(BytesReference.bytes(b), b.contentType()); + if (loader.hasValue()) { + loader.write(b); + } else { + b.startObject().endObject(); } } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 7ca0b0bd401e..9d3aa9905c74 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -436,7 +436,7 @@ public class SearchExecutionContext extends QueryRewriteContext { */ public SourceLoader newSourceLoader(boolean forceSyntheticSource) { if (forceSyntheticSource) { - return new SourceLoader.Synthetic(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()); + return new SourceLoader.Synthetic(mappingLookup.getMapping()::syntheticFieldLoader, mapperMetrics.sourceFieldMetrics()); } return mappingLookup.newSourceLoader(mapperMetrics.sourceFieldMetrics()); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java index 8a180d4f11ec..a8c898409bf9 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java @@ -48,6 +48,6 @@ public interface SourceProvider { * multiple threads. */ static SourceProvider fromSyntheticSource(Mapping mapping, SourceFieldMetrics metrics) { - return new SyntheticSourceProvider(new SourceLoader.Synthetic(mapping, metrics)); + return new SyntheticSourceProvider(new SourceLoader.Synthetic(mapping::syntheticFieldLoader, metrics)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index dfd4a59e2c3a..50436ad64c8a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -46,7 +46,6 @@ import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -68,7 +67,6 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.search.sort.BucketedSort; import org.elasticsearch.search.sort.BucketedSort.ExtraData; @@ -77,11 +75,13 @@ import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -808,23 +808,33 @@ public abstract class MapperServiceTestCase extends FieldTypeTestCase { } private static String syntheticSource(DocumentMapper mapper, IndexReader reader, int docId) throws IOException { - SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), SourceFieldMetrics.NOOP); - Source synthetic = provider.getSource(getOnlyLeafReader(reader).getContext(), docId); - return synthetic.internalSourceRef().utf8ToString(); - } + LeafReader leafReader = getOnlyLeafReader(reader); - protected static LeafStoredFieldLoader syntheticSourceStoredFieldLoader( - DocumentMapper mapper, - LeafReader leafReader, - SourceLoader loader - ) throws IOException { - if (loader.requiredStoredFields().isEmpty()) { - return StoredFieldLoader.empty().getLoader(leafReader.getContext(), null); + final String synthetic1; + final XContent xContent; + { + SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), SourceFieldMetrics.NOOP); + var source = provider.getSource(leafReader.getContext(), docId); + synthetic1 = source.internalSourceRef().utf8ToString(); + xContent = source.sourceContentType().xContent(); } - LeafStoredFieldLoader storedFields = StoredFieldLoader.create(false, loader.requiredStoredFields()) - .getLoader(leafReader.getContext(), null); - storedFields.advanceTo(0); - return storedFields; + + final String synthetic2; + { + int[] docIds = new int[] { docId }; + SourceLoader sourceLoader = new SourceLoader.Synthetic(mapper.mapping()::syntheticFieldLoader, SourceFieldMetrics.NOOP); + var sourceLeafLoader = sourceLoader.leaf(getOnlyLeafReader(reader), docIds); + var storedFieldLoader = StoredFieldLoader.create(false, sourceLoader.requiredStoredFields()) + .getLoader(leafReader.getContext(), docIds); + storedFieldLoader.advanceTo(docId); + try (XContentBuilder b = new XContentBuilder(xContent, new ByteArrayOutputStream())) { + sourceLeafLoader.write(storedFieldLoader, docId, b); + synthetic2 = BytesReference.bytes(b).utf8ToString(); + } + } + + assertThat(synthetic2, equalTo(synthetic1)); + return synthetic1; } protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) From df961991d78d5ccb4083417197b5287a5c661d27 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 7 Jun 2024 18:38:07 +0300 Subject: [PATCH 11/58] Fix task cancellation on remote cluster when original request fails (#109440) Fixes a bug where the task on the remote cluster node is not cancelled when the original request (that started the task) fails (returns an exception). --- docs/changelog/109440.yaml | 5 + .../tasks/TaskCancellationService.java | 2 +- .../node/tasks/TaskManagerTestCase.java | 8 +- .../RemoteClusterAwareClientTests.java | 94 +++++++++++++++++++ .../test/tasks/MockTaskManager.java | 6 ++ .../test/transport/MockTransportService.java | 13 ++- 6 files changed, 119 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/109440.yaml diff --git a/docs/changelog/109440.yaml b/docs/changelog/109440.yaml new file mode 100644 index 000000000000..c1e9aef8110f --- /dev/null +++ b/docs/changelog/109440.yaml @@ -0,0 +1,5 @@ +pr: 109440 +summary: Fix task cancellation on remote cluster when original request fails +area: Network +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java index d292a6a15a79..7d8b966451d3 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java @@ -439,7 +439,7 @@ public class TaskCancellationService { reason ); final CancelChildRequest request = CancelChildRequest.createCancelChildRequest(parentTask, childRequestId, reason); - transportService.sendRequest(childNode, CANCEL_CHILD_ACTION_NAME, request, TransportRequestOptions.EMPTY, NOOP_HANDLER); + transportService.sendRequest(childConnection, CANCEL_CHILD_ACTION_NAME, request, TransportRequestOptions.EMPTY, NOOP_HANDLER); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 882da84d22fe..f943ff14002c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -61,6 +61,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.elasticsearch.test.transport.MockTransportService.createTaskManager; /** * The test case for unit testing task manager and related transport actions @@ -176,12 +177,7 @@ public abstract class TaskManagerTestCase extends ESTestCase { discoveryNode.set(DiscoveryNodeUtils.create(name, address.publishAddress(), emptyMap(), emptySet())); return discoveryNode.get(); }; - TaskManager taskManager; - if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { - taskManager = new MockTaskManager(settings, threadPool, emptySet()); - } else { - taskManager = new TaskManager(settings, threadPool, emptySet()); - } + TaskManager taskManager = createTaskManager(settings, threadPool, emptySet(), Tracer.NOOP); transportService = new TransportService( settings, new Netty4Transport( diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java index c350e2a4cfaa..863bb60f0acc 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java @@ -22,6 +22,9 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancellationService; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ScalingExecutorBuilder; @@ -31,11 +34,19 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.test.tasks.MockTaskManager.SPY_TASK_MANAGER_SETTING; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; public class RemoteClusterAwareClientTests extends ESTestCase { @@ -62,6 +73,89 @@ public class RemoteClusterAwareClientTests extends ESTestCase { ); } + public void testRemoteTaskCancellationOnFailedResponse() throws Exception { + Settings.Builder remoteTransportSettingsBuilder = Settings.builder(); + remoteTransportSettingsBuilder.put(SPY_TASK_MANAGER_SETTING.getKey(), true); + try ( + MockTransportService remoteTransport = RemoteClusterConnectionTests.startTransport( + "seed_node", + new CopyOnWriteArrayList<>(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + remoteTransportSettingsBuilder.build() + ) + ) { + remoteTransport.getTaskManager().setTaskCancellationService(new TaskCancellationService(remoteTransport)); + Settings.Builder builder = Settings.builder(); + builder.putList("cluster.remote.cluster1.seeds", remoteTransport.getLocalDiscoNode().getAddress().toString()); + try ( + MockTransportService localService = MockTransportService.createNewService( + builder.build(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ) + ) { + // the TaskCancellationService references the same TransportService instance + // this is identically to how it works in the Node constructor + localService.getTaskManager().setTaskCancellationService(new TaskCancellationService(localService)); + localService.start(); + localService.acceptIncomingRequests(); + + SearchShardsRequest searchShardsRequest = new SearchShardsRequest( + new String[] { "test-index" }, + IndicesOptions.strictExpandOpen(), + new MatchAllQueryBuilder(), + null, + "index_not_found", // this request must fail + randomBoolean(), + null + ); + Task parentTask = localService.getTaskManager().register("test_type", "test_action", searchShardsRequest); + TaskId parentTaskId = new TaskId("test-mock-node-id", parentTask.getId()); + searchShardsRequest.setParentTask(parentTaskId); + var client = new RemoteClusterAwareClient( + localService, + "cluster1", + threadPool.executor(TEST_THREAD_POOL_NAME), + randomBoolean() + ); + + CountDownLatch cancelChildReceived = new CountDownLatch(1); + remoteTransport.addRequestHandlingBehavior( + TaskCancellationService.CANCEL_CHILD_ACTION_NAME, + (handler, request, channel, task) -> { + handler.messageReceived(request, channel, task); + cancelChildReceived.countDown(); + } + ); + AtomicLong searchShardsRequestId = new AtomicLong(-1); + CountDownLatch cancelChildSent = new CountDownLatch(1); + localService.addSendBehavior(remoteTransport, (connection, requestId, action, request, options) -> { + connection.sendRequest(requestId, action, request, options); + if (action.equals("indices:admin/search/search_shards")) { + searchShardsRequestId.set(requestId); + } else if (action.equals(TaskCancellationService.CANCEL_CHILD_ACTION_NAME)) { + cancelChildSent.countDown(); + } + }); + + // assert original request failed + var future = new PlainActionFuture(); + client.execute(TransportSearchShardsAction.REMOTE_TYPE, searchShardsRequest, future); + ExecutionException e = expectThrows(ExecutionException.class, future::get); + assertThat(e.getCause(), instanceOf(RemoteTransportException.class)); + + // assert remote task is cancelled + safeAwait(cancelChildSent); + safeAwait(cancelChildReceived); + verify(remoteTransport.getTaskManager()).cancelChildLocal(eq(parentTaskId), eq(searchShardsRequestId.get()), anyString()); + } + } + } + public void testSearchShards() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try ( diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index 599868ab7f1f..68f1f74b23c0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -39,6 +39,12 @@ public class MockTaskManager extends TaskManager { Property.NodeScope ); + public static final Setting SPY_TASK_MANAGER_SETTING = Setting.boolSetting( + "tests.spy.taskmanager.enabled", + false, + Property.NodeScope + ); + private final Collection listeners = new CopyOnWriteArrayList<>(); public MockTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 93c9f63fc5e6..51893e551ba8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -80,6 +80,7 @@ import java.util.function.Function; import java.util.function.Supplier; import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.spy; /** * A mock delegate service that allows to simulate different network topology failures. @@ -102,7 +103,7 @@ public class MockTransportService extends TransportService { public static class TestPlugin extends Plugin { @Override public List> getSettings() { - return List.of(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING); + return List.of(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING, MockTaskManager.SPY_TASK_MANAGER_SETTING); } } @@ -310,7 +311,15 @@ public class MockTransportService extends TransportService { return transportAddresses.toArray(new TransportAddress[transportAddresses.size()]); } - private static TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { + public static TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { + if (MockTaskManager.SPY_TASK_MANAGER_SETTING.get(settings)) { + return spy(createMockTaskManager(settings, threadPool, taskHeaders, tracer)); + } else { + return createMockTaskManager(settings, threadPool, taskHeaders, tracer); + } + } + + private static TaskManager createMockTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { return new MockTaskManager(settings, threadPool, taskHeaders); } else { From 3d0c65d0c5566ef3454b11b985780b2954da013b Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 7 Jun 2024 18:01:42 +0200 Subject: [PATCH 12/58] ES|QL: add tests for COALESCE() function on VERSION type (#109468) --- .../functions/kibana/definition/coalesce.json | 18 ++++++++++++++++++ .../esql/functions/types/coalesce.asciidoc | 1 + .../src/main/resources/meta.csv-spec | 6 +++--- .../function/scalar/nulls/Coalesce.java | 9 ++++++--- .../function/scalar/nulls/CoalesceTests.java | 15 +++++++++++++++ 5 files changed, 43 insertions(+), 6 deletions(-) diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index d9659fa03e80..f00f471e63ec 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -261,6 +261,24 @@ ], "variadic" : true, "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "Expression to evaluate." + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "Other expression to evaluate." + } + ], + "variadic" : true, + "returnType" : "version" } ], "examples" : [ diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index a5d8f85aa564..841d836f6837 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -21,4 +21,5 @@ long | long | long long | | long text | text | text text | | text +version | version | version |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 4ab10f78c312..2cdd5c1dfd93 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -14,7 +14,7 @@ synopsis:keyword "double cbrt(number:double|integer|long|unsigned_long)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" "boolean cidr_match(ip:ip, blockX...:keyword|text)" -"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text coalesce(first:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text, ?rest...:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text)" +"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version coalesce(first:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version, ?rest...:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version)" "keyword concat(string1:keyword|text, string2...:keyword|text)" "double cos(angle:double|integer|long|unsigned_long)" "double cosh(angle:double|integer|long|unsigned_long)" @@ -131,7 +131,7 @@ case |[condition, trueValue] |[boolean, "boolean|cartesian cbrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text" |Expression to evaluate. +coalesce |first |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version" |Expression to evaluate. concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. @@ -368,7 +368,7 @@ case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword cbrt |double |false |false |false ceil |"double|integer|long|unsigned_long" |false |false |false cidr_match |boolean |[false, false] |true |false -coalesce |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text" |false |true |false +coalesce |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version" |false |true |false concat |keyword |[false, false] |true |false cos |double |false |false |false cosh |double |false |false |false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index a367e2f75b7e..ff7cd83eedbe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -54,7 +54,8 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "ip", "keyword", "long", - "text" }, + "text", + "version" }, description = "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", examples = { @Example(file = "null", tag = "coalesce") } ) @@ -73,7 +74,8 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "ip", "keyword", "long", - "text" }, + "text", + "version" }, description = "Expression to evaluate." ) Expression first, @Param( @@ -89,7 +91,8 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "ip", "keyword", "long", - "text" }, + "text", + "version" }, description = "Other expression to evaluate.", optional = true ) List rest diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index 77a129c8eb62..42022099ceac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -71,6 +71,21 @@ public class CoalesceTests extends AbstractFunctionTestCase { equalTo(first == null ? second : first) ); })); + suppliers.add(new TestCaseSupplier(List.of(DataType.VERSION, DataType.VERSION), () -> { + var first = randomBoolean() + ? null + : EsqlDataTypeConverter.stringToVersion(randomInt(10) + "." + randomInt(10) + "." + randomInt(10)); + var second = EsqlDataTypeConverter.stringToVersion(randomInt(10) + "." + randomInt(10) + "." + randomInt(10)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(first, DataType.VERSION, "first"), + new TestCaseSupplier.TypedData(second, DataType.VERSION, "second") + ), + "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + DataType.VERSION, + equalTo(first == null ? second : first) + ); + })); suppliers.add(new TestCaseSupplier(List.of(DataType.DATETIME, DataType.DATETIME), () -> { Long firstDate = randomBoolean() ? null : ZonedDateTime.parse("2023-12-04T10:15:30Z").toInstant().toEpochMilli(); Long secondDate = ZonedDateTime.parse("2023-12-05T10:45:00Z").toInstant().toEpochMilli(); From 1080425a65d826df1fae1a45b15fd163c49ff172 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 7 Jun 2024 09:21:22 -0700 Subject: [PATCH 13/58] Enable fallback synthetic source by default (#109370) --- docs/changelog/109370.yaml | 6 ++ .../test/rank_feature/30_synthetic_source.yml | 49 ++++++++++ .../rank_features/20_synthetic_source.yml | 56 ++++++++++++ modules/parent-join/build.gradle | 2 +- .../test/60_synthetic_source.yml | 46 +++++++++- modules/percolator/build.gradle | 2 +- .../resources/rest-api-spec/test/10_basic.yml | 38 ++++++++ qa/ccs-common-rest/build.gradle | 2 +- .../test/get/100_synthetic_source.yml | 32 +++++++ .../test/search.vectors/90_sparse_vector.yml | 89 +++++++++++++++++++ .../index/mapper/DocumentParser.java | 13 +++ .../index/mapper/FieldMapper.java | 29 +++--- 12 files changed, 347 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/109370.yaml create mode 100644 modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml create mode 100644 modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml diff --git a/docs/changelog/109370.yaml b/docs/changelog/109370.yaml new file mode 100644 index 000000000000..32b190d1a1c9 --- /dev/null +++ b/docs/changelog/109370.yaml @@ -0,0 +1,6 @@ +pr: 109370 +summary: Enable fallback synthetic source by default +area: Mapping +type: feature +issues: + - 106460 diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml new file mode 100644 index 000000000000..1e0b90ebb9e0 --- /dev/null +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml @@ -0,0 +1,49 @@ +setup: + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + pagerank: + type: rank_feature + +--- +"synthetic source sanity test": + - do: + index: + index: test + id: "1" + body: + pagerank: 10 + + - do: + index: + index: test + id: "2" + body: + pagerank: null + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.pagerank: 10 } + + - do: + get: + index: test + id: "2" + + - match: { _source.pagerank: null } + diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml new file mode 100644 index 000000000000..c64e35cc2cea --- /dev/null +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml @@ -0,0 +1,56 @@ +setup: + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + tags: + type: rank_features + +--- +"synthetic source sanity test": + - do: + index: + index: test + id: "1" + body: + tags: + foo: 3 + bar: 5 + + - do: + index: + index: test + id: "2" + body: + tags: [] + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: + _source: + tags: + foo: 3 + bar: 5 + + - do: + get: + index: test + id: "2" + + - match: { _source.tags: [] } + + diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index 903192e6ce25..844478c83e7c 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -16,7 +16,7 @@ esplugin { restResources { restApi { - include '_common', 'bulk', 'cluster', 'nodes', 'indices', 'index', 'search' + include '_common', 'bulk', 'cluster', 'get', 'nodes', 'indices', 'index', 'search' } } diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml index 4ebc6cf4e9d6..12d0f1bbae6c 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml @@ -1,10 +1,9 @@ -unsupported: +supported: - requires: - cluster_features: ["gte_v8.3.0"] - reason: introduced in 8.3.0 + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 - do: - catch: bad_request indices.create: index: test body: @@ -16,3 +15,42 @@ unsupported: type: join relations: parent: child + + - do: + index: + index: test + id: "1" + body: {"foo": "bar", "join_field": {"name" : "parent"} } + + - do: + index: + index: test + id: "2" + routing: "1" + body: {"zab": "baz", "join_field": { "name" : "child", "parent": "1"} } + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: + _source: + foo: "bar" + join_field: + name: "parent" + + - do: + get: + index: test + id: "2" + + - match: + _source: + join_field: + name: "child" + parent: "1" + zab: "baz" diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index a871056539d3..b9b257a42e05 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -20,7 +20,7 @@ dependencies { restResources { restApi { - include '_common', 'indices', 'index', 'search', 'msearch' + include '_common', 'get', 'indices', 'index', 'search', 'msearch' } } diff --git a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml index 11c2993f4d34..a5576d203314 100644 --- a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml +++ b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml @@ -126,3 +126,41 @@ document: foo.bar: value - match: { hits.total.value: 1 } + +--- +"Synthetic source": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: queries_index + body: + mappings: + _source: + mode: synthetic + properties: + query: + type: percolator + + - do: + index: + index: queries_index + id: test_percolator + body: + query: + match_all: {} + + - do: + indices.refresh: {} + + - do: + get: + index: queries_index + id: "test_percolator" + + - match: + _source: + query: + match_all: {} diff --git a/qa/ccs-common-rest/build.gradle b/qa/ccs-common-rest/build.gradle index 41dba06649ea..e5e8c5a489d5 100644 --- a/qa/ccs-common-rest/build.gradle +++ b/qa/ccs-common-rest/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { - include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'knn_search', 'index', 'indices', 'msearch', + include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', 'search', 'async_search', 'graph', '*_point_in_time', 'info', 'scroll', 'clear_scroll', 'search_mvt', 'eql', 'sql' } restTests { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml index fc8df138f94a..b2b9e1b90cb3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml @@ -1108,3 +1108,35 @@ flattened field with ignore_above: key7: "key7" - is_false: fields + +--- +completion: + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + completion: + type: completion + + - do: + index: + index: test + id: 1 + refresh: true + body: + completion: "the quick brown fox" + + - do: + get: + index: test + id: 1 + + - match: { _source.completion: "the quick brown fox" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml index fa89a4356176..00cea2e330d8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml @@ -209,3 +209,92 @@ query: exists: field: ml.tokens + +--- +"sparse_vector synthetic source": + + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + ml.tokens: + type: sparse_vector + + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + body: + ml: + tokens: + running: 2.4097164 + good: 2.170997 + run: 2.052153 + race: 1.4575411 + for: 1.1908325 + + - match: { result: "created" } + + - do: + index: + index: test + id: "2" + body: + ml: + tokens: [] + + - match: { result: "created" } + + - do: + index: + index: test + id: "3" + body: + ml: + tokens: {} + + - match: { result: "created" } + + - do: + indices.refresh: { } + + - do: + get: + index: test + id: "1" + + - match: + _source: + ml: + tokens: + running: 2.4097164 + good: 2.170997 + run: 2.052153 + race: 1.4575411 + for: 1.1908325 + + - do: + get: + index: test + id: "2" + + - match: + _source.ml.tokens: [] + + - do: + get: + index: test + id: "3" + + - match: + _source.ml.tokens: {} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index a89a89472a67..3d4f0823bb1c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -965,6 +965,19 @@ public final class DocumentParser { protected String contentType() { throw new UnsupportedOperationException(); } + + @Override + protected SyntheticSourceMode syntheticSourceMode() { + // Opt out of fallback synthetic source implementation + // since there is custom logic in #parseCreateField() + return SyntheticSourceMode.NATIVE; + } + + @Override + public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { + // Handled via IgnoredSourceFieldMapper infrastructure + return SourceLoader.SyntheticFieldLoader.NOTHING; + } }; private static class NoOpObjectMapper extends ObjectMapper { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 040963b0f8ba..4338a62d79ab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -443,27 +443,36 @@ public abstract class FieldMapper extends Mapper { * Specifies the mode of synthetic source support by the mapper. * *
-     * {@link NATIVE} - mapper natively supports synthetic source, f.e. by constructing it from doc values.
+     * {@link SyntheticSourceMode#NATIVE} - mapper natively supports synthetic source, f.e. by constructing it from doc values.
      *
-     * {@link FALLBACK} - mapper does not have native support but uses fallback implementation.
-     * This is a temporary variant that exists in order to roll out fallback implementation on a per field basis.
-     *
-     * {@link NOT_SUPPORTED} - synthetic source is not supported.
+     * {@link SyntheticSourceMode#FALLBACK} - mapper does not have native support and uses generic fallback implementation
+     * that stores raw input source data as is.
      * 
*/ protected enum SyntheticSourceMode { NATIVE, - FALLBACK, - NOT_SUPPORTED + FALLBACK } /** + *

* Specifies the mode of synthetic source support by the mapper. - * + *
+ * This is used to determine if a field mapper has support for + * constructing synthetic source. + * In case it doesn't (meaning {@link SyntheticSourceMode#FALLBACK}), + * we will store raw source data for this field as is + * and then use it for synthetic source. + *

+ *

+ * Field mappers must override this method if they provide + * a custom implementation of {@link #syntheticFieldLoader()} + * in order to use a more efficient field-specific implementation. + *

* @return {@link SyntheticSourceMode} */ protected SyntheticSourceMode syntheticSourceMode() { - return SyntheticSourceMode.NOT_SUPPORTED; + return SyntheticSourceMode.FALLBACK; } /** @@ -476,7 +485,7 @@ public abstract class FieldMapper extends Mapper { @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { // If mapper supports synthetic source natively, it overrides this method, - // /so we won't see those here. + // so we won't see those here. if (syntheticSourceMode() == SyntheticSourceMode.FALLBACK) { if (copyTo.copyToFields().isEmpty() != true) { throw new IllegalArgumentException( From e5b051a69669446390afe9257743d2b89f253d00 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 7 Jun 2024 18:35:20 +0200 Subject: [PATCH 14/58] ES|QL: fix logging of async queries (#109485) Just a quick fix to the logs (it logged EQL instead of ESQL), but probably it will need some more refactoring. --- .../esql/core/async/AsyncTaskManagementService.java | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java index deb5cae1172f..94bac95b9150 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java @@ -43,7 +43,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; /** - * Service for managing EQL requests + * Service for managing ESQL requests */ public class AsyncTaskManagementService< Request extends TaskAwareRequest, @@ -84,7 +84,7 @@ public class AsyncTaskManagementService< } /** - * Wrapper for EqlSearchRequest that creates an async version of EqlSearchTask + * Wrapper for EsqlQueryRequest that creates an async version of EsqlQueryTask */ private class AsyncRequestWrapper implements TaskAwareRequest { private final Request request; @@ -278,8 +278,7 @@ public class AsyncTaskManagementService< ActionListener.wrap( // We should only unregister after the result is saved resp -> { - // TODO: generalize the logging, not just eql - logger.trace(() -> "stored eql search results for [" + searchTask.getExecutionId().getEncoded() + "]"); + logger.trace(() -> "stored ESQL search results for [" + searchTask.getExecutionId().getEncoded() + "]"); taskManager.unregister(searchTask); if (storedResponse.getException() != null) { searchTask.onFailure(storedResponse.getException()); @@ -297,8 +296,7 @@ public class AsyncTaskManagementService< if (cause instanceof DocumentMissingException == false && cause instanceof VersionConflictEngineException == false) { logger.error( - // TODO: generalize the logging, not just eql - () -> format("failed to store eql search results for [%s]", searchTask.getExecutionId().getEncoded()), + () -> format("failed to store ESQL search results for [%s]", searchTask.getExecutionId().getEncoded()), exc ); } @@ -311,7 +309,7 @@ public class AsyncTaskManagementService< } catch (Exception exc) { taskManager.unregister(searchTask); searchTask.onFailure(exc); - logger.error(() -> "failed to store eql search results for [" + searchTask.getExecutionId().getEncoded() + "]", exc); + logger.error(() -> "failed to store ESQL search results for [" + searchTask.getExecutionId().getEncoded() + "]", exc); } } From a5b4f1fa6121b8c853a5c82efa17501ce7c75d45 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 7 Jun 2024 19:24:04 +0200 Subject: [PATCH 15/58] ES|QL: vectorize eval (#109332) Use VectorFixedBuilders for better optimization and to facilitate JIT auto-vectorization --- .../compute/operator/EvalBenchmark.java | 42 ++++++++++++++++++- docs/changelog/109332.yaml | 5 +++ .../compute/gen/EvaluatorImplementer.java | 25 +++++++---- .../compute/aggregation/DoubleArrayState.java | 7 ++-- .../compute/aggregation/IntArrayState.java | 6 +-- .../compute/aggregation/LongArrayState.java | 7 ++-- .../aggregation/RateDoubleAggregator.java | 6 +-- .../aggregation/RateIntAggregator.java | 6 +-- .../aggregation/RateLongAggregator.java | 6 +-- .../compute/data/BooleanVector.java | 5 ++- .../data/BooleanVectorFixedBuilder.java | 21 ++++++---- .../compute/data/DoubleVector.java | 5 ++- .../data/DoubleVectorFixedBuilder.java | 21 ++++++---- .../elasticsearch/compute/data/IntVector.java | 5 ++- .../compute/data/IntVectorFixedBuilder.java | 21 ++++++---- .../compute/data/LongVector.java | 5 ++- .../compute/data/LongVectorFixedBuilder.java | 21 ++++++---- .../compute/aggregation/X-ArrayState.java.st | 8 ++-- .../aggregation/X-RateAggregator.java.st | 6 +-- .../blockhash/BooleanBlockHash.java | 2 +- .../blockhash/BytesRef3BlockHash.java | 4 +- .../blockhash/LongLongBlockHash.java | 2 +- .../blockhash/TimeSeriesBlockHash.java | 4 +- .../AscendingSequenceRowInTableLookup.java | 4 +- .../compute/data/X-Vector.java.st | 5 ++- .../compute/data/X-VectorFixedBuilder.java.st | 21 ++++++---- .../operator/OrdinalsGroupingOperator.java | 6 +-- .../SequenceBooleanBlockSourceOperator.java | 4 +- .../operator/logical/NotEvaluator.java | 4 +- .../conditional/GreatestBooleanEvaluator.java | 4 +- .../conditional/GreatestDoubleEvaluator.java | 4 +- .../conditional/GreatestIntEvaluator.java | 4 +- .../conditional/GreatestLongEvaluator.java | 4 +- .../conditional/LeastBooleanEvaluator.java | 4 +- .../conditional/LeastDoubleEvaluator.java | 4 +- .../scalar/conditional/LeastIntEvaluator.java | 4 +- .../conditional/LeastLongEvaluator.java | 4 +- .../date/DateExtractConstantEvaluator.java | 4 +- .../scalar/date/DateTruncEvaluator.java | 4 +- .../function/scalar/date/NowEvaluator.java | 4 +- .../scalar/ip/CIDRMatchEvaluator.java | 4 +- .../scalar/math/AbsDoubleEvaluator.java | 4 +- .../function/scalar/math/AbsIntEvaluator.java | 4 +- .../scalar/math/AbsLongEvaluator.java | 4 +- .../function/scalar/math/Atan2Evaluator.java | 4 +- .../function/scalar/math/AtanEvaluator.java | 4 +- .../scalar/math/CastIntToDoubleEvaluator.java | 4 +- .../scalar/math/CastIntToLongEvaluator.java | 4 +- .../math/CastIntToUnsignedLongEvaluator.java | 4 +- .../math/CastLongToDoubleEvaluator.java | 4 +- .../math/CastLongToUnsignedLongEvaluator.java | 4 +- .../CastUnsignedLongToDoubleEvaluator.java | 4 +- .../math/CbrtUnsignedLongEvaluator.java | 4 +- .../scalar/math/CeilDoubleEvaluator.java | 4 +- .../function/scalar/math/CosEvaluator.java | 4 +- .../scalar/math/FloorDoubleEvaluator.java | 4 +- .../scalar/math/RoundDoubleEvaluator.java | 4 +- .../math/RoundDoubleNoDecimalsEvaluator.java | 4 +- .../scalar/math/RoundIntEvaluator.java | 4 +- .../scalar/math/RoundLongEvaluator.java | 4 +- .../math/RoundUnsignedLongEvaluator.java | 4 +- .../scalar/math/SignumDoubleEvaluator.java | 4 +- .../scalar/math/SignumIntEvaluator.java | 4 +- .../scalar/math/SignumLongEvaluator.java | 4 +- .../math/SignumUnsignedLongEvaluator.java | 4 +- .../function/scalar/math/SinEvaluator.java | 4 +- .../math/SqrtUnsignedLongEvaluator.java | 4 +- .../function/scalar/math/TanEvaluator.java | 4 +- .../function/scalar/math/TanhEvaluator.java | 4 +- ...esianPointDocValuesAndSourceEvaluator.java | 4 +- ...esianPointDocValuesAndSourceEvaluator.java | 4 +- ...esianPointDocValuesAndSourceEvaluator.java | 4 +- ...esianPointDocValuesAndSourceEvaluator.java | 4 +- .../scalar/string/AutomataMatchEvaluator.java | 4 +- .../scalar/string/EndsWithEvaluator.java | 4 +- .../scalar/string/LengthEvaluator.java | 4 +- .../scalar/string/LocateEvaluator.java | 4 +- .../scalar/string/LocateNoStartEvaluator.java | 4 +- .../scalar/string/StartsWithEvaluator.java | 4 +- .../arithmetic/AddDoublesEvaluator.java | 4 +- .../arithmetic/MulDoublesEvaluator.java | 4 +- .../arithmetic/NegDoublesEvaluator.java | 4 +- .../arithmetic/SubDoublesEvaluator.java | 4 +- .../comparison/EqualsBoolsEvaluator.java | 4 +- .../comparison/EqualsDoublesEvaluator.java | 4 +- .../comparison/EqualsGeometriesEvaluator.java | 4 +- .../comparison/EqualsIntsEvaluator.java | 4 +- .../comparison/EqualsKeywordsEvaluator.java | 4 +- .../comparison/EqualsLongsEvaluator.java | 4 +- .../GreaterThanDoublesEvaluator.java | 4 +- .../comparison/GreaterThanIntsEvaluator.java | 4 +- .../GreaterThanKeywordsEvaluator.java | 4 +- .../comparison/GreaterThanLongsEvaluator.java | 4 +- .../GreaterThanOrEqualDoublesEvaluator.java | 4 +- .../GreaterThanOrEqualIntsEvaluator.java | 4 +- .../GreaterThanOrEqualKeywordsEvaluator.java | 4 +- .../GreaterThanOrEqualLongsEvaluator.java | 4 +- .../InsensitiveEqualsConstantEvaluator.java | 4 +- .../InsensitiveEqualsEvaluator.java | 4 +- .../comparison/LessThanDoublesEvaluator.java | 4 +- .../comparison/LessThanIntsEvaluator.java | 4 +- .../comparison/LessThanKeywordsEvaluator.java | 4 +- .../comparison/LessThanLongsEvaluator.java | 4 +- .../LessThanOrEqualDoublesEvaluator.java | 4 +- .../LessThanOrEqualIntsEvaluator.java | 4 +- .../LessThanOrEqualKeywordsEvaluator.java | 4 +- .../LessThanOrEqualLongsEvaluator.java | 4 +- .../comparison/NotEqualsBoolsEvaluator.java | 4 +- .../comparison/NotEqualsDoublesEvaluator.java | 4 +- .../NotEqualsGeometriesEvaluator.java | 4 +- .../comparison/NotEqualsIntsEvaluator.java | 4 +- .../NotEqualsKeywordsEvaluator.java | 4 +- .../comparison/NotEqualsLongsEvaluator.java | 4 +- .../xpack/esql/evaluator/EvalMapper.java | 6 +-- 114 files changed, 360 insertions(+), 266 deletions(-) create mode 100644 docs/changelog/109332.yaml diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 7aedbfa9ee99..7335dfbd8f23 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -15,6 +15,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -84,7 +86,17 @@ public class EvalBenchmark { } @Param( - { "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending", "rlike" } + { + "abs", + "add", + "add_double", + "date_trunc", + "equal_to_const", + "long_equal_to_long", + "long_equal_to_int", + "mv_min", + "mv_min_ascending", + "rlike" } ) public String operation; @@ -105,6 +117,13 @@ public class EvalBenchmark { layout(longField) ).get(driverContext); } + case "add_double" -> { + FieldAttribute doubleField = doubleField(); + yield EvalMapper.toEvaluator( + new Add(Source.EMPTY, doubleField, new Literal(Source.EMPTY, 1D, DataType.DOUBLE)), + layout(doubleField) + ).get(driverContext); + } case "date_trunc" -> { FieldAttribute timestamp = new FieldAttribute( Source.EMPTY, @@ -150,6 +169,10 @@ public class EvalBenchmark { return new FieldAttribute(Source.EMPTY, "long", new EsField("long", DataType.LONG, Map.of(), true)); } + private static FieldAttribute doubleField() { + return new FieldAttribute(Source.EMPTY, "double", new EsField("double", DataType.DOUBLE, Map.of(), true)); + } + private static FieldAttribute intField() { return new FieldAttribute(Source.EMPTY, "int", new EsField("int", DataType.INTEGER, Map.of(), true)); } @@ -182,6 +205,16 @@ public class EvalBenchmark { } } } + case "add_double" -> { + DoubleVector v = actual.getBlock(1).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + if (v.getDouble(i) != i * 100_000 + 1D) { + throw new AssertionError( + "[" + operation + "] expected [" + (i * 100_000 + 1D) + "] but was [" + v.getDouble(i) + "]" + ); + } + } + } case "date_trunc" -> { LongVector v = actual.getBlock(1).asVector(); long oneDay = TimeValue.timeValueHours(24).millis(); @@ -239,6 +272,13 @@ public class EvalBenchmark { } yield new Page(builder.build()); } + case "add_double" -> { + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); + for (int i = 0; i < BLOCK_LENGTH; i++) { + builder.appendDouble(i * 100_000D); + } + yield new Page(builder.build()); + } case "long_equal_to_long" -> { var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); var rhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); diff --git a/docs/changelog/109332.yaml b/docs/changelog/109332.yaml new file mode 100644 index 000000000000..3d03523fd518 --- /dev/null +++ b/docs/changelog/109332.yaml @@ -0,0 +1,5 @@ +pr: 109332 +summary: "ES|QL: vectorize eval" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 2d3c59d3f885..e1456328e7f6 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -52,6 +52,8 @@ import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.WARNINGS; import static org.elasticsearch.compute.gen.Types.blockType; import static org.elasticsearch.compute.gen.Types.builderType; +import static org.elasticsearch.compute.gen.Types.elementType; +import static org.elasticsearch.compute.gen.Types.vectorFixedBuilderType; import static org.elasticsearch.compute.gen.Types.vectorType; public class EvaluatorImplementer { @@ -167,19 +169,25 @@ public class EvaluatorImplementer { builder.addModifiers(Modifier.PUBLIC).returns(resultDataType); builder.addParameter(TypeName.INT, "positionCount"); - processFunction.args.stream().forEach(a -> { - if (a.paramName(blockStyle) != null) { - builder.addParameter(a.dataType(blockStyle), a.paramName(blockStyle)); - } - }); + boolean vectorize = false; + if (blockStyle == false && processFunction.warnExceptions.isEmpty() && processOutputsMultivalued == false) { + ClassName type = processFunction.resultDataType(false); + vectorize = type.simpleName().startsWith("BytesRef") == false; + } - TypeName builderType = builderType(resultDataType); + TypeName builderType = vectorize ? vectorFixedBuilderType(elementType(resultDataType)) : builderType(resultDataType); builder.beginControlFlow( "try($T result = driverContext.blockFactory().$L(positionCount))", builderType, buildFromFactory(builderType) ); { + processFunction.args.stream().forEach(a -> { + if (a.paramName(blockStyle) != null) { + builder.addParameter(a.dataType(blockStyle), a.paramName(blockStyle)); + } + }); + processFunction.args.stream().forEach(a -> a.createScratch(builder)); builder.beginControlFlow("position: for (int p = 0; p < positionCount; p++)"); @@ -226,7 +234,7 @@ public class EvaluatorImplementer { pattern.append(")"); String builtPattern; if (processFunction.builderArg == null) { - builtPattern = "result.$L(" + pattern + ")"; + builtPattern = vectorize ? "result.$L(p, " + pattern + ")" : "result.$L(" + pattern + ")"; args.add(0, appendMethod(resultDataType)); } else { builtPattern = pattern.toString(); @@ -249,8 +257,9 @@ public class EvaluatorImplementer { } builder.endControlFlow(); builder.addStatement("return result.build()"); - builder.endControlFlow(); } + builder.endControlFlow(); + return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java index 5b82950c7de3..953b7172a286 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; @@ -59,9 +58,9 @@ final class DoubleArrayState extends AbstractArrayState implements GroupingAggre Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (DoubleVector.Builder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.appendDouble(values.get(selected.getInt(i))); + builder.appendDouble(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -107,7 +106,7 @@ final class DoubleArrayState extends AbstractArrayState implements GroupingAggre } else { valuesBuilder.appendDouble(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java index 0234f36f6675..034ed72d08c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java @@ -58,9 +58,9 @@ final class IntArrayState extends AbstractArrayState implements GroupingAggregat Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (IntVector.Builder builder = driverContext.blockFactory().newIntVectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().newIntVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.appendInt(values.get(selected.getInt(i))); + builder.appendInt(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -106,7 +106,7 @@ final class IntArrayState extends AbstractArrayState implements GroupingAggregat } else { valuesBuilder.appendInt(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java index 860bf43eaad8..9ff7e3f53648 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.util.LongArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; @@ -65,9 +64,9 @@ final class LongArrayState extends AbstractArrayState implements GroupingAggrega Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (LongVector.Builder builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.appendLong(values.get(selected.getInt(i))); + builder.appendLong(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -113,7 +112,7 @@ final class LongArrayState extends AbstractArrayState implements GroupingAggrega } else { valuesBuilder.appendLong(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java index 2dc5b441ca00..cbd20f15c651 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -272,7 +272,7 @@ public class RateDoubleAggregator { try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); DoubleBlock.Builder values = blockFactory.newDoubleBlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -290,11 +290,11 @@ public class RateDoubleAggregator { } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java index 1ba8b9264c24..01c3e3d7fb8e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -273,7 +273,7 @@ public class RateIntAggregator { try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); IntBlock.Builder values = blockFactory.newIntBlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -291,11 +291,11 @@ public class RateIntAggregator { } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java index 846c6f0cc273..c84985b703ae 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -272,7 +272,7 @@ public class RateLongAggregator { try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); LongBlock.Builder values = blockFactory.newLongBlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -290,11 +290,11 @@ public class RateLongAggregator { } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index c8921a7c9f02..5cf900cfc4a7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -116,7 +116,7 @@ public sealed interface BooleanVector extends Vector permits ConstantBooleanVect private static BooleanVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newBooleanVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendBoolean(in.readBoolean()); + builder.appendBoolean(i, in.readBoolean()); } return builder.build(); } @@ -150,5 +150,8 @@ public sealed interface BooleanVector extends Vector permits ConstantBooleanVect */ @Override FixedBuilder appendBoolean(boolean value); + + FixedBuilder appendBoolean(int index, boolean value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index 4cc2ec17b6ad..8b952ee0d951 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + BooleanVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { return this; } + @Override + public BooleanVectorFixedBuilder appendBoolean(int idx, boolean value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantBooleanVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { @Override public BooleanVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; BooleanVector vector; if (values.length == 1) { vector = blockFactory.newConstantBooleanBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 3d93043f93d8..10d4f4abe5f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -117,7 +117,7 @@ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector private static DoubleVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newDoubleVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendDouble(in.readDouble()); + builder.appendDouble(i, in.readDouble()); } return builder.build(); } @@ -151,5 +151,8 @@ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector */ @Override FixedBuilder appendDouble(double value); + + FixedBuilder appendDouble(int index, double value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index 42cdd0f5667f..ff363b36e44b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + DoubleVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { return this; } + @Override + public DoubleVectorFixedBuilder appendDouble(int idx, double value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantDoubleVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { @Override public DoubleVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; DoubleVector vector; if (values.length == 1) { vector = blockFactory.newConstantDoubleBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index b1a2d1b80a41..384d5813d575 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -126,7 +126,7 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA private static IntVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(in.readInt()); + builder.appendInt(i, in.readInt()); } return builder.build(); } @@ -169,5 +169,8 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA */ @Override FixedBuilder appendInt(int value); + + FixedBuilder appendInt(int index, int value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 77e3511a5cb5..9ab01d019252 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + IntVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { return this; } + @Override + public IntVectorFixedBuilder appendInt(int idx, int value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantIntVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { @Override public IntVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; IntVector vector; if (values.length == 1) { vector = blockFactory.newConstantIntBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index e2f53d1ee07f..a74146b692e3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -117,7 +117,7 @@ public sealed interface LongVector extends Vector permits ConstantLongVector, Lo private static LongVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newLongVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendLong(in.readLong()); + builder.appendLong(i, in.readLong()); } return builder.build(); } @@ -151,5 +151,8 @@ public sealed interface LongVector extends Vector permits ConstantLongVector, Lo */ @Override FixedBuilder appendLong(long value); + + FixedBuilder appendLong(int index, long value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 2ad259198bf1..77dd0a87dfb2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + LongVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { return this; } + @Override + public LongVectorFixedBuilder appendLong(int idx, long value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantLongVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { @Override public LongVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; LongVector vector; if (values.length == 1) { vector = blockFactory.newConstantLongBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st index e81af4841d1a..246aebe2c08e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st @@ -14,7 +14,9 @@ $if(long)$ import org.elasticsearch.compute.data.IntVector; $endif$ import org.elasticsearch.compute.data.$Type$Block; +$if(int)$ import org.elasticsearch.compute.data.$Type$Vector; +$endif$ $if(double)$ import org.elasticsearch.compute.data.IntVector; $endif$ @@ -72,9 +74,9 @@ $endif$ Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try ($Type$Vector.Builder builder = driverContext.blockFactory().new$Type$VectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().new$Type$VectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.append$Type$(values.get(selected.getInt(i))); + builder.append$Type$(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -120,7 +122,7 @@ $endif$ } else { valuesBuilder.append$Type$(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st index ad305809c665..212a017cb300 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -275,7 +275,7 @@ public class Rate$Type$Aggregator { try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); $Type$Block.Builder values = blockFactory.new$Type$BlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -293,11 +293,11 @@ public class Rate$Type$Aggregator { } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 17aa5afbe3ad..4c2817588904 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -64,7 +64,7 @@ final class BooleanBlockHash extends BlockHash { int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(MultivalueDedupeBoolean.hashOrd(everSeen, vector.getBoolean(i))); + builder.appendInt(i, MultivalueDedupeBoolean.hashOrd(everSeen, vector.getBoolean(i))); } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java index ce11d1bb6414..626c5bb910ce 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java @@ -84,12 +84,12 @@ final class BytesRef3BlockHash extends BlockHash { private void addVectors(BytesRefVector v1, BytesRefVector v2, BytesRefVector v3, GroupingAggregatorFunction.AddInput addInput) { final int positionCount = v1.getPositionCount(); - try (IntVector.Builder ordsBuilder = blockFactory.newIntVectorFixedBuilder(positionCount)) { + try (IntVector.FixedBuilder ordsBuilder = blockFactory.newIntVectorFixedBuilder(positionCount)) { // TODO: enable ordinal vectors in BytesRefBlockHash try (IntVector k1 = hash1.add(v1); IntVector k2 = hash2.add(v2); IntVector k3 = hash3.add(v3)) { for (int p = 0; p < positionCount; p++) { long ord = hashOrdToGroup(finalHash.add(k1.getInt(p), k2.getInt(p), k3.getInt(p))); - ordsBuilder.appendInt(Math.toIntExact(ord)); + ordsBuilder.appendInt(p, Math.toIntExact(ord)); } } try (IntVector ords = ordsBuilder.build()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index 11423539db39..3be4db702a93 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -67,7 +67,7 @@ final class LongLongBlockHash extends BlockHash { int positions = vector1.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))))); + builder.appendInt(i, Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))))); } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java index 09b1022200b6..7cbc7cc4c25d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java @@ -96,13 +96,13 @@ public final class TimeSeriesBlockHash extends BlockHash { LongVector timestampIntervals = null; try ( BytesRefVector.Builder tsidHashesBuilder = blockFactory.newBytesRefVectorBuilder(positions); - LongVector.Builder timestampIntervalsBuilder = blockFactory.newLongVectorFixedBuilder(positions) + LongVector.FixedBuilder timestampIntervalsBuilder = blockFactory.newLongVectorFixedBuilder(positions) ) { BytesRef scratch = new BytesRef(); for (long i = 0; i < positions; i++) { BytesRef key1 = this.tsidHashes.get(intervalHash.getKey1(i), scratch); tsidHashesBuilder.appendBytesRef(key1); - timestampIntervalsBuilder.appendLong(intervalHash.getKey2(i)); + timestampIntervalsBuilder.appendLong((int) i, intervalHash.getKey2(i)); } tsidHashes = tsidHashesBuilder.build(); timestampIntervals = timestampIntervalsBuilder.build(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java index bcb245146c2c..b8a02642450c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java @@ -51,9 +51,9 @@ public final class AscendingSequenceRowInTableLookup extends RowInTableLookup { } private IntVector lookupVectorInRange(IntVector vector) { - try (IntVector.Builder builder = blockFactory.newIntVectorFixedBuilder(vector.getPositionCount())) { + try (IntVector.FixedBuilder builder = blockFactory.newIntVectorFixedBuilder(vector.getPositionCount())) { for (int i = 0; i < vector.getPositionCount(); i++) { - builder.appendInt(vector.getInt(i) - min); + builder.appendInt(i, vector.getInt(i) - min); } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 628ee93ed757..0113f4940adb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -185,7 +185,7 @@ $endif$ private static $Type$Vector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.new$Type$Vector$if(BytesRef)$$else$Fixed$endif$Builder(positions)) { for (int i = 0; i < positions; i++) { - builder.append$Type$(in.read$Type$()); + builder.append$Type$($if(BytesRef)$$else$i, $endif$in.read$Type$()); } return builder.build(); } @@ -243,6 +243,9 @@ $else$ */ @Override FixedBuilder append$Type$($type$ value); + + FixedBuilder append$Type$(int index, $type$ value); + } $endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index af783a243525..a8876c512009 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -25,6 +25,8 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { */ private int nextIndex; + private boolean closed; + $Type$VectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { return this; } + @Override + public $Type$VectorFixedBuilder append$Type$(int idx, $type$ value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? Constant$Type$Vector.RAM_BYTES_USED @@ -53,13 +61,10 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { @Override public $Type$Vector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; $Type$Vector vector; if (values.length == 1) { vector = blockFactory.newConstant$Type$BlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 2aea504db1ac..b5ae35bfc8d7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -611,12 +611,12 @@ public class OrdinalsGroupingOperator implements Operator { @Override IntBlock readOrdinalsAdded1(IntVector docs) throws IOException { final int positionCount = docs.getPositionCount(); - try (IntVector.Builder builder = blockFactory.newIntVectorFixedBuilder(positionCount)) { + try (IntVector.FixedBuilder builder = blockFactory.newIntVectorFixedBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { if (sortedDocValues.advanceExact(docs.getInt(p))) { - builder.appendInt(sortedDocValues.ordValue() + 1); + builder.appendInt(p, sortedDocValues.ordValue() + 1); } else { - builder.appendInt(0); + builder.appendInt(p, 0); } } return builder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java index b92c6d01e507..9665590940af 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java @@ -37,9 +37,9 @@ public class SequenceBooleanBlockSourceOperator extends AbstractBlockSourceOpera @Override protected Page createPage(int positionOffset, int length) { - try (BooleanVector.Builder builder = blockFactory.newBooleanVectorFixedBuilder(length)) { + try (BooleanVector.FixedBuilder builder = blockFactory.newBooleanVectorFixedBuilder(length)) { for (int i = 0; i < length; i++) { - builder.appendBoolean(values[positionOffset + i]); + builder.appendBoolean(i, values[positionOffset + i]); } currentPosition += length; return new Page(builder.build().asBlock()); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index 1ca9ddcb4574..76e09389c7ad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -67,9 +67,9 @@ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { } public BooleanVector eval(int positionCount, BooleanVector vVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Not.process(vVector.getBoolean(p))); + result.appendBoolean(p, Not.process(vVector.getBoolean(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index 3109687b8c57..4c8988bbf603 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -84,14 +84,14 @@ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEv } public BooleanVector eval(int positionCount, BooleanVector[] valuesVectors) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getBoolean(p); } - result.appendBoolean(Greatest.process(valuesValues)); + result.appendBoolean(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index 0ab8c6c3588a..20121bd3727a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -84,14 +84,14 @@ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEva } public DoubleVector eval(int positionCount, DoubleVector[] valuesVectors) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getDouble(p); } - result.appendDouble(Greatest.process(valuesValues)); + result.appendDouble(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 0969c7f70820..85268a83b159 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -84,14 +84,14 @@ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvalua } public IntVector eval(int positionCount, IntVector[] valuesVectors) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getInt(p); } - result.appendInt(Greatest.process(valuesValues)); + result.appendInt(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 3bcafa572c9e..98e45ea0fe7b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -84,14 +84,14 @@ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvalu } public LongVector eval(int positionCount, LongVector[] valuesVectors) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getLong(p); } - result.appendLong(Greatest.process(valuesValues)); + result.appendLong(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index fe46511f70a9..2dce335fc442 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -84,14 +84,14 @@ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvalu } public BooleanVector eval(int positionCount, BooleanVector[] valuesVectors) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getBoolean(p); } - result.appendBoolean(Least.process(valuesValues)); + result.appendBoolean(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index f174cbc3561a..eb605876045f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -84,14 +84,14 @@ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvalua } public DoubleVector eval(int positionCount, DoubleVector[] valuesVectors) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getDouble(p); } - result.appendDouble(Least.process(valuesValues)); + result.appendDouble(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index 888e84ce7bea..3a69293b66cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -84,14 +84,14 @@ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator } public IntVector eval(int positionCount, IntVector[] valuesVectors) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getInt(p); } - result.appendInt(Least.process(valuesValues)); + result.appendInt(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index f270cad20738..00494374236e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -84,14 +84,14 @@ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluato } public LongVector eval(int positionCount, LongVector[] valuesVectors) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getLong(p); } - result.appendLong(Least.process(valuesValues)); + result.appendLong(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index 6b7c794810f1..abff711e5c19 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -75,9 +75,9 @@ public final class DateExtractConstantEvaluator implements EvalOperator.Expressi } public LongVector eval(int positionCount, LongVector valueVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(DateExtract.process(valueVector.getLong(p), chronoField, zone)); + result.appendLong(p, DateExtract.process(valueVector.getLong(p), chronoField, zone)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index 7c027fc1143d..b72203ce0de3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -71,9 +71,9 @@ public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluato } public LongVector eval(int positionCount, LongVector fieldValVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(DateTrunc.process(fieldValVector.getLong(p), rounding)); + result.appendLong(p, DateTrunc.process(fieldValVector.getLong(p), rounding)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index ef3f08b3d2b7..1894d19d7b08 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -37,9 +37,9 @@ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { } public LongVector eval(int positionCount) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Now.process(now)); + result.appendLong(p, Now.process(now)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index 070d0a5a07ed..8782e547c383 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -113,7 +113,7 @@ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluato public BooleanVector eval(int positionCount, BytesRefVector ipVector, BytesRefVector[] cidrsVectors) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef ipScratch = new BytesRef(); BytesRef[] cidrsValues = new BytesRef[cidrs.length]; BytesRef[] cidrsScratch = new BytesRef[cidrs.length]; @@ -125,7 +125,7 @@ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluato for (int i = 0; i < cidrsVectors.length; i++) { cidrsValues[i] = cidrsVectors[i].getBytesRef(p, cidrsScratch[i]); } - result.appendBoolean(CIDRMatch.process(ipVector.getBytesRef(p, ipScratch), cidrsValues)); + result.appendBoolean(p, CIDRMatch.process(ipVector.getBytesRef(p, ipScratch), cidrsValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index 235182f5e6ec..330ee39d4990 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -67,9 +67,9 @@ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluato } public DoubleVector eval(int positionCount, DoubleVector fieldValVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Abs.process(fieldValVector.getDouble(p))); + result.appendDouble(p, Abs.process(fieldValVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index bec60e7bc870..c453fbd08267 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -67,9 +67,9 @@ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { } public IntVector eval(int positionCount, IntVector fieldValVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Abs.process(fieldValVector.getInt(p))); + result.appendInt(p, Abs.process(fieldValVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 46f2f18c07ec..3e75e955b258 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -67,9 +67,9 @@ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator } public LongVector eval(int positionCount, LongVector fieldValVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Abs.process(fieldValVector.getLong(p))); + result.appendLong(p, Abs.process(fieldValVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index e5abbfc6c7c7..0c3bb4933336 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -87,9 +87,9 @@ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { } public DoubleVector eval(int positionCount, DoubleVector yVector, DoubleVector xVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Atan2.process(yVector.getDouble(p), xVector.getDouble(p))); + result.appendDouble(p, Atan2.process(yVector.getDouble(p), xVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index 364e94bd20f7..0902d138620a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -67,9 +67,9 @@ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Atan.process(valVector.getDouble(p))); + result.appendDouble(p, Atan.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index 8e05a9f01fc4..12e074f1d504 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -69,9 +69,9 @@ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEv } public DoubleVector eval(int positionCount, IntVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cast.castIntToDouble(vVector.getInt(p))); + result.appendDouble(p, Cast.castIntToDouble(vVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 7dc73231f109..29e28c305a16 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -69,9 +69,9 @@ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEval } public LongVector eval(int positionCount, IntVector vVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Cast.castIntToLong(vVector.getInt(p))); + result.appendLong(p, Cast.castIntToLong(vVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index 8a98dd61c6b8..61d19f02c4cb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -69,9 +69,9 @@ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.Expres } public LongVector eval(int positionCount, IntVector vVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Cast.castIntToUnsignedLong(vVector.getInt(p))); + result.appendLong(p, Cast.castIntToUnsignedLong(vVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 230ed8cbdd53..fdfc31b471d8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -69,9 +69,9 @@ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionE } public DoubleVector eval(int positionCount, LongVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cast.castLongToDouble(vVector.getLong(p))); + result.appendDouble(p, Cast.castLongToDouble(vVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index 050205ceede7..4198062c2ecf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -67,9 +67,9 @@ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.Expre } public LongVector eval(int positionCount, LongVector vVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Cast.castLongToUnsignedLong(vVector.getLong(p))); + result.appendLong(p, Cast.castLongToUnsignedLong(vVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index 7d76e27e426f..3ae66262f9b0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -69,9 +69,9 @@ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.Exp } public DoubleVector eval(int positionCount, LongVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cast.castUnsignedLongToDouble(vVector.getLong(p))); + result.appendDouble(p, Cast.castUnsignedLongToDouble(vVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java index eb833043c4d6..843d8f0d58c3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java @@ -69,9 +69,9 @@ public final class CbrtUnsignedLongEvaluator implements EvalOperator.ExpressionE } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cbrt.processUnsignedLong(valVector.getLong(p))); + result.appendDouble(p, Cbrt.processUnsignedLong(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index 4ab5481af167..6ee809c683f7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -67,9 +67,9 @@ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluat } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Ceil.process(valVector.getDouble(p))); + result.appendDouble(p, Ceil.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index e78dcbbfbb1c..7d2833dc025d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -67,9 +67,9 @@ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cos.process(valVector.getDouble(p))); + result.appendDouble(p, Cos.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index 0ae93dddeb5a..fb3bbb34bf72 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -67,9 +67,9 @@ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvalua } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Floor.process(valVector.getDouble(p))); + result.appendDouble(p, Floor.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 40ac314983db..1fe09cdd7079 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -89,9 +89,9 @@ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvalua } public DoubleVector eval(int positionCount, DoubleVector valVector, LongVector decimalsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Round.process(valVector.getDouble(p), decimalsVector.getLong(p))); + result.appendDouble(p, Round.process(valVector.getDouble(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index b6476e867fdf..c9b3c778139c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -67,9 +67,9 @@ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.Expres } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Round.process(valVector.getDouble(p))); + result.appendDouble(p, Round.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index cac749b070f6..75886d8fb5ac 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -89,9 +89,9 @@ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator } public IntVector eval(int positionCount, IntVector valVector, LongVector decimalsVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Round.process(valVector.getInt(p), decimalsVector.getLong(p))); + result.appendInt(p, Round.process(valVector.getInt(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index 86c44ed9fef5..3c37fab209a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -87,9 +87,9 @@ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluato } public LongVector eval(int positionCount, LongVector valVector, LongVector decimalsVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Round.process(valVector.getLong(p), decimalsVector.getLong(p))); + result.appendLong(p, Round.process(valVector.getLong(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 731bfd8f5989..2826feeea29b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -87,9 +87,9 @@ public final class RoundUnsignedLongEvaluator implements EvalOperator.Expression } public LongVector eval(int positionCount, LongVector valVector, LongVector decimalsVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Round.processUnsignedLong(valVector.getLong(p), decimalsVector.getLong(p))); + result.appendLong(p, Round.processUnsignedLong(valVector.getLong(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java index cb7add4a6727..c1f184afc588 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -67,9 +67,9 @@ public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvalu } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.process(valVector.getDouble(p))); + result.appendDouble(p, Signum.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java index 3e6f0e52347c..68b603cd98a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -69,9 +69,9 @@ public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluato } public DoubleVector eval(int positionCount, IntVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.process(valVector.getInt(p))); + result.appendDouble(p, Signum.process(valVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java index 3b05ca81d787..b66532789a57 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -69,9 +69,9 @@ public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluat } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.process(valVector.getLong(p))); + result.appendDouble(p, Signum.process(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java index c080d5c9d1c9..2fa03ed2cf44 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -69,9 +69,9 @@ public final class SignumUnsignedLongEvaluator implements EvalOperator.Expressio } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.processUnsignedLong(valVector.getLong(p))); + result.appendDouble(p, Signum.processUnsignedLong(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index 50f10aaefa99..23df0d539b63 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -67,9 +67,9 @@ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sin.process(valVector.getDouble(p))); + result.appendDouble(p, Sin.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index 868342a59fbe..eba1d041e673 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -69,9 +69,9 @@ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionE } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sqrt.processUnsignedLong(valVector.getLong(p))); + result.appendDouble(p, Sqrt.processUnsignedLong(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 9a4d71d8b8c5..de602995cc32 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -67,9 +67,9 @@ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Tan.process(valVector.getDouble(p))); + result.appendDouble(p, Tan.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 5e4046d0a8c4..80a1448820cc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -67,9 +67,9 @@ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Tanh.process(valVector.getDouble(p))); + result.appendDouble(p, Tanh.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java index b5e353d64100..1d9da890a1b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -96,10 +96,10 @@ public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator impl public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialContains.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java index 96c72ee13780..4630ef9b01b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -96,10 +96,10 @@ public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator impl public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java index 0d4a5aa196f4..a38dacc1e04b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -96,10 +96,10 @@ public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator im public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java index 1a6e8a1cd172..dae80c04b43d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -96,10 +96,10 @@ public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implem public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialWithin.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index e8859b70e658..21491b4272ea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -78,10 +78,10 @@ public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEval } public BooleanVector eval(int positionCount, BytesRefVector inputVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); + result.appendBoolean(p, AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index a644b097d18f..e075cdcff882 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -93,11 +93,11 @@ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator public BooleanVector eval(int positionCount, BytesRefVector strVector, BytesRefVector suffixVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef suffixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(EndsWith.process(strVector.getBytesRef(p, strScratch), suffixVector.getBytesRef(p, suffixScratch))); + result.appendBoolean(p, EndsWith.process(strVector.getBytesRef(p, strScratch), suffixVector.getBytesRef(p, suffixScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index d45b28e7b7a3..5d71fa5a4d70 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -71,10 +71,10 @@ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { } public IntVector eval(int positionCount, BytesRefVector valVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Length.process(valVector.getBytesRef(p, valScratch))); + result.appendInt(p, Length.process(valVector.getBytesRef(p, valScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java index 82cbd88f0db0..17430f8fc572 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -115,11 +115,11 @@ public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector, IntVector startVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef substrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); + result.appendInt(p, Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java index a6c9fc92ef66..9f206426a348 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -92,11 +92,11 @@ public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEval } public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef substrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); + result.appendInt(p, Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 92e015642ce7..a932e449f650 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -93,11 +93,11 @@ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluat public BooleanVector eval(int positionCount, BytesRefVector strVector, BytesRefVector prefixVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef prefixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(StartsWith.process(strVector.getBytesRef(p, strScratch), prefixVector.getBytesRef(p, prefixScratch))); + result.appendBoolean(p, StartsWith.process(strVector.getBytesRef(p, strScratch), prefixVector.getBytesRef(p, prefixScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index fa5b631accb7..fbf25c5fec39 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -87,9 +87,9 @@ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluat } public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Add.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendDouble(p, Add.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 4b27514cfda8..9f3d5aa6d8b1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -87,9 +87,9 @@ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluat } public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Mul.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendDouble(p, Mul.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index edd9db230cd0..24f04a23ebb4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -67,9 +67,9 @@ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluat } public DoubleVector eval(int positionCount, DoubleVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Neg.processDoubles(vVector.getDouble(p))); + result.appendDouble(p, Neg.processDoubles(vVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 9fb0dbc0c5df..291cb5648e21 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -87,9 +87,9 @@ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluat } public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sub.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendDouble(p, Sub.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java index 10f51451752b..e39a9482215f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -87,9 +87,9 @@ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvalua } public BooleanVector eval(int positionCount, BooleanVector lhsVector, BooleanVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); + result.appendBoolean(p, Equals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java index bbf78fb01aa4..cdf84e050621 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -89,9 +89,9 @@ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEval } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, Equals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java index ab7464e45e57..ad942e63c6f4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java @@ -92,11 +92,11 @@ public final class EqualsGeometriesEvaluator implements EvalOperator.ExpressionE } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, Equals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java index c40087ed6901..d60efd0edded 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -89,9 +89,9 @@ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluat } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, Equals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 95cfd5a03de9..e28dcaeba31d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -92,11 +92,11 @@ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEva } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, Equals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java index bf2479f50d9a..504422e59071 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -89,9 +89,9 @@ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvalua } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, Equals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index d962ae071590..c1e0fcd09f17 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -89,9 +89,9 @@ public final class GreaterThanDoublesEvaluator implements EvalOperator.Expressio } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, GreaterThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java index a080d435d324..721310c8a751 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -89,9 +89,9 @@ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEv } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, GreaterThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 3f02de949387..1edb13c789a9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -92,11 +92,11 @@ public final class GreaterThanKeywordsEvaluator implements EvalOperator.Expressi } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, GreaterThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 1ad7026f2c03..79bc2b646b2f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -89,9 +89,9 @@ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionE } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, GreaterThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index a80be6b77707..9b39defdf744 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -89,9 +89,9 @@ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.Ex } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, GreaterThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 5b181e7bcea7..c6aa1e89c199 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -89,9 +89,9 @@ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.Expre } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, GreaterThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index 46db71e878e9..baddf53e4d74 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -92,11 +92,11 @@ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.E } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, GreaterThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index 1967e9b29edd..d2291d175263 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -89,9 +89,9 @@ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.Expr } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, GreaterThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java index c431e37bd98d..4a1737f01a24 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -75,10 +75,10 @@ public final class InsensitiveEqualsConstantEvaluator implements EvalOperator.Ex } public BooleanVector eval(int positionCount, BytesRefVector lhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), rhs)); + result.appendBoolean(p, InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), rhs)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java index 7a0f76ff6597..9dc408311b15 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java @@ -92,11 +92,11 @@ public final class InsensitiveEqualsEvaluator implements EvalOperator.Expression } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(InsensitiveEquals.process(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, InsensitiveEquals.process(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java index e2c11e3ce095..922c95b2bb55 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -89,9 +89,9 @@ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEv } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, LessThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java index fb8bf746f0d1..f8d7b716b337 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -89,9 +89,9 @@ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvalu } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, LessThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java index b826a87d8f86..af31709cc957 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -92,11 +92,11 @@ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionE } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, LessThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java index f75b43e6ec7f..8911398202ce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -89,9 +89,9 @@ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEval } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, LessThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index 5c03b488ff47..ea2097bead16 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -89,9 +89,9 @@ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.Expre } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, LessThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 6fe1996a52f2..01a46e011d34 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -89,9 +89,9 @@ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.Expressi } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, LessThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index 4572929d0f6f..d30033733130 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -92,11 +92,11 @@ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.Expr } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, LessThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index 42642205c451..3c1a03006a84 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -89,9 +89,9 @@ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.Express } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, LessThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index 46357ab39032..0dc80fdbcf16 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -87,9 +87,9 @@ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEva } public BooleanVector eval(int positionCount, BooleanVector lhsVector, BooleanVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); + result.appendBoolean(p, NotEquals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index 0004f7bc23a3..f439ec0e94d9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -89,9 +89,9 @@ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionE } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, NotEquals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java index aec5629c3b7f..7553a5667f4a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java @@ -92,11 +92,11 @@ public final class NotEqualsGeometriesEvaluator implements EvalOperator.Expressi } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, NotEquals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 69a7ffdb3963..19098d89be46 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -89,9 +89,9 @@ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEval } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, NotEquals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 6ca5d8ddc0b6..124674540475 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -92,11 +92,11 @@ public final class NotEqualsKeywordsEvaluator implements EvalOperator.Expression } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, NotEquals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java index a0b176952acf..25f95af9266e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -89,9 +89,9 @@ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEva } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, NotEquals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index a788a7abcc8c..c8074d29e057 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -127,7 +127,7 @@ public final class EvalMapper { int positionCount = lhs.getPositionCount(); try (var result = lhs.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - result.appendBoolean(bl.function().apply(lhs.getBoolean(p), rhs.getBoolean(p))); + result.appendBoolean(p, bl.function().apply(lhs.getBoolean(p), rhs.getBoolean(p))); } return result.build().asBlock(); } @@ -264,7 +264,7 @@ public final class EvalMapper { } try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { - builder.appendBoolean(fieldBlock.isNull(p)); + builder.appendBoolean(p, fieldBlock.isNull(p)); } return builder.build().asBlock(); } @@ -313,7 +313,7 @@ public final class EvalMapper { } try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { - builder.appendBoolean(fieldBlock.isNull(p) == false); + builder.appendBoolean(p, fieldBlock.isNull(p) == false); } return builder.build().asBlock(); } From 89945db31c86820573af47745fcb3d427acf9652 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 7 Jun 2024 20:08:23 +0200 Subject: [PATCH 16/58] Fork freeing search/scroll contexts to GENERIC pool (#109481) While unlikely, freeing these things may result in closing `Store` instances which itself may block. => we shouldn't do this on the transport threads ever => added some assertions to that effect and made all the free type actions fork to generic where these block times don't hurt the same way This is motivated by seeing a significant number of slow inbound handler warnings for freeing scroll contexts. --- docs/changelog/109481.yaml | 5 +++++ .../elasticsearch/action/search/SearchTransportService.java | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/109481.yaml diff --git a/docs/changelog/109481.yaml b/docs/changelog/109481.yaml new file mode 100644 index 000000000000..e8251788a90b --- /dev/null +++ b/docs/changelog/109481.yaml @@ -0,0 +1,5 @@ +pr: 109481 +summary: Fork freeing search/scroll contexts to GENERIC pool +area: Search +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index d627da9b0e33..873c644725ab 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -439,7 +439,7 @@ public class SearchTransportService { }; transportService.registerRequestHandler( FREE_CONTEXT_SCROLL_ACTION_NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, + transportService.getThreadPool().generic(), ScrollFreeContextRequest::new, instrumentedHandler(FREE_CONTEXT_SCROLL_ACTION_METRIC, transportService, searchTransportMetrics, freeContextHandler) ); @@ -447,7 +447,7 @@ public class SearchTransportService { transportService.registerRequestHandler( FREE_CONTEXT_ACTION_NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, + transportService.getThreadPool().generic(), SearchFreeContextRequest::new, instrumentedHandler(FREE_CONTEXT_ACTION_METRIC, transportService, searchTransportMetrics, freeContextHandler) ); @@ -455,7 +455,7 @@ public class SearchTransportService { transportService.registerRequestHandler( CLEAR_SCROLL_CONTEXTS_ACTION_NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, + transportService.getThreadPool().generic(), TransportRequest.Empty::new, instrumentedHandler(CLEAR_SCROLL_CONTEXTS_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { searchService.freeAllScrollContexts(); From b8c774422b80f9f0668b266cc55dfef2d60f1402 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Fri, 7 Jun 2024 21:44:53 +0200 Subject: [PATCH 17/58] Fix #109005 (#109491) --- x-pack/plugin/inference/build.gradle | 2 +- .../action/filter/ShardBulkInferenceActionFilterIT.java | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index f1f131119643..f4378d8ab5b7 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -41,7 +41,7 @@ dependencies { } if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { + tasks.withType(Test).configureEach { systemProperty 'es.semantic_text_feature_flag_enabled', 'true' } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 1602aa3af5e9..300c0d2c471d 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -54,7 +54,6 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { return Arrays.asList(Utils.TestInferencePlugin.class); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109005") public void testBulkOperations() throws Exception { Map shardsSettings = Collections.singletonMap(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)); indicesAdmin().prepareCreate(INDEX_NAME) From 37a6010b898dbba832e2acfd789cfa041cdc0006 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Jun 2024 15:57:57 -0400 Subject: [PATCH 18/58] ESQL: Reenable test (#109490) We disabled this test because it failed once but it wasn't reproducable and the test logs didn't give us any hints of what happened. So this reenables is. If we're lucky it'll fail again with useful logs. If we're not lucky it'll never fail again and we'll blame cosmic rays. Closes #109477 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 96c72bd60e3d..5b9478f1e452 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -62,9 +62,6 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppendTests method: testEvaluateBlockWithoutNulls {TestCase=, } issue: https://github.com/elastic/elasticsearch/issues/109409 -- class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlClientYamlIT" - issue: "https://github.com/elastic/elasticsearch/issues/109477" - method: "test {p0=esql/150_lookup/multivalued keys}" - class: "org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109478" method: "test {yaml=reference/esql/processing-commands/lookup/line_31}" From 96e297bcf6318a6848627fd48ae9d6ed1afe6c64 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sat, 8 Jun 2024 20:29:20 +0100 Subject: [PATCH 19/58] Avoid invalid command in `FieldCapabilitiesIT` (#109507) `FieldCapabilitiesIT#testRelocation` will sometimes try and move a shard in an invalid way, either from a node which no longer has a copy of the shard due to rebalancing, or to a node which already has a copy of the shard. The reroute command then throws an exception which is ultimately ignored. This commit tightens the test up by disabling rebalancing and choosing the target node more carefully. --- .../search/fieldcaps/FieldCapabilitiesIT.java | 50 ++++++++++++------- 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index 09633a0ea1b3..ae3347dafd55 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -25,8 +25,8 @@ import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -187,6 +187,14 @@ public class FieldCapabilitiesIT extends ESIntegTestCase { return List.of(TestMapperPlugin.class, ExceptionOnRewriteQueryPlugin.class, BlockingOnRewriteQueryPlugin.class); } + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) + .build(); + } + @Override protected boolean addMockHttpTransport() { return false; // enable http @@ -529,23 +537,31 @@ public class FieldCapabilitiesIT extends ESIntegTestCase { closeShardNoCheck(indexShard, randomBoolean()); } else if (randomBoolean()) { final ShardId shardId = indexShard.shardId(); - final String[] nodeNames = internalCluster().getNodeNames(); - final String newNodeName = randomValueOtherThanMany(n -> nodeName.equals(n) == false, () -> randomFrom(nodeNames)); - DiscoveryNode fromNode = null; - DiscoveryNode toNode = null; - for (DiscoveryNode node : clusterService().state().nodes()) { - if (node.getName().equals(nodeName)) { - fromNode = node; - } - if (node.getName().equals(newNodeName)) { - toNode = node; + + final var targetNodes = new ArrayList(); + for (final var targetIndicesService : internalCluster().getInstances(IndicesService.class)) { + final var targetNode = targetIndicesService.clusterService().localNode(); + if (targetNode.canContainData() && targetIndicesService.getShardOrNull(shardId) == null) { + targetNodes.add(targetNode.getId()); } } - assertNotNull(fromNode); - assertNotNull(toNode); - clusterAdmin().prepareReroute() - .add(new MoveAllocationCommand(shardId.getIndexName(), shardId.id(), fromNode.getId(), toNode.getId())) - .get(); + + if (targetNodes.isEmpty()) { + continue; + } + + safeGet( + clusterAdmin().prepareReroute() + .add( + new MoveAllocationCommand( + shardId.getIndexName(), + shardId.id(), + indicesService.clusterService().localNode().getId(), + randomFrom(targetNodes) + ) + ) + .execute() + ); } } } @@ -570,7 +586,7 @@ public class FieldCapabilitiesIT extends ESIntegTestCase { if (randomBoolean()) { request.indexFilter(QueryBuilders.rangeQuery("timestamp").gte("2020-01-01")); } - final FieldCapabilitiesResponse response = client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet(); + final FieldCapabilitiesResponse response = safeGet(client().execute(TransportFieldCapabilitiesAction.TYPE, request)); assertThat(response.getIndices(), arrayContainingInAnyOrder("log-index-1", "log-index-2")); assertThat(response.getField("field1"), aMapWithSize(2)); assertThat(response.getField("field1"), hasKey("long")); From 78587ab41a6c0ea2f2a03bb036b612652505d09c Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 10 Jun 2024 07:41:02 +0100 Subject: [PATCH 20/58] Suppress stack traces from failed snapshot clones (#109495) Extends the behaviour introduced in #105622 to cover clone-snapshot operations as well as create-snapshot ones. --- .../snapshots/SnapshotsService.java | 43 +++++++++++-------- .../snapshots/SnapshotResiliencyTests.java | 28 +++++++++++- 2 files changed, 51 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index b4c15e7b77b7..7ca92ebfdcf3 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -395,7 +395,7 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement @Override public void onFailure(Exception e) { initializingClones.remove(snapshot); - logger.warn(() -> format("[%s][%s] failed to clone snapshot", repositoryName, snapshotName), e); + logSnapshotFailure("clone", snapshot, e); listener.onFailure(e); } @@ -3845,28 +3845,33 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement @Override public void onFailure(Exception e) { - final var logLevel = snapshotFailureLogLevel(e); - if (logLevel == Level.INFO && logger.isDebugEnabled() == false) { - // suppress stack trace at INFO unless extra verbosity is configured - logger.info( - format( - "[%s][%s] failed to create snapshot: %s", - snapshot.getRepository(), - snapshot.getSnapshotId().getName(), - e.getMessage() - ) - ); - } else { - logger.log( - logLevel, - () -> format("[%s][%s] failed to create snapshot", snapshot.getRepository(), snapshot.getSnapshotId().getName()), - e - ); - } + logSnapshotFailure("create", snapshot, e); listener.onFailure(e); } } + private static void logSnapshotFailure(String operation, Snapshot snapshot, Exception e) { + final var logLevel = snapshotFailureLogLevel(e); + if (logLevel == Level.INFO && logger.isDebugEnabled() == false) { + // suppress stack trace at INFO unless extra verbosity is configured + logger.info( + format( + "[%s][%s] failed to %s snapshot: %s", + snapshot.getRepository(), + snapshot.getSnapshotId().getName(), + operation, + e.getMessage() + ) + ); + } else { + logger.log( + logLevel, + () -> format("[%s][%s] failed to %s snapshot", snapshot.getRepository(), snapshot.getSnapshotId().getName(), operation), + e + ); + } + } + private static Level snapshotFailureLogLevel(Exception e) { if (MasterService.isPublishFailureException(e)) { // no action needed, the new master will take things from here diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 6419759ab596..40064e2b68ed 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1495,6 +1495,25 @@ public class SnapshotResiliencyTests extends ESTestCase { fail("snapshot should not have started"); } + @Override + public void onFailure(Exception e) { + assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(SnapshotNameAlreadyInUseException.class)); + l.onResponse(null); + } + }) + ) + // attempt to clone snapshot + .andThen( + (l, ignored) -> client().admin() + .cluster() + .prepareCloneSnapshot(repoName, snapshotName, snapshotName) + .setIndices("*") + .execute(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + fail("snapshot should not have started"); + } + @Override public void onFailure(Exception e) { assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(SnapshotNameAlreadyInUseException.class)); @@ -1503,6 +1522,7 @@ public class SnapshotResiliencyTests extends ESTestCase { }) ); + final var expectedMessage = Strings.format("Invalid snapshot name [%s], snapshot with the same name already exists", snapshotName); MockLog.assertThatLogger(() -> { deterministicTaskQueue.runAllRunnableTasks(); assertTrue("executed all runnable tasks but test steps are still incomplete", testListener.isDone()); @@ -1513,7 +1533,13 @@ public class SnapshotResiliencyTests extends ESTestCase { "INFO log", SnapshotsService.class.getCanonicalName(), Level.INFO, - Strings.format("*failed to create snapshot*Invalid snapshot name [%s]*", snapshotName) + Strings.format("*failed to create snapshot*%s", expectedMessage) + ), + new MockLog.SeenEventExpectation( + "INFO log", + SnapshotsService.class.getCanonicalName(), + Level.INFO, + Strings.format("*failed to clone snapshot*%s", expectedMessage) ) ); } From 683245e41e5be562dd7e9bdafe80a627d9edb992 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 10 Jun 2024 08:47:40 +0100 Subject: [PATCH 21/58] Detect long-running tasks on network threads (#109204) This commit introduces a watchdog timer to monitor for long-running tasks on network threads. If a network thread is active and has not made progress for two consecutive ticks of the timer then the watchdog logs a warning and a thread dump. --- docs/changelog/109204.yaml | 5 + .../modules/network/threading.asciidoc | 53 ++- .../netty4/Netty4HttpPipeliningHandler.java | 42 ++- .../netty4/Netty4HttpServerTransport.java | 13 +- .../netty4/Netty4MessageInboundHandler.java | 13 +- .../transport/netty4/Netty4Transport.java | 14 +- .../Netty4HttpPipeliningHandlerTests.java | 41 ++- .../common/network/ThreadWatchdogIT.java | 163 ++++++++++ .../common/network/NetworkService.java | 5 + .../common/network/ThreadWatchdog.java | 280 ++++++++++++++++ .../common/settings/ClusterSettings.java | 3 + .../common/network/ThreadWatchdogTests.java | 305 ++++++++++++++++++ .../common/network/ThreadWatchdogHelper.java | 18 ++ .../AbstractSimpleTransportTestCase.java | 59 ++++ 14 files changed, 986 insertions(+), 28 deletions(-) create mode 100644 docs/changelog/109204.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java create mode 100644 server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java create mode 100644 server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java create mode 100644 test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java diff --git a/docs/changelog/109204.yaml b/docs/changelog/109204.yaml new file mode 100644 index 000000000000..b5b22ef1a06f --- /dev/null +++ b/docs/changelog/109204.yaml @@ -0,0 +1,5 @@ +pr: 109204 +summary: Detect long-running tasks on network threads +area: Network +type: enhancement +issues: [] diff --git a/docs/reference/modules/network/threading.asciidoc b/docs/reference/modules/network/threading.asciidoc index abf00b521b5c..832ffc0c1588 100644 --- a/docs/reference/modules/network/threading.asciidoc +++ b/docs/reference/modules/network/threading.asciidoc @@ -109,10 +109,49 @@ the `transport_worker` threads are too busy. It is more reliable to use profiling trace. These tools are independent of any work the JVM is performing. It may also be possible to identify some reasons for delays from the server -logs, particularly looking at warnings from -`org.elasticsearch.transport.InboundHandler` and -`org.elasticsearch.transport.OutboundHandler`. Warnings about long processing -times from the `InboundHandler` are particularly indicative of incorrect -threading behaviour, whereas the transmission time reported by the -`OutboundHandler` includes time spent waiting for network congestion and the -`transport_worker` thread is free to do other work during this time. +logs. See for instance the following loggers: + +`org.elasticsearch.transport.InboundHandler`:: This logger reports a warning if +processing an inbound message occupies a network thread for unreasonably long, +which is almost certainly a bug. The warning includes some information which +can be used to identify the message that took unreasonably long to process. + +`org.elasticsearch.transport.OutboundHandler`:: This logger reports a warning +if sending an outbound message takes longer than expected. This duration +includes time spent waiting for network congestion to clear, and time spent +processing other work on the same network thread, so does not always indicate +the presence of a bug related to the outbound message specified in the log +entry. + +`org.elasticsearch.common.network.ThreadWatchdog`:: This logger reports a +warning and a thread dump when it notices that a network thread has not made +progress between two consecutive checks, which is almost certainly a bug: ++ +-- +[source,text] +---- +[WARN ][o.e.c.n.ThreadWatchdog ] the following threads are active but did not make progress in the preceding [5s]: [elasticsearch[instance-0000000004][transport_worker][T#1]]] +[WARN ][o.e.c.n.ThreadWatchdog ] hot threads dump due to active threads not making progress [part 1]: H4sIAAAAAAAA/+1aa2/bOBb93l8hYLUYFWgYvWw5AQbYpEkn6STZbJyiwAwGA1qiY8US6ZJUHvPr90qk/JJky41TtDMuUIci... +[WARN ][o.e.c.n.ThreadWatchdog ] hot threads dump due to active threads not making progress [part 2]: LfXL/x70a3eL8ve6Ral74ZBrp5x7HmUD9KXQz1MaXUNfFC6SeEysxSw1cNXL9JXYl3AigAE7ywbm/AZ+ll3Ox4qXJHNjVr6h... +[WARN ][o.e.c.n.ThreadWatchdog ] hot threads dump due to active threads not making progress (gzip compressed, base64-encoded, and split into 2 parts on preceding log lines; ... +---- + +To reconstruct the thread dump, base64-decode the data and decompress it using `gzip`. For instance, on Unix-like systems: + +[source,sh] +---- +cat watchdog.log | sed -e 's/.*://' | base64 --decode | gzip --decompress +---- + +This mechanism can be controlled with the following settings: + +`network.thread.watchdog.interval`::: +(<>, <>) +Defines the interval between watchdog checks. Defaults to `5s`. Set to `0` to +disable the network thread watchdog. + +`network.thread.watchdog.quiet_time`::: +(<>, <>) +Defines the interval between watchdog warnings. Defaults to `10m`. + +-- diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index 9cf210c2a8aa..cfbd9ad68a31 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -31,6 +31,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; @@ -56,6 +57,7 @@ public class Netty4HttpPipeliningHandler extends ChannelDuplexHandler { private static final Logger logger = LogManager.getLogger(Netty4HttpPipeliningHandler.class); private final int maxEventsHeld; + private final ThreadWatchdog.ActivityTracker activityTracker; private final PriorityQueue> outboundHoldingQueue; private record ChunkedWrite(PromiseCombiner combiner, ChannelPromise onDone, ChunkedRestResponseBodyPart responseBodyPart) {} @@ -90,31 +92,41 @@ public class Netty4HttpPipeliningHandler extends ChannelDuplexHandler { * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel connection; this is * required as events cannot queue up indefinitely */ - public Netty4HttpPipeliningHandler(final int maxEventsHeld, final Netty4HttpServerTransport serverTransport) { + public Netty4HttpPipeliningHandler( + final int maxEventsHeld, + final Netty4HttpServerTransport serverTransport, + final ThreadWatchdog.ActivityTracker activityTracker + ) { this.maxEventsHeld = maxEventsHeld; + this.activityTracker = activityTracker; this.outboundHoldingQueue = new PriorityQueue<>(1, Comparator.comparingInt(t -> t.v1().getSequence())); this.serverTransport = serverTransport; } @Override public void channelRead(final ChannelHandlerContext ctx, final Object msg) { - assert msg instanceof FullHttpRequest : "Should have fully aggregated message already but saw [" + msg + "]"; - final FullHttpRequest fullHttpRequest = (FullHttpRequest) msg; - final Netty4HttpRequest netty4HttpRequest; - if (fullHttpRequest.decoderResult().isFailure()) { - final Throwable cause = fullHttpRequest.decoderResult().cause(); - final Exception nonError; - if (cause instanceof Error) { - ExceptionsHelper.maybeDieOnAnotherThread(cause); - nonError = new Exception(cause); + activityTracker.startActivity(); + try { + assert msg instanceof FullHttpRequest : "Should have fully aggregated message already but saw [" + msg + "]"; + final FullHttpRequest fullHttpRequest = (FullHttpRequest) msg; + final Netty4HttpRequest netty4HttpRequest; + if (fullHttpRequest.decoderResult().isFailure()) { + final Throwable cause = fullHttpRequest.decoderResult().cause(); + final Exception nonError; + if (cause instanceof Error) { + ExceptionsHelper.maybeDieOnAnotherThread(cause); + nonError = new Exception(cause); + } else { + nonError = (Exception) cause; + } + netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest, nonError); } else { - nonError = (Exception) cause; + netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest); } - netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest, nonError); - } else { - netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest); + handlePipelinedRequest(ctx, netty4HttpRequest); + } finally { + activityTracker.stopActivity(); } - handlePipelinedRequest(ctx, netty4HttpRequest); } // protected so tests can override it diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 277015e240d5..f48a3143fd01 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -38,6 +38,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -94,6 +95,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private final TLSConfig tlsConfig; private final AcceptChannelHandler.AcceptPredicate acceptChannelPredicate; private final HttpValidator httpValidator; + private final ThreadWatchdog threadWatchdog; private final int readTimeoutMillis; private final int maxCompositeBufferComponents; @@ -130,6 +132,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { this.tlsConfig = tlsConfig; this.acceptChannelPredicate = acceptChannelPredicate; this.httpValidator = httpValidator; + this.threadWatchdog = networkService.getThreadWatchdog(); this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); @@ -381,7 +384,15 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { if (handlingSettings.compression()) { ch.pipeline().addLast("encoder_compress", new HttpContentCompressor(handlingSettings.compressionLevel())); } - ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(transport.pipeliningMaxEvents, transport)); + ch.pipeline() + .addLast( + "pipelining", + new Netty4HttpPipeliningHandler( + transport.pipeliningMaxEvents, + transport, + transport.threadWatchdog.getActivityTrackerForCurrentThread() + ) + ); transport.serverAcceptedChannel(nettyHttpChannel); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java index 8924bc1924ad..e39a60e0efd5 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java @@ -15,6 +15,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; import org.elasticsearch.transport.InboundPipeline; @@ -30,9 +31,16 @@ public class Netty4MessageInboundHandler extends ChannelInboundHandlerAdapter { private final InboundPipeline pipeline; - public Netty4MessageInboundHandler(Netty4Transport transport, InboundPipeline inboundPipeline) { + private final ThreadWatchdog.ActivityTracker activityTracker; + + public Netty4MessageInboundHandler( + Netty4Transport transport, + InboundPipeline inboundPipeline, + ThreadWatchdog.ActivityTracker activityTracker + ) { this.transport = transport; this.pipeline = inboundPipeline; + this.activityTracker = activityTracker; } @Override @@ -44,8 +52,11 @@ public class Netty4MessageInboundHandler extends ChannelInboundHandlerAdapter { final ByteBuf buffer = (ByteBuf) msg; Netty4TcpChannel channel = ctx.channel().attr(Netty4Transport.CHANNEL_KEY).get(); final BytesReference wrapped = Netty4Utils.toBytesReference(buffer); + activityTracker.startActivity(); try (ReleasableBytesReference reference = new ReleasableBytesReference(wrapped, new ByteBufRefCounted(buffer))) { pipeline.handleBytes(channel, reference); + } finally { + activityTracker.stopActivity(); } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 6d8f950ef1cf..d85bf32da263 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -30,6 +30,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -78,6 +79,8 @@ public class Netty4Transport extends TcpTransport { private volatile SharedGroupFactory.SharedGroup sharedGroup; protected final boolean remoteClusterPortEnabled; + private final ThreadWatchdog threadWatchdog; + public Netty4Transport( Settings settings, TransportVersion version, @@ -92,6 +95,7 @@ public class Netty4Transport extends TcpTransport { Netty4Utils.setAvailableProcessors(EsExecutors.allocatedProcessors(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); this.sharedGroupFactory = sharedGroupFactory; + this.threadWatchdog = networkService.getThreadWatchdog(); // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one this.receivePredictorMin = Netty4Plugin.NETTY_RECEIVE_PREDICTOR_MIN.get(settings); @@ -125,6 +129,7 @@ public class Netty4Transport extends TcpTransport { bindServer(profileSettings); } } + threadWatchdog.run(settings, threadPool, lifecycle); success = true; } finally { if (success == false) { @@ -354,7 +359,14 @@ public class Netty4Transport extends TcpTransport { pipeline.addLast("logging", ESLoggingHandler.INSTANCE); } pipeline.addLast("chunked_writer", new Netty4WriteThrottlingHandler(getThreadPool().getThreadContext())); - pipeline.addLast("dispatcher", new Netty4MessageInboundHandler(this, getInboundPipeline(ch, isRemoteClusterServerChannel))); + pipeline.addLast( + "dispatcher", + new Netty4MessageInboundHandler( + this, + getInboundPipeline(ch, isRemoteClusterServerChannel), + threadWatchdog.getActivityTrackerForCurrentThread() + ) + ); } protected InboundPipeline getInboundPipeline(Channel ch, boolean isRemoteClusterServerChannel) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 4dca3d17bf07..b2158384fa1c 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -34,6 +34,8 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.bytes.ZeroBytesReference; +import org.elasticsearch.common.network.ThreadWatchdog; +import org.elasticsearch.common.network.ThreadWatchdogHelper; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.http.HttpResponse; import org.elasticsearch.rest.ChunkedRestResponseBodyPart; @@ -53,11 +55,14 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import java.util.stream.IntStream; import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -120,7 +125,7 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase { } private EmbeddedChannel makeEmbeddedChannelWithSimulatedWork(int numberOfRequests) { - return new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests, null) { + return new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests, null, new ThreadWatchdog.ActivityTracker()) { @Override protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { ctx.fireChannelRead(pipelinedRequest); @@ -186,7 +191,9 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase { public void testPipeliningRequestsAreReleased() { final int numberOfRequests = 10; - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests + 1, null)); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel( + new Netty4HttpPipeliningHandler(numberOfRequests + 1, null, new ThreadWatchdog.ActivityTracker()) + ); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + i)); @@ -473,6 +480,30 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase { assertThat(messagesSeen.get(1), instanceOf(DefaultHttpContent.class)); } + public void testActivityTracking() { + final var watchdog = new ThreadWatchdog(); + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + final var requestHandled = new AtomicBoolean(); + final var handler = new Netty4HttpPipeliningHandler(Integer.MAX_VALUE, mock(Netty4HttpServerTransport.class), activityTracker) { + @Override + protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { + // thread is not idle while handling the request + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), empty()); + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), equalTo(List.of(Thread.currentThread().getName()))); + ctx.fireChannelRead(pipelinedRequest); + assertTrue(requestHandled.compareAndSet(false, true)); + } + }; + + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new ChannelDuplexHandler(), handler); + embeddedChannel.writeInbound(createHttpRequest("/test")); + assertTrue(requestHandled.get()); + + // thread is now idle + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), empty()); + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), empty()); + } + // assert that a message of the given number of repeated chunks is found at the given index in the list and each chunk is equal to // the given BytesReference private static void assertChunkedMessageAtIndex(List messagesSeen, int index, int chunks, BytesReference chunkBytes) { @@ -494,7 +525,11 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase { } private Netty4HttpPipeliningHandler getTestHttpHandler() { - return new Netty4HttpPipeliningHandler(Integer.MAX_VALUE, mock(Netty4HttpServerTransport.class)) { + return new Netty4HttpPipeliningHandler( + Integer.MAX_VALUE, + mock(Netty4HttpServerTransport.class), + new ThreadWatchdog.ActivityTracker() + ) { @Override protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { ctx.fireChannelRead(pipelinedRequest); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java new file mode 100644 index 000000000000..4bd56e2276d1 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import org.apache.logging.log4j.core.LogEvent; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class ThreadWatchdogIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), "100ms") + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME.getKey(), "0") + .build(); + } + + @SuppressWarnings("unchecked") + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopyNoNullElements( + super.nodePlugins(), + SlowRequestProcessingPlugin.class, + MockTransportService.TestPlugin.class + ); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public static class SlowRequestProcessingPlugin extends Plugin implements ActionPlugin { + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new RestHandler() { + @Override + public List routes() { + return List.of(Route.builder(RestRequest.Method.POST, "_slow").build()); + } + + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + blockAndWaitForWatchdogLogs(); + new RestToXContentListener<>(channel).onResponse((b, p) -> b.startObject().endObject()); + } + }); + } + } + + private static void blockAndWaitForWatchdogLogs() { + final var threadName = Thread.currentThread().getName(); + final var logsSeenLatch = new CountDownLatch(2); + final var warningSeen = new RunOnce(logsSeenLatch::countDown); + final var threadDumpSeen = new RunOnce(logsSeenLatch::countDown); + MockLog.assertThatLogger(() -> safeAwait(logsSeenLatch), ThreadWatchdog.class, new MockLog.LoggingExpectation() { + @Override + public void match(LogEvent event) { + final var formattedMessage = event.getMessage().getFormattedMessage(); + if (formattedMessage.contains("the following threads are active but did not make progress in the preceding [100ms]:") + && formattedMessage.contains(threadName)) { + warningSeen.run(); + } + if (formattedMessage.contains("hot threads dump due to active threads not making progress")) { + threadDumpSeen.run(); + } + } + + @Override + public void assertMatched() {} + }); + } + + public void testThreadWatchdogHttpLogging() throws IOException { + ESRestTestCase.assertOK(getRestClient().performRequest(new Request("POST", "_slow"))); + } + + public void testThreadWatchdogTransportLogging() { + internalCluster().ensureAtLeastNumDataNodes(2); + final var transportServiceIterator = internalCluster().getInstances(TransportService.class).iterator(); + final var sourceTransportService = transportServiceIterator.next(); + final var targetTransportService = transportServiceIterator.next(); + + targetTransportService.registerRequestHandler( + "internal:slow", + EsExecutors.DIRECT_EXECUTOR_SERVICE, + TransportRequest.Empty::new, + (request, channel, task) -> { + blockAndWaitForWatchdogLogs(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + ); + + safeAwait( + SubscribableListener.newForked( + l -> sourceTransportService.sendRequest( + targetTransportService.getLocalNode(), + "internal:slow", + new TransportRequest.Empty(), + new ActionListenerResponseHandler( + l, + in -> TransportResponse.Empty.INSTANCE, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) + ) + ) + ); + } + +} diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java index b1a01553ef1b..f5a52cd1c4ce 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -85,6 +85,7 @@ public final class NetworkService { private final List customNameResolvers; private final HandlingTimeTracker handlingTimeTracker = new HandlingTimeTracker(); + private final ThreadWatchdog threadWatchdog = new ThreadWatchdog(); public NetworkService(List customNameResolvers) { this.customNameResolvers = Objects.requireNonNull(customNameResolvers, "customNameResolvers must be non null"); @@ -94,6 +95,10 @@ public final class NetworkService { return handlingTimeTracker; } + public ThreadWatchdog getThreadWatchdog() { + return threadWatchdog; + } + /** * Resolves {@code bindHosts} to a list of internet addresses. The list will * not contain duplicate addresses. diff --git a/server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java b/server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java new file mode 100644 index 000000000000..90d4d2493de8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java @@ -0,0 +1,280 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.ReferenceDocs; +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.monitor.jvm.HotThreads; +import org.elasticsearch.threadpool.ThreadPool; + +import java.lang.ref.WeakReference; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Watchdog mechanism for making sure that no transport thread spends too long blocking the event loop. + */ +// Today we only use this to track activity processing reads on network threads. Tracking time when we're busy processing writes is a little +// trickier because that code is more re-entrant, both within the network layer and also it may complete a listener from the wider codebase +// that ends up calling back into the network layer again. But also we don't see many network threads blocking for ages on the write path, +// so we focus on reads for now. +public class ThreadWatchdog { + + public static final Setting NETWORK_THREAD_WATCHDOG_INTERVAL = Setting.timeSetting( + "network.thread.watchdog.interval", + TimeValue.timeValueSeconds(5), + Setting.Property.NodeScope + ); + + public static final Setting NETWORK_THREAD_WATCHDOG_QUIET_TIME = Setting.timeSetting( + "network.thread.watchdog.quiet_time", + TimeValue.timeValueMinutes(10), + Setting.Property.NodeScope + ); + + private static final Logger logger = LogManager.getLogger(ThreadWatchdog.class); + + /** + * Activity tracker for the current thread. Thread-locals are only retained by the owning thread so these will be GCd after thread exit. + */ + private final ThreadLocal activityTrackerThreadLocal = new ThreadLocal<>(); + + /** + * Collection of known activity trackers to be scanned for stuck threads. Uses {@link WeakReference} so that we don't prevent trackers + * from being GCd if a thread exits. There aren't many such trackers, O(#cpus), and they almost never change, so an {@link ArrayList} + * with explicit synchronization is fine. + */ + private final List> knownTrackers = new ArrayList<>(); + + /** + * @return an activity tracker for activities on the current thread. + */ + public ActivityTracker getActivityTrackerForCurrentThread() { + var result = activityTrackerThreadLocal.get(); + if (result == null) { + // this is a previously-untracked thread; thread creation is assumed to be very rare, no need to optimize this path at all + result = new ActivityTracker(); + synchronized (knownTrackers) { + knownTrackers.add(new WeakReference<>(result)); + } + activityTrackerThreadLocal.set(result); + } + return result; + } + + // exposed for testing + List getStuckThreadNames() { + List stuckThreadNames = null; + // this is not called very often, and only on a single thread, with almost no contention on this mutex since thread creation is rare + synchronized (knownTrackers) { + final var iterator = knownTrackers.iterator(); + while (iterator.hasNext()) { + final var tracker = iterator.next().get(); + if (tracker == null) { + // tracker was GCd because its thread exited - very rare, no need to optimize this case + iterator.remove(); + } else if (tracker.isIdleOrMakingProgress() == false) { + if (stuckThreadNames == null) { + stuckThreadNames = new ArrayList<>(); + } + stuckThreadNames.add(tracker.getTrackedThreadName()); + } + } + } + if (stuckThreadNames == null) { + return List.of(); + } else { + stuckThreadNames.sort(Comparator.naturalOrder()); + return stuckThreadNames; + } + } + + /** + * Per-thread class which keeps track of activity on that thread, represented as a {@code long} which is incremented every time an + * activity starts or stops. Thus the parity of its value indicates whether the thread is idle or not. Crucially, the activity tracking + * is very lightweight (on the tracked thread). + */ + public static final class ActivityTracker extends AtomicLong { + + private final Thread trackedThread; + private long lastObservedValue; + + public ActivityTracker() { + this.trackedThread = Thread.currentThread(); + } + + String getTrackedThreadName() { + return trackedThread.getName(); + } + + public void startActivity() { + assert trackedThread == Thread.currentThread() : trackedThread.getName() + " vs " + Thread.currentThread().getName(); + final var prevValue = getAndIncrement(); + assert isIdle(prevValue) : "thread [" + trackedThread.getName() + "] was already active"; + } + + public void stopActivity() { + assert trackedThread == Thread.currentThread() : trackedThread.getName() + " vs " + Thread.currentThread().getName(); + final var prevValue = getAndIncrement(); + assert isIdle(prevValue) == false : "thread [" + trackedThread.getName() + "] was already idle"; + } + + boolean isIdleOrMakingProgress() { + final var value = get(); + if (isIdle(value)) { + return true; + } + if (value == lastObservedValue) { + // no change since last check + return false; + } else { + // made progress since last check + lastObservedValue = value; + return true; + } + } + + private static boolean isIdle(long value) { + // the parity of the value indicates the idle state: initially zero (idle), so active == odd + return (value & 1) == 0; + } + } + + public void run(Settings settings, ThreadPool threadPool, Lifecycle lifecycle) { + new Checker(threadPool, NETWORK_THREAD_WATCHDOG_INTERVAL.get(settings), NETWORK_THREAD_WATCHDOG_QUIET_TIME.get(settings), lifecycle) + .run(); + } + + /** + * Action which runs itself periodically, calling {@link #getStuckThreadNames} to check for active threads that didn't make progress + * since the last call, and if it finds any then it dispatches {@link #threadDumper} to log the current hot threads. + */ + private final class Checker extends AbstractRunnable { + private final ThreadPool threadPool; + private final TimeValue interval; + private final TimeValue quietTime; + private final Lifecycle lifecycle; + + Checker(ThreadPool threadPool, TimeValue interval, TimeValue quietTime, Lifecycle lifecycle) { + this.threadPool = threadPool; + this.interval = interval; + this.quietTime = quietTime.compareTo(interval) <= 0 ? interval : quietTime; + this.lifecycle = lifecycle; + assert this.interval.millis() <= this.quietTime.millis(); + } + + @Override + protected void doRun() { + if (isRunning() == false) { + return; + } + + boolean rescheduleImmediately = true; + try { + final var stuckThreadNames = getStuckThreadNames(); + if (stuckThreadNames.isEmpty() == false) { + logger.warn( + "the following threads are active but did not make progress in the preceding [{}]: {}", + interval, + stuckThreadNames + ); + rescheduleImmediately = false; + threadPool.generic().execute(threadDumper); + } + } finally { + if (rescheduleImmediately) { + scheduleNext(interval); + } + } + } + + @Override + public boolean isForceExecution() { + return true; + } + + private boolean isRunning() { + return 0 < interval.millis() && lifecycle.stoppedOrClosed() == false; + } + + private void scheduleNext(TimeValue delay) { + if (isRunning()) { + threadPool.scheduleUnlessShuttingDown(delay, EsExecutors.DIRECT_EXECUTOR_SERVICE, Checker.this); + } + } + + private final AbstractRunnable threadDumper = new AbstractRunnable() { + @Override + protected void doRun() { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.GENERIC); + if (isRunning()) { + HotThreads.logLocalHotThreads( + logger, + Level.WARN, + "hot threads dump due to active threads not making progress", + ReferenceDocs.NETWORK_THREADING_MODEL + ); + } + } + + @Override + public boolean isForceExecution() { + return true; + } + + @Override + public void onFailure(Exception e) { + Checker.this.onFailure(e); + } + + @Override + public void onRejection(Exception e) { + Checker.this.onRejection(e); + } + + @Override + public void onAfter() { + scheduleNext(quietTime); + } + + @Override + public String toString() { + return "ThreadWatchDog$Checker#threadDumper"; + } + }; + + @Override + public void onFailure(Exception e) { + logger.error("exception in ThreadWatchDog$Checker", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + logger.debug("ThreadWatchDog$Checker execution rejected", e); + assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e; + } + + @Override + public String toString() { + return "ThreadWatchDog$Checker"; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 2ea0cc60b5af..90609adabfbc 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -60,6 +60,7 @@ import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -421,6 +422,8 @@ public final class ClusterSettings extends AbstractScopedSettings { NetworkService.TCP_REUSE_ADDRESS, NetworkService.TCP_SEND_BUFFER_SIZE, NetworkService.TCP_RECEIVE_BUFFER_SIZE, + ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL, + ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME, IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, ScriptService.SCRIPT_CACHE_SIZE_SETTING, diff --git a/server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java b/server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java new file mode 100644 index 000000000000..6ffbfd65dc45 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java @@ -0,0 +1,305 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.core.TimeValue.timeValueMillis; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; + +public class ThreadWatchdogTests extends ESTestCase { + + public void testSimpleActivityTracking() throws InterruptedException { + final var watchdog = new ThreadWatchdog(); + final var barrier = new CyclicBarrier(2); + final var threadName = "watched-thread"; + final var thread = new Thread(() -> { + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + + assertEquals(0L, activityTracker.get()); + if (randomBoolean()) { + // ensure overflow is no problem + activityTracker.set(Long.MAX_VALUE - randomFrom(1, 3, 5)); + } + + safeAwait(barrier); + // step 1: thread is idle + safeAwait(barrier); + + activityTracker.startActivity(); + + safeAwait(barrier); + // step 2: thread is active + safeAwait(barrier); + + for (int i = between(1, 10); i > 0; i--) { + activityTracker.stopActivity(); + activityTracker.startActivity(); + } + + safeAwait(barrier); + // step 3: thread still active, but made progress + safeAwait(barrier); + + activityTracker.stopActivity(); + + safeAwait(barrier); + // step 4: thread is idle again + safeAwait(barrier); + + }, threadName); + thread.start(); + + safeAwait(barrier); + + // step 1: thread is idle + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(), watchdog.getStuckThreadNames()); + + safeAwait(barrier); + safeAwait(barrier); + + // step 2: thread is active + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); // just to check it's still reported as stuck + + safeAwait(barrier); + safeAwait(barrier); + + // step 3: thread still active, but made progress + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); // just to check it's still reported as stuck + + safeAwait(barrier); + safeAwait(barrier); + + // step 4: thread is idle again + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(), watchdog.getStuckThreadNames()); + + safeAwait(barrier); + + thread.join(); + } + + public void testMultipleBlockedThreads() throws InterruptedException { + final var threadNames = randomList(2, 10, ESTestCase::randomIdentifier); + + final var watchdog = new ThreadWatchdog(); + final var barrier = new CyclicBarrier(threadNames.size() + 1); + final var threads = new Thread[threadNames.size()]; + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + safeAwait(barrier); + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + activityTracker.startActivity(); + safeAwait(barrier); + // wait for main test thread + safeAwait(barrier); + activityTracker.stopActivity(); + }, threadNames.get(i)); + threads[i].start(); + } + + safeAwait(barrier); + safeAwait(barrier); + + try { + assertEquals(List.of(), watchdog.getStuckThreadNames()); + threadNames.sort(Comparator.naturalOrder()); // stuck threads are sorted by name + assertEquals(threadNames, watchdog.getStuckThreadNames()); + assertEquals(threadNames, watchdog.getStuckThreadNames()); // just to check they're all still reported as stuck + } finally { + safeAwait(barrier); + for (final var thread : threads) { + thread.join(); + } + } + } + + public void testConcurrency() throws Exception { + final var keepGoing = new AtomicBoolean(true); + final var watchdog = new ThreadWatchdog(); + final var threads = new Thread[between(1, 5)]; + final var semaphoresByThreadName = new HashMap(); + final var warmUpLatches = new CountDownLatch[threads.length]; + try { + for (int i = 0; i < threads.length; i++) { + final var threadName = "watched-thread-" + i; + final var semaphore = new Semaphore(1); + final var warmUpLatch = new CountDownLatch(20); + warmUpLatches[i] = warmUpLatch; + semaphoresByThreadName.put(threadName, semaphore); + threads[i] = new Thread(() -> { + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + while (keepGoing.get()) { + activityTracker.startActivity(); + try { + safeAcquire(semaphore); + Thread.yield(); + semaphore.release(); + Thread.yield(); + } finally { + activityTracker.stopActivity(); + warmUpLatch.countDown(); + } + } + }, threadName); + threads[i].start(); + } + + for (final var warmUpLatch : warmUpLatches) { + safeAwait(warmUpLatch); + } + + final var threadToBlock = randomFrom(semaphoresByThreadName.keySet()); + final var semaphore = semaphoresByThreadName.get(threadToBlock); + safeAcquire(semaphore); + assertBusy(() -> assertThat(watchdog.getStuckThreadNames(), hasItem(threadToBlock))); + semaphore.release(); + assertBusy(() -> assertThat(watchdog.getStuckThreadNames(), not(hasItem(threadToBlock)))); + } finally { + keepGoing.set(false); + for (final var thread : threads) { + thread.join(); + } + } + } + + /** + * This logger is mentioned in the docs by name, so we cannot rename it without adjusting the docs. Thus we fix the expected logger + * name in this string constant rather than using {@code ThreadWatchdog.class.getCanonicalName()}. + */ + private static final String LOGGER_NAME = "org.elasticsearch.common.network.ThreadWatchdog"; + + public void testLoggingAndScheduling() { + final var watchdog = new ThreadWatchdog(); + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + + final var settings = Settings.builder(); + final var lifecycle = new Lifecycle(); + assertTrue(lifecycle.moveToStarted()); + + final long checkIntervalMillis; + if (randomBoolean()) { + checkIntervalMillis = ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.get(Settings.EMPTY).millis(); + } else { + checkIntervalMillis = between(1, 100000); + settings.put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), timeValueMillis(checkIntervalMillis)); + } + + final long quietTimeMillis; + if (randomBoolean()) { + quietTimeMillis = ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME.get(Settings.EMPTY).millis(); + } else { + quietTimeMillis = between(1, 100000); + settings.put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME.getKey(), timeValueMillis(quietTimeMillis)); + } + + watchdog.run(settings.build(), deterministicTaskQueue.getThreadPool(), lifecycle); + + for (int i = 0; i < 3; i++) { + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", LOGGER_NAME, Level.WARN, "*") + ); + } + + activityTracker.startActivity(); + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", LOGGER_NAME, Level.WARN, "*") + ); + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.SeenEventExpectation( + "stuck threads logging", + LOGGER_NAME, + Level.WARN, + Strings.format( + "the following threads are active but did not make progress in the preceding [%s]: [%s]", + TimeValue.timeValueMillis(checkIntervalMillis), + Thread.currentThread().getName() + ) + ), + new MockLog.SeenEventExpectation( + "thread dump", + LOGGER_NAME, + Level.WARN, + "hot threads dump due to active threads not making progress (gzip compressed*base64-encoded*" + ) + ); + assertAdvanceTime(deterministicTaskQueue, Math.max(quietTimeMillis, checkIntervalMillis)); + activityTracker.stopActivity(); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", LOGGER_NAME, Level.WARN, "*") + ); + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + deterministicTaskQueue.scheduleNow(lifecycle::moveToStopped); + deterministicTaskQueue.runAllTasksInTimeOrder(); // ensures that the rescheduling stops + } + + public void testDisableWithZeroInterval() { + final var watchdog = new ThreadWatchdog(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + final var lifecycle = new Lifecycle(); + assertTrue(lifecycle.moveToStarted()); + + watchdog.run( + Settings.builder() + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), randomFrom(TimeValue.ZERO, TimeValue.MINUS_ONE)) + .build(), + deterministicTaskQueue.getThreadPool(), + lifecycle + ); + assertFalse(deterministicTaskQueue.hasAnyTasks()); + + watchdog.run( + Settings.builder().put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), timeValueMillis(between(1, 100000))).build(), + deterministicTaskQueue.getThreadPool(), + lifecycle + ); + assertTrue(deterministicTaskQueue.hasDeferredTasks()); + lifecycle.moveToStopped(); + deterministicTaskQueue.runAllTasksInTimeOrder(); // ensures that the rescheduling stops + } + + private static void assertAdvanceTime(DeterministicTaskQueue deterministicTaskQueue, long expectedMillis) { + final var currentTimeMillis = deterministicTaskQueue.getCurrentTimeMillis(); + deterministicTaskQueue.advanceTime(); + assertEquals(expectedMillis, deterministicTaskQueue.getCurrentTimeMillis() - currentTimeMillis); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java b/test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java new file mode 100644 index 000000000000..7658a37c1df7 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import java.util.List; + +public class ThreadWatchdogHelper { + // exposes this package-private method to tests + public static List getStuckThreadNames(ThreadWatchdog watchdog) { + return watchdog.getStuckThreadNames(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 6ced86156d00..b6a8bc343687 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -18,6 +18,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.UnsafePlainActionFuture; @@ -33,6 +34,7 @@ import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -41,6 +43,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.IOUtils; @@ -226,6 +229,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { ) { Settings updatedSettings = Settings.builder() .put(TransportSettings.PORT.getKey(), getPortRange()) + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), TimeValue.ZERO) // suppress watchdog running concurrently .put(settings) .put(Node.NODE_NAME_SETTING.getKey(), name) .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) // suppress assertions to test production error-handling @@ -3348,6 +3352,61 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } } + public void testWatchdogLogging() { + final var watchdog = networkService.getThreadWatchdog(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + watchdog.run(Settings.EMPTY, deterministicTaskQueue.getThreadPool(), new Lifecycle()); + + final var barrier = new CyclicBarrier(2); + final var threadNameFuture = new PlainActionFuture(); + final var actionName = "internal:action"; + serviceA.registerRequestHandler( + actionName, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + TransportRequest.Empty::new, + (request, channel, task) -> { + threadNameFuture.onResponse(Thread.currentThread().getName()); + safeAwait(barrier); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + ); + + final var responseLatch = new CountDownLatch(1); + submitRequest( + serviceB, + nodeA, + actionName, + new TransportRequest.Empty(), + new ActionListenerResponseHandler( + ActionTestUtils.assertNoFailureListener(t -> responseLatch.countDown()), + in -> TransportResponse.Empty.INSTANCE, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) + ); + + final var threadName = safeGet(threadNameFuture); + assertFalse(deterministicTaskQueue.hasRunnableTasks()); + deterministicTaskQueue.advanceTime(); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", ThreadWatchdog.class.getCanonicalName(), Level.WARN, "*") + ); + deterministicTaskQueue.advanceTime(); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.SeenEventExpectation( + "stuck threads logging", + ThreadWatchdog.class.getCanonicalName(), + Level.WARN, + "the following threads are active but did not make progress in the preceding [5s]: [" + threadName + "]" + ) + ); + safeAwait(barrier); + safeAwait(responseLatch); + } + private static long[] getConstantMessageSizeHistogram(int count, long size) { final var histogram = new long[29]; int bucket = 0; From 8b759a1a70b26117d898f7f74e8ff3b322a8c444 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 10 Jun 2024 08:48:18 +0100 Subject: [PATCH 22/58] Fix trappy timeouts in security settings APIs (#109233) Relates #107984 --- docs/changelog/109233.yaml | 5 ++ .../api/security.get_settings.json | 7 +- .../api/security.update_settings.json | 11 +++- .../org/elasticsearch/TransportVersions.java | 2 + .../settings/GetSecuritySettingsAction.java | 37 +++++++---- .../UpdateSecuritySettingsAction.java | 56 ++++++++++++---- .../privilege/ClusterPrivilegeResolver.java | 2 +- .../UpdateSecuritySettingsActionTests.java | 64 ++++++++++++++++--- .../authz/privilege/PrivilegeTests.java | 4 +- .../TransportGetSecuritySettingsAction.java | 4 +- ...TransportUpdateSecuritySettingsAction.java | 4 +- .../RestGetSecuritySettingsAction.java | 3 +- .../RestUpdateSecuritySettingsAction.java | 14 +++- 13 files changed, 169 insertions(+), 44 deletions(-) create mode 100644 docs/changelog/109233.yaml diff --git a/docs/changelog/109233.yaml b/docs/changelog/109233.yaml new file mode 100644 index 000000000000..36010273c80d --- /dev/null +++ b/docs/changelog/109233.yaml @@ -0,0 +1,5 @@ +pr: 109233 +summary: Fix trappy timeouts in security settings APIs +area: Security +type: bug +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json index 09cc8e322f5c..6339d8a6dee9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json @@ -18,6 +18,11 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Timeout for connection to master" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json index fb76ca28f821..998548408c5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json @@ -18,7 +18,16 @@ } ] }, - "params":{}, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Timeout for connection to master" + }, + "timeout":{ + "type":"time", + "description":"Timeout for acknowledgements from all nodes" + } + }, "body":{ "description": "An object with the new settings for each index, if any", "required": true diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0d9844241d69..e6b98971ff8c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -186,6 +186,8 @@ public class TransportVersions { public static final TransportVersion ML_CHUNK_INFERENCE_OPTION = def(8_677_00_0); public static final TransportVersion RANK_FEATURE_PHASE_ADDED = def(8_678_00_0); public static final TransportVersion RANK_DOC_IN_SHARD_FETCH_REQUEST = def(8_679_00_0); + public static final TransportVersion SECURITY_SETTINGS_REQUEST_TIMEOUTS = def(8_680_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java index 7623a7f65af3..4cea7526bce8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.security.action.settings; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -14,6 +15,8 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,27 +26,39 @@ import static org.elasticsearch.xpack.core.security.action.settings.UpdateSecuri import static org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction.PROFILES_INDEX_NAME; import static org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction.TOKENS_INDEX_NAME; -public class GetSecuritySettingsAction extends ActionType { +public class GetSecuritySettingsAction { - public static final GetSecuritySettingsAction INSTANCE = new GetSecuritySettingsAction(); - public static final String NAME = "cluster:admin/xpack/security/settings/get"; + public static final ActionType INSTANCE = new ActionType<>( + "cluster:admin/xpack/security/settings/get" + ); - public GetSecuritySettingsAction() { - super(NAME); - } + private GetSecuritySettingsAction() {/* no instances */} public static class Request extends MasterNodeReadRequest { - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout) { + super(masterNodeTimeout); } - public Request(StreamInput in) throws IOException { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + @UpdateForV9 // no need for bwc any more, this can be inlined + public static Request readFrom(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + return new Request(in); + } else { + return new Request(TimeValue.THIRTY_SECONDS); + } + } + + private Request(StreamInput in) throws IOException { + super(in); } @Override - public void writeTo(StreamOutput out) throws IOException {} + public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + super.writeTo(out); + } + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 3cce133749e4..2d59911ec7ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.security.action.settings; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ValidateActions; @@ -16,6 +17,8 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -28,9 +31,9 @@ import java.util.Set; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateSecuritySettingsAction extends ActionType { - public static final UpdateSecuritySettingsAction INSTANCE = new UpdateSecuritySettingsAction(); - public static final String NAME = "cluster:admin/xpack/security/settings/update"; +public class UpdateSecuritySettingsAction { + + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/xpack/security/settings/update"); // The names here are separate constants for 2 reasons: // 1. Keeping the names defined here helps ensure REST compatibility, even if the internal aliases of these indices change, @@ -44,9 +47,7 @@ public class UpdateSecuritySettingsAction extends ActionType { @@ -54,11 +55,19 @@ public class UpdateSecuritySettingsAction extends ActionType tokensIndexSettings; private final Map profilesIndexSettings; + public interface Factory { + Request create( + Map mainIndexSettings, + Map tokensIndexSettings, + Map profilesIndexSettings + ); + } + @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "update_security_settings_request", false, - a -> new Request((Map) a[0], (Map) a[1], (Map) a[2]) + (a, factory) -> factory.create((Map) a[0], (Map) a[1], (Map) a[2]) ); static { @@ -68,18 +77,36 @@ public class UpdateSecuritySettingsAction extends ActionType mainIndexSettings, Map tokensIndexSettings, Map profilesIndexSettings ) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + super(masterNodeTimeout, ackTimeout); this.mainIndexSettings = Objects.requireNonNullElse(mainIndexSettings, Collections.emptyMap()); this.tokensIndexSettings = Objects.requireNonNullElse(tokensIndexSettings, Collections.emptyMap()); this.profilesIndexSettings = Objects.requireNonNullElse(profilesIndexSettings, Collections.emptyMap()); } - public Request(StreamInput in) throws IOException { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + @UpdateForV9 // no need for bwc any more, this can be inlined + public static Request readFrom(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + return new Request(in); + } else { + return new Request(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS, in); + } + } + + private Request(StreamInput in) throws IOException { + super(in); + this.mainIndexSettings = in.readGenericMap(); + this.tokensIndexSettings = in.readGenericMap(); + this.profilesIndexSettings = in.readGenericMap(); + } + + private Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, StreamInput in) throws IOException { + super(masterNodeTimeout, ackTimeout); this.mainIndexSettings = in.readGenericMap(); this.tokensIndexSettings = in.readGenericMap(); this.profilesIndexSettings = in.readGenericMap(); @@ -87,13 +114,16 @@ public class UpdateSecuritySettingsAction extends ActionType mainIndexSettings() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 5b3829a00468..4465d7d08318 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -269,7 +269,7 @@ public class ClusterPrivilegeResolver { ActionTypes.QUERY_USER_ACTION.name(), GetUserPrivilegesAction.NAME, // normally authorized under the "same-user" authz check, but added here for uniformity HasPrivilegesAction.NAME, - GetSecuritySettingsAction.NAME + GetSecuritySettingsAction.INSTANCE.name() ) ); public static final NamedClusterPrivilege MANAGE_SAML = new ActionClusterPrivilege("manage_saml", MANAGE_SAML_PATTERN); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java index 7ad647075f52..893f7474c3e6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java @@ -28,7 +28,13 @@ import static org.hamcrest.Matchers.nullValue; public class UpdateSecuritySettingsActionTests extends ESTestCase { public void testValidateSettingsEmpty() { - var req = new UpdateSecuritySettingsAction.Request(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap() + ); var ex = req.validate(); assertThat(ex, notNullValue()); assertThat(ex.getMessage(), containsString("No settings given to update")); @@ -40,17 +46,41 @@ public class UpdateSecuritySettingsActionTests extends ESTestCase { for (String allowedSetting : ALLOWED_SETTING_KEYS) { Map allowedSettingMap = Map.of(allowedSetting, randomAlphaOfLength(5)); allAllowedSettingsMap.put(allowedSetting, randomAlphaOfLength(5)); - var req = new UpdateSecuritySettingsAction.Request(allowedSettingMap, Collections.emptyMap(), Collections.emptyMap()); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + allowedSettingMap, + Collections.emptyMap(), + Collections.emptyMap() + ); assertThat(req.validate(), nullValue()); - req = new UpdateSecuritySettingsAction.Request(Collections.emptyMap(), allowedSettingMap, Collections.emptyMap()); + req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + Collections.emptyMap(), + allowedSettingMap, + Collections.emptyMap() + ); assertThat(req.validate(), nullValue()); - req = new UpdateSecuritySettingsAction.Request(Collections.emptyMap(), Collections.emptyMap(), allowedSettingMap); + req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + Collections.emptyMap(), + Collections.emptyMap(), + allowedSettingMap + ); assertThat(req.validate(), nullValue()); } - var req = new UpdateSecuritySettingsAction.Request(allAllowedSettingsMap, allAllowedSettingsMap, allAllowedSettingsMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + allAllowedSettingsMap, + allAllowedSettingsMap, + allAllowedSettingsMap + ); assertThat(req.validate(), nullValue()); } @@ -63,7 +93,13 @@ public class UpdateSecuritySettingsActionTests extends ESTestCase { Map.of(randomFrom(ALLOWED_SETTING_KEYS), randomAlphaOfLength(5)) ); { - var req = new UpdateSecuritySettingsAction.Request(validOrEmptySettingMap, disallowedSettingMap, validOrEmptySettingMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + validOrEmptySettingMap, + disallowedSettingMap, + validOrEmptySettingMap + ); List errors = req.validate().validationErrors(); assertThat(errors, hasSize(1)); for (String errorMsg : errors) { @@ -81,7 +117,13 @@ public class UpdateSecuritySettingsActionTests extends ESTestCase { } { - var req = new UpdateSecuritySettingsAction.Request(disallowedSettingMap, validOrEmptySettingMap, disallowedSettingMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + disallowedSettingMap, + validOrEmptySettingMap, + disallowedSettingMap + ); List errors = req.validate().validationErrors(); assertThat(errors, hasSize(2)); for (String errorMsg : errors) { @@ -101,7 +143,13 @@ public class UpdateSecuritySettingsActionTests extends ESTestCase { } { - var req = new UpdateSecuritySettingsAction.Request(disallowedSettingMap, disallowedSettingMap, disallowedSettingMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + disallowedSettingMap, + disallowedSettingMap, + disallowedSettingMap + ); List errors = req.validate().validationErrors(); assertThat(errors, hasSize(3)); for (String errorMsg : errors) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index ad73944f4c64..1ade22179ab5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -285,7 +285,7 @@ public class PrivilegeTests extends ESTestCase { ActionTypes.QUERY_USER_ACTION.name(), HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME, - GetSecuritySettingsAction.NAME + GetSecuritySettingsAction.INSTANCE.name() ); verifyClusterActionAllowed( ClusterPrivilegeResolver.READ_SECURITY, @@ -321,7 +321,7 @@ public class PrivilegeTests extends ESTestCase { ActivateProfileAction.NAME, SetProfileEnabledAction.NAME, UpdateProfileDataAction.NAME, - UpdateSecuritySettingsAction.NAME + UpdateSecuritySettingsAction.INSTANCE.name() ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java index 8b883b01bd16..73abfffcd3a2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java @@ -47,12 +47,12 @@ public class TransportGetSecuritySettingsAction extends TransportMasterNodeActio IndexNameExpressionResolver indexNameExpressionResolver ) { super( - GetSecuritySettingsAction.NAME, + GetSecuritySettingsAction.INSTANCE.name(), transportService, clusterService, threadPool, actionFilters, - GetSecuritySettingsAction.Request::new, + GetSecuritySettingsAction.Request::readFrom, indexNameExpressionResolver, GetSecuritySettingsAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java index fc38bf16da8c..20bab85a5092 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java @@ -57,12 +57,12 @@ public class TransportUpdateSecuritySettingsAction extends TransportMasterNodeAc IndexNameExpressionResolver indexNameExpressionResolver ) { super( - UpdateSecuritySettingsAction.NAME, + UpdateSecuritySettingsAction.INSTANCE.name(), transportService, clusterService, threadPool, actionFilters, - UpdateSecuritySettingsAction.Request::new, + UpdateSecuritySettingsAction.Request::readFrom, indexNameExpressionResolver, AcknowledgedResponse::readFrom, EsExecutors.DIRECT_EXECUTOR_SERVICE diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java index 033f692d7b1e..0b4ced0a2044 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.security.action.settings.GetSecuritySettingsAction; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; @@ -36,7 +37,7 @@ public class RestGetSecuritySettingsAction extends SecurityBaseRestHandler { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - GetSecuritySettingsAction.Request req = new GetSecuritySettingsAction.Request(); + final var req = new GetSecuritySettingsAction.Request(RestUtils.getMasterNodeTimeout(request)); return restChannel -> client.execute(GetSecuritySettingsAction.INSTANCE, req, new RestToXContentListener<>(restChannel)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java index b2e8719b25c2..27ed6d2475d2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; @@ -36,9 +37,18 @@ public class RestUpdateSecuritySettingsAction extends SecurityBaseRestHandler { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - UpdateSecuritySettingsAction.Request req; + final UpdateSecuritySettingsAction.Request req; try (var parser = request.contentParser()) { - req = UpdateSecuritySettingsAction.Request.parse(parser); + req = UpdateSecuritySettingsAction.Request.parse( + parser, + (mainIndexSettings, tokensIndexSettings, profilesIndexSettings) -> new UpdateSecuritySettingsAction.Request( + RestUtils.getMasterNodeTimeout(request), + RestUtils.getAckTimeout(request), + mainIndexSettings, + tokensIndexSettings, + profilesIndexSettings + ) + ); } return restChannel -> client.execute(UpdateSecuritySettingsAction.INSTANCE, req, new RestToXContentListener<>(restChannel)); } From 4d3f9f2fb93c3f9e0646dcd88e3fe4a74d1a7598 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 10 Jun 2024 09:59:13 +0200 Subject: [PATCH 23/58] Fix RRF example for semantic query (#109516) Follow up to https://github.com/elastic/elasticsearch/pull/109433, fix appropriately this time the semantic query example with RRF. --- docs/reference/query-dsl/semantic-query.asciidoc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/reference/query-dsl/semantic-query.asciidoc b/docs/reference/query-dsl/semantic-query.asciidoc index f3eb4b6ea68d..23bcb4a52ef3 100644 --- a/docs/reference/query-dsl/semantic-query.asciidoc +++ b/docs/reference/query-dsl/semantic-query.asciidoc @@ -101,9 +101,11 @@ GET my-index/_search }, { "standard": { - "semantic": { - "field": "semantic_field", - "query": "shoes" + "query": { + "semantic": { + "field": "semantic_field", + "query": "shoes" + } } } } From 9f8d28463bc0b708484b9717f13c548b0587d660 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 10 Jun 2024 11:32:28 +0300 Subject: [PATCH 24/58] Enable failure store feature flag ilm in unit tests (#109463) Fixes https://github.com/elastic/elasticsearch/issues/109457 and https://github.com/elastic/elasticsearch/issues/109458 by enabling the feature flag in release runs. --- x-pack/plugin/core/build.gradle | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 116f864695e1..0c65c7e4b6d2 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -176,6 +176,12 @@ testClusters.configureEach { requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.15.0") } +if (BuildParams.isSnapshotBuild() == false) { + tasks.withType(Test).configureEach { + systemProperty 'es.failure_store_feature_flag_enabled', 'true' + } +} + if (BuildParams.inFipsJvm) { // Test clusters run with security disabled tasks.named("javaRestTest").configure { enabled = false } From 3cd35079bba5511739d95a38e44b33861473560e Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 10 Jun 2024 12:11:55 +0300 Subject: [PATCH 25/58] [TEST] Assert that the ILM policy gets created in TimeSeriesRestDriver (#109510) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We still see errors [like](https://gradle-enterprise.elastic.co/s/gvt5metawa44g/tests/task/:x-pack:plugin:ilm:qa:multi-node:javaRestTest/details/org.elasticsearch.xpack.ilm.actions.DownsampleActionIT/testRollupIndex?top-execution=1): ``` [2024-06-07T07:01:01,493][WARN ][o.e.x.i.a.DownsampleActionIT] [testRollupIndex] This test is running on the legacy test framework; historical features from production code will not be available. You need to port the test to the new test plugins in order to use historical features from production code. If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as org.elasticsearch.test.rest.RestTestLegacyFeatures. |     | [2024-06-07T07:01:01,493][INFO ][o.e.x.i.a.DownsampleActionIT] [testRollupIndex] --> running [testRollupIndex] with index [index-vaqoveopxt], data stream [ds-svdndfstvk], alias [alias-YLpBP] and policy [policy-NxqyM] |     | [2024-06-07T07:03:02,224][WARN ][o.e.x.i.a.DownsampleActionIT] [testRollupIndex] --> original index name is [index-vaqoveopxt], rollup index name is NULL, possible explanation: {index=index-vaqoveopxt, managed=true, policy=policy-NxqyM, index_creation_date_millis=1717754461596, time_since_index_creation=2.01m, step_info={type=illegal_argument_exception, reason=policy [policy-NxqyM] does not exist}} |   ``` For some reason, the policy is reported as not existent. Adding an assert during creation to catch errors earlier. Related to #105437 --- .../java/org/elasticsearch/xpack/TimeSeriesRestDriver.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java index c46d4d334cd0..3949139db033 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java @@ -161,7 +161,7 @@ public final class TimeSeriesRestDriver { final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); Request request = new Request("PUT", "_ilm/policy/" + policyName); request.setEntity(entity); - client.performRequest(request); + assertOK(client.performRequest(request)); } public static void createComposableTemplate(RestClient client, String templateName, String indexPattern, Template template) From d81166d86e2fd13e23ab0e483a844ed29d0c90df Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 10 Jun 2024 11:41:22 +0200 Subject: [PATCH 26/58] Clean up role resolution with `CrossClusterApiKeyRoleReference` (#108707) This PR cleans up role resolution for cross-cluster API key role references. Previously, these were handled as generic API key role references but the flows are now different enough to use a new, standalone class instead. Relates: https://github.com/elastic/elasticsearch/pull/108600 --- .../xpack/core/security/authc/Subject.java | 4 +- .../security/authz/store/RoleReference.java | 75 ++++++++++++++----- .../authz/store/RoleReferenceResolver.java | 5 ++ .../core/security/authc/SubjectTests.java | 16 ++-- .../authz/store/RoleReferenceTests.java | 16 ++++ .../authz/store/RoleDescriptorStore.java | 39 ++++++---- 6 files changed, 117 insertions(+), 38 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java index 703cc18c66f4..39173be73f19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java @@ -272,7 +272,7 @@ public class Subject { } // Package private for testing - RoleReference.ApiKeyRoleReference buildRoleReferenceForCrossClusterApiKey() { + RoleReference.CrossClusterApiKeyRoleReference buildRoleReferenceForCrossClusterApiKey() { assert version.onOrAfter(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY); final String apiKeyId = (String) metadata.get(AuthenticationField.API_KEY_ID_KEY); assert ApiKey.Type.CROSS_CLUSTER == getApiKeyType() : "cross cluster access must use cross-cluster API keys"; @@ -283,7 +283,7 @@ public class Subject { final BytesReference limitedByRoleDescriptorsBytes = (BytesReference) metadata.get(API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY); assert isEmptyRoleDescriptorsBytes(limitedByRoleDescriptorsBytes) : "cross cluster API keys must have empty limited-by role descriptors"; - return new RoleReference.ApiKeyRoleReference(apiKeyId, roleDescriptorsBytes, RoleReference.ApiKeyRoleType.ASSIGNED, true); + return new RoleReference.CrossClusterApiKeyRoleReference(apiKeyId, roleDescriptorsBytes); } private RoleReferenceIntersection buildRoleReferencesForCrossClusterAccess() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java index 3e631c251696..3f7a53f7a2dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java @@ -81,36 +81,29 @@ public interface RoleReference { private final BytesReference roleDescriptorsBytes; private final ApiKeyRoleType roleType; private RoleKey id = null; - private final boolean checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess; public ApiKeyRoleReference(String apiKeyId, BytesReference roleDescriptorsBytes, ApiKeyRoleType roleType) { - this(apiKeyId, roleDescriptorsBytes, roleType, false); - } - - public ApiKeyRoleReference( - String apiKeyId, - BytesReference roleDescriptorsBytes, - ApiKeyRoleType roleType, - boolean checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess - ) { this.apiKeyId = apiKeyId; this.roleDescriptorsBytes = roleDescriptorsBytes; this.roleType = roleType; - this.checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess = checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess; } @Override public RoleKey id() { // Hashing can be expensive. memorize the result in case the method is called multiple times. if (id == null) { - final String roleDescriptorsHash = MessageDigests.toHexString( - MessageDigests.digest(roleDescriptorsBytes, MessageDigests.sha256()) - ); - id = new RoleKey(Set.of("apikey:" + roleDescriptorsHash), "apikey_" + roleType); + id = computeRoleKey(roleDescriptorsBytes, roleType); } return id; } + private static RoleKey computeRoleKey(BytesReference roleDescriptorsBytes, ApiKeyRoleType roleType) { + final String roleDescriptorsHash = MessageDigests.toHexString( + MessageDigests.digest(roleDescriptorsBytes, MessageDigests.sha256()) + ); + return new RoleKey(Set.of("apikey:" + roleDescriptorsHash), "apikey_" + roleType); + } + @Override public void resolve(RoleReferenceResolver resolver, ActionListener listener) { resolver.resolveApiKeyRoleReference(this, listener); @@ -127,12 +120,60 @@ public interface RoleReference { public ApiKeyRoleType getRoleType() { return roleType; } + } - public boolean checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess() { - return checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess; + /** + * Represents the role descriptors of the cross-cluster API key underlying an API key authentication based remote cluster connection. + * This captures the permissions of the cross-cluster API key on the fulfilling cluster and is intersected with the permissions of the + * query-cluster-side user entity making the cross cluster request (see {@link CrossClusterAccessRoleReference}). + */ + final class CrossClusterApiKeyRoleReference implements RoleReference { + + private final String apiKeyId; + private final BytesReference roleDescriptorsBytes; + private final ApiKeyRoleType roleType; + private RoleKey id = null; + + public CrossClusterApiKeyRoleReference(String apiKeyId, BytesReference roleDescriptorsBytes) { + this.apiKeyId = apiKeyId; + this.roleDescriptorsBytes = roleDescriptorsBytes; + this.roleType = ApiKeyRoleType.ASSIGNED; + } + + @Override + public RoleKey id() { + // Hashing can be expensive. memorize the result in case the method is called multiple times. + if (id == null) { + // Note: the role key is the same as for ApiKeyRoleReference, to maximize cache utilization + id = ApiKeyRoleReference.computeRoleKey(roleDescriptorsBytes, roleType); + } + return id; + } + + @Override + public void resolve(RoleReferenceResolver resolver, ActionListener listener) { + resolver.resolveCrossClusterApiKeyRoleReference(this, listener); + } + + public String getApiKeyId() { + return apiKeyId; + } + + public BytesReference getRoleDescriptorsBytes() { + return roleDescriptorsBytes; + } + + public ApiKeyRoleType getRoleType() { + return roleType; } } + /** + * Represents the role descriptors sent from the querying cluster to the fulfilling cluster as part of API key authentication based + * cross cluster operations. This captures the permissions of the user entity on the querying cluster and is intersected with the + * fulfilling-cluster-side permissions of the cross-cluster API key underlying the connection + * (see {@link CrossClusterApiKeyRoleReference}). + */ final class CrossClusterAccessRoleReference implements RoleReference { private final CrossClusterAccessSubjectInfo.RoleDescriptorsBytes roleDescriptorsBytes; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java index 21e4a3f73a9b..bac9a210fa7a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java @@ -30,4 +30,9 @@ public interface RoleReferenceResolver { RoleReference.CrossClusterAccessRoleReference crossClusterAccessRoleReference, ActionListener listener ); + + void resolveCrossClusterApiKeyRoleReference( + RoleReference.CrossClusterApiKeyRoleReference crossClusterApiKeyRoleReference, + ActionListener listener + ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java index 1c4592c33108..625feca39cdb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java @@ -46,6 +46,7 @@ import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.CR import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.CROSS_CLUSTER_ACCESS_REALM_TYPE; import static org.elasticsearch.xpack.core.security.authc.Subject.FLEET_SERVER_ROLE_DESCRIPTOR_BYTES_V_7_14; import static org.elasticsearch.xpack.core.security.authz.store.RoleReference.CrossClusterAccessRoleReference; +import static org.elasticsearch.xpack.core.security.authz.store.RoleReference.CrossClusterApiKeyRoleReference; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -182,7 +183,7 @@ public class SubjectTests extends ESTestCase { authMetadata ); - final ApiKeyRoleReference roleReference = subject.buildRoleReferenceForCrossClusterApiKey(); + final CrossClusterApiKeyRoleReference roleReference = subject.buildRoleReferenceForCrossClusterApiKey(); assertThat(roleReference.getApiKeyId(), equalTo(apiKeyId)); assertThat(roleReference.getRoleDescriptorsBytes(), equalTo(authMetadata.get(API_KEY_ROLE_DESCRIPTORS_KEY))); } @@ -233,26 +234,29 @@ public class SubjectTests extends ESTestCase { contains( isA(CrossClusterAccessRoleReference.class), isA(CrossClusterAccessRoleReference.class), - isA(ApiKeyRoleReference.class) + isA(CrossClusterApiKeyRoleReference.class) ) ); expectCrossClusterAccessReferenceAtIndex(0, roleReferences, crossClusterAccessSubjectInfo); expectCrossClusterAccessReferenceAtIndex(1, roleReferences, crossClusterAccessSubjectInfo); - final ApiKeyRoleReference roleReference = (ApiKeyRoleReference) roleReferences.get(2); + final CrossClusterApiKeyRoleReference roleReference = (CrossClusterApiKeyRoleReference) roleReferences.get(2); assertThat(roleReference.getApiKeyId(), equalTo(apiKeyId)); assertThat(roleReference.getRoleDescriptorsBytes(), equalTo(authMetadata.get(API_KEY_ROLE_DESCRIPTORS_KEY))); } else { if (isInternalUser) { - assertThat(roleReferences, contains(isA(FixedRoleReference.class), isA(ApiKeyRoleReference.class))); + assertThat(roleReferences, contains(isA(FixedRoleReference.class), isA(CrossClusterApiKeyRoleReference.class))); expectFixedReferenceAtIndex(0, roleReferences); } else { - assertThat(roleReferences, contains(isA(CrossClusterAccessRoleReference.class), isA(ApiKeyRoleReference.class))); + assertThat( + roleReferences, + contains(isA(CrossClusterAccessRoleReference.class), isA(CrossClusterApiKeyRoleReference.class)) + ); expectCrossClusterAccessReferenceAtIndex(0, roleReferences, crossClusterAccessSubjectInfo); } - final ApiKeyRoleReference roleReference = (ApiKeyRoleReference) roleReferences.get(1); + final CrossClusterApiKeyRoleReference roleReference = (CrossClusterApiKeyRoleReference) roleReferences.get(1); assertThat(roleReference.getApiKeyId(), equalTo(apiKeyId)); assertThat(roleReference.getRoleDescriptorsBytes(), equalTo(authMetadata.get(API_KEY_ROLE_DESCRIPTORS_KEY))); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java index 74c8e6addf24..bc94cabab187 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java @@ -72,6 +72,22 @@ public class RoleReferenceTests extends ESTestCase { assertThat(roleKey.getSource(), equalTo("apikey_" + apiKeyRoleType)); } + public void testCrossClusterApiKeyRoleReference() { + final String apiKeyId = randomAlphaOfLength(20); + final BytesArray roleDescriptorsBytes = new BytesArray(randomAlphaOfLength(50)); + final RoleReference.CrossClusterApiKeyRoleReference apiKeyRoleReference = new RoleReference.CrossClusterApiKeyRoleReference( + apiKeyId, + roleDescriptorsBytes + ); + + final RoleKey roleKey = apiKeyRoleReference.id(); + assertThat( + roleKey.getNames(), + hasItem("apikey:" + MessageDigests.toHexString(MessageDigests.digest(roleDescriptorsBytes, MessageDigests.sha256()))) + ); + assertThat(roleKey.getSource(), equalTo("apikey_" + RoleReference.ApiKeyRoleType.ASSIGNED)); + } + public void testCrossClusterAccessRoleReference() { final var roleDescriptorsBytes = new CrossClusterAccessSubjectInfo.RoleDescriptorsBytes(new BytesArray(randomAlphaOfLength(50))); final var crossClusterAccessRoleReference = new RoleReference.CrossClusterAccessRoleReference("user", roleDescriptorsBytes); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java index 50a4658c27ee..ac8d84d95fd1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java @@ -108,19 +108,6 @@ public class RoleDescriptorStore implements RoleReferenceResolver { || (apiKeyRoleReference.getRoleType() == RoleReference.ApiKeyRoleType.LIMITED_BY && rolesRetrievalResult.getRoleDescriptors().stream().noneMatch(RoleDescriptor::hasRestriction)) : "there should be zero limited-by role descriptors with restriction and no more than one assigned"; - // TODO we need unit tests for edge-cases here, for instance, we need to test the REST API keys are never checked for invalid legacy - // role descriptors - if (apiKeyRoleReference.checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess()) { - try { - CrossClusterApiKeyRoleDescriptorBuilder.checkForInvalidLegacyRoleDescriptors( - apiKeyRoleReference.getApiKeyId(), - roleDescriptors - ); - } catch (IllegalArgumentException e) { - listener.onFailure(e); - return; - } - } listener.onResponse(rolesRetrievalResult); } @@ -182,6 +169,32 @@ public class RoleDescriptorStore implements RoleReferenceResolver { listener.onResponse(rolesRetrievalResult); } + @Override + public void resolveCrossClusterApiKeyRoleReference( + RoleReference.CrossClusterApiKeyRoleReference crossClusterApiKeyRoleReference, + ActionListener listener + ) { + final List roleDescriptors = apiKeyService.parseRoleDescriptorsBytes( + crossClusterApiKeyRoleReference.getApiKeyId(), + crossClusterApiKeyRoleReference.getRoleDescriptorsBytes(), + crossClusterApiKeyRoleReference.getRoleType() + ); + final RolesRetrievalResult rolesRetrievalResult = new RolesRetrievalResult(); + rolesRetrievalResult.addDescriptors(Set.copyOf(roleDescriptors)); + assert rolesRetrievalResult.getRoleDescriptors().stream().noneMatch(RoleDescriptor::hasRestriction) + : "there should be no role descriptors with restriction"; + try { + CrossClusterApiKeyRoleDescriptorBuilder.checkForInvalidLegacyRoleDescriptors( + crossClusterApiKeyRoleReference.getApiKeyId(), + roleDescriptors + ); + } catch (IllegalArgumentException e) { + listener.onFailure(e); + return; + } + listener.onResponse(rolesRetrievalResult); + } + private void resolveRoleNames(Set roleNames, ActionListener listener) { roleDescriptors(roleNames, ActionListener.wrap(rolesRetrievalResult -> { logDeprecatedRoles(rolesRetrievalResult.getRoleDescriptors()); From 44ae540fd7ce7482c242b33fc971cf6984cf38a2 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Mon, 10 Jun 2024 11:48:22 +0200 Subject: [PATCH 27/58] Provide the DocumentSizeReporter with index mode (#108947) in order to decided what logic in to apply when reporting a document size we need to know if an index is a time_series mode. This information is in indexSettings.mode. --- docs/changelog/108947.yaml | 5 +++++ .../internal/DocumentSizeObserverWithPipelinesIT.java | 7 ++++++- .../plugins/internal/DocumentSizeObserverIT.java | 8 +++++++- .../plugins/internal/DocumentParsingProvider.java | 8 +++++++- 4 files changed, 25 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/108947.yaml diff --git a/docs/changelog/108947.yaml b/docs/changelog/108947.yaml new file mode 100644 index 000000000000..8aa429324298 --- /dev/null +++ b/docs/changelog/108947.yaml @@ -0,0 +1,5 @@ +pr: 108947 +summary: Provide the `DocumentSizeReporter` with index mode +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java index aa63c2949ac3..1621a235187a 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; @@ -99,7 +100,11 @@ public class DocumentSizeObserverWithPipelinesIT extends ESIntegTestCase { } @Override - public DocumentSizeReporter newDocumentSizeReporter(String indexName, DocumentSizeAccumulator documentSizeAccumulator) { + public DocumentSizeReporter newDocumentSizeReporter( + String indexName, + IndexMode indexMode, + DocumentSizeAccumulator documentSizeAccumulator + ) { return DocumentSizeReporter.EMPTY_INSTANCE; } }; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java index 69f30720533d..bf6c59a4c0a9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.plugins.internal; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngine; @@ -103,6 +104,7 @@ public class DocumentSizeObserverIT extends ESIntegTestCase { DocumentSizeReporter documentParsingReporter = documentParsingProvider.newDocumentSizeReporter( shardId.getIndexName(), + IndexMode.STANDARD, DocumentSizeAccumulator.EMPTY_INSTANCE ); documentParsingReporter.onIndexingCompleted(index.parsedDoc()); @@ -132,7 +134,11 @@ public class DocumentSizeObserverIT extends ESIntegTestCase { } @Override - public DocumentSizeReporter newDocumentSizeReporter(String indexName, DocumentSizeAccumulator documentSizeAccumulator) { + public DocumentSizeReporter newDocumentSizeReporter( + String indexName, + IndexMode indexMode, + DocumentSizeAccumulator documentSizeAccumulator + ) { return new TestDocumentSizeReporter(indexName); } }; diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java index da12c97281de..0e404ca03707 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java @@ -8,6 +8,8 @@ package org.elasticsearch.plugins.internal; +import org.elasticsearch.index.IndexMode; + /** * An interface to provide instances of document parsing observer and reporter */ @@ -32,7 +34,11 @@ public interface DocumentParsingProvider { /** * @return an instance of a reporter to use when parsing has been completed and indexing successful */ - default DocumentSizeReporter newDocumentSizeReporter(String indexName, DocumentSizeAccumulator documentSizeAccumulator) { + default DocumentSizeReporter newDocumentSizeReporter( + String indexName, + IndexMode indexMode, + DocumentSizeAccumulator documentSizeAccumulator + ) { return DocumentSizeReporter.EMPTY_INSTANCE; } From 7c7405c11bfecfcf051b3c2f1f02744a2d5276b6 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Mon, 10 Jun 2024 12:17:15 +0200 Subject: [PATCH 28/58] Test switching datastreams `index.mode` (#109242) Add a few integration tests where we try switching `index.mode`. --- .../datastreams/LogsDataStreamIT.java | 406 ++++++++++++++++++ .../datastreams/LogsDataStreamRestIT.java | 293 +++++++++++++ 2 files changed, 699 insertions(+) create mode 100644 modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java create mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java new file mode 100644 index 000000000000..8a343ff9cf85 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -0,0 +1,406 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.core.Strings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.indices.InvalidIndexTemplateException; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.is; + +public class LogsDataStreamIT extends ESSingleNodeTestCase { + + private static final String LOGS_OR_STANDARD_MAPPING = """ + { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip_address": { + "type": "ip" + } + } + }"""; + + private static final String TIME_SERIES_MAPPING = """ + { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword", + "time_series_dimension": "true" + }, + "pid": { + "type": "long", + "time_series_dimension": "true" + }, + "method": { + "type": "keyword" + }, + "ip_address": { + "type": "ip" + }, + "cpu_usage": { + "type": "float", + "time_series_metric": "gauge" + } + } + }"""; + + private static final String LOG_DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "hostname": "%s", + "pid": "%d", + "method": "%s", + "message": "%s", + "ip_address": "%s" + } + """; + + private static final String TIME_SERIES_DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "hostname": "%s", + "pid": "%d", + "method": "%s", + "ip_address": "%s", + "cpu_usage": "%f" + } + """; + + private static String toIsoTimestamp(final Instant instant) { + return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); + } + + private static String createLogDocument( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String message, + final String ipAddress + ) { + return Strings.format(LOG_DOC_TEMPLATE, toIsoTimestamp(timestamp), hostname, pid, method, message, ipAddress); + } + + private static String createTimeSeriesDocument( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String ipAddress, + double cpuUsage + ) { + return Strings.format(TIME_SERIES_DOC_TEMPLATE, toIsoTimestamp(timestamp), hostname, pid, method, ipAddress, cpuUsage); + } + + @Override + protected Collection> getPlugins() { + return List.of(DataStreamsPlugin.class, InternalSettingsPlugin.class); + } + + public void testLogsIndexModeDataStreamIndexing() throws IOException, ExecutionException, InterruptedException { + putComposableIndexTemplate( + client(), + "logs-composable-template", + LOGS_OR_STANDARD_MAPPING, + Map.of("index.mode", "logs"), + List.of("logs-*-*") + ); + final String dataStreamName = generateDataStreamName("logs"); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + rolloverDataStream(dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + } + + public void testIndexModeLogsAndStandardSwitching() throws IOException, ExecutionException, InterruptedException { + final List indexModes = new ArrayList<>(); + final String dataStreamName = generateDataStreamName("logs"); + indexModes.add(IndexMode.STANDARD); + putComposableIndexTemplate( + client(), + "logs-composable-template", + LOGS_OR_STANDARD_MAPPING, + Map.of("index.mode", "standard"), + List.of("logs-*-*") + ); + createDataStream(client(), dataStreamName); + for (int i = 0; i < randomIntBetween(5, 10); i++) { + final IndexMode indexMode = i % 2 == 0 ? IndexMode.LOGS : IndexMode.STANDARD; + indexModes.add(indexMode); + updateComposableIndexTemplate( + client(), + "logs-composable-template", + LOGS_OR_STANDARD_MAPPING, + Map.of("index.mode", indexMode.getName()), + List.of("logs-*-*") + ); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + rolloverDataStream(dataStreamName); + } + assertDataStreamBackingIndicesModes(dataStreamName, indexModes); + } + + public void testIndexModeLogsAndTimeSeriesSwitching() throws IOException, ExecutionException, InterruptedException { + final String dataStreamName = generateDataStreamName("custom"); + final List indexPatterns = List.of("custom-*-*"); + final Map logsSettings = Map.of("index.mode", "logs"); + final Map timeSeriesSettings = Map.of("index.mode", "time_series", "index.routing_path", "hostname"); + + putComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + updateComposableIndexTemplate(client(), "custom-composable-template", TIME_SERIES_MAPPING, timeSeriesSettings, indexPatterns); + rolloverDataStream(dataStreamName); + indexTimeSeriesDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + updateComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + rolloverDataStream(dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + assertDataStreamBackingIndicesModes(dataStreamName, List.of(IndexMode.LOGS, IndexMode.TIME_SERIES, IndexMode.LOGS)); + } + + public void testInvalidIndexModeTimeSeriesSwitchWithoutROutingPath() throws IOException, ExecutionException, InterruptedException { + final String dataStreamName = generateDataStreamName("custom"); + final List indexPatterns = List.of("custom-*-*"); + final Map logsSettings = Map.of("index.mode", "logs"); + final Map timeSeriesSettings = Map.of("index.mode", "time_series"); + + putComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + expectThrows( + InvalidIndexTemplateException.class, + () -> updateComposableIndexTemplate( + client(), + "custom-composable-template", + LOGS_OR_STANDARD_MAPPING, + timeSeriesSettings, + indexPatterns + ) + ); + } + + public void testInvalidIndexModeTimeSeriesSwitchWithoutDimensions() throws IOException, ExecutionException, InterruptedException { + final String dataStreamName = generateDataStreamName("custom"); + final List indexPatterns = List.of("custom-*-*"); + final Map logsSettings = Map.of("index.mode", "logs"); + final Map timeSeriesSettings = Map.of("index.mode", "time_series", "index.routing_path", "hostname"); + + putComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + updateComposableIndexTemplate( + client(), + "custom-composable-template", + LOGS_OR_STANDARD_MAPPING, + timeSeriesSettings, + indexPatterns + ); + + }); + assertThat( + exception.getCause().getCause().getMessage(), + Matchers.equalTo( + "All fields that match routing_path must be configured with [time_series_dimension: true] or flattened fields with " + + "a list of dimensions in [time_series_dimensions] and without the [script] parameter. [hostname] was not a dimension." + ) + ); + } + + private void assertDataStreamBackingIndicesModes(final String dataStreamName, final List modes) { + final GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + final GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + final DataStream dataStream = getDataStreamResponse.getDataStreams().get(0).getDataStream(); + final DataStream.DataStreamIndices backingIndices = dataStream.getBackingIndices(); + final Iterator indexModesIterator = modes.iterator(); + assertThat(backingIndices.getIndices().size(), Matchers.equalTo(modes.size())); + for (final Index index : backingIndices.getIndices()) { + final GetSettingsResponse getSettingsResponse = indicesAdmin().getSettings( + new GetSettingsRequest().indices(index.getName()).includeDefaults(true) + ).actionGet(); + final Settings settings = getSettingsResponse.getIndexToSettings().get(index.getName()); + assertThat(settings.get("index.mode"), Matchers.equalTo(indexModesIterator.next().getName())); + } + } + + final String generateDataStreamName(final String prefix) { + return String.format(Locale.ROOT, "%s-%s-%s", prefix, randomFrom("apache", "nginx", "system"), randomFrom("dev", "qa", "prod")); + } + + private void rolloverDataStream(final String dataStreamName) { + assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); + } + + private void indexLogOrStandardDocuments( + final Client client, + int numBulkRequests, + int numDocsPerBulkRequest, + final String dataStreamName + ) { + { + for (int i = 0; i < numBulkRequests; i++) { + BulkRequest bulkRequest = new BulkRequest(dataStreamName); + for (int j = 0; j < numDocsPerBulkRequest; j++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = createLogDocument( + Instant.now(), + randomAlphaOfLength(7), + randomIntBetween(100, 200), + randomFrom("POST", "PUT", "GET"), + randomAlphaOfLengthBetween(256, 512), + InetAddresses.toAddrString(randomIp(randomBoolean())) + ); + indexRequest.source(doc, XContentType.JSON); + bulkRequest.add(indexRequest); + } + final BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.hasFailures(), is(false)); + } + final BroadcastResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(dataStreamName)).actionGet(); + assertThat(refreshResponse.getStatus(), is(RestStatus.OK)); + } + } + + private void indexTimeSeriesDocuments( + final Client client, + int numBulkRequests, + int numDocsPerBulkRequest, + final String dataStreamName + ) { + { + for (int i = 0; i < numBulkRequests; i++) { + BulkRequest bulkRequest = new BulkRequest(dataStreamName); + for (int j = 0; j < numDocsPerBulkRequest; j++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = createTimeSeriesDocument( + Instant.now(), + randomAlphaOfLength(12), + randomIntBetween(100, 200), + randomFrom("POST", "PUT", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomDoubleBetween(0.0D, 1.0D, false) + ); + indexRequest.source(doc, XContentType.JSON); + bulkRequest.add(indexRequest); + } + final BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.hasFailures(), is(false)); + } + final BroadcastResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(dataStreamName)).actionGet(); + assertThat(refreshResponse.getStatus(), is(RestStatus.OK)); + } + } + + private void createDataStream(final Client client, final String dataStreamName) throws InterruptedException, ExecutionException { + final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); + final AcknowledgedResponse createDataStreamResponse = client.execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + .get(); + assertThat(createDataStreamResponse.isAcknowledged(), is(true)); + } + + private static void updateComposableIndexTemplate( + final Client client, + final String templateName, + final String mapping, + final Map settings, + final List indexPatterns + ) throws IOException { + putComposableIndexTemplate(client, templateName, mapping, settings, indexPatterns); + } + + private static void putComposableIndexTemplate( + final Client client, + final String templateName, + final String mapping, + final Map settings, + final List indexPatterns + ) throws IOException { + final Settings.Builder templateSettings = Settings.builder(); + for (Map.Entry setting : settings.entrySet()) { + templateSettings.put(setting.getKey(), setting.getValue()); + } + final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest = + new TransportPutComposableIndexTemplateAction.Request(templateName); + putComposableTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(indexPatterns) + .template(new Template(templateSettings.build(), new CompressedXContent(mapping), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() + ); + final AcknowledgedResponse putComposableTemplateResponse = client.execute( + TransportPutComposableIndexTemplateAction.TYPE, + putComposableTemplateRequest + ).actionGet(); + assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); + } +} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java new file mode 100644 index 000000000000..c18bcf750242 --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java @@ -0,0 +1,293 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.net.InetAddress; +import java.time.Instant; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class LogsDataStreamRestIT extends ESRestTestCase { + + private static final String DATA_STREAM_NAME = "logs-apache-dev"; + private RestClient client; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void setup() throws Exception { + client = client(); + waitForLogs(client); + } + + private static void waitForLogs(RestClient client) throws Exception { + assertBusy(() -> { + try { + Request request = new Request("GET", "_index_template/logs"); + assertOK(client.performRequest(request)); + } catch (ResponseException e) { + fail(e.getMessage()); + } + }); + } + + private static final String LOGS_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 201, + "composed_of": [ "logs@mappings", "logs@settings" ], + "template": { + "settings": { + "index": { + "mode": "logs" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip_address": { + "type": "ip" + } + } + } + } + }"""; + + private static final String STANDARD_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 201, + "template": { + "settings": { + "index": { + "mode": "standard" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword", + "time_series_dimension": "true" + }, + "pid": { + "type": "long", + "time_series_dimension": "true" + }, + "method": { + "type": "keyword" + }, + "ip_address": { + "type": "ip" + } + } + } + } + }"""; + + private static final String DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "hostname": "%s", + "pid": "%d", + "method": "%s", + "message": "%s", + "ip_address": "%s" + } + """; + + public void testLogsIndexing() throws IOException { + putTemplate(client, "custom-template", LOGS_TEMPLATE); + createDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 0); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 1); + } + + public void testLogsStandardIndexModeSwitch() throws IOException { + putTemplate(client, "custom-template", LOGS_TEMPLATE); + createDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 0); + + putTemplate(client, "custom-template", STANDARD_TEMPLATE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(64), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("standard", 1); + + putTemplate(client, "custom-template", LOGS_TEMPLATE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 2); + } + + private void assertDataStreamBackingIndexMode(final String indexMode, int backingIndex) throws IOException { + assertThat(getSettings(client, getWriteBackingIndex(client, DATA_STREAM_NAME, backingIndex)).get("index.mode"), is(indexMode)); + } + + private String document( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String message, + final InetAddress ipAddress + ) { + return String.format( + Locale.ROOT, + DOC_TEMPLATE, + DateFormatter.forPattern(FormatNames.DATE.getName()).format(timestamp), + hostname, + pid, + method, + message, + InetAddresses.toAddrString(ipAddress) + ); + } + + private static void createDataStream(final RestClient client, final String dataStreamName) throws IOException { + Request request = new Request("PUT", "_data_stream/" + dataStreamName); + assertOK(client.performRequest(request)); + } + + private static void putTemplate(final RestClient client, final String templateName, final String mappings) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(mappings); + assertOK(client.performRequest(request)); + } + + private static void indexDocument(final RestClient client, String dataStreamName, String doc) throws IOException { + final Request request = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + request.setJsonEntity(doc); + assertOK(client.performRequest(request)); + } + + private static void rolloverDataStream(final RestClient client, final String dataStreamName) throws IOException { + final Request request = new Request("POST", "/" + dataStreamName + "/_rollover"); + final Response response = client.performRequest(request); + assertOK(response); + assertThat(entityAsMap(response).get("rolled_over"), is(true)); + } + + @SuppressWarnings("unchecked") + private static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException { + final Request request = new Request("GET", "_data_stream/" + dataStreamName); + final List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); + final Map dataStream = (Map) dataStreams.get(0); + final List> backingIndices = (List>) dataStream.get("indices"); + return backingIndices.get(backingIndex).get("index_name"); + } + + @SuppressWarnings("unchecked") + private static Map getSettings(final RestClient client, final String indexName) throws IOException { + final Request request = new Request("GET", "/" + indexName + "/_settings?flat_settings"); + return ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get("settings"); + } +} From 501f78701d03760852b2aae2194027ea17b3e44d Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 10 Jun 2024 07:47:36 -0400 Subject: [PATCH 29/58] [Transform] Reset max page size to settings value (#109449) When a circuit breaker exception causes the transform checkpoint to fail, the transform will reduce the max page size to the value specified by the circuit breaker. Transform will retry the checkpoint until the search succeeds. When the search succeeds, the Transform will now reset the max search size to the value specified in the Transform settings rather than the now-deprecated value in the Latest/Pivot config. Fix #109308 --- docs/changelog/109449.yaml | 6 + .../transforms/TransformIndexer.java | 14 +- .../transforms/TransformIndexerTests.java | 169 +++++++++++++++++- 3 files changed, 183 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/109449.yaml diff --git a/docs/changelog/109449.yaml b/docs/changelog/109449.yaml new file mode 100644 index 000000000000..90cb908227f1 --- /dev/null +++ b/docs/changelog/109449.yaml @@ -0,0 +1,6 @@ +pr: 109449 +summary: Reset max page size to settings value +area: Transform +type: bug +issues: + - 109308 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 201f20ac1eb0..712a95ded207 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; @@ -570,9 +571,7 @@ public abstract class TransformIndexer extends AsyncTwoPhaseIndexer listener) { try { // reset the page size, so we do not memorize a low page size forever - if (function != null) { - context.setPageSize(function.getInitialPageSize()); - } + resetPageSize(); // reset the changed bucket to free memory if (changeCollector != null) { changeCollector.clear(); @@ -1234,12 +1233,17 @@ public abstract class TransformIndexer extends AsyncTwoPhaseIndexer 0) { context.setPageSize(initialConfiguredPageSize); - } else { + } else if (function != null) { context.setPageSize(function.getInitialPageSize()); + } else { + // we should never be in a state where both initialConfiguredPageSize and function are null, but just in case... + context.setPageSize(Transform.DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index abad10b148f2..1c268174f5be 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -35,6 +37,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfigTests; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -43,6 +46,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPositio import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; @@ -59,7 +63,9 @@ import org.junit.Before; import java.time.Clock; import java.util.Collections; import java.util.Map; +import java.util.concurrent.BlockingDeque; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -107,9 +113,13 @@ public class TransformIndexerTests extends ESTestCase { private CountDownLatch searchLatch; private CountDownLatch doProcessLatch; private CountDownLatch doSaveStateLatch; + private CountDownLatch afterFinishOrFailureLatch; private AtomicBoolean saveStateInProgress = new AtomicBoolean(false); + private BlockingDeque searchExceptions = new LinkedBlockingDeque<>(); + private BlockingDeque runBeforeOnFinish = new LinkedBlockingDeque<>(); + // how many loops to execute until reporting done private int numberOfLoops; @@ -211,7 +221,11 @@ public class TransformIndexerTests extends ESTestCase { throw new IllegalStateException(e); } } - threadPool.generic().execute(() -> nextPhase.onResponse(ONE_HIT_SEARCH_RESPONSE)); + if (searchExceptions.isEmpty() == false) { + nextPhase.onFailure(searchExceptions.poll()); + } else { + threadPool.generic().execute(() -> nextPhase.onResponse(ONE_HIT_SEARCH_RESPONSE)); + } } @Override @@ -261,6 +275,22 @@ public class TransformIndexerTests extends ESTestCase { listener.onResponse(null); } + @Override + protected void onFinish(ActionListener listener) { + while (runBeforeOnFinish.isEmpty() == false) { + runBeforeOnFinish.poll().run(); + } + super.onFinish(listener); + } + + @Override + protected void afterFinishOrFailure() { + super.afterFinishOrFailure(); + if (afterFinishOrFailureLatch != null) { + afterFinishOrFailureLatch.countDown(); + } + } + public boolean waitingForNextSearch() { return super.getScheduledNextSearch() != null; } @@ -278,6 +308,14 @@ public class TransformIndexerTests extends ESTestCase { void validate(ActionListener listener) { listener.onResponse(null); } + + public void addAfterFinishOrFailureLatch() { + afterFinishOrFailureLatch = new CountDownLatch(1); + } + + public void waitForAfterFinishOrFailureLatch(long timeout, TimeUnit unit) throws InterruptedException { + assertTrue(afterFinishOrFailureLatch.await(timeout, unit)); + } } @Before @@ -439,6 +477,135 @@ public class TransformIndexerTests extends ESTestCase { assertBusy(() -> assertEquals(IndexerState.STOPPED, indexer.getState()), 5, TimeUnit.SECONDS); } + public void testMaxPageSearchSizeIsResetToDefaultValue() throws Exception { + TransformConfig config = new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)), + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + null, + null, + null, + null, + null + ); + AtomicReference state = new AtomicReference<>(IndexerState.STARTED); + + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); + final MockedTransformIndexer indexer = createMockIndexer( + 1, + config, + state, + null, + threadPool, + auditor, + new TransformIndexerStats(), + context + ); + + // add latches + CountDownLatch searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + + indexer.start(); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertEquals(indexer.getState(), IndexerState.INDEXING); + + // set circuit breaker to 50% + indexer.searchExceptions.offer(new CircuitBreakingException("hello", 2, 1, CircuitBreaker.Durability.TRANSIENT)); + indexer.runBeforeOnFinish.offer(() -> { + assertEquals(Math.round(Transform.DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE / 2.0), context.getPageSize()); + }); + assertFalse(indexer.runBeforeOnFinish.isEmpty()); + + // run and wait + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // rerun, don't throw an exception this time + searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + assertBusy(() -> assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()))); + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // verify that we checked the pageSize decreased + assertTrue(indexer.runBeforeOnFinish.isEmpty()); + // verify that the pageSize reset + assertEquals(Transform.DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE.intValue(), context.getPageSize()); + } + + public void testMaxPageSearchSizeIsResetToConfiguredValue() throws Exception { + TransformConfig config = new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)), + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + null, + null, + null, + null, + null + ); + AtomicReference state = new AtomicReference<>(IndexerState.STARTED); + + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); + final MockedTransformIndexer indexer = createMockIndexer( + 1, + config, + state, + null, + threadPool, + auditor, + new TransformIndexerStats(), + context + ); + + // add latches + CountDownLatch searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + + indexer.start(); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertEquals(indexer.getState(), IndexerState.INDEXING); + + var configuredMaxPageSearchSize = 20_000; + indexer.applyNewSettings( + new SettingsConfig.Builder(SettingsConfig.EMPTY).setMaxPageSearchSize(configuredMaxPageSearchSize).build() + ); + + // set circuit breaker to 50% + indexer.searchExceptions.offer(new CircuitBreakingException("hello", 2, 1, CircuitBreaker.Durability.TRANSIENT)); + indexer.runBeforeOnFinish.offer(() -> { assertEquals(Math.round(configuredMaxPageSearchSize / 2.0), context.getPageSize()); }); + assertFalse(indexer.runBeforeOnFinish.isEmpty()); + + // run and wait + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // rerun, don't throw an exception this time + searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + assertBusy(() -> assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()))); + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // verify that we checked the pageSize decreased + assertTrue(indexer.runBeforeOnFinish.isEmpty()); + // verify that the pageSize reset + assertEquals(configuredMaxPageSearchSize, context.getPageSize()); + } + private MockedTransformIndexer createMockIndexer( int numberOfLoops, TransformConfig config, From 28faacd8690ed89fe7088cd63454100745d14941 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Mon, 10 Jun 2024 14:03:49 +0200 Subject: [PATCH 30/58] [Transform] Introduce `_transform/_node_stats` API (#107279) --- docs/changelog/107279.yaml | 5 + .../api/transform.get_node_stats.json | 23 +++ .../action/GetTransformNodeStatsAction.java | 149 ++++++++++++++++++ .../transforms/TransformSchedulerStats.java | 40 +++++ ...odeStatsActionNodesStatsResponseTests.java | 64 ++++++++ .../xpack/security/operator/Constants.java | 1 + .../common/TransformCommonRestTestCase.java | 9 ++ .../transform/integration/TransformIT.java | 5 + .../integration/TransformNodeStatsIT.java | 100 ++++++++++++ .../xpack/transform/Transform.java | 7 +- .../TransportGetTransformNodeStatsAction.java | 84 ++++++++++ .../RestGetTransformNodeStatsAction.java | 42 +++++ .../TransformScheduledTaskQueue.java | 9 ++ .../scheduling/TransformScheduler.java | 19 +++ .../TransformScheduledTaskQueueTests.java | 12 ++ .../scheduling/TransformSchedulerTests.java | 13 ++ 16 files changed, 581 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/107279.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java create mode 100644 x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java create mode 100644 x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java create mode 100644 x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java diff --git a/docs/changelog/107279.yaml b/docs/changelog/107279.yaml new file mode 100644 index 000000000000..a2940ecc9ba2 --- /dev/null +++ b/docs/changelog/107279.yaml @@ -0,0 +1,5 @@ +pr: 107279 +summary: Introduce _transform/_node_stats API +area: Transform +type: feature +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json new file mode 100644 index 000000000000..ca3fde65f636 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json @@ -0,0 +1,23 @@ +{ + "transform.get_node_stats":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-node-stats.html", + "description":"Retrieves transform usage information for transform nodes." + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_transform/_node_stats", + "methods":[ + "GET" + ] + } + ] + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java new file mode 100644 index 000000000000..6cadefbe206f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats.REGISTERED_TRANSFORM_COUNT_FIELD_NAME; + +public class GetTransformNodeStatsAction extends ActionType { + + public static final GetTransformNodeStatsAction INSTANCE = new GetTransformNodeStatsAction(); + public static final String NAME = "cluster:admin/transform/node_stats"; + + private static final String SCHEDULER_STATS_FIELD_NAME = "scheduler"; + + private GetTransformNodeStatsAction() { + super(NAME); + } + + public static class NodesStatsRequest extends BaseNodesRequest { + + public NodesStatsRequest() { + super(Strings.EMPTY_ARRAY); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + } + + public static class NodesStatsResponse extends BaseNodesResponse implements ToXContentObject { + + private static final String TOTAL_FIELD_NAME = "total"; + + public int getTotalRegisteredTransformCount() { + int totalRegisteredTransformCount = 0; + for (var nodeResponse : getNodes()) { + totalRegisteredTransformCount += nodeResponse.schedulerStats().registeredTransformCount(); + } + return totalRegisteredTransformCount; + } + + public NodesStatsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + public RestStatus status() { + return this.hasFailures() ? RestStatus.INTERNAL_SERVER_ERROR : RestStatus.OK; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (var nodeEntry : getNodesMap().entrySet()) { + String nodeName = nodeEntry.getKey(); + NodeStatsResponse nodeResponse = nodeEntry.getValue(); + builder.field(nodeName); + nodeResponse.toXContent(builder, params); + } + builder.startObject(TOTAL_FIELD_NAME); + builder.startObject(SCHEDULER_STATS_FIELD_NAME); + builder.field(REGISTERED_TRANSFORM_COUNT_FIELD_NAME, getTotalRegisteredTransformCount()); + builder.endObject(); + builder.endObject(); + return builder.endObject(); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return TransportAction.localOnly(); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + TransportAction.localOnly(); + } + } + + public static class NodeStatsRequest extends TransportRequest { + + public NodeStatsRequest() {} + + public NodeStatsRequest(StreamInput in) throws IOException { + super(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } + } + + public static class NodeStatsResponse extends BaseNodeResponse implements ToXContentObject { + + private final TransformSchedulerStats schedulerStats; + + public NodeStatsResponse(DiscoveryNode node, TransformSchedulerStats schedulerStats) { + super(node); + this.schedulerStats = schedulerStats; + } + + public NodeStatsResponse(StreamInput in) throws IOException { + super(in); + this.schedulerStats = in.readOptionalWriteable(TransformSchedulerStats::new); + } + + TransformSchedulerStats schedulerStats() { + return schedulerStats; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(schedulerStats); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(SCHEDULER_STATS_FIELD_NAME, schedulerStats); + return builder.endObject(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java new file mode 100644 index 000000000000..ab6e9d587cb8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.transforms; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public record TransformSchedulerStats(int registeredTransformCount, String peekTransformName) implements ToXContent, Writeable { + + public static final String REGISTERED_TRANSFORM_COUNT_FIELD_NAME = "registered_transform_count"; + public static final String PEEK_TRANSFORM_FIELD_NAME = "peek_transform"; + + public TransformSchedulerStats(StreamInput in) throws IOException { + this(in.readVInt(), in.readOptionalString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.registeredTransformCount); + out.writeOptionalString(this.peekTransformName); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(REGISTERED_TRANSFORM_COUNT_FIELD_NAME, this.registeredTransformCount); + builder.field(PEEK_TRANSFORM_FIELD_NAME, this.peekTransformName); + return builder.endObject(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java new file mode 100644 index 000000000000..b50895659ddf --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodeStatsResponse; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsResponse; +import org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats; + +import java.util.List; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class GetTransformNodeStatsActionNodesStatsResponseTests extends ESTestCase { + + private static final ClusterName CLUSTER_NAME = new ClusterName("my-cluster"); + + public void testEmptyResponse() { + var nodesStatsResponse = new NodesStatsResponse(CLUSTER_NAME, List.of(), List.of()); + assertThat(nodesStatsResponse.getNodes(), is(empty())); + assertThat(nodesStatsResponse.failures(), is(empty())); + assertThat(nodesStatsResponse.getTotalRegisteredTransformCount(), is(equalTo(0))); + } + + public void testResponse() { + var nodeA = new NodeStatsResponse(createNode("node-A"), new TransformSchedulerStats(7, null)); + var nodeB = new NodeStatsResponse(createNode("node-B"), new TransformSchedulerStats(0, null)); + var nodeC = new NodeStatsResponse(createNode("node-C"), new TransformSchedulerStats(4, null)); + + var nodesStatsResponse = new NodesStatsResponse(CLUSTER_NAME, List.of(nodeA, nodeB, nodeC), List.of()); + assertThat(nodesStatsResponse.getNodes(), containsInAnyOrder(nodeA, nodeB, nodeC)); + assertThat(nodesStatsResponse.failures(), is(empty())); + assertThat(nodesStatsResponse.getTotalRegisteredTransformCount(), is(equalTo(11))); + } + + public void testResponseWithFailure() { + var nodeA = new NodeStatsResponse(createNode("node-A"), new TransformSchedulerStats(7, null)); + var nodeB = new NodeStatsResponse(createNode("node-B"), new TransformSchedulerStats(0, null)); + var nodeC = new FailedNodeException("node-C", "node C failed", null); + + var nodesStatsResponse = new NodesStatsResponse(CLUSTER_NAME, List.of(nodeA, nodeB), List.of(nodeC)); + assertThat(nodesStatsResponse.getNodes(), containsInAnyOrder(nodeA, nodeB)); + assertThat(nodesStatsResponse.failures(), contains(nodeC)); + assertThat(nodesStatsResponse.getTotalRegisteredTransformCount(), is(equalTo(7))); + } + + private static DiscoveryNode createNode(String name) { + return DiscoveryNodeUtils.builder(UUIDs.randomBase64UUID(random())).name(name).build(); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5561e14da980..33503bc55879 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -99,6 +99,7 @@ public class Constants { "cluster:admin/features/reset", "cluster:admin/tasks/cancel", "cluster:admin/transform/delete", + "cluster:admin/transform/node_stats", "cluster:admin/transform/preview", "cluster:admin/transform/put", "cluster:admin/transform/reset", diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java index 98cf817d6c01..97d38807f5c1 100644 --- a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -77,6 +77,15 @@ public abstract class TransformCommonRestTestCase extends ESRestTestCase { return tasks.stream().map(t -> (String) t.get("id")).filter(transformId::equals).toList(); } + protected Response getNodeStats() throws IOException { + return adminClient().performRequest(new Request("GET", "/_transform/_node_stats")); + } + + protected int getTotalRegisteredTransformCount() throws IOException { + Response response = getNodeStats(); + return (int) XContentMapValues.extractValue(entityAsMap(response), "total", "scheduler", "registered_transform_count"); + } + @SuppressWarnings("unchecked") protected void logAudits() throws Exception { logger.info("writing audit messages to the log"); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 4db0d0d8baaf..ab478dc16f22 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -245,16 +245,19 @@ public class TransformIT extends TransformRestTestCase { putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(0))); startTransform(transformId, RequestOptions.DEFAULT); // There is 1 transform task after start. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(1))); Thread.sleep(sleepAfterStartMillis); // There should still be 1 transform task as the transform is continuous. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(1))); // Stop the transform with force set randomly. stopTransform(transformId, true, null, false, force); @@ -268,6 +271,7 @@ public class TransformIT extends TransformRestTestCase { } // After the transform is stopped, there should be no transform task left in the cluster state. assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(0))); // Delete the transform deleteTransform(transformId); @@ -586,6 +590,7 @@ public class TransformIT extends TransformRestTestCase { deleteTransform(config.getId()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109101") public void testStartTransform_GivenTimeout_Returns408() throws Exception { String indexName = "start-transform-timeout-index"; String transformId = "start-transform-timeout"; diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java new file mode 100644 index 000000000000..04483873a2af --- /dev/null +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration; + +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; +import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource; +import org.junit.After; + +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; + +public class TransformNodeStatsIT extends TransformRestTestCase { + + private static final int NUM_USERS = 28; + + static Integer getUserIdForRow(int row) { + return row % NUM_USERS; + } + + static String getDateStringForRow(int row) { + int day = (11 + (row / 100)) % 28; + int hour = 10 + (row % 13); + int min = 10 + (row % 49); + int sec = 10 + (row % 49); + return "2017-01-" + (day < 10 ? "0" + day : day) + "T" + hour + ":" + min + ":" + sec + "Z"; + } + + @After + public void cleanTransforms() throws Exception { + cleanUp(); + } + + @SuppressWarnings("unchecked") + public void testTransformNodeStats() throws Exception { + var transformId = "transform-node-stats"; + createTransform("basic-stats-reviews", transformId); + + var nodesInfo = getNodesInfo(adminClient()); + assertThat("Nodes were: " + nodesInfo, nodesInfo.size(), is(equalTo(3))); + + var response = entityAsMap(getNodeStats()); + assertThat(response, hasKey("total")); + assertThat( + "Response was: " + response, + (int) XContentMapValues.extractValue(response, "total", "scheduler", "registered_transform_count"), + is(equalTo(1)) + ); + for (String nodeId : nodesInfo.keySet()) { + assertThat(response, hasKey(nodeId)); + assertThat( + "Response was: " + response, + (int) XContentMapValues.extractValue(response, nodeId, "scheduler", "registered_transform_count"), + is(greaterThanOrEqualTo(0)) + ); + } + } + + private void createTransform(String indexName, String transformId) throws Exception { + createReviewsIndex(indexName, 100, NUM_USERS, TransformNodeStatsIT::getUserIdForRow, TransformNodeStatsIT::getDateStringForRow); + + var groups = Map.of( + "by-day", + createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null), + "by-user", + new TermsGroupSource("user_id", null, false), + "by-business", + new TermsGroupSource("business_id", null, false) + ); + + var aggs = AggregatorFactories.builder() + .addAggregator(AggregationBuilders.avg("review_score").field("stars")) + .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); + + var config = createTransformConfigBuilder(transformId, "reviews-by-user-business-day", QueryConfig.matchAll(), indexName) + .setPivotConfig(createPivotConfig(groups, aggs)) + .setSyncConfig(new TimeSyncConfig("timestamp", null)) + .build(); + + putTransform(transformId, Strings.toString(config), RequestOptions.DEFAULT); + startTransform(config.getId(), RequestOptions.DEFAULT); + + waitUntilCheckpoint(config.getId(), 1L); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index e788a85562e8..ab4652c562e2 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction; import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; @@ -74,6 +75,7 @@ import org.elasticsearch.xpack.transform.action.TransportDeleteTransformAction; import org.elasticsearch.xpack.transform.action.TransportGetCheckpointAction; import org.elasticsearch.xpack.transform.action.TransportGetCheckpointNodeAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformAction; +import org.elasticsearch.xpack.transform.action.TransportGetTransformNodeStatsAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformStatsAction; import org.elasticsearch.xpack.transform.action.TransportPreviewTransformAction; import org.elasticsearch.xpack.transform.action.TransportPutTransformAction; @@ -93,6 +95,7 @@ import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.rest.action.RestCatTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestDeleteTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformAction; +import org.elasticsearch.xpack.transform.rest.action.RestGetTransformNodeStatsAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformStatsAction; import org.elasticsearch.xpack.transform.rest.action.RestPreviewTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestPutTransformAction; @@ -191,7 +194,8 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa new RestCatTransformAction(), new RestUpgradeTransformsAction(), new RestResetTransformAction(), - new RestScheduleNowTransformAction() + new RestScheduleNowTransformAction(), + new RestGetTransformNodeStatsAction() ); } @@ -211,6 +215,7 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa new ActionHandler<>(UpgradeTransformsAction.INSTANCE, TransportUpgradeTransformsAction.class), new ActionHandler<>(ResetTransformAction.INSTANCE, TransportResetTransformAction.class), new ActionHandler<>(ScheduleNowTransformAction.INSTANCE, TransportScheduleNowTransformAction.class), + new ActionHandler<>(GetTransformNodeStatsAction.INSTANCE, TransportGetTransformNodeStatsAction.class), // internal, no rest endpoint new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java new file mode 100644 index 000000000000..bbe8f6ea05b4 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.action; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodeStatsRequest; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodeStatsResponse; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsRequest; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsResponse; +import org.elasticsearch.xpack.transform.TransformServices; +import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; + +import java.io.IOException; +import java.util.List; + +/** + * {@link TransportGetTransformNodeStatsAction} class fetches transform-related metrics from all the nodes and aggregates these metrics. + */ +public class TransportGetTransformNodeStatsAction extends TransportNodesAction< + NodesStatsRequest, + NodesStatsResponse, + NodeStatsRequest, + NodeStatsResponse> { + + private final TransportService transportService; + private final TransformScheduler scheduler; + + @Inject + public TransportGetTransformNodeStatsAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + TransformServices transformServices + ) { + super( + GetTransformNodeStatsAction.NAME, + clusterService, + transportService, + actionFilters, + NodeStatsRequest::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.transportService = transportService; + this.scheduler = transformServices.scheduler(); + } + + @Override + protected NodesStatsResponse newResponse(NodesStatsRequest request, List nodes, List failures) { + return new NodesStatsResponse(clusterService.getClusterName(), nodes, failures); + } + + @Override + protected NodeStatsRequest newNodeRequest(NodesStatsRequest request) { + return new NodeStatsRequest(); + } + + @Override + protected NodeStatsResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeStatsResponse(in); + } + + @Override + protected NodeStatsResponse nodeOperation(NodeStatsRequest request, Task task) { + var localNode = transportService.getLocalNode(); + var schedulerStats = scheduler.getStats(); + return new NodeStatsResponse(localNode, schedulerStats); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java new file mode 100644 index 000000000000..30d3b6dbdcaa --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.rest.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsRequest; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +@ServerlessScope(Scope.PUBLIC) +public class RestGetTransformNodeStatsAction extends BaseRestHandler { + + @Override + public List routes() { + return List.of(new Route(GET, TransformField.REST_BASE_PATH_TRANSFORMS + "_node_stats")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + NodesStatsRequest request = new NodesStatsRequest(); + return channel -> client.execute(GetTransformNodeStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "transform_get_transform_node_stats_action"; + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java index e11da6af1c28..cd3630a095ed 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java @@ -108,6 +108,15 @@ class TransformScheduledTaskQueue { return task; } + /** + * Returns the current queue size. + * + * @return the current queue size + */ + public synchronized int size() { + return tasks.size(); + } + // Visible for testing /** * @return the set of all the transform ids diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java index a02f2aac956e..9c7afa38a5c5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.transform.Transform; @@ -21,6 +22,7 @@ import java.time.Duration; import java.time.Instant; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -270,6 +272,23 @@ public final class TransformScheduler { scheduledTasks.remove(transformId); } + public TransformSchedulerStats getStats() { + return new TransformSchedulerStats( + scheduledTasks.size(), + Optional.ofNullable(scheduledTasks.first()).map(TransformScheduledTask::getTransformId).orElse(null) + ); + } + + // Visible for testing + /** + * Returns the number of transforms currently in the queue. + * + * @return number of transforms currently in the queue + */ + int getRegisteredTransformCount() { + return scheduledTasks.size(); + } + // Visible for testing /** * @return queue current contents diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java index 5030d42f9c17..6c032e752613 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java @@ -54,6 +54,7 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { public void testNonEmptyQueue() { queue.add(createTask("task-1", 5)); assertThat(queue.first(), is(notNullValue())); + assertThat(queue.size(), is(equalTo(1))); } public void testAddAndRemove() { @@ -63,6 +64,7 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2", "task-3")); assertThat(queue.first(), is(equalTo(createTask("task-2", 1)))); + assertThat(queue.size(), is(equalTo(3))); queue.remove("task-1"); queue.remove("task-2"); @@ -86,6 +88,7 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { } assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), hasSize(100)); + assertThat(queue.size(), is(equalTo(100))); { Set removedTaskIds = new HashSet<>(); @@ -107,11 +110,13 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { public void testAddNoOp() { queue.add(createTask("task-1", 5)); assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); // Try adding a task with a duplicate key queue.add(createTask("task-1", 6)); // Verify that the add operation had no effect assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); } public void testRemoveNoOp() { @@ -121,6 +126,7 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1")); assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); } public void testUpdateNoOp() { @@ -130,6 +136,7 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1")); assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); } public void testUpdateModifiesId() { @@ -154,6 +161,7 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { containsInAnyOrder("task-1", "task-2", "task-3", "task-4", "task-5", "task-6", "task-7", "task-8", "task-9") ); assertThat(queue.first(), is(equalTo(createTask("task-7", 0)))); + assertThat(queue.size(), is(equalTo(9))); List tasksByPriority = new ArrayList<>(); while (queue.first() != null) { @@ -184,15 +192,18 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { queue.add(createTask("task-3", 9)); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2", "task-3")); assertThat(queue.first(), is(equalTo(createTask("task-2", 1)))); + assertThat(queue.size(), is(equalTo(3))); queue.update("task-3", task -> createTask(task.getTransformId(), -999)); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2", "task-3")); assertThat(queue.first(), is(equalTo(createTask("task-3", -999)))); + assertThat(queue.size(), is(equalTo(3))); queue.update("task-1", task -> createTask(task.getTransformId(), 0)); queue.remove("task-3"); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2")); assertThat(queue.first(), is(equalTo(createTask("task-1", 0)))); + assertThat(queue.size(), is(equalTo(2))); } private static TransformScheduledTask createTask(String transformId, long nextScheduledTimeMillis) { @@ -213,5 +224,6 @@ public class TransformScheduledTaskQueueTests extends ESTestCase { private void assertThatQueueIsEmpty() { assertThat(queue.first(), is(nullValue())); assertThat(queue.getTransformIds(), is(empty())); + assertThat(queue.size(), is(equalTo(0))); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java index 8d3220a5b4de..06fdfd7b538b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java @@ -80,7 +80,9 @@ public class TransformSchedulerTests extends ESTestCase { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, minFrequency); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(1))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(new TransformScheduledTask(transformId, fiveSeconds, 0L, 0, 5000, listener)) @@ -125,6 +127,7 @@ public class TransformSchedulerTests extends ESTestCase { assertThat(events.get(2), is(equalTo(new TransformScheduler.Event(transformId, 10005, 10010)))); transformScheduler.deregisterTransform(transformId); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); assertThat(transformScheduler.getTransformScheduledTasks(), is(empty())); transformScheduler.stop(); @@ -139,7 +142,9 @@ public class TransformSchedulerTests extends ESTestCase { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(1))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(new TransformScheduledTask(transformId, frequency, 0L, 0, 60 * 60 * 1000, listener)) @@ -177,6 +182,7 @@ public class TransformSchedulerTests extends ESTestCase { ); transformScheduler.deregisterTransform(transformId); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); assertThat(transformScheduler.getTransformScheduledTasks(), is(empty())); transformScheduler.stop(); @@ -191,7 +197,9 @@ public class TransformSchedulerTests extends ESTestCase { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(1))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(new TransformScheduledTask(transformId, frequency, 0L, 0, 60 * 60 * 1000, listener)) @@ -226,6 +234,7 @@ public class TransformSchedulerTests extends ESTestCase { assertThat(events.get(2), is(equalTo(new TransformScheduler.Event(transformId, 31 * 60 * 1000, 31 * 60 * 1000)))); transformScheduler.deregisterTransform(transformId); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); assertThat(transformScheduler.getTransformScheduledTasks(), is(empty())); transformScheduler.stop(); @@ -402,9 +411,11 @@ public class TransformSchedulerTests extends ESTestCase { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams1, listener); transformScheduler.registerTransform(transformTaskParams2, listener); transformScheduler.registerTransform(transformTaskParams3, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(3))); assertThat( transformScheduler.getTransformScheduledTasks(), contains( @@ -432,9 +443,11 @@ public class TransformSchedulerTests extends ESTestCase { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams1, listener); transformScheduler.registerTransform(transformTaskParams2, listener); transformScheduler.registerTransform(transformTaskParams3, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(3))); assertThat( transformScheduler.getTransformScheduledTasks(), contains( From 02439e321d00eb904c4e1413c49abf6ea631d55e Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 10 Jun 2024 14:26:43 +0200 Subject: [PATCH 31/58] Update Gradle gradleEnterprise plugin (develocity now) (#109443) * Update Gradle gradleEnterprise plugin (develocity now) * Fix imports in build scan script * Fix build scan api usage * Dependency cleanup and API fix * Fix API update for BuildResult in Build scans * Fix buildkite buildscan annotations based on gradle failures --- .ci/init.gradle | 4 +- .../groovy/elasticsearch.build-scan.gradle | 299 +++++++++--------- gradle/build.versions.toml | 2 +- gradle/verification-metadata.xml | 6 +- plugins/examples/settings.gradle | 2 +- settings.gradle | 2 +- 6 files changed, 163 insertions(+), 152 deletions(-) diff --git a/.ci/init.gradle b/.ci/init.gradle index 4b2cbd1907ca..3e1f23804cf9 100644 --- a/.ci/init.gradle +++ b/.ci/init.gradle @@ -91,8 +91,8 @@ if (USE_ARTIFACTORY) { } gradle.settingsEvaluated { settings -> - settings.pluginManager.withPlugin("com.gradle.enterprise") { - settings.gradleEnterprise { + settings.pluginManager.withPlugin("com.gradle.develocity") { + settings.develocity { server = 'https://gradle-enterprise.elastic.co' } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 67123119f7cd..f0a7b1a6d0b1 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -12,168 +12,179 @@ import java.time.LocalDateTime; import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.internal.info.BuildParams -import org.gradle.initialization.BuildRequestMetaData -buildScan { - URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null - String buildKiteUrl = System.getenv('BUILDKITE_BUILD_URL') ? System.getenv('BUILDKITE_BUILD_URL') : null +import java.lang.management.ManagementFactory +import java.time.LocalDateTime - // Automatically publish scans from Elasticsearch CI - if (jenkinsUrl?.host?.endsWith('elastic.co') || jenkinsUrl?.host?.endsWith('elastic.dev') || System.getenv('BUILDKITE') == 'true') { - publishAlways() - buildScan.server = 'https://gradle-enterprise.elastic.co' - } +develocity { - background { - tag OS.current().name() - tag Architecture.current().name() + buildScan { + URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null + String buildKiteUrl = System.getenv('BUILDKITE_BUILD_URL') ? System.getenv('BUILDKITE_BUILD_URL') : null - // Tag if this build is run in FIPS mode - if (BuildParams.inFipsJvm) { - tag 'FIPS' + // Automatically publish scans from Elasticsearch CI + if (jenkinsUrl?.host?.endsWith('elastic.co') || jenkinsUrl?.host?.endsWith('elastic.dev') || System.getenv('BUILDKITE') == 'true') { + publishing.onlyIf { true } + server = 'https://gradle-enterprise.elastic.co' } - // Jenkins-specific build scan metadata - if (jenkinsUrl) { - // Disable async upload in CI to ensure scan upload completes before CI agent is terminated - uploadInBackground = false - String buildNumber = System.getenv('BUILD_NUMBER') - String buildUrl = System.getenv('BUILD_URL') - String jobName = System.getenv('JOB_NAME') - String nodeName = System.getenv('NODE_NAME') - String jobBranch = System.getenv('ghprbTargetBranch') ?: System.getenv('JOB_BRANCH') + background { + tag OS.current().name() + tag Architecture.current().name() - // Link to Jenkins worker logs and system metrics - if (nodeName) { - link 'System logs', "https://ci-stats.elastic.co/app/infra#/logs?&logFilter=(expression:'host.name:${nodeName}',kind:kuery)" - buildFinished { - link 'System metrics', "https://ci-stats.elastic.co/app/metrics/detail/host/${nodeName}" - } + // Tag if this build is run in FIPS mode + if (BuildParams.inFipsJvm) { + tag 'FIPS' } - // Parse job name in the case of matrix builds - // Matrix job names come in the form of "base-job-name/matrix_param1=value1,matrix_param2=value2" - def splitJobName = jobName.split('/') - if (splitJobName.length > 1 && splitJobName.last() ==~ /^([a-zA-Z0-9_\-]+=[a-zA-Z0-9_\-&\.]+,?)+$/) { - def baseJobName = splitJobName.dropRight(1).join('/') - tag baseJobName - tag splitJobName.last() - value 'Job Name', baseJobName - def matrixParams = splitJobName.last().split(',') - matrixParams.collect { it.split('=') }.each { param -> - value "MATRIX_${param[0].toUpperCase()}", param[1] + // Jenkins-specific build scan metadata + if (jenkinsUrl) { + // Disable async upload in CI to ensure scan upload completes before CI agent is terminated + uploadInBackground = false + + String buildNumber = System.getenv('BUILD_NUMBER') + String buildUrl = System.getenv('BUILD_URL') + String jobName = System.getenv('JOB_NAME') + String nodeName = System.getenv('NODE_NAME') + String jobBranch = System.getenv('ghprbTargetBranch') ?: System.getenv('JOB_BRANCH') + + // Link to Jenkins worker logs and system metrics + if (nodeName) { + link 'System logs', "https://ci-stats.elastic.co/app/infra#/logs?&logFilter=(expression:'host.name:${nodeName}',kind:kuery)" + buildFinished { + link 'System metrics', "https://ci-stats.elastic.co/app/metrics/detail/host/${nodeName}" + } } - } else { - tag jobName + + // Parse job name in the case of matrix builds + // Matrix job names come in the form of "base-job-name/matrix_param1=value1,matrix_param2=value2" + def splitJobName = jobName.split('/') + if (splitJobName.length > 1 && splitJobName.last() ==~ /^([a-zA-Z0-9_\-]+=[a-zA-Z0-9_\-&\.]+,?)+$/) { + def baseJobName = splitJobName.dropRight(1).join('/') + tag baseJobName + tag splitJobName.last() + value 'Job Name', baseJobName + def matrixParams = splitJobName.last().split(',') + matrixParams.collect { it.split('=') }.each { param -> + value "MATRIX_${param[0].toUpperCase()}", param[1] + } + } else { + tag jobName + value 'Job Name', jobName + } + + tag 'CI' + link 'CI Build', buildUrl + link 'GCP Upload', + "https://console.cloud.google.com/storage/browser/_details/elasticsearch-ci-artifacts/jobs/${URLEncoder.encode(jobName, "UTF-8")}/build/${buildNumber}.tar.bz2" + value 'Job Number', buildNumber + if (jobBranch) { + tag jobBranch + value 'Git Branch', jobBranch + } + + System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each { + value 'Jenkins Worker Label', it + } + + // Add SCM information + def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null + if (isPrBuild) { + value 'Git Commit ID', System.getenv('ghprbActualCommit') + tag "pr/${System.getenv('ghprbPullId')}" + tag 'pull-request' + link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}" + link 'Pull Request', System.getenv('ghprbPullLink') + } else { + value 'Git Commit ID', BuildParams.gitRevision + link 'Source', "https://github.com/elastic/elasticsearch/tree/${BuildParams.gitRevision}" + } + } else if (buildKiteUrl) { //Buildkite-specific build scan metadata + // Disable async upload in CI to ensure scan upload completes before CI agent is terminated + uploadInBackground = false + + def branch = System.getenv('BUILDKITE_PULL_REQUEST_BASE_BRANCH') ?: System.getenv('BUILDKITE_BRANCH') + def repoMatcher = System.getenv('BUILDKITE_REPO') =~ /(https:\/\/github\.com\/|git@github\.com:)(\S+)\.git/ + def repository = repoMatcher.matches() ? repoMatcher.group(2) : "" + def jobLabel = System.getenv('BUILDKITE_LABEL') ?: '' + def jobName = safeName(jobLabel) + + tag 'CI' + link 'CI Build', "${buildKiteUrl}#${System.getenv('BUILDKITE_JOB_ID')}" + value 'Job Number', System.getenv('BUILDKITE_BUILD_NUMBER') + value 'Build ID', System.getenv('BUILDKITE_BUILD_ID') + value 'Job ID', System.getenv('BUILDKITE_JOB_ID') + + value 'Pipeline', System.getenv('BUILDKITE_PIPELINE_SLUG') + tag System.getenv('BUILDKITE_PIPELINE_SLUG') + value 'Job Name', jobName - } - - tag 'CI' - link 'CI Build', buildUrl - link 'GCP Upload', "https://console.cloud.google.com/storage/browser/_details/elasticsearch-ci-artifacts/jobs/${URLEncoder.encode(jobName, "UTF-8")}/build/${buildNumber}.tar.bz2" - value 'Job Number', buildNumber - if (jobBranch) { - tag jobBranch - value 'Git Branch', jobBranch - } - - System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each { - value 'Jenkins Worker Label', it - } - - // Add SCM information - def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null - if (isPrBuild) { - value 'Git Commit ID', System.getenv('ghprbActualCommit') - tag "pr/${System.getenv('ghprbPullId')}" - tag 'pull-request' - link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}" - link 'Pull Request', System.getenv('ghprbPullLink') - } else { - value 'Git Commit ID', BuildParams.gitRevision - link 'Source', "https://github.com/elastic/elasticsearch/tree/${BuildParams.gitRevision}" - } - } else if (buildKiteUrl) { //Buildkite-specific build scan metadata - // Disable async upload in CI to ensure scan upload completes before CI agent is terminated - uploadInBackground = false - - def branch = System.getenv('BUILDKITE_PULL_REQUEST_BASE_BRANCH') ?: System.getenv('BUILDKITE_BRANCH') - def repoMatcher = System.getenv('BUILDKITE_REPO') =~ /(https:\/\/github\.com\/|git@github\.com:)(\S+)\.git/ - def repository = repoMatcher.matches() ? repoMatcher.group(2) : "" - def jobLabel = System.getenv('BUILDKITE_LABEL') ?: '' - def jobName = safeName(jobLabel) - - tag 'CI' - link 'CI Build', "${buildKiteUrl}#${System.getenv('BUILDKITE_JOB_ID')}" - value 'Job Number', System.getenv('BUILDKITE_BUILD_NUMBER') - value 'Build ID', System.getenv('BUILDKITE_BUILD_ID') - value 'Job ID', System.getenv('BUILDKITE_JOB_ID') - - value 'Pipeline', System.getenv('BUILDKITE_PIPELINE_SLUG') - tag System.getenv('BUILDKITE_PIPELINE_SLUG') - - value 'Job Name', jobName - tag jobName - if (jobLabel.contains("/")) { - jobLabel.split("/").collect {safeName(it) }.each {matrix -> - tag matrix + tag jobName + if (jobLabel.contains("/")) { + jobLabel.split("/").collect { safeName(it) }.each { matrix -> + tag matrix + } } - } - def uptime = ManagementFactory.getRuntimeMXBean().getUptime() / 1000; - def metricsStartTime = LocalDateTime.now().minusSeconds(uptime.longValue()).minusMinutes(15).toString() - def metricsEndTime = LocalDateTime.now().plusMinutes(15).toString() + def uptime = ManagementFactory.getRuntimeMXBean().getUptime() / 1000; + def metricsStartTime = LocalDateTime.now().minusSeconds(uptime.longValue()).minusMinutes(15).toString() + def metricsEndTime = LocalDateTime.now().plusMinutes(15).toString() - link 'Agent Metrics', "https://es-buildkite-agents.elastic.dev/app/metrics/detail/host/${System.getenv('BUILDKITE_AGENT_NAME')}?_a=(time:(from:%27${metricsStartTime}Z%27,interval:%3E%3D1m,to:%27${metricsEndTime}Z%27))" - link 'Agent Logs', "https://es-buildkite-agents.elastic.dev/app/logs/stream?logFilter=(filters:!(),query:(language:kuery,query:%27host.name:%20${System.getenv('BUILDKITE_AGENT_NAME')}%27),timeRange:(from:%27${metricsStartTime}Z%27,to:%27${metricsEndTime}Z%27))" + link 'Agent Metrics', + "https://es-buildkite-agents.elastic.dev/app/metrics/detail/host/${System.getenv('BUILDKITE_AGENT_NAME')}?_a=(time:(from:%27${metricsStartTime}Z%27,interval:%3E%3D1m,to:%27${metricsEndTime}Z%27))" + link 'Agent Logs', + "https://es-buildkite-agents.elastic.dev/app/logs/stream?logFilter=(filters:!(),query:(language:kuery,query:%27host.name:%20${System.getenv('BUILDKITE_AGENT_NAME')}%27),timeRange:(from:%27${metricsStartTime}Z%27,to:%27${metricsEndTime}Z%27))" - if (branch) { - tag branch - value 'Git Branch', branch - } - - // Add SCM information - def prId = System.getenv('BUILDKITE_PULL_REQUEST') - if (prId != 'false') { - def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git").replaceFirst("git://", "https://") - value 'Git Commit ID', System.getenv('BUILDKITE_COMMIT') - tag "pr/${prId}" - tag 'pull-request' - link 'Source', "${prBaseUrl}/tree/${System.getenv('BUILDKITE_COMMIT')}" - link 'Pull Request', "https://github.com/${repository}/pull/${prId}" - } else { - value 'Git Commit ID', BuildParams.gitRevision - link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" - } - - buildFinished { result -> - - buildScanPublished { scan -> - // Attach build scan link as build metadata - // See: https://buildkite.com/docs/pipelines/build-meta-data - new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") - .start() - .waitFor() - - // Add a build annotation - // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failure ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" - def process = [ - 'buildkite-agent', - 'annotate', - '--context', - result.failure ? 'gradle-build-scans-failed' : 'gradle-build-scans', - '--append', - '--style', - result.failure ? 'error' : 'info' - ].execute() - process.withWriter { it.write(body) } // passing the body in as an argument has issues on Windows, so let's use stdin of the process instead - process.waitFor() + if (branch) { + tag branch + value 'Git Branch', branch } + + // Add SCM information + def prId = System.getenv('BUILDKITE_PULL_REQUEST') + if (prId != 'false') { + def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git").replaceFirst("git://", "https://") + value 'Git Commit ID', System.getenv('BUILDKITE_COMMIT') + tag "pr/${prId}" + tag 'pull-request' + link 'Source', "${prBaseUrl}/tree/${System.getenv('BUILDKITE_COMMIT')}" + link 'Pull Request', "https://github.com/${repository}/pull/${prId}" + } else { + value 'Git Commit ID', BuildParams.gitRevision + link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" + } + + buildFinished { result -> + + buildScanPublished { scan + -> + // Attach build scan link as build metadata + // See: https://buildkite.com/docs/pipelines/build-meta-data + new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") + .start() + .waitFor() + + // Add a build annotation + // See: https://buildkite.com/docs/agent/v3/cli-annotate + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + def process = [ + 'buildkite-agent', + 'annotate', + '--context', + result.failures ? 'gradle-build-scans-failed' : 'gradle-build-scans', + '--append', + '--style', + result.failures ? 'error' : 'info' + ].execute() + process.withWriter { it.write(body) } + // passing the body in as an argument has issues on Windows, so let's use stdin of the process instead + process.waitFor() + } + } + } else { + tag 'LOCAL' } - } else { - tag 'LOCAL' } } } diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index ba8167312056..5a32d2e0a58c 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -17,7 +17,7 @@ commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" forbiddenApis = "de.thetaphi:forbiddenapis:3.6" -gradle-enterprise = "com.gradle:gradle-enterprise-gradle-plugin:3.16.2" +gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.17.4" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" httpclient = "org.apache.httpcomponents:httpclient:4.5.14" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 532112d0138d..29e0afe51364 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -741,9 +741,9 @@ - - - + + + diff --git a/plugins/examples/settings.gradle b/plugins/examples/settings.gradle index af2596fdbafe..09abbfa6b586 100644 --- a/plugins/examples/settings.gradle +++ b/plugins/examples/settings.gradle @@ -7,7 +7,7 @@ */ plugins { - id "com.gradle.enterprise" version "3.16.2" + id "com.gradle.develocity" version "3.17.4" } // Include all subdirectories as example projects diff --git a/settings.gradle b/settings.gradle index ef758a7205cd..a75c66001659 100644 --- a/settings.gradle +++ b/settings.gradle @@ -17,7 +17,7 @@ pluginManagement { } plugins { - id "com.gradle.enterprise" version "3.16.2" + id "com.gradle.develocity" version "3.17.4" id 'elasticsearch.java-toolchain' } From 335c03afa9a2d05c63a174b18aa65294e6330a9d Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Mon, 10 Jun 2024 08:56:40 -0400 Subject: [PATCH 32/58] Handle unmatching remote cluster wildcards properly for IndicesRequest.SingleIndexNoWildcards requests (#109185) When security is enabled and a IndicesRequest.SingleIndexNoWildcards request uses a wildcard for remote clusters that does not match (for example "*:myindex" when there are no configured remotes), RBACEngine#authorizeIndexAction throws an assert error because it is given an empty list of indices. This fix checks the IndicesRequest.SingleIndexNoWildcards special case in IndicesAndAliasesResolver.resolveIndicesAndAliasesWithoutWildcards and if splitLocalAndRemoteIndexNames filters out all indexes due to a non-matching remote cluster wildcard, that code now just throws the NoSuchRemoteClusterException (which is what RBACEngine does when asserts are disabled). This handles painless/execute requests to remote clusters. Added note to painless/execute API docs about wildcards not being allowed --- docs/changelog/109185.yaml | 6 ++ .../painless-execute-script.asciidoc | 4 + .../authz/IndicesAndAliasesResolver.java | 14 ++- .../authz/IndicesAndAliasesResolverTests.java | 98 +++++++++++++++++++ 4 files changed, 121 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/109185.yaml diff --git a/docs/changelog/109185.yaml b/docs/changelog/109185.yaml new file mode 100644 index 000000000000..4da72c4b20ff --- /dev/null +++ b/docs/changelog/109185.yaml @@ -0,0 +1,6 @@ +pr: 109185 +summary: Handle unmatching remote cluster wildcards properly for `IndicesRequest.SingleIndexNoWildcards` + requests +area: Authorization +type: bug +issues: [] diff --git a/docs/painless/painless-guide/painless-execute-script.asciidoc b/docs/painless/painless-guide/painless-execute-script.asciidoc index 1c7ec61ffa75..4417daeb63ef 100644 --- a/docs/painless/painless-guide/painless-execute-script.asciidoc +++ b/docs/painless/painless-guide/painless-execute-script.asciidoc @@ -123,6 +123,10 @@ alias. For example, `remote1:my_index` indicates that you want to execute the painless script against the "my_index" index on the "remote1" cluster. This request will be forwarded to the "remote1" cluster if you have {ref}/remote-clusters-connect.html[configured a connection] to that remote cluster. + +NOTE: Wildcards are not accepted in the index expression for this endpoint. The +expression `*:myindex` will return the error "No such remote cluster" and the +expression `logs*` or `remote1:logs*` will return the error "index not found". ==== `params`:: (`Map`, read-only) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index bf1bf7b7d3ce..42a1d89a9aa0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteConnectionStrategy; import org.elasticsearch.transport.TransportRequest; @@ -164,6 +165,18 @@ class IndicesAndAliasesResolver { final ResolvedIndices split; if (indicesRequest instanceof IndicesRequest.SingleIndexNoWildcards single && single.allowsRemoteIndices()) { split = remoteClusterResolver.splitLocalAndRemoteIndexNames(indicesRequest.indices()); + // all indices can come back empty when the remote index expression included a cluster alias with a wildcard + // and no remote clusters are configured that match it + if (split.getLocal().isEmpty() && split.getRemote().isEmpty()) { + for (String indexExpression : indices) { + String[] clusterAndIndex = indexExpression.split(":", 2); + if (clusterAndIndex.length == 2) { + if (clusterAndIndex[0].contains("*")) { + throw new NoSuchRemoteClusterException(clusterAndIndex[0]); + } + } + } + } } else { split = new ResolvedIndices(Arrays.asList(indicesRequest.indices()), List.of()); } @@ -473,5 +486,4 @@ class IndicesAndAliasesResolver { return new ResolvedIndices(local == null ? List.of() : local, remote); } } - } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index be0516ab180c..82ac95a21086 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; @@ -104,6 +105,7 @@ import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.RESTRICTED_INDICES; import static org.elasticsearch.xpack.security.authz.AuthorizedIndicesTests.getRequestInfo; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.arrayContaining; @@ -455,6 +457,102 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { ); } + public void testResolveIndicesAndAliasesWithoutWildcardsWithSingleIndexNoWildcardsRequest() { + // test 1: matching local index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "index10" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(0)); + assertThat(resolvedIndices.getLocal().size(), equalTo(1)); + assertThat(resolvedIndices.getLocal().get(0), equalTo("index10")); + } + + // test 2: matching remote index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "remote:indexName" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:indexName")); + assertThat(resolvedIndices.getLocal().size(), equalTo(0)); + } + + // test 3: missing local index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "zzz_no_such_index_zzz" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(0)); + assertThat(resolvedIndices.getLocal().size(), equalTo(1)); + assertThat(resolvedIndices.getLocal().get(0), equalTo("zzz_no_such_index_zzz")); + } + + // test 4: missing remote index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "remote:zzz_no_such_index_zzz" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:zzz_no_such_index_zzz")); + assertThat(resolvedIndices.getLocal().size(), equalTo(0)); + } + + // test 5: both local and remote indexes + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "index10", "remote:indexName" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:indexName")); + assertThat(resolvedIndices.getLocal().size(), equalTo(1)); + assertThat(resolvedIndices.getLocal().get(0), equalTo("index10")); + } + + // test 6: remote cluster name with wildcards that does not match any configured remotes + { + NoSuchRemoteClusterException exception = expectThrows( + NoSuchRemoteClusterException.class, + () -> defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "x*x:test" }) + ) + ); + assertThat(exception.getMessage(), containsString("no such remote cluster: [x*x]")); + } + + // test 7: mix and test 2 and test 6 - should not result in exception (wildcard without matches has no effect) + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "x*x:test", "remote:indexName" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:indexName")); + assertThat(resolvedIndices.getLocal().size(), equalTo(0)); + } + } + + private static IndicesRequest.SingleIndexNoWildcards createSingleIndexNoWildcardsRequest(String[] indexExpression) { + IndicesRequest.SingleIndexNoWildcards singleIndexNoWildcardsRequest = new IndicesRequest.SingleIndexNoWildcards() { + @Override + public String[] indices() { + return indexExpression; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.DEFAULT; + } + }; + return singleIndexNoWildcardsRequest; + } + public void testExplicitDashIndices() { SearchRequest request = new SearchRequest("-index10", "-index20"); List indices = resolveIndices(request, buildAuthorizedIndices(userDashIndices, TransportSearchAction.TYPE.name())) From eedc2b9354b556799073de655e3904d599017aaa Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Mon, 10 Jun 2024 06:24:05 -0700 Subject: [PATCH 33/58] Fix typo in TSDB documentation (#109504) --- docs/reference/data-streams/tsds.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index e943a25f1fb0..2e81e5b7e384 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -285,7 +285,7 @@ field values that are older than 2 hours (the `index.look_back_time` default). A TSDS is designed to ingest current metrics data. When the TSDS is first created the initial backing index has: -* an `index.time_series.start_time` value set to `now - index.look_ahead_time` +* an `index.time_series.start_time` value set to `now - index.look_back_time` * an `index.time_series.end_time` value set to `now + index.look_ahead_time` Only data that falls inside that range can be indexed. From bf19072247c61c05ff7f307be57c5cea31df182c Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Mon, 10 Jun 2024 09:50:28 -0400 Subject: [PATCH 34/58] [ESQL] move optimizer rules to a package (#109408) This moves (most of) the Optimizer Rules from being public static sub-classes of OptimizerRules to being public top level classes in the optimizer.rules package. The main benefit of this is to reduce git conflicts when touching rules, since they will no longer be all in the same file. It also improves readability, as short files are easier to read in general. I would like to move the tests for these rules into class-specific test files, but there's some more general test refactoring that I'd like to get done first, so that'll be in a follow-up PR. --------- Co-authored-by: Elastic Machine --- .../esql/optimizer/LogicalPlanOptimizer.java | 9 +- .../xpack/esql/optimizer/OptimizerRules.java | 458 ------------------ .../BooleanFunctionEqualsElimination.java | 47 ++ .../rules/CombineDisjunctionsToIn.java | 101 ++++ .../esql/optimizer/rules/PropagateEquals.java | 348 +++++++++++++ .../esql/optimizer/OptimizerRulesTests.java | 73 +-- 6 files changed, 540 insertions(+), 496 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 951c97d7e69f..4e2cb2c8223e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -59,8 +59,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.optimizer.rules.BooleanFunctionEqualsElimination; +import org.elasticsearch.xpack.esql.optimizer.rules.CombineDisjunctionsToIn; import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; +import org.elasticsearch.xpack.esql.optimizer.rules.PropagateEquals; import org.elasticsearch.xpack.esql.optimizer.rules.PruneLiteralsInOrderBy; import org.elasticsearch.xpack.esql.optimizer.rules.SetAsOptimized; import org.elasticsearch.xpack.esql.optimizer.rules.SimplifyComparisonsArithmetics; @@ -159,10 +162,10 @@ public class LogicalPlanOptimizer extends ParameterizedRuleExecutor { - CombineDisjunctionsToIn() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); - } - - protected In createIn(Expression key, List values, ZoneId zoneId) { - return new In(key.source(), key, values); - } - - protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { - return new Equals(k.source(), k, v.iterator().next(), finalZoneId); - } - - @Override - protected Expression rule(Or or) { - Expression e = or; - // look only at equals and In - List exps = splitOr(e); - - Map> found = new LinkedHashMap<>(); - ZoneId zoneId = null; - List ors = new LinkedList<>(); - - for (Expression exp : exps) { - if (exp instanceof Equals eq) { - // consider only equals against foldables - if (eq.right().foldable()) { - found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); - } else { - ors.add(exp); - } - if (zoneId == null) { - zoneId = eq.zoneId(); - } - } else if (exp instanceof In in) { - found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); - if (zoneId == null) { - zoneId = in.zoneId(); - } - } else { - ors.add(exp); - } - } - - if (found.isEmpty() == false) { - // combine equals alongside the existing ors - final ZoneId finalZoneId = zoneId; - found.forEach( - (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } - ); - - // TODO: this makes a QL `or`, not an ESQL `or` - Expression combineOr = combineOr(ors); - // check the result semantically since the result might different in order - // but be actually the same which can trigger a loop - // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle - if (e.semanticEquals(combineOr) == false) { - e = combineOr; - } - } - - return e; - } - } - - /** - * This rule must always be placed after {@link LiteralsOnTheRight} - * since it looks at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. - */ - public static final class BooleanFunctionEqualsElimination extends - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { - - BooleanFunctionEqualsElimination() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); - } - - @Override - protected Expression rule(BinaryComparison bc) { - if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { - // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant - - // TODO: Replace use of QL Not with ESQL Not - if (TRUE.equals(bc.right())) { - return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); - } - if (FALSE.equals(bc.right())) { - return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); - } - } - - return bc; - } - } - - /** - * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. - * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. - * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. - * - */ - public static final class PropagateEquals extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule< - BinaryLogic> { - - PropagateEquals() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); - } - - public Expression rule(BinaryLogic e) { - if (e instanceof And) { - return propagate((And) e); - } else if (e instanceof Or) { - return propagate((Or) e); - } - return e; - } - - // combine conjunction - private static Expression propagate(And and) { - List ranges = new ArrayList<>(); - // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; - // the others go into the general 'exps'. - // TODO: In 105217, this should change to EsqlBinaryComparison, but it doesn't exist in this branch yet - List equals = new ArrayList<>(); - List notEquals = new ArrayList<>(); - List inequalities = new ArrayList<>(); - List exps = new ArrayList<>(); - - boolean changed = false; - - for (Expression ex : Predicates.splitAnd(and)) { - if (ex instanceof Range) { - ranges.add((Range) ex); - } else if (ex instanceof Equals otherEq) { - // equals on different values evaluate to FALSE - // ignore date/time fields as equality comparison might actually be a range check - if (otherEq.right().foldable() && DataType.isDateTime(otherEq.left().dataType()) == false) { - for (BinaryComparison eq : equals) { - if (otherEq.left().semanticEquals(eq.left())) { - Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); - if (comp != null) { - // var cannot be equal to two different values at the same time - if (comp != 0) { - return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); - } - } - } - } - equals.add(otherEq); - } else { - exps.add(otherEq); - } - } else if (ex instanceof GreaterThan - || ex instanceof GreaterThanOrEqual - || ex instanceof LessThan - || ex instanceof LessThanOrEqual) { - BinaryComparison bc = (BinaryComparison) ex; - if (bc.right().foldable()) { - inequalities.add(bc); - } else { - exps.add(ex); - } - } else if (ex instanceof NotEquals otherNotEq) { - if (otherNotEq.right().foldable()) { - notEquals.add(otherNotEq); - } else { - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - // check - for (BinaryComparison eq : equals) { - Object eqValue = eq.right().fold(); - - for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { - Range range = iterator.next(); - - if (range.value().semanticEquals(eq.left())) { - // if equals is outside the interval, evaluate the whole expression to FALSE - if (range.lower().foldable()) { - Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); - if (compare != null && ( - // eq outside the lower boundary - compare > 0 || - // eq matches the boundary but should not be included - (compare == 0 && range.includeLower() == false))) { - return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); - } - } - if (range.upper().foldable()) { - Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); - if (compare != null && ( - // eq outside the upper boundary - compare < 0 || - // eq matches the boundary but should not be included - (compare == 0 && range.includeUpper() == false))) { - return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); - } - } - - // it's in the range and thus, remove it - iterator.remove(); - changed = true; - } - } - - // evaluate all NotEquals against the Equal - for (Iterator iter = notEquals.iterator(); iter.hasNext();) { - NotEquals neq = iter.next(); - if (eq.left().semanticEquals(neq.left())) { - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); - if (comp != null) { - if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 - return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); - } else { // clashing and redundant: a = 1 AND a != 2 - iter.remove(); - changed = true; - } - } - } - } - - // evaluate all inequalities against the Equal - for (Iterator iter = inequalities.iterator(); iter.hasNext();) { - BinaryComparison bc = iter.next(); - if (eq.left().semanticEquals(bc.left())) { - Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); - if (compare != null) { - if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? - if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 - compare < 0) { // a = 2 AND a >/>= 3 - return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); - } - } - - iter.remove(); - changed = true; - } - } - } - } - - return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; - } - - // combine disjunction: - // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 - // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop - // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop - // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 - private static Expression propagate(Or or) { - List exps = new ArrayList<>(); - List equals = new ArrayList<>(); // foldable right term Equals - List notEquals = new ArrayList<>(); // foldable right term NotEquals - List ranges = new ArrayList<>(); - List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision - - // split expressions by type - for (Expression ex : Predicates.splitOr(or)) { - if (ex instanceof Equals eq) { - if (eq.right().foldable()) { - equals.add(eq); - } else { - exps.add(ex); - } - } else if (ex instanceof NotEquals neq) { - if (neq.right().foldable()) { - notEquals.add(neq); - } else { - exps.add(ex); - } - } else if (ex instanceof Range) { - ranges.add((Range) ex); - } else if (ex instanceof BinaryComparison bc) { - if (bc.right().foldable()) { - inequalities.add(bc); - } else { - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - boolean updated = false; // has the expression been modified? - - // evaluate the impact of each Equal over the different types of Expressions - for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { - Equals eq = iterEq.next(); - Object eqValue = eq.right().fold(); - boolean removeEquals = false; - - // Equals OR NotEquals - for (NotEquals neq : notEquals) { - if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); - if (comp != null) { - if (comp == 0) { // a = 2 OR a != 2 -> TRUE - return TRUE; - } else { // a = 2 OR a != 5 -> a != 5 - removeEquals = true; - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - continue; - } - - // Equals OR Range - for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop - Range range = ranges.get(i); - if (eq.left().semanticEquals(range.value())) { - Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; - Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; - - if (lowerComp != null && lowerComp == 0) { - if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - true, - range.upper(), - range.includeUpper(), - range.zoneId() - ) - ); - } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? - removeEquals = true; // update range with lower equality instead or simply superfluous - break; - } else if (upperComp != null && upperComp == 0) { - if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - range.includeLower(), - range.upper(), - true, - range.zoneId() - ) - ); - } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 - removeEquals = true; // update range with upper equality instead - break; - } else if (lowerComp != null && upperComp != null) { - if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 - removeEquals = true; // equality is superfluous - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - continue; - } - - // Equals OR Inequality - for (int i = 0; i < inequalities.size(); i++) { - BinaryComparison bc = inequalities.get(i); - if (eq.left().semanticEquals(bc.left())) { - Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); - if (comp != null) { - if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { - if (comp < 0) { // a = 1 OR a > 2 -> nop - continue; - } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 - inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else (0 < comp || bc instanceof GreaterThanOrEqual) : - // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 - - removeEquals = true; // update range with equality instead or simply superfluous - break; - } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { - if (comp > 0) { // a = 2 OR a < 1 -> nop - continue; - } - if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 - inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 - removeEquals = true; // update range with equality instead or simply superfluous - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - } - } - - return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java new file mode 100644 index 000000000000..cf62f9219f3c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; +import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; + +/** + * This rule must always be placed after {@link LiteralsOnTheRight} + * since it looks at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. + */ +public final class BooleanFunctionEqualsElimination extends + org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { + + public BooleanFunctionEqualsElimination() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + } + + @Override + public Expression rule(BinaryComparison bc) { + if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { + // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant + + // TODO: Replace use of QL Not with ESQL Not + if (TRUE.equals(bc.right())) { + return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); + } + if (FALSE.equals(bc.right())) { + return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); + } + } + + return bc; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java new file mode 100644 index 000000000000..5cc3184d9ea7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; + +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.combineOr; +import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitOr; + +/** + * Combine disjunctions on the same field into an In expression. + * This rule looks for both simple equalities: + * 1. a == 1 OR a == 2 becomes a IN (1, 2) + * and combinations of In + * 2. a == 1 OR a IN (2) becomes a IN (1, 2) + * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) + *

+ * This rule does NOT check for type compatibility as that phase has been + * already be verified in the analyzer. + */ +public class CombineDisjunctionsToIn extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { + public CombineDisjunctionsToIn() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + } + + protected In createIn(Expression key, List values, ZoneId zoneId) { + return new In(key.source(), key, values); + } + + protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { + return new Equals(k.source(), k, v.iterator().next(), finalZoneId); + } + + @Override + public Expression rule(Or or) { + Expression e = or; + // look only at equals and In + List exps = splitOr(e); + + Map> found = new LinkedHashMap<>(); + ZoneId zoneId = null; + List ors = new LinkedList<>(); + + for (Expression exp : exps) { + if (exp instanceof Equals eq) { + // consider only equals against foldables + if (eq.right().foldable()) { + found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); + } else { + ors.add(exp); + } + if (zoneId == null) { + zoneId = eq.zoneId(); + } + } else if (exp instanceof In in) { + found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); + if (zoneId == null) { + zoneId = in.zoneId(); + } + } else { + ors.add(exp); + } + } + + if (found.isEmpty() == false) { + // combine equals alongside the existing ors + final ZoneId finalZoneId = zoneId; + found.forEach( + (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } + ); + + // TODO: this makes a QL `or`, not an ESQL `or` + Expression combineOr = combineOr(ors); + // check the result semantically since the result might different in order + // but be actually the same which can trigger a loop + // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle + if (e.semanticEquals(combineOr) == false) { + e = combineOr; + } + } + + return e; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java new file mode 100644 index 000000000000..5f08363abdba --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java @@ -0,0 +1,348 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.core.expression.predicate.Range; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; + +/** + * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. + * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. + * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. + */ +public final class PropagateEquals extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { + + public PropagateEquals() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); + } + + public Expression rule(BinaryLogic e) { + if (e instanceof And) { + return propagate((And) e); + } else if (e instanceof Or) { + return propagate((Or) e); + } + return e; + } + + // combine conjunction + private static Expression propagate(And and) { + List ranges = new ArrayList<>(); + // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; + // the others go into the general 'exps'. + // TODO: In 105217, this should change to EsqlBinaryComparison, but it doesn't exist in this branch yet + List equals = new ArrayList<>(); + List notEquals = new ArrayList<>(); + List inequalities = new ArrayList<>(); + List exps = new ArrayList<>(); + + boolean changed = false; + + for (Expression ex : Predicates.splitAnd(and)) { + if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof Equals otherEq) { + // equals on different values evaluate to FALSE + // ignore date/time fields as equality comparison might actually be a range check + if (otherEq.right().foldable() && DataType.isDateTime(otherEq.left().dataType()) == false) { + for (BinaryComparison eq : equals) { + if (otherEq.left().semanticEquals(eq.left())) { + Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); + if (comp != null) { + // var cannot be equal to two different values at the same time + if (comp != 0) { + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + } + } + equals.add(otherEq); + } else { + exps.add(otherEq); + } + } else if (ex instanceof GreaterThan + || ex instanceof GreaterThanOrEqual + || ex instanceof LessThan + || ex instanceof LessThanOrEqual) { + BinaryComparison bc = (BinaryComparison) ex; + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals otherNotEq) { + if (otherNotEq.right().foldable()) { + notEquals.add(otherNotEq); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + // check + for (BinaryComparison eq : equals) { + Object eqValue = eq.right().fold(); + + for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { + Range range = iterator.next(); + + if (range.value().semanticEquals(eq.left())) { + // if equals is outside the interval, evaluate the whole expression to FALSE + if (range.lower().foldable()) { + Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); + if (compare != null && ( + // eq outside the lower boundary + compare > 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeLower() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + if (range.upper().foldable()) { + Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); + if (compare != null && ( + // eq outside the upper boundary + compare < 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeUpper() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + + // it's in the range and thus, remove it + iterator.remove(); + changed = true; + } + } + + // evaluate all NotEquals against the Equal + for (Iterator iter = notEquals.iterator(); iter.hasNext();) { + NotEquals neq = iter.next(); + if (eq.left().semanticEquals(neq.left())) { + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } else { // clashing and redundant: a = 1 AND a != 2 + iter.remove(); + changed = true; + } + } + } + } + + // evaluate all inequalities against the Equal + for (Iterator iter = inequalities.iterator(); iter.hasNext();) { + BinaryComparison bc = iter.next(); + if (eq.left().semanticEquals(bc.left())) { + Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); + if (compare != null) { + if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? + if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 + compare < 0) { // a = 2 AND a >/>= 3 + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + + iter.remove(); + changed = true; + } + } + } + } + + return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; + } + + // combine disjunction: + // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 + // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop + // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop + // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 + private static Expression propagate(Or or) { + List exps = new ArrayList<>(); + List equals = new ArrayList<>(); // foldable right term Equals + List notEquals = new ArrayList<>(); // foldable right term NotEquals + List ranges = new ArrayList<>(); + List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision + + // split expressions by type + for (Expression ex : Predicates.splitOr(or)) { + if (ex instanceof Equals eq) { + if (eq.right().foldable()) { + equals.add(eq); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals neq) { + if (neq.right().foldable()) { + notEquals.add(neq); + } else { + exps.add(ex); + } + } else if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof BinaryComparison bc) { + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + boolean updated = false; // has the expression been modified? + + // evaluate the impact of each Equal over the different types of Expressions + for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { + Equals eq = iterEq.next(); + Object eqValue = eq.right().fold(); + boolean removeEquals = false; + + // Equals OR NotEquals + for (NotEquals neq : notEquals) { + if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // a = 2 OR a != 2 -> TRUE + return TRUE; + } else { // a = 2 OR a != 5 -> a != 5 + removeEquals = true; + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Range + for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop + Range range = ranges.get(i); + if (eq.left().semanticEquals(range.value())) { + Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; + Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; + + if (lowerComp != null && lowerComp == 0) { + if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + true, + range.upper(), + range.includeUpper(), + range.zoneId() + ) + ); + } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? + removeEquals = true; // update range with lower equality instead or simply superfluous + break; + } else if (upperComp != null && upperComp == 0) { + if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + range.includeLower(), + range.upper(), + true, + range.zoneId() + ) + ); + } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 + removeEquals = true; // update range with upper equality instead + break; + } else if (lowerComp != null && upperComp != null) { + if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 + removeEquals = true; // equality is superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Inequality + for (int i = 0; i < inequalities.size(); i++) { + BinaryComparison bc = inequalities.get(i); + if (eq.left().semanticEquals(bc.left())) { + Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); + if (comp != null) { + if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { + if (comp < 0) { // a = 1 OR a > 2 -> nop + continue; + } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 + inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (0 < comp || bc instanceof GreaterThanOrEqual) : + // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 + + removeEquals = true; // update range with equality instead or simply superfluous + break; + } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + if (comp > 0) { // a = 2 OR a < 1 -> nop + continue; + } + if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 + inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 + removeEquals = true; // update range with equality instead or simply superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + } + } + + return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index fb4f35b7173c..a829808ee040 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -50,8 +50,11 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Les import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.optimizer.rules.BooleanFunctionEqualsElimination; +import org.elasticsearch.xpack.esql.optimizer.rules.CombineDisjunctionsToIn; import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; +import org.elasticsearch.xpack.esql.optimizer.rules.PropagateEquals; import java.util.List; @@ -189,7 +192,7 @@ public class OptimizerRulesTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -200,7 +203,7 @@ public class OptimizerRulesTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); @@ -211,7 +214,7 @@ public class OptimizerRulesTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, List.of(TWO))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -222,7 +225,7 @@ public class OptimizerRulesTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -234,7 +237,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals equals = equalsOf(fa, ONE); Or or = new Or(EMPTY, equals, new In(EMPTY, fa, List.of(ONE))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(equals, e); } @@ -243,7 +246,7 @@ public class OptimizerRulesTests extends ESTestCase { And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); Filter dummy = new Filter(EMPTY, relation(), and); - LogicalPlan transformed = new OptimizerRules.CombineDisjunctionsToIn().apply(dummy); + LogicalPlan transformed = new CombineDisjunctionsToIn().apply(dummy); assertSame(dummy, transformed); assertEquals(and, ((Filter) transformed).condition()); } @@ -253,7 +256,7 @@ public class OptimizerRulesTests extends ESTestCase { FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(or, e); } @@ -262,7 +265,7 @@ public class OptimizerRulesTests extends ESTestCase { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TWO))); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + Expression e = new CombineDisjunctionsToIn().rule(secondOr); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -274,7 +277,7 @@ public class OptimizerRulesTests extends ESTestCase { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), lessThanOf(fa, TWO)); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + Expression e = new CombineDisjunctionsToIn().rule(secondOr); assertEquals(Or.class, e.getClass()); Or or = (Or) e; assertEquals(or.left(), firstOr.right()); @@ -286,7 +289,7 @@ public class OptimizerRulesTests extends ESTestCase { // Test BooleanFunctionEqualsElimination public void testBoolEqualsSimplificationOnExpressions() { - OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), new Literal(EMPTY, 0, DataType.INTEGER), null); assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE))); @@ -295,7 +298,7 @@ public class OptimizerRulesTests extends ESTestCase { } public void testBoolEqualsSimplificationOnFields() { - OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); FieldAttribute field = getFieldAttribute(); @@ -323,7 +326,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq1 = equalsOf(fa, ONE); Equals eq2 = equalsOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); assertEquals(FALSE, exp); } @@ -334,7 +337,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq1 = equalsOf(fa, new Literal(EMPTY, 10, DataType.INTEGER)); Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataType.INTEGER), false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq1, r)); assertEquals(FALSE, exp); } @@ -345,7 +348,7 @@ public class OptimizerRulesTests extends ESTestCase { NotEquals neq = notEqualsOf(fa, THREE); Equals eq = equalsOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, neq, eq)); assertEquals(FALSE, exp); } @@ -356,7 +359,7 @@ public class OptimizerRulesTests extends ESTestCase { NotEquals neq = notEqualsOf(fa, FOUR); Equals eq = equalsOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, neq, eq)); assertEquals(Equals.class, exp.getClass()); assertEquals(eq, exp); @@ -368,7 +371,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThan lt = lessThanOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, lt)); assertEquals(FALSE, exp); } @@ -379,7 +382,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, lt)); assertEquals(eq, exp); } @@ -390,7 +393,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, lt)); assertEquals(FALSE, exp); } @@ -401,7 +404,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, gt)); assertEquals(FALSE, exp); } @@ -412,7 +415,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, gte)); assertEquals(eq, exp); } @@ -423,7 +426,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, gt)); assertEquals(FALSE, exp); } @@ -436,7 +439,7 @@ public class OptimizerRulesTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, ONE); NotEquals neq = notEqualsOf(fa, FOUR); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq)); Expression exp = rule.rule((And) and); assertEquals(eq, exp); @@ -450,7 +453,7 @@ public class OptimizerRulesTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, new Literal(EMPTY, 0, DataType.INTEGER)); NotEquals neq = notEqualsOf(fa, FOUR); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression and = Predicates.combineAnd(asList(eq, range, gt, neq)); Expression exp = rule.rule((And) and); assertEquals(eq, exp); @@ -462,7 +465,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, ONE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, gt)); assertEquals(gt, exp); } @@ -473,7 +476,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, gt)); assertEquals(GreaterThanOrEqual.class, exp.getClass()); GreaterThanOrEqual gte = (GreaterThanOrEqual) exp; @@ -486,7 +489,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThan lt = lessThanOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, lt)); assertEquals(lt, exp); } @@ -497,7 +500,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, THREE); LessThan lt = lessThanOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, lt)); assertEquals(LessThanOrEqual.class, exp.getClass()); LessThanOrEqual lte = (LessThanOrEqual) exp; @@ -510,7 +513,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, ONE, false, THREE, false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, range)); assertEquals(range, exp); } @@ -521,7 +524,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, TWO, false, THREE, false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, range)); assertEquals(Range.class, exp.getClass()); Range r = (Range) exp; @@ -537,7 +540,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, THREE); Range range = rangeOf(fa, TWO, false, THREE, false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, range)); assertEquals(Range.class, exp.getClass()); Range r = (Range) exp; @@ -553,7 +556,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, neq)); assertEquals(TRUE, exp); } @@ -564,7 +567,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); NotEquals neq = notEqualsOf(fa, FIVE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, neq)); assertEquals(NotEquals.class, exp.getClass()); NotEquals ne = (NotEquals) exp; @@ -579,7 +582,7 @@ public class OptimizerRulesTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt))); assertEquals(TRUE, exp); } @@ -591,7 +594,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq2 = equalsOf(fa, TWO); And and = new And(EMPTY, eq1, eq2); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(and); assertEquals(and, exp); } @@ -602,7 +605,7 @@ public class OptimizerRulesTests extends ESTestCase { Equals eq1 = equalsOf(fa, ONE); Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataType.INTEGER), false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq1, r)); assertEquals(eq1, exp); } From 50592ec2f5b6abdde012a74d2a588b2a0decc339 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 10 Jun 2024 14:53:28 +0100 Subject: [PATCH 35/58] Add `@UpdateForV9` to `IndexSettingDeprecatedInV7AndRemovedInV8` (#109531) This setting property is not applicable in v9, but we'll need an equivalent one for the 8-to-9 transition. This commit adds the `@UpdateForV9` annotation as a reminder. --- .../src/main/java/org/elasticsearch/common/settings/Setting.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 71882e0c6819..e96de685381e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -149,6 +149,7 @@ public class Setting implements ToXContentObject { * Indicates that this index-level setting was deprecated in {@link Version#V_7_17_0} and is * forbidden in indices created from {@link Version#V_8_0_0} onwards. */ + @UpdateForV9 // introduce IndexSettingDeprecatedInV8AndRemovedInV9 to replace this constant IndexSettingDeprecatedInV7AndRemovedInV8, /** From 7cba6c8c166def80134b2f215eea8f49b31698b7 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Mon, 10 Jun 2024 15:57:56 +0200 Subject: [PATCH 36/58] Docs: Fix available update by query operations (#109486) --- docs/reference/docs/update-by-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index bc63fa4e33d0..d470080fc602 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -431,7 +431,7 @@ The update by query operation skips updating the document and increments the `n Set `ctx.op = "delete"` if your script decides that the document should be deleted. The update by query operation deletes the document and increments the `deleted` counter. -Update by query only supports `update`, `noop`, and `delete`. +Update by query only supports `index`, `noop`, and `delete`. Setting `ctx.op` to anything else is an error. Setting any other field in `ctx` is an error. This API only enables you to modify the source of matching documents, you cannot move them. From beec35813c4cabf462611dd88c236bebdc0f85f2 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Mon, 10 Jun 2024 07:53:15 -0700 Subject: [PATCH 37/58] Remove explicit references to SyntheticSourceMode.FALLBACK (#109503) --- .../index/mapper/extras/SearchAsYouTypeFieldMapper.java | 5 ----- .../index/mapper/extras/TokenCountFieldMapper.java | 5 ----- .../index/mapper/GeoShapeWithDocValuesFieldMapper.java | 5 ----- .../xpack/spatial/index/mapper/PointFieldMapper.java | 5 ----- .../xpack/spatial/index/mapper/ShapeFieldMapper.java | 5 ----- 5 files changed, 25 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index 878e22312d64..a5e011d5772f 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -762,11 +762,6 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper { return subfieldsAndMultifieldsIterator(); } - @Override - protected SyntheticSourceMode syntheticSourceMode() { - return SyntheticSourceMode.FALLBACK; - } - /** * An analyzer wrapper to add a shingle token filter, an edge ngram token filter or both to its wrapped analyzer. When adding an edge * ngrams token filter, it also adds a {@link TrailingShingleTokenFilter} to add extra position increments at the end of the stream diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java index c538c7641a01..831306a8e859 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java @@ -215,9 +215,4 @@ public class TokenCountFieldMapper extends FieldMapper { public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName()).init(this); } - - @Override - protected SyntheticSourceMode syntheticSourceMode() { - return SyntheticSourceMode.FALLBACK; - } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 44cfa99c003d..a8f437f476ad 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -482,11 +482,6 @@ public class GeoShapeWithDocValuesFieldMapper extends AbstractShapeGeometryField super.checkIncomingMergeType(mergeWith); } - @Override - protected SyntheticSourceMode syntheticSourceMode() { - return SyntheticSourceMode.FALLBACK; - } - public static class GeoShapeDocValuesField extends AbstractScriptFieldFactory implements Field, diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 2a2a6ad16467..1657a3bf7fbc 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -180,11 +180,6 @@ public class PointFieldMapper extends AbstractPointGeometryFieldMapper implements ShapeQueryable { private final ShapeQueryPointProcessor queryProcessor; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 4d6a909f96c3..83e434f82959 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -249,11 +249,6 @@ public class ShapeFieldMapper extends AbstractShapeGeometryFieldMapper return (ShapeFieldType) super.fieldType(); } - @Override - protected SyntheticSourceMode syntheticSourceMode() { - return SyntheticSourceMode.FALLBACK; - } - public static class CartesianShapeDocValuesField extends AbstractScriptFieldFactory implements Field, From a9bc30d66e03c076a3f0909e745818d76d240455 Mon Sep 17 00:00:00 2001 From: Jean-Fabrice Bobo <2949987+jeanfabrice@users.noreply.github.com> Date: Mon, 10 Jun 2024 17:51:55 +0200 Subject: [PATCH 38/58] FIx misleading repository-s3 type (#109347) in 8.x, `repository-s3` type has been replaced by `s3` type. Fixing remaining reference to `repository-s3` in the documentation. --- docs/reference/snapshot-restore/repository-s3.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index d0a1d36e5560..d757a74110ca 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -539,7 +539,7 @@ VPC's internet gateway and not be bandwidth limited by the VPC's NAT instance. ==== S3-compatible services There are a number of storage systems that provide an S3-compatible API, and -the `repository-s3` type allows you to use these systems in place of AWS S3. +the `s3` repository type allows you to use these systems in place of AWS S3. To do so, you should set the `s3.client.CLIENT_NAME.endpoint` setting to the system's endpoint. This setting accepts IP addresses and hostnames and may include a port. For example, the endpoint may be `172.17.0.2` or @@ -552,7 +552,7 @@ you wish to use unsecured HTTP communication instead of HTTPS, set `s3.client.CLIENT_NAME.protocol` to `http`. https://minio.io[MinIO] is an example of a storage system that provides an -S3-compatible API. The `repository-s3` type allows {es} to work with +S3-compatible API. The `s3` repository type allows {es} to work with MinIO-backed repositories as well as repositories stored on AWS S3. Other S3-compatible storage systems may also work with {es}, but these are not covered by the {es} test suite. @@ -562,7 +562,7 @@ which claim to offer an S3-compatible API despite failing to emulate S3's behaviour in full. If you are using such a system for your snapshots, consider using a <> based on a standardized protocol such as NFS to access your storage system instead. -The `repository-s3` type requires full compatibility with S3. In particular it +The `s3` repository type requires full compatibility with S3. In particular it must support the same set of API endpoints, with the same parameters, return the same errors in case of failures, and offer consistency and performance at least as good as S3 even when accessed concurrently by multiple nodes. You will From 6dbcd7908a19ee776b98b8ffb97130ee8b21b04f Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Mon, 10 Jun 2024 10:52:21 -0500 Subject: [PATCH 39/58] Fix test by removing unecessary mTLS (#109324) fixes: #108774 --- muted-tests.yml | 3 --- .../xpack/core/ssl/SSLConfigurationReloaderTests.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 5b9478f1e452..d82c823f664b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -5,9 +5,6 @@ tests: - class: "org.elasticsearch.cluster.coordination.CoordinatorVotingConfigurationTests" issue: "https://github.com/elastic/elasticsearch/issues/108729" method: "testClusterUUIDLogging" -- class: "org.elasticsearch.xpack.core.ssl.SSLConfigurationReloaderTests" - issue: "https://github.com/elastic/elasticsearch/issues/108774" - method: "testReloadingKeyStore" - class: "org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT" issue: "https://github.com/elastic/elasticsearch/issues/108808" method: "test {k8s-metrics.MetricsWithAggs}" diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 02f9a58d7947..7b19d53663a0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -130,7 +130,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { // Load HTTPClient only once. Client uses the same store as a truststore try (CloseableHttpClient client = getSSLClient(keystorePath, "testnode")) { final Consumer keyMaterialPreChecks = (context) -> { - try (MockWebServer server = new MockWebServer(context, true)) { + try (MockWebServer server = new MockWebServer(context, false)) { server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); server.start(); privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close()); From 832029a1b547c984afe8834770f974dc2041d1ed Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 10 Jun 2024 17:53:41 +0200 Subject: [PATCH 40/58] Use BytesRef instead of Term on o.e.i.engine.Engine.Operation (#109508) No need to have the `Term` objects here. Creating a `Term` forces copying the id bytes (in addition to the cost of creating the object) and we do not need a term here to begin with. This saves GBs of allocations when running the http_logs indexing benchmark and sets up further simplifications around the id value. --- .../uid/PerThreadIDVersionAndSeqNoLookup.java | 13 ++-- .../lucene/uid/VersionsAndSeqNoResolver.java | 38 +++++------- .../elasticsearch/index/engine/Engine.java | 44 +++++-------- .../index/engine/InternalEngine.java | 46 ++++++-------- .../org/elasticsearch/index/mapper/Uid.java | 2 +- .../elasticsearch/index/shard/IndexShard.java | 10 +-- .../common/lucene/uid/VersionLookupTests.java | 16 ++--- .../common/lucene/uid/VersionsTests.java | 46 ++++++-------- .../elasticsearch/index/IndexModuleTests.java | 3 +- .../index/IndexingSlowLogTests.java | 5 +- .../index/engine/InternalEngineTests.java | 61 ++++++++++++------- .../engine/LuceneChangesSnapshotTests.java | 5 +- .../index/engine/NoOpEngineTests.java | 3 +- .../index/engine/ReadOnlyEngineTests.java | 3 +- .../shard/IndexingOperationListenerTests.java | 5 +- .../index/shard/RefreshListenersTests.java | 6 +- .../index/translog/TranslogTests.java | 6 +- .../index/engine/EngineTestCase.java | 12 ++-- .../index/engine/FollowingEngineTests.java | 4 +- 19 files changed, 144 insertions(+), 184 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java b/server/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java index 52ddaa9a8758..43924eb86f12 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion; +import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; @@ -45,8 +46,6 @@ final class PerThreadIDVersionAndSeqNoLookup { // TODO: do we really need to store all this stuff? some if it might not speed up anything. // we keep it around for now, to reduce the amount of e.g. hash lookups by field and stuff - /** terms enum for uid field */ - final String uidField; private final TermsEnum termsEnum; /** Reused for iteration (when the term exists) */ @@ -62,10 +61,8 @@ final class PerThreadIDVersionAndSeqNoLookup { /** * Initialize lookup for the provided segment */ - PerThreadIDVersionAndSeqNoLookup(LeafReader reader, String uidField, boolean trackReaderKey, boolean loadTimestampRange) - throws IOException { - this.uidField = uidField; - final Terms terms = reader.terms(uidField); + PerThreadIDVersionAndSeqNoLookup(LeafReader reader, boolean trackReaderKey, boolean loadTimestampRange) throws IOException { + final Terms terms = reader.terms(IdFieldMapper.NAME); if (terms == null) { // If a segment contains only no-ops, it does not have _uid but has both _soft_deletes and _tombstone fields. final NumericDocValues softDeletesDV = reader.getNumericDocValues(Lucene.SOFT_DELETES_FIELD); @@ -107,8 +104,8 @@ final class PerThreadIDVersionAndSeqNoLookup { } } - PerThreadIDVersionAndSeqNoLookup(LeafReader reader, String uidField, boolean loadTimestampRange) throws IOException { - this(reader, uidField, true, loadTimestampRange); + PerThreadIDVersionAndSeqNoLookup(LeafReader reader, boolean loadTimestampRange) throws IOException { + this(reader, true, loadTimestampRange); } /** Return null if id is not found. diff --git a/server/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java b/server/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java index 56c0869992ba..1743343b44bf 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java @@ -11,7 +11,7 @@ package org.elasticsearch.common.lucene.uid; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -20,7 +20,6 @@ import org.elasticsearch.core.Assertions; import java.io.IOException; import java.util.Base64; import java.util.List; -import java.util.Objects; import java.util.concurrent.ConcurrentMap; /** Utility class to resolve the Lucene doc ID, version, seqNo and primaryTerms for a given uid. */ @@ -37,8 +36,7 @@ public final class VersionsAndSeqNoResolver { } }; - private static PerThreadIDVersionAndSeqNoLookup[] getLookupState(IndexReader reader, String uidField, boolean loadTimestampRange) - throws IOException { + private static PerThreadIDVersionAndSeqNoLookup[] getLookupState(IndexReader reader, boolean loadTimestampRange) throws IOException { // We cache on the top level // This means cache entries have a shorter lifetime, maybe as low as 1s with the // default refresh interval and a steady indexing rate, but on the other hand it @@ -63,7 +61,7 @@ public final class VersionsAndSeqNoResolver { if (lookupState == null) { lookupState = new PerThreadIDVersionAndSeqNoLookup[reader.leaves().size()]; for (LeafReaderContext leaf : reader.leaves()) { - lookupState[leaf.ord] = new PerThreadIDVersionAndSeqNoLookup(leaf.reader(), uidField, loadTimestampRange); + lookupState[leaf.ord] = new PerThreadIDVersionAndSeqNoLookup(leaf.reader(), loadTimestampRange); } ctl.set(lookupState); } else { @@ -87,12 +85,6 @@ public final class VersionsAndSeqNoResolver { throw new AssertionError("Mismatched numbers of leaves: " + lookupState.length + " != " + reader.leaves().size()); } - if (lookupState.length > 0 && Objects.equals(lookupState[0].uidField, uidField) == false) { - throw new AssertionError( - "Index does not consistently use the same uid field: [" + uidField + "] != [" + lookupState[0].uidField + "]" - ); - } - return lookupState; } @@ -136,15 +128,15 @@ public final class VersionsAndSeqNoResolver { *

  • a doc ID and a version otherwise * */ - public static DocIdAndVersion timeSeriesLoadDocIdAndVersion(IndexReader reader, Term term, boolean loadSeqNo) throws IOException { - PerThreadIDVersionAndSeqNoLookup[] lookups = getLookupState(reader, term.field(), false); + public static DocIdAndVersion timeSeriesLoadDocIdAndVersion(IndexReader reader, BytesRef term, boolean loadSeqNo) throws IOException { + PerThreadIDVersionAndSeqNoLookup[] lookups = getLookupState(reader, false); List leaves = reader.leaves(); // iterate backwards to optimize for the frequently updated documents // which are likely to be in the last segments for (int i = leaves.size() - 1; i >= 0; i--) { final LeafReaderContext leaf = leaves.get(i); PerThreadIDVersionAndSeqNoLookup lookup = lookups[leaf.ord]; - DocIdAndVersion result = lookup.lookupVersion(term.bytes(), loadSeqNo, leaf); + DocIdAndVersion result = lookup.lookupVersion(term, loadSeqNo, leaf); if (result != null) { return result; } @@ -168,7 +160,7 @@ public final class VersionsAndSeqNoResolver { * returning null if no document was found for the specified id * @throws IOException In case of an i/o related failure */ - public static DocIdAndVersion timeSeriesLoadDocIdAndVersion(IndexReader reader, Term uid, String id, boolean loadSeqNo) + public static DocIdAndVersion timeSeriesLoadDocIdAndVersion(IndexReader reader, BytesRef uid, String id, boolean loadSeqNo) throws IOException { byte[] idAsBytes = Base64.getUrlDecoder().decode(id); assert idAsBytes.length == 20; @@ -176,7 +168,7 @@ public final class VersionsAndSeqNoResolver { // @timestamp) long timestamp = ByteUtils.readLongBE(idAsBytes, 12); - PerThreadIDVersionAndSeqNoLookup[] lookups = getLookupState(reader, uid.field(), true); + PerThreadIDVersionAndSeqNoLookup[] lookups = getLookupState(reader, true); List leaves = reader.leaves(); // iterate in default order, the segments should be sorted by DataStream#TIMESERIES_LEAF_READERS_SORTER long prevMaxTimestamp = Long.MAX_VALUE; @@ -190,7 +182,7 @@ public final class VersionsAndSeqNoResolver { if (timestamp > lookup.maxTimestamp) { return null; } - DocIdAndVersion result = lookup.lookupVersion(uid.bytes(), loadSeqNo, leaf); + DocIdAndVersion result = lookup.lookupVersion(uid, loadSeqNo, leaf); if (result != null) { return result; } @@ -199,12 +191,12 @@ public final class VersionsAndSeqNoResolver { return null; } - public static DocIdAndVersion loadDocIdAndVersionUncached(IndexReader reader, Term term, boolean loadSeqNo) throws IOException { + public static DocIdAndVersion loadDocIdAndVersionUncached(IndexReader reader, BytesRef term, boolean loadSeqNo) throws IOException { List leaves = reader.leaves(); for (int i = leaves.size() - 1; i >= 0; i--) { final LeafReaderContext leaf = leaves.get(i); - PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(leaf.reader(), term.field(), false, false); - DocIdAndVersion result = lookup.lookupVersion(term.bytes(), loadSeqNo, leaf); + PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(leaf.reader(), false, false); + DocIdAndVersion result = lookup.lookupVersion(term, loadSeqNo, leaf); if (result != null) { return result; } @@ -216,15 +208,15 @@ public final class VersionsAndSeqNoResolver { * Loads the internal docId and sequence number of the latest copy for a given uid from the provided reader. * The result is either null or the live and latest version of the given uid. */ - public static DocIdAndSeqNo loadDocIdAndSeqNo(IndexReader reader, Term term) throws IOException { - final PerThreadIDVersionAndSeqNoLookup[] lookups = getLookupState(reader, term.field(), false); + public static DocIdAndSeqNo loadDocIdAndSeqNo(IndexReader reader, BytesRef term) throws IOException { + final PerThreadIDVersionAndSeqNoLookup[] lookups = getLookupState(reader, false); final List leaves = reader.leaves(); // iterate backwards to optimize for the frequently updated documents // which are likely to be in the last segments for (int i = leaves.size() - 1; i >= 0; i--) { final LeafReaderContext leaf = leaves.get(i); final PerThreadIDVersionAndSeqNoLookup lookup = lookups[leaf.ord]; - final DocIdAndSeqNo result = lookup.lookupSeqNo(term.bytes(), leaf); + final DocIdAndSeqNo result = lookup.lookupSeqNo(term, leaf); if (result != null) { return result; } diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index c219e16659c9..4f461a5d51c7 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -23,13 +23,13 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentReader; -import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -59,7 +59,6 @@ import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.DocumentParser; -import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; @@ -1496,7 +1495,7 @@ public abstract class Engine implements Closeable { } } - private final Term uid; + private final BytesRef uid; private final long version; private final long seqNo; private final long primaryTerm; @@ -1504,7 +1503,7 @@ public abstract class Engine implements Closeable { private final Origin origin; private final long startTime; - public Operation(Term uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) { + public Operation(BytesRef uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) { this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; @@ -1534,7 +1533,7 @@ public abstract class Engine implements Closeable { return this.origin; } - public Term uid() { + public BytesRef uid() { return this.uid; } @@ -1577,7 +1576,7 @@ public abstract class Engine implements Closeable { private final long ifPrimaryTerm; public Index( - Term uid, + BytesRef uid, ParsedDocument doc, long seqNo, long primaryTerm, @@ -1603,11 +1602,11 @@ public abstract class Engine implements Closeable { this.ifPrimaryTerm = ifPrimaryTerm; } - public Index(Term uid, long primaryTerm, ParsedDocument doc) { + public Index(BytesRef uid, long primaryTerm, ParsedDocument doc) { this(uid, primaryTerm, doc, Versions.MATCH_ANY); } // TEST ONLY - Index(Term uid, long primaryTerm, ParsedDocument doc, long version) { + Index(BytesRef uid, long primaryTerm, ParsedDocument doc, long version) { this( uid, doc, @@ -1689,7 +1688,7 @@ public abstract class Engine implements Closeable { public Delete( String id, - Term uid, + BytesRef uid, long seqNo, long primaryTerm, long version, @@ -1710,7 +1709,7 @@ public abstract class Engine implements Closeable { this.ifPrimaryTerm = ifPrimaryTerm; } - public Delete(String id, Term uid, long primaryTerm) { + public Delete(String id, BytesRef uid, long primaryTerm) { this( id, uid, @@ -1725,21 +1724,6 @@ public abstract class Engine implements Closeable { ); } - public Delete(Delete template, VersionType versionType) { - this( - template.id(), - template.uid(), - template.seqNo(), - template.primaryTerm(), - template.version(), - versionType, - template.origin(), - template.startTime(), - UNASSIGNED_SEQ_NO, - 0 - ); - } - @Override public String id() { return this.id; @@ -1752,7 +1736,7 @@ public abstract class Engine implements Closeable { @Override public int estimatedSizeInBytes() { - return (uid().field().length() + uid().text().length()) * 2 + 20; + return uid().length * 2 + 20; } public long getIfSeqNo() { @@ -1778,7 +1762,7 @@ public abstract class Engine implements Closeable { } @Override - public Term uid() { + public BytesRef uid() { throw new UnsupportedOperationException(); } @@ -1811,7 +1795,7 @@ public abstract class Engine implements Closeable { public static class Get { private final boolean realtime; - private final Term uid; + private final BytesRef uid; private final String id; private final boolean readFromTranslog; private long version = Versions.MATCH_ANY; @@ -1822,7 +1806,7 @@ public abstract class Engine implements Closeable { public Get(boolean realtime, boolean readFromTranslog, String id) { this.realtime = realtime; this.id = id; - this.uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); + this.uid = Uid.encodeId(id); this.readFromTranslog = readFromTranslog; } @@ -1834,7 +1818,7 @@ public abstract class Engine implements Closeable { return id; } - public Term uid() { + public BytesRef uid() { return uid; } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 58bffa0369f4..245cef2d97b2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -838,7 +838,6 @@ public class InternalEngine extends Engine { DocumentParser documentParser, Function searcherWrapper ) { - assert assertGetUsesIdField(get); try (var ignored = acquireEnsureOpenRef()) { if (get.realtime()) { var result = realtimeGetUnderLock(get, mappingLookup, documentParser, searcherWrapper, true); @@ -858,7 +857,6 @@ public class InternalEngine extends Engine { DocumentParser documentParser, Function searcherWrapper ) { - assert assertGetUsesIdField(get); try (var ignored = acquireEnsureOpenRef()) { return realtimeGetUnderLock(get, mappingLookup, documentParser, searcherWrapper, false); } @@ -878,9 +876,9 @@ public class InternalEngine extends Engine { assert isDrainedForClose() == false; assert get.realtime(); final VersionValue versionValue; - try (Releasable ignore = versionMap.acquireLock(get.uid().bytes())) { + try (Releasable ignore = versionMap.acquireLock(get.uid())) { // we need to lock here to access the version map to do this truly in RT - versionValue = getVersionFromMap(get.uid().bytes()); + versionValue = getVersionFromMap(get.uid()); } try { boolean getFromSearcherIfNotInTranslog = getFromSearcher; @@ -982,7 +980,7 @@ public class InternalEngine extends Engine { private OpVsLuceneDocStatus compareOpToLuceneDocBasedOnSeqNo(final Operation op) throws IOException { assert op.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO : "resolving ops based on seq# but no seqNo is found"; final OpVsLuceneDocStatus status; - VersionValue versionValue = getVersionFromMap(op.uid().bytes()); + VersionValue versionValue = getVersionFromMap(op.uid()); assert incrementVersionLookup(); if (versionValue != null) { status = compareOpToVersionMapOnSeqNo(op.id(), op.seqNo(), op.primaryTerm(), versionValue); @@ -1010,7 +1008,7 @@ public class InternalEngine extends Engine { /** resolves the current version of the document, returning null if not found */ private VersionValue resolveDocVersion(final Operation op, boolean loadSeqNo) throws IOException { assert incrementVersionLookup(); // used for asserting in tests - VersionValue versionValue = getVersionFromMap(op.uid().bytes()); + VersionValue versionValue = getVersionFromMap(op.uid()); if (versionValue == null) { assert incrementIndexVersionLookup(); // used for asserting in tests final VersionsAndSeqNoResolver.DocIdAndVersion docIdAndVersion; @@ -1139,13 +1137,12 @@ public class InternalEngine extends Engine { @Override public IndexResult index(Index index) throws IOException { - assert Objects.equals(index.uid().field(), IdFieldMapper.NAME) : index.uid().field(); final boolean doThrottle = index.origin().isRecovery() == false; try (var ignored1 = acquireEnsureOpenRef()) { assert assertIncomingSequenceNumber(index.origin(), index.seqNo()); int reservedDocs = 0; try ( - Releasable ignored = versionMap.acquireLock(index.uid().bytes()); + Releasable ignored = versionMap.acquireLock(index.uid()); Releasable indexThrottle = doThrottle ? throttle.acquireThrottle() : () -> {} ) { lastWriteNanos = index.startTime(); @@ -1245,7 +1242,7 @@ public class InternalEngine extends Engine { if (plan.indexIntoLucene && indexResult.getResultType() == Result.Type.SUCCESS) { final Translog.Location translogLocation = trackTranslogLocation.get() ? indexResult.getTranslogLocation() : null; versionMap.maybePutIndexUnderLock( - index.uid().bytes(), + index.uid(), new IndexVersionValue(translogLocation, plan.versionForIndexing, index.seqNo(), index.primaryTerm()) ); } @@ -1569,7 +1566,7 @@ public class InternalEngine extends Engine { private boolean assertDocDoesNotExist(final Index index, final boolean allowDeleted) throws IOException { // NOTE this uses direct access to the version map since we are in the assertion code where we maintain a secondary // map in the version map such that we don't need to refresh if we are unsafe; - final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes()); + final VersionValue versionValue = versionMap.getVersionForAssert(index.uid()); if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { throw new AssertionError("doc [" + index.id() + "] exists in version map (version " + versionValue + ")"); @@ -1577,7 +1574,7 @@ public class InternalEngine extends Engine { } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) { searcher.setQueryCache(null); // so that it does not interfere with tests that check caching behavior - final long docsWithId = searcher.count(new TermQuery(index.uid())); + final long docsWithId = searcher.count(new TermQuery(new Term(IdFieldMapper.NAME, index.uid()))); if (docsWithId > 0) { throw new AssertionError("doc [" + index.id() + "] exists [" + docsWithId + "] times in index"); } @@ -1586,11 +1583,12 @@ public class InternalEngine extends Engine { return true; } - private void updateDocs(final Term uid, final List docs, final IndexWriter indexWriter) throws IOException { + private void updateDocs(final BytesRef uid, final List docs, final IndexWriter indexWriter) throws IOException { + final Term uidTerm = new Term(IdFieldMapper.NAME, uid); if (docs.size() > 1) { - indexWriter.softUpdateDocuments(uid, docs, softDeletesField); + indexWriter.softUpdateDocuments(uidTerm, docs, softDeletesField); } else { - indexWriter.softUpdateDocument(uid, docs.get(0), softDeletesField); + indexWriter.softUpdateDocument(uidTerm, docs.get(0), softDeletesField); } numDocUpdates.inc(docs.size()); } @@ -1598,12 +1596,11 @@ public class InternalEngine extends Engine { @Override public DeleteResult delete(Delete delete) throws IOException { versionMap.enforceSafeAccess(); - assert Objects.equals(delete.uid().field(), IdFieldMapper.NAME) : delete.uid().field(); assert assertIncomingSequenceNumber(delete.origin(), delete.seqNo()); final DeleteResult deleteResult; int reservedDocs = 0; // NOTE: we don't throttle this when merges fall behind because delete-by-id does not create new segments: - try (var ignored = acquireEnsureOpenRef(); Releasable ignored2 = versionMap.acquireLock(delete.uid().bytes())) { + try (var ignored = acquireEnsureOpenRef(); Releasable ignored2 = versionMap.acquireLock(delete.uid())) { lastWriteNanos = delete.startTime(); final DeletionStrategy plan = deletionStrategyForOperation(delete); reservedDocs = plan.reservedDocs; @@ -1647,7 +1644,7 @@ public class InternalEngine extends Engine { if (plan.deleteFromLucene) { numDocDeletes.inc(); versionMap.putDeleteUnderLock( - delete.uid().bytes(), + delete.uid(), new DeleteVersionValue( plan.versionOfDeletion, delete.seqNo(), @@ -1812,7 +1809,7 @@ public class InternalEngine extends Engine { if (plan.addStaleOpToLucene || plan.currentlyDeleted) { indexWriter.addDocument(doc); } else { - indexWriter.softUpdateDocument(delete.uid(), doc, softDeletesField); + indexWriter.softUpdateDocument(new Term(IdFieldMapper.NAME, delete.uid()), doc, softDeletesField); } return new DeleteResult( plan.versionOfDeletion, @@ -3011,7 +3008,7 @@ public class InternalEngine extends Engine { if (op.operationType() == Operation.TYPE.NO_OP) { assert noOpKeyedLock.isHeldByCurrentThread(op.seqNo()); } else { - assert versionMap.assertKeyedLockHeldByCurrentThread(op.uid().bytes()); + assert versionMap.assertKeyedLockHeldByCurrentThread(op.uid()); } } return localCheckpointTracker.hasProcessed(op.seqNo()); @@ -3266,11 +3263,11 @@ public class InternalEngine extends Engine { this.maxSeqNoOfUpdatesOrDeletes.accumulateAndGet(maxSeqNoOfUpdatesOnPrimary, Math::max); } - private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean allowDeleted, boolean relaxIfGapInSeqNo) { + private boolean assertMaxSeqNoOfUpdatesIsAdvanced(BytesRef id, long seqNo, boolean allowDeleted, boolean relaxIfGapInSeqNo) { final long maxSeqNoOfUpdates = getMaxSeqNoOfUpdatesOrDeletes(); // We treat a delete on the tombstones on replicas as a regular document, then use updateDocument (not addDocument). if (allowDeleted) { - final VersionValue versionValue = versionMap.getVersionForAssert(id.bytes()); + final VersionValue versionValue = versionMap.getVersionForAssert(id); if (versionValue != null && versionValue.isDelete()) { return true; } @@ -3320,7 +3317,7 @@ public class InternalEngine extends Engine { assert dv.isTombstone(docId); continue; } - final BytesRef uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)).bytes(); + final BytesRef uid = Uid.encodeId(id); try (Releasable ignored = versionMap.acquireLock(uid)) { final VersionValue curr = versionMap.getUnderLock(uid); if (curr == null || compareOpToVersionMapOnSeqNo(id, seqNo, primaryTerm, curr) == OpVsLuceneDocStatus.OP_NEWER) { @@ -3391,11 +3388,6 @@ public class InternalEngine extends Engine { return versionMap; } - private static boolean assertGetUsesIdField(Get get) { - assert Objects.equals(get.uid().field(), IdFieldMapper.NAME) : get.uid().field(); - return true; - } - protected long getPreCommitSegmentGeneration() { return preCommitSegmentGeneration.get(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Uid.java b/server/src/main/java/org/elasticsearch/index/mapper/Uid.java index 05593ee3e99c..84b3a5cb4e73 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Uid.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Uid.java @@ -112,7 +112,7 @@ public final class Uid { } private static BytesRef encodeUtf8Id(String id) { - byte[] b = new byte[1 + UnicodeUtil.maxUTF8Length(id.length())]; + byte[] b = new byte[1 + UnicodeUtil.calcUTF16toUTF8Length(id, 0, id.length())]; // Prepend a byte that indicates that the content is an utf8 string b[0] = (byte) UTF8; int length = UnicodeUtil.UTF16toUTF8(id, 0, id.length(), b, 1); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 9c16f62335d5..39044720bea1 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -18,7 +18,6 @@ import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.index.Term; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; @@ -101,7 +100,6 @@ import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.ShardGetService; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; @@ -1045,9 +1043,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl // whether mappings were provided or not. doc.addDynamicMappingsUpdate(mapping); } - Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(doc.id())); return new Engine.Index( - uid, + Uid.encodeId(doc.id()), doc, seqNo, primaryTerm, @@ -1210,7 +1207,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl final Engine.DeleteResult result; try { if (logger.isTraceEnabled()) { - logger.trace("delete [{}] (seq no [{}])", delete.uid().text(), delete.seqNo()); + logger.trace("delete [{}] (seq no [{}])", delete.uid(), delete.seqNo()); } result = engine.delete(delete); } catch (Exception e) { @@ -1235,8 +1232,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl long ifPrimaryTerm ) { long startTime = System.nanoTime(); - final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); - return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm); + return new Engine.Delete(id, Uid.encodeId(id), seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm); } public Engine.GetResult get(Engine.Get get) { diff --git a/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java b/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java index b463a5ddf11a..b05bdedef7b6 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java @@ -55,7 +55,7 @@ public class VersionLookupTests extends ESTestCase { writer.addDocument(new Document()); DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); - PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, false); + PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false); // found doc DocIdAndVersion result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment); assertNotNull(result); @@ -68,7 +68,7 @@ public class VersionLookupTests extends ESTestCase { reader.close(); reader = DirectoryReader.open(writer); segment = reader.leaves().get(0); - lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, false); + lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false); assertNull(lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment)); reader.close(); writer.close(); @@ -91,7 +91,7 @@ public class VersionLookupTests extends ESTestCase { writer.addDocument(new Document()); DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); - PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, false); + PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false); // return the last doc when there are duplicates DocIdAndVersion result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment); assertNotNull(result); @@ -102,7 +102,7 @@ public class VersionLookupTests extends ESTestCase { reader.close(); reader = DirectoryReader.open(writer); segment = reader.leaves().get(0); - lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, false); + lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false); result = lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment); assertNotNull(result); assertEquals(87, result.version); @@ -112,7 +112,7 @@ public class VersionLookupTests extends ESTestCase { reader.close(); reader = DirectoryReader.open(writer); segment = reader.leaves().get(0); - lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, false); + lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false); assertNull(lookup.lookupVersion(new BytesRef("6"), randomBoolean(), segment)); reader.close(); writer.close(); @@ -139,12 +139,12 @@ public class VersionLookupTests extends ESTestCase { DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); - PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, true); + PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), true); assertTrue(lookup.loadedTimestampRange); assertEquals(lookup.minTimestamp, 1_000L); assertEquals(lookup.maxTimestamp, 1_000_000L); - lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, false); + lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), false); assertFalse(lookup.loadedTimestampRange); assertEquals(lookup.minTimestamp, 0L); assertEquals(lookup.maxTimestamp, Long.MAX_VALUE); @@ -160,7 +160,7 @@ public class VersionLookupTests extends ESTestCase { writer.addDocument(ParsedDocument.deleteTombstone("_id").docs().get(0)); DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); - PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), IdFieldMapper.NAME, true); + PerThreadIDVersionAndSeqNoLookup lookup = new PerThreadIDVersionAndSeqNoLookup(segment.reader(), true); assertTrue(lookup.loadedTimestampRange); assertEquals(lookup.minTimestamp, 0L); assertEquals(lookup.maxTimestamp, Long.MAX_VALUE); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java index 011a23ddb051..cc1a677f2e2f 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java @@ -57,7 +57,7 @@ public class VersionsTests extends ESTestCase { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()), nullValue()); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()), nullValue()); Document doc = new Document(); doc.add(new StringField(IdFieldMapper.NAME, "1", Field.Store.YES)); @@ -66,7 +66,7 @@ public class VersionsTests extends ESTestCase { doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE))); writer.updateDocument(new Term(IdFieldMapper.NAME, "1"), doc); directoryReader = reopen(directoryReader); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(1L)); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()).version, equalTo(1L)); doc = new Document(); Field uid = new StringField(IdFieldMapper.NAME, "1", Field.Store.YES); @@ -77,7 +77,7 @@ public class VersionsTests extends ESTestCase { doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE))); writer.updateDocument(new Term(IdFieldMapper.NAME, "1"), doc); directoryReader = reopen(directoryReader); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(2L)); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()).version, equalTo(2L)); // test reuse of uid field doc = new Document(); @@ -89,11 +89,11 @@ public class VersionsTests extends ESTestCase { writer.updateDocument(new Term(IdFieldMapper.NAME, "1"), doc); directoryReader = reopen(directoryReader); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(3L)); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()).version, equalTo(3L)); writer.deleteDocuments(new Term(IdFieldMapper.NAME, "1")); directoryReader = reopen(directoryReader); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()), nullValue()); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()), nullValue()); directoryReader.close(); writer.close(); dir.close(); @@ -121,18 +121,18 @@ public class VersionsTests extends ESTestCase { writer.updateDocuments(new Term(IdFieldMapper.NAME, "1"), docs); DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(5L)); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()).version, equalTo(5L)); version.setLongValue(6L); writer.updateDocuments(new Term(IdFieldMapper.NAME, "1"), docs); version.setLongValue(7L); writer.updateDocuments(new Term(IdFieldMapper.NAME, "1"), docs); directoryReader = reopen(directoryReader); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(7L)); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()).version, equalTo(7L)); writer.deleteDocuments(new Term(IdFieldMapper.NAME, "1")); directoryReader = reopen(directoryReader); - assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()), nullValue()); + assertThat(timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), randomBoolean()), nullValue()); directoryReader.close(); writer.close(); dir.close(); @@ -152,10 +152,10 @@ public class VersionsTests extends ESTestCase { writer.addDocument(doc); DirectoryReader reader = DirectoryReader.open(writer); // should increase cache size by 1 - assertEquals(87, timeSeriesLoadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version); + assertEquals(87, timeSeriesLoadDocIdAndVersion(reader, new BytesRef("6"), randomBoolean()).version); assertEquals(size + 1, VersionsAndSeqNoResolver.lookupStates.size()); // should be cache hit - assertEquals(87, timeSeriesLoadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version); + assertEquals(87, timeSeriesLoadDocIdAndVersion(reader, new BytesRef("6"), randomBoolean()).version); assertEquals(size + 1, VersionsAndSeqNoResolver.lookupStates.size()); reader.close(); @@ -178,11 +178,11 @@ public class VersionsTests extends ESTestCase { doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, randomLongBetween(1, Long.MAX_VALUE))); writer.addDocument(doc); DirectoryReader reader = DirectoryReader.open(writer); - assertEquals(87, timeSeriesLoadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version); + assertEquals(87, timeSeriesLoadDocIdAndVersion(reader, new BytesRef("6"), randomBoolean()).version); assertEquals(size + 1, VersionsAndSeqNoResolver.lookupStates.size()); // now wrap the reader DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", "_na_", 5)); - assertEquals(87, timeSeriesLoadDocIdAndVersion(wrapped, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version); + assertEquals(87, timeSeriesLoadDocIdAndVersion(wrapped, new BytesRef("6"), randomBoolean()).version); // same size map: core cache key is shared assertEquals(size + 1, VersionsAndSeqNoResolver.lookupStates.size()); @@ -199,7 +199,7 @@ public class VersionsTests extends ESTestCase { DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); String id = createTSDBId(1000L); assertThat( - VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), id, randomBoolean()), + VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), id, randomBoolean()), nullValue() ); @@ -221,23 +221,11 @@ public class VersionsTests extends ESTestCase { directoryReader = reopen(directoryReader); id = createTSDBId(randomLongBetween(1000, 10000)); - assertThat( - VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), id, true), - notNullValue() - ); - assertThat( - VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "2"), id, true), - notNullValue() - ); + assertThat(VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), id, true), notNullValue()); + assertThat(VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("2"), id, true), notNullValue()); id = createTSDBId(randomBoolean() ? randomLongBetween(0, 999) : randomLongBetween(10001, Long.MAX_VALUE)); - assertThat( - VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), id, true), - nullValue() - ); - assertThat( - VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "2"), id, true), - nullValue() - ); + assertThat(VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("1"), id, true), nullValue()); + assertThat(VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(directoryReader, new BytesRef("2"), id, true), nullValue()); directoryReader.close(); writer.close(); diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index c3c94c273036..d753d268e45d 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.IndexCommit; -import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.TermStatistics; @@ -383,7 +382,7 @@ public class IndexModuleTests extends ESTestCase { assertSame(listener, indexService.getIndexOperationListeners().get(1)); ParsedDocument doc = EngineTestCase.createParsedDoc("1", null); - Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong(), doc); + Engine.Index index = new Engine.Index(Uid.encodeId(doc.id()), randomNonNegativeLong(), doc); ShardId shardId = new ShardId(new Index("foo", "bar"), 0); for (IndexingOperationListener l : indexService.getIndexOperationListeners()) { l.preIndex(shardId, index); diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index be10fea3779b..c743a83208a2 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LoggerContext; import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.index.Term; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; @@ -79,7 +78,7 @@ public class IndexingSlowLogTests extends ESTestCase { IndexingSlowLog log = new IndexingSlowLog(settings, mock(SlowLogFieldProvider.class)); ParsedDocument doc = EngineTestCase.createParsedDoc("1", null); - Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId("doc_id")), randomNonNegativeLong(), doc); + Engine.Index index = new Engine.Index(Uid.encodeId("doc_id"), randomNonNegativeLong(), doc); Engine.IndexResult result = Mockito.mock(Engine.IndexResult.class);// (0, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, false); Mockito.when(result.getResultType()).thenReturn(Engine.Result.Type.SUCCESS); @@ -153,7 +152,7 @@ public class IndexingSlowLogTests extends ESTestCase { IndexingSlowLog log2 = new IndexingSlowLog(index2Settings, mock(SlowLogFieldProvider.class)); ParsedDocument doc = EngineTestCase.createParsedDoc("1", null); - Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId("doc_id")), randomNonNegativeLong(), doc); + Engine.Index index = new Engine.Index(Uid.encodeId("doc_id"), randomNonNegativeLong(), doc); Engine.IndexResult result = Mockito.mock(Engine.IndexResult.class); Mockito.when(result.getResultType()).thenReturn(Engine.Result.Type.SUCCESS); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 175ce085994e..00de132f9200 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1105,7 +1105,10 @@ public class InternalEngineTests extends EngineTestCase { new Engine.Get(true, true, "1"), mappingLookup, documentParser, - searcher -> SearcherHelper.wrapSearcher(searcher, reader -> new MatchingDirectoryReader(reader, new TermQuery(newUid("1")))) + searcher -> SearcherHelper.wrapSearcher( + searcher, + reader -> new MatchingDirectoryReader(reader, new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("1")))) + ) ) ) { assertTrue(get.exists()); @@ -1119,7 +1122,10 @@ public class InternalEngineTests extends EngineTestCase { new Engine.Get(true, true, "1"), mappingLookup, documentParser, - searcher -> SearcherHelper.wrapSearcher(searcher, reader -> new MatchingDirectoryReader(reader, new TermQuery(newUid("2")))) + searcher -> SearcherHelper.wrapSearcher( + searcher, + reader -> new MatchingDirectoryReader(reader, new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2")))) + ) ) ) { assertFalse(get.exists()); @@ -1604,7 +1610,7 @@ public class InternalEngineTests extends EngineTestCase { writer.forceMerge(1); try (DirectoryReader reader = DirectoryReader.open(writer)) { assertEquals(1, reader.leaves().size()); - assertNull(VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "1"), false)); + assertNull(VersionsAndSeqNoResolver.timeSeriesLoadDocIdAndVersion(reader, new BytesRef("1"), false)); } } } @@ -1658,7 +1664,8 @@ public class InternalEngineTests extends EngineTestCase { for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); + String id = doc.id(); + engine.delete(new Engine.Delete(doc.id(), Uid.encodeId(id), primaryTerm.get())); liveDocs.remove(doc.id()); } if (randomBoolean()) { @@ -1739,7 +1746,8 @@ public class InternalEngineTests extends EngineTestCase { boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime; ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null, useRecoverySource); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); + String id = doc.id(); + engine.delete(new Engine.Delete(doc.id(), Uid.encodeId(id), primaryTerm.get())); liveDocs.remove(doc.id()); liveDocsWithSource.remove(doc.id()); } @@ -2373,7 +2381,7 @@ public class InternalEngineTests extends EngineTestCase { final long finalReplicaSeqNo = lastReplicaOp.seqNo(); assertOpsOnReplica(replicaOps, replicaEngine, true, logger); final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); - final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, lastReplicaOp.uid().text())).v1(); + final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, Term.toString(lastReplicaOp.uid()))).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.search(new MatchAllDocsQuery(), collector); @@ -2428,7 +2436,7 @@ public class InternalEngineTests extends EngineTestCase { final AtomicInteger idGenerator = new AtomicInteger(); final Queue history = ConcurrentCollections.newQueue(); ParsedDocument doc = testParsedDocument("1", null, testDocument(), bytesArray(""), null); - final Term uidTerm = newUid(doc); + final BytesRef uidTerm = newUid(doc); engine.index(indexForDoc(doc)); for (int i = 0; i < thread.length; i++) { thread[i] = new Thread(() -> { @@ -2721,7 +2729,7 @@ public class InternalEngineTests extends EngineTestCase { id = randomFrom(indexedIds); final Engine.Delete delete = new Engine.Delete( id, - newUid(id), + Uid.encodeId(id), UNASSIGNED_SEQ_NO, primaryTerm.get(), rarely() ? 100 : Versions.MATCH_ANY, @@ -3063,7 +3071,7 @@ public class InternalEngineTests extends EngineTestCase { engine.delete( new Engine.Delete( "2", - newUid("2"), + Uid.encodeId("2"), UNASSIGNED_SEQ_NO, 0, 10, @@ -3855,7 +3863,10 @@ public class InternalEngineTests extends EngineTestCase { try (InternalEngine engine = createEngine(indexWriterFactory, null, null, config)) { final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); - expectThrows(IllegalStateException.class, () -> engine.delete(new Engine.Delete("1", newUid("1"), primaryTerm.get()))); + expectThrows( + IllegalStateException.class, + () -> engine.delete(new Engine.Delete("1", Uid.encodeId("1"), primaryTerm.get())) + ); assertTrue(engine.isClosed.get()); assertSame(tragicException, engine.failedEngine.get()); } @@ -4730,7 +4741,10 @@ public class InternalEngineTests extends EngineTestCase { assertThat(getDocIds(engine, true).stream().collect(Collectors.toMap(e -> e.id(), e -> e.seqNo())), equalTo(liveOps)); for (String id : latestOps.keySet()) { String msg = "latestOps=" + latestOps + " op=" + id; - DocIdAndSeqNo docIdAndSeqNo = VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), newUid(id)); + DocIdAndSeqNo docIdAndSeqNo = VersionsAndSeqNoResolver.loadDocIdAndSeqNo( + searcher.getIndexReader(), + Uid.encodeId(id) + ); if (liveOps.containsKey(id) == false) { assertNull(msg, docIdAndSeqNo); } else { @@ -4739,7 +4753,7 @@ public class InternalEngineTests extends EngineTestCase { } } String notFoundId = randomValueOtherThanMany(liveOps::containsKey, () -> Long.toString(randomNonNegativeLong())); - assertNull(VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), newUid(notFoundId))); + assertNull(VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), Uid.encodeId(notFoundId))); } }; for (Engine.Operation op : operations) { @@ -4880,7 +4894,7 @@ public class InternalEngineTests extends EngineTestCase { document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); return testParsedDocument("1", null, document, B_1, null); }; - final Term uid = newUid("1"); + final BytesRef uid = Uid.encodeId("1"); final BiFunction searcherFactory = engine::acquireSearcher; for (int i = 0; i < numberOfOperations; i++) { if (randomBoolean()) { @@ -4970,12 +4984,12 @@ public class InternalEngineTests extends EngineTestCase { */ public void testVersionConflictIgnoreDeletedDoc() throws IOException { ParsedDocument doc = testParsedDocument("1", null, testDocument(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); - engine.delete(new Engine.Delete("1", newUid("1"), 1)); + engine.delete(new Engine.Delete("1", Uid.encodeId("1"), 1)); for (long seqNo : new long[] { 0, 1, randomNonNegativeLong() }) { assertDeletedVersionConflict( engine.index( new Engine.Index( - newUid("1"), + Uid.encodeId("1"), doc, UNASSIGNED_SEQ_NO, 1, @@ -4996,7 +5010,7 @@ public class InternalEngineTests extends EngineTestCase { engine.delete( new Engine.Delete( "1", - newUid("1"), + Uid.encodeId("1"), UNASSIGNED_SEQ_NO, 1, Versions.MATCH_ANY, @@ -5539,7 +5553,7 @@ public class InternalEngineTests extends EngineTestCase { ); final Engine.Index index = new Engine.Index( - new Term("_id", parsedDocument.id()), + BytesRef.deepCopyOf(new BytesRef(parsedDocument.id())), parsedDocument, UNASSIGNED_SEQ_NO, randomIntBetween(1, 8), @@ -5558,7 +5572,7 @@ public class InternalEngineTests extends EngineTestCase { final Engine.Delete delete = new Engine.Delete( id, - new Term("_id", parsedDocument.id()), + BytesRef.deepCopyOf(new BytesRef(parsedDocument.id())), UNASSIGNED_SEQ_NO, randomIntBetween(1, 8), Versions.MATCH_ANY, @@ -6547,7 +6561,8 @@ public class InternalEngineTests extends EngineTestCase { ); } } else { - Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); + String id = doc.id(); + Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.id(), Uid.encodeId(id), primaryTerm.get())); liveDocIds.remove(doc.id()); assertThat( "delete operations on primary must advance max_seq_no_of_updates", @@ -6895,7 +6910,7 @@ public class InternalEngineTests extends EngineTestCase { index(engine, i); } engine.forceMerge(true, 1, false, UUIDs.randomBase64UUID()); - engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get())); + engine.delete(new Engine.Delete("0", Uid.encodeId("0"), primaryTerm.get())); engine.refresh("test"); // now we have 2 segments since we now added a tombstone plus the old segment with the delete try (Engine.Searcher searcher = engine.acquireSearcher("test")) { @@ -7106,8 +7121,8 @@ public class InternalEngineTests extends EngineTestCase { iw.set(new ThrowingIndexWriter(dir, iwc)); return iw.get(); }, null, null, config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) { - engine.index(new Engine.Index(newUid("0"), primaryTerm.get(), InternalEngineTests.createParsedDoc("0", null))); - final Engine.Delete op = new Engine.Delete("0", newUid("0"), primaryTerm.get()); + engine.index(new Engine.Index(Uid.encodeId("0"), primaryTerm.get(), InternalEngineTests.createParsedDoc("0", null))); + final Engine.Delete op = new Engine.Delete("0", Uid.encodeId("0"), primaryTerm.get()); consumer.accept(engine, op); iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal")); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op)); @@ -7385,7 +7400,7 @@ public class InternalEngineTests extends EngineTestCase { if (randomBoolean()) { operations.add(indexForDoc(createParsedDoc(id, null))); } else { - operations.add(new Engine.Delete(id, newUid(id), primaryTerm.get())); + operations.add(new Engine.Delete(id, Uid.encodeId(id), primaryTerm.get())); } } for (int i = 0; i < numDocs; i++) { diff --git a/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java index 8ed162f8cda8..6f568ecf347c 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.SnapshotMatchers; import org.elasticsearch.index.translog.Translog; @@ -65,7 +66,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase { if (randomBoolean()) { engine.index(indexForDoc(doc)); } else { - engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), Uid.encodeId(doc.id()), primaryTerm.get())); } if (rarely()) { if (randomBoolean()) { @@ -270,7 +271,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase { if (randomBoolean()) { op = new Engine.Index(newUid(doc), primaryTerm.get(), doc); } else { - op = new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()); + op = new Engine.Delete(doc.id(), Uid.encodeId(doc.id()), primaryTerm.get()); } } else { if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java index 5efdd4c79940..aa298955fc08 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.DocsStats; @@ -117,7 +118,7 @@ public class NoOpEngineTests extends EngineTestCase { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String delId = Integer.toString(i); - Engine.DeleteResult result = engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get())); + Engine.DeleteResult result = engine.delete(new Engine.Delete(delId, Uid.encodeId(delId), primaryTerm.get())); assertTrue(result.isFound()); engine.syncTranslog(); // advance persisted local checkpoint globalCheckpoint.set(engine.getPersistedLocalCheckpoint()); diff --git a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java index 5bb51f99dfb1..cfc7e82fddab 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.store.Store; @@ -94,7 +95,7 @@ public class ReadOnlyEngineTests extends EngineTestCase { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String delId = Integer.toString(i); - engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get())); + engine.delete(new Engine.Delete(delId, Uid.encodeId(delId), primaryTerm.get())); } if (rarely()) { engine.flush(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java index 4c05486f9799..99a85f7479dd 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.index.shard; -import org.apache.lucene.index.Term; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineTestCase; @@ -127,8 +126,8 @@ public class IndexingOperationListenerTests extends ESTestCase { logger ); ParsedDocument doc = EngineTestCase.createParsedDoc("1", null); - Engine.Delete delete = new Engine.Delete("1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong()); - Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong(), doc); + Engine.Delete delete = new Engine.Delete("1", Uid.encodeId(doc.id()), randomNonNegativeLong()); + Engine.Index index = new Engine.Index(Uid.encodeId(doc.id()), randomNonNegativeLong(), doc); compositeListener.postDelete( randomShardId, delete, diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index aaf0f40a7300..7f22c9f9ccc2 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -13,10 +13,10 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.common.bytes.BytesArray; @@ -547,10 +547,10 @@ public class RefreshListenersTests extends ESTestCase { } private Engine.IndexResult index(String id, String testFieldValue) throws IOException { - final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); + final BytesRef uid = Uid.encodeId(id); LuceneDocument document = new LuceneDocument(); document.add(new TextField("test", testFieldValue, Field.Store.YES)); - Field idField = new StringField(uid.field(), uid.bytes(), Field.Store.YES); + Field idField = new StringField(IdFieldMapper.NAME, uid, Field.Store.YES); Field versionField = new NumericDocValuesField("_version", Versions.MATCH_ANY); SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); document.add(idField); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index aa6ff7073a21..6aaeabdc175d 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -16,7 +16,6 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataOutput; @@ -25,6 +24,7 @@ import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.apache.lucene.tests.util.LineFileDocs; import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -961,8 +961,8 @@ public class TranslogTests extends ESTestCase { } } - private Term newUid(ParsedDocument doc) { - return new Term("_id", Uid.encodeId(doc.id())); + private static BytesRef newUid(ParsedDocument doc) { + return Uid.encodeId(doc.id()); } public void testVerifyTranslogIsNotDeleted() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 52927432f535..3a7a31e761e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -914,12 +914,8 @@ public abstract class EngineTestCase extends ESTestCase { return new BytesArray(string.getBytes(Charset.defaultCharset())); } - public static Term newUid(String id) { - return new Term("_id", Uid.encodeId(id)); - } - - public static Term newUid(ParsedDocument doc) { - return newUid(doc.id()); + public static BytesRef newUid(ParsedDocument doc) { + return Uid.encodeId(doc.id()); } protected Engine.Get newGet(boolean realtime, ParsedDocument doc) { @@ -950,7 +946,7 @@ public abstract class EngineTestCase extends ESTestCase { protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { return new Engine.Delete( id, - newUid(id), + Uid.encodeId(id), seqNo, 1, version, @@ -987,7 +983,7 @@ public abstract class EngineTestCase extends ESTestCase { ) { final int numOfOps = randomIntBetween(minOpCount, maxOpCount); final List ops = new ArrayList<>(); - final Term id = newUid(docId); + final BytesRef id = Uid.encodeId(docId); final int startWithSeqNo = 0; final String valuePrefix = (forReplica ? "r_" : "p_") + docId + "_"; final boolean incrementTermWhenIntroducingSeqNo = randomBoolean(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index 15e358b68e64..7c9b1b5efbde 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.ccr.index.engine; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.CheckedBiConsumer; @@ -177,7 +177,7 @@ public class FollowingEngineTests extends ESTestCase { final String id = "id"; final Engine.Delete delete = new Engine.Delete( id, - new Term("_id", id), + BytesRef.deepCopyOf(new BytesRef(id)), seqNo, primaryTerm.get(), randomNonNegativeLong(), From f4d87e0f2570a6d80f58c90fbbce7e340c6d38f1 Mon Sep 17 00:00:00 2001 From: Karen Metts <35154725+karenzone@users.noreply.github.com> Date: Mon, 10 Jun 2024 12:24:23 -0400 Subject: [PATCH 41/58] [DOCS] Add note that Logstash sets up data streams (#109502) --- .../data-streams/set-up-a-data-stream.asciidoc | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/docs/reference/data-streams/set-up-a-data-stream.asciidoc b/docs/reference/data-streams/set-up-a-data-stream.asciidoc index 57388a1199f5..a8cbbeac0607 100644 --- a/docs/reference/data-streams/set-up-a-data-stream.asciidoc +++ b/docs/reference/data-streams/set-up-a-data-stream.asciidoc @@ -13,9 +13,16 @@ To set up a data stream, follow these steps: You can also <>. -IMPORTANT: If you use {fleet} or {agent}, skip this tutorial. {fleet} and -{agent} set up data streams for you. See {fleet}'s -{fleet-guide}/data-streams.html[data streams] documentation. +[IMPORTANT] +-- +If you use {fleet}, {agent}, or {ls}, skip this tutorial. +They all set up data streams for you. + +For {fleet} and {agent}, check out this {fleet-guide}/data-streams.html[data streams documentation]. +For {ls}, check out the +{logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-data_stream[data streams settings] +for the `elasticsearch output` plugin. +-- [discrete] [[create-index-lifecycle-policy]] From 7e7f8a379a1dc2c302d71b7fa90f609250425527 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Mon, 10 Jun 2024 18:39:02 +0200 Subject: [PATCH 42/58] Make dense vector field type updatable (#106591) --- docs/changelog/106591.yaml | 5 + .../upgrades/DenseVectorMappingUpdateIT.java | 160 ++ .../180_update_dense_vector_type.yml | 1363 +++++++++++++++++ .../vectors/DenseVectorFieldMapper.java | 48 +- .../vectors/DenseVectorFieldMapperTests.java | 102 +- .../index/mapper/MapperTestCase.java | 6 +- 6 files changed, 1677 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/106591.yaml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml diff --git a/docs/changelog/106591.yaml b/docs/changelog/106591.yaml new file mode 100644 index 000000000000..6a7814cb9ced --- /dev/null +++ b/docs/changelog/106591.yaml @@ -0,0 +1,5 @@ +pr: 106591 +summary: Make dense vector field type updatable +area: Search +type: enhancement +issues: [] diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java new file mode 100644 index 000000000000..99cbef655693 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.function.Predicate; + +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; + +/** + * This IT indexes some dense vector on an old node, then update its mapping and, once upgraded, checks that KNN search still works + * before and after further data indexing. + */ +public class DenseVectorMappingUpdateIT extends AbstractRollingUpgradeTestCase { + + private static final String BULK1 = """ + {"index": {"_id": "1"}} + {"embedding": [1, 1, 1, 1]} + {"index": {"_id": "2"}} + {"embedding": [1, 1, 1, 2]} + {"index": {"_id": "3"}} + {"embedding": [1, 1, 1, 3]} + {"index": {"_id": "4"}} + {"embedding": [1, 1, 1, 4]} + {"index": {"_id": "5"}} + {"embedding": [1, 1, 1, 5]} + {"index": {"_id": "6"}} + {"embedding": [1, 1, 1, 6]} + {"index": {"_id": "7"}} + {"embedding": [1, 1, 1, 7]} + {"index": {"_id": "8"}} + {"embedding": [1, 1, 1, 8]} + {"index": {"_id": "9"}} + {"embedding": [1, 1, 1, 9]} + {"index": {"_id": "10"}} + {"embedding": [1, 1, 1, 10]} + """; + + private static final String BULK2 = """ + {"index": {"_id": "11"}} + {"embedding": [1, 0, 1, 1]} + {"index": {"_id": "12"}} + {"embedding": [1, 2, 1, 1]} + {"index": {"_id": "13"}} + {"embedding": [1, 3, 1, 1]} + {"index": {"_id": "14"}} + {"embedding": [1, 4, 1, 1]} + {"index": {"_id": "15"}} + {"embedding": [1, 5, 1, 1]} + {"index": {"_id": "16"}} + {"embedding": [1, 6, 1, 1]} + {"index": {"_id": "17"}} + {"embedding": [1, 7, 1, 1]} + {"index": {"_id": "18"}} + {"embedding": [1, 8, 1, 1]} + {"index": {"_id": "19"}} + {"embedding": [1, 9, 1, 1]} + {"index": {"_id": "20"}} + {"embedding": [1, 10, 1, 1]} + """; + + public DenseVectorMappingUpdateIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testDenseVectorMappingUpdateOnOldCluster() throws IOException { + if (getOldClusterTestVersion().after(Version.V_8_7_0.toString())) { + String indexName = "test_index"; + if (isOldCluster()) { + Request createIndex = new Request("PUT", "/" + indexName); + XContentBuilder mappings = XContentBuilder.builder(XContentType.JSON.xContent()) + .startObject() + .startObject("mappings") + .startObject("properties") + .startObject("embedding") + .field("type", "dense_vector") + .field("dims", 4) + .startObject("index_options") + .field("type", "hnsw") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + createIndex.setJsonEntity(Strings.toString(mappings)); + client().performRequest(createIndex); + Request index = new Request("POST", "/" + indexName + "/_bulk/"); + index.addParameter("refresh", "true"); + index.setJsonEntity(BULK1); + client().performRequest(index); + } + + int expectedCount = 10; + + assertCount("test_index", expectedCount); + + if (isUpgradedCluster() && clusterSupportsDenseVectorTypeUpdate()) { + Request updateMapping = new Request("PUT", "/" + indexName + "/_mapping"); + XContentBuilder mappings = XContentBuilder.builder(XContentType.JSON.xContent()) + .startObject() + .startObject("properties") + .startObject("embedding") + .field("type", "dense_vector") + .field("dims", 4) + .startObject("index_options") + .field("type", "int8_hnsw") + .endObject() + .endObject() + .endObject() + .endObject(); + updateMapping.setJsonEntity(Strings.toString(mappings)); + assertOK(client().performRequest(updateMapping)); + Request index = new Request("POST", "/" + indexName + "/_bulk/"); + index.addParameter("refresh", "true"); + index.setJsonEntity(BULK2); + assertOK(client().performRequest(index)); + expectedCount = 20; + assertCount("test_index", expectedCount); + } + } + } + + private void assertCount(String index, int count) throws IOException { + Request searchTestIndexRequest = new Request("POST", "/" + index + "/_search"); + searchTestIndexRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); + searchTestIndexRequest.addParameter("filter_path", "hits.total"); + Response searchTestIndexResponse = client().performRequest(searchTestIndexRequest); + assertEquals( + "{\"hits\":{\"total\":" + count + "}}", + EntityUtils.toString(searchTestIndexResponse.getEntity(), StandardCharsets.UTF_8) + ); + } + + private boolean clusterSupportsDenseVectorTypeUpdate() throws IOException { + Map response = entityAsMap(client().performRequest(new Request("GET", "_nodes"))); + Map nodes = (Map) response.get("nodes"); + + Predicate> nodeSupportsBulkApi = n -> Version.fromString(n.get("version").toString()).onOrAfter(Version.V_8_14_0); + + return nodes.values().stream().map(o -> (Map) o).allMatch(nodeSupportsBulkApi); + } + +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml new file mode 100644 index 000000000000..0780b789e92a --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml @@ -0,0 +1,1363 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: 'updatable dense vector field types was added in 8.15' +--- +"Test create and update dense vector mapping with per-doc indexing and flush": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: flat } + + - do: + index: + index: test_index + id: "1" + body: + embedding: [ 1, 1, 1, 1 ] + - do: + index: + index: test_index + id: "2" + body: + embedding: [ 1, 1, 1, 2 ] + - do: + index: + index: test_index + id: "3" + body: + embedding: [ 1, 1, 1, 3 ] + - do: + index: + index: test_index + id: "4" + body: + embedding: [ 1, 1, 1, 4 ] + - do: + index: + index: test_index + id: "5" + body: + embedding: [ 1, 1, 1, 5 ] + + - do: + indices.flush: { } + + - do: + index: + index: test_index + id: "6" + body: + embedding: [ 1, 1, 1, 6 ] + - do: + index: + index: test_index + id: "7" + body: + embedding: [ 1, 1, 1, 7 ] + - do: + index: + index: test_index + id: "8" + body: + embedding: [ 1, 1, 1, 8 ] + - do: + index: + index: test_index + id: "9" + body: + embedding: [ 1, 1, 1, 9 ] + - do: + index: + index: test_index + id: "10" + body: + embedding: [ 1, 1, 1, 10 ] + + - do: + indices.flush: { } + + - do: + indices.refresh: {} + + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [1, 1, 1, 1] + num_candidates: 10 + + - match: { hits.total.value: 10 } + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_flat } + + - do: + index: + index: test_index + id: "11" + body: + embedding: [ 2, 1, 1, 1 ] + - do: + index: + index: test_index + id: "12" + body: + embedding: [ 3, 1, 1, 2 ] + - do: + index: + index: test_index + id: "13" + body: + embedding: [ 4, 1, 1, 3 ] + - do: + index: + index: test_index + id: "14" + body: + embedding: [ 5, 1, 1, 4 ] + - do: + index: + index: test_index + id: "15" + body: + embedding: [ 6, 1, 1, 5 ] + + - do: + indices.flush: { } + + - do: + index: + index: test_index + id: "16" + body: + embedding: [ 7, 1, 1, 6 ] + - do: + index: + index: test_index + id: "17" + body: + embedding: [ 8, 1, 1, 7 ] + - do: + index: + index: test_index + id: "18" + body: + embedding: [ 9, 1, 1, 8 ] + - do: + index: + index: test_index + id: "19" + body: + embedding: [ 10, 1, 1, 9 ] + - do: + index: + index: test_index + id: "20" + body: + embedding: [ 1, 11, 1, 10 ] + + - do: + indices.flush: { } + + - do: + indices.refresh: {} + + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 20 + + - match: { hits.total.value: 20 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "11" } + - match: { hits.hits.2._id: "2" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + m: 3 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + + - do: + index: + index: test_index + id: "21" + body: + embedding: [ 1, 1, 2, 1 ] + - do: + index: + index: test_index + id: "22" + body: + embedding: [ 1, 1, 3, 1 ] + - do: + index: + index: test_index + id: "23" + body: + embedding: [ 1, 1, 4, 1 ] + - do: + index: + index: test_index + id: "24" + body: + embedding: [ 1, 1, 5, 1 ] + - do: + index: + index: test_index + id: "25" + body: + embedding: [ 1, 1, 6, 1 ] + + - do: + indices.flush: { } + + - do: + index: + index: test_index + id: "26" + body: + embedding: [ 1, 1, 7, 1 ] + - do: + index: + index: test_index + id: "27" + body: + embedding: [ 1, 1, 8, 1 ] + - do: + index: + index: test_index + id: "28" + body: + embedding: [ 1, 1, 9, 1 ] + - do: + index: + index: test_index + id: "29" + body: + embedding: [ 1, 1, 10, 1 ] + - do: + index: + index: test_index + id: "30" + body: + embedding: [ 1, 1, 11, 1 ] + + - do: + indices.flush: { } + + - do: + indices.refresh: {} + + - do: + search: + index: test_index + body: + size: 4 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 30 + + - match: { hits.total.value: 30 } + - length: { hits.hits: 4 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "11" } + - match: { hits.hits.2._id: "2" } + - match: { hits.hits.3._id: "21" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + ef_construction: 200 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + + - do: + index: + index: test_index + id: "31" + body: + embedding: [ 1, 1, 1, 2 ] + - do: + index: + index: test_index + id: "32" + body: + embedding: [ 1, 1, 1, 3 ] + - do: + index: + index: test_index + id: "33" + body: + embedding: [ 1, 1, 1, 4 ] + - do: + index: + index: test_index + id: "34" + body: + embedding: [ 1, 1, 1, 5 ] + - do: + index: + index: test_index + id: "35" + body: + embedding: [ 1, 1, 1, 6 ] + + - do: + indices.flush: { } + + - do: + index: + index: test_index + id: "36" + body: + embedding: [ 1, 1, 1, 7 ] + - do: + index: + index: test_index + id: "37" + body: + embedding: [ 1, 1, 1, 8 ] + - do: + index: + index: test_index + id: "38" + body: + embedding: [ 1, 1, 1, 9 ] + - do: + index: + index: test_index + id: "39" + body: + embedding: [ 1, 1, 1, 10 ] + - do: + index: + index: test_index + id: "40" + body: + embedding: [ 1, 1, 1, 11 ] + + - do: + indices.flush: { } + + - do: + indices.refresh: {} + + - do: + search: + index: test_index + body: + size: 5 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 40 + + - match: { hits.total.value: 40 } + - length: { hits.hits: 5 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "11" } + - match: { hits.hits.2._id: "31" } + - match: { hits.hits.3._id: "2" } + - match: { hits.hits.4._id: "21" } + + +--- +"Test create and update dense vector mapping with bulk indexing": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: flat } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "1"}}' + - '{"embedding": [1, 1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"embedding": [1, 1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"embedding": [1, 1, 1, 3]}' + - '{"index": {"_id": "4"}}' + - '{"embedding": [1, 1, 1, 4]}' + - '{"index": {"_id": "5"}}' + - '{"embedding": [1, 1, 1, 5]}' + - '{"index": {"_id": "6"}}' + - '{"embedding": [1, 1, 1, 6]}' + - '{"index": {"_id": "7"}}' + - '{"embedding": [1, 1, 1, 7]}' + - '{"index": {"_id": "8"}}' + - '{"embedding": [1, 1, 1, 8]}' + - '{"index": {"_id": "9"}}' + - '{"embedding": [1, 1, 1, 9]}' + - '{"index": {"_id": "10"}}' + - '{"embedding": [1, 1, 1, 10]}' + + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [1, 1, 1, 1] + num_candidates: 10 + + - match: { hits.total.value: 10 } + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_flat } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "11"}}' + - '{"embedding": [2, 1, 1, 1]}' + - '{"index": {"_id": "12"}}' + - '{"embedding": [3, 1, 1, 2]}' + - '{"index": {"_id": "13"}}' + - '{"embedding": [4, 1, 1, 3]}' + - '{"index": {"_id": "14"}}' + - '{"embedding": [5, 1, 1, 4]}' + - '{"index": {"_id": "15"}}' + - '{"embedding": [6, 1, 1, 5]}' + - '{"index": {"_id": "16"}}' + - '{"embedding": [7, 1, 1, 6]}' + - '{"index": {"_id": "17"}}' + - '{"embedding": [8, 1, 1, 7]}' + - '{"index": {"_id": "18"}}' + - '{"embedding": [9, 1, 1, 8]}' + - '{"index": {"_id": "19"}}' + - '{"embedding": [10, 1, 1, 9]}' + - '{"index": {"_id": "20"}}' + - '{"embedding": [1, 11, 1, 10]}' + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 20 + + - match: { hits.total.value: 20 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "11" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + m: 3 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "21"}}' + - '{"embedding": [1, 1, 2, 1]}' + - '{"index": {"_id": "22"}}' + - '{"embedding": [1, 1, 3, 1]}' + - '{"index": {"_id": "23"}}' + - '{"embedding": [1, 1, 4, 1]}' + - '{"index": {"_id": "24"}}' + - '{"embedding": [1, 1, 6, 1]}' + - '{"index": {"_id": "25"}}' + - '{"embedding": [1, 1, 7, 1]}' + - '{"index": {"_id": "26"}}' + - '{"embedding": [1, 1, 8, 1]}' + - '{"index": {"_id": "27"}}' + - '{"embedding": [1, 1, 9, 1]}' + - '{"index": {"_id": "28"}}' + - '{"embedding": [1, 1, 10, 1]}' + - '{"index": {"_id": "29"}}' + - '{"embedding": [1, 1, 11, 1]}' + - '{"index": {"_id": "30"}}' + - '{"embedding": [1, 1, 12, 1]}' + - do: + search: + index: test_index + body: + size: 4 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 30 + + - match: { hits.total.value: 30 } + - length: { hits.hits: 4 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "21" } + - match: { hits.hits.3._id: "11" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + ef_construction: 200 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "31"}}' + - '{"embedding": [1, 1, 1, 2]}' + - '{"index": {"_id": "32"}}' + - '{"embedding": [1, 1, 1, 3]}' + - '{"index": {"_id": "33"}}' + - '{"embedding": [1, 1, 1, 4]}' + - '{"index": {"_id": "34"}}' + - '{"embedding": [1, 1, 1, 5]}' + - '{"index": {"_id": "35"}}' + - '{"embedding": [1, 1, 1, 6]}' + - '{"index": {"_id": "36"}}' + - '{"embedding": [1, 1, 1, 7]}' + - '{"index": {"_id": "37"}}' + - '{"embedding": [1, 1, 1, 8]}' + - '{"index": {"_id": "38"}}' + - '{"embedding": [1, 1, 1, 9]}' + - '{"index": {"_id": "39"}}' + - '{"embedding": [1, 1, 1, 10]}' + - '{"index": {"_id": "40"}}' + - '{"embedding": [1, 1, 1, 11]}' + - do: + search: + index: test_index + body: + size: 5 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 40 + + - match: { hits.total.value: 40 } + - length: { hits.hits: 5 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "21" } + - match: { hits.hits.3._id: "31" } + - match: { hits.hits.4._id: "11" } + +--- +"Index, update and merge": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: flat } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "1"}}' + - '{"embedding": [1, 1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"embedding": [1, 1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"embedding": [1, 1, 1, 3]}' + - '{"index": {"_id": "4"}}' + - '{"embedding": [1, 1, 1, 4]}' + - '{"index": {"_id": "5"}}' + - '{"embedding": [1, 1, 1, 5]}' + - '{"index": {"_id": "6"}}' + - '{"embedding": [1, 1, 1, 6]}' + - '{"index": {"_id": "7"}}' + - '{"embedding": [1, 1, 1, 7]}' + - '{"index": {"_id": "8"}}' + - '{"embedding": [1, 1, 1, 8]}' + - '{"index": {"_id": "9"}}' + - '{"embedding": [1, 1, 1, 9]}' + - '{"index": {"_id": "10"}}' + - '{"embedding": [1, 1, 1, 10]}' + + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 10 + + - match: { hits.total.value: 10 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_flat } + + - do: + indices.forcemerge: + index: test_index + max_num_segments: 1 + + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 10 + + - match: { hits.total.value: 10 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "11"}}' + - '{"embedding": [2, 1, 1, 1]}' + - '{"index": {"_id": "12"}}' + - '{"embedding": [3, 1, 1, 2]}' + - '{"index": {"_id": "13"}}' + - '{"embedding": [4, 1, 1, 3]}' + - '{"index": {"_id": "14"}}' + - '{"embedding": [5, 1, 1, 4]}' + - '{"index": {"_id": "15"}}' + - '{"embedding": [6, 1, 1, 5]}' + - '{"index": {"_id": "16"}}' + - '{"embedding": [7, 1, 1, 6]}' + - '{"index": {"_id": "17"}}' + - '{"embedding": [8, 1, 1, 7]}' + - '{"index": {"_id": "18"}}' + - '{"embedding": [9, 1, 1, 8]}' + - '{"index": {"_id": "19"}}' + - '{"embedding": [10, 1, 1, 9]}' + - '{"index": {"_id": "20"}}' + - '{"embedding": [1, 11, 1, 10]}' + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 20 + + - match: { hits.total.value: 20 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "11" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + m: 3 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + + - do: + search: + index: test_index + body: + size: 3 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 20 + + - match: { hits.total.value: 20 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "11" } + + - do: + bulk: + refresh: true + index: test_index + body: + - '{"index": {"_id": "21"}}' + - '{"embedding": [1, 1, 2, 1]}' + - '{"index": {"_id": "22"}}' + - '{"embedding": [1, 1, 3, 1]}' + - '{"index": {"_id": "23"}}' + - '{"embedding": [1, 1, 4, 1]}' + - '{"index": {"_id": "24"}}' + - '{"embedding": [1, 1, 6, 1]}' + - '{"index": {"_id": "25"}}' + - '{"embedding": [1, 1, 7, 1]}' + - '{"index": {"_id": "26"}}' + - '{"embedding": [1, 1, 8, 1]}' + - '{"index": {"_id": "27"}}' + - '{"embedding": [1, 1, 9, 1]}' + - '{"index": {"_id": "28"}}' + - '{"embedding": [1, 1, 10, 1]}' + - '{"index": {"_id": "29"}}' + - '{"embedding": [1, 1, 11, 1]}' + - '{"index": {"_id": "30"}}' + - '{"embedding": [1, 1, 12, 1]}' + - do: + search: + index: test_index + body: + size: 4 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 30 + + - match: { hits.total.value: 30 } + - length: { hits.hits: 4 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "21" } + - match: { hits.hits.3._id: "11" } + + - do: + indices.forcemerge: + index: test_index + max_num_segments: 1 + + - do: + search: + index: test_index + body: + size: 4 + query: + knn: + field: embedding + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 30 + + - match: { hits.total.value: 30 } + - length: { hits.hits: 4 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "21" } + - match: { hits.hits.3._id: "11" } + + +--- +"Disallowed dense vector update path hnsw --> flat": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: flat + +--- +"Disallowed dense vector update path hnsw --> int8_flat": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + +--- +"Disallowed dense vector update path int8_hnsw --> flat": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: flat + +--- +"Disallowed dense vector update path int8_hnsw --> int8_flat": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + +--- +"Disallowed dense vector update path int8_flat --> flat": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_flat } + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: flat + +--- +"Allowed dense vector updates on same type but different other index_options, hnsw": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + m: 24 + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + - match: { test_index.mappings.properties.embedding.index_options.m: 24 } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + m: 24 + ef_construction: 200 + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: hnsw } + - match: { test_index.mappings.properties.embedding.index_options.m: 24 } + - match: { test_index.mappings.properties.embedding.index_options.ef_construction: 200 } + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: hnsw + m: 3 + ef_construction: 200 + +--- +"Allowed dense vector updates on same type but different other index_options, int8_hnsw": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + m: 32 + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + - match: { test_index.mappings.properties.embedding.index_options.m: 32 } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + m: 32 + ef_construction: 200 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + - match: { test_index.mappings.properties.embedding.index_options.m: 32 } + - match: { test_index.mappings.properties.embedding.index_options.ef_construction: 200 } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + m: 32 + ef_construction: 200 + confidence_interval: 0.3 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_hnsw } + - match: { test_index.mappings.properties.embedding.index_options.m: 32 } + - match: { test_index.mappings.properties.embedding.index_options.ef_construction: 200 } + - match: { test_index.mappings.properties.embedding.index_options.confidence_interval: 0.3 } + + - do: + catch: /illegal_argument_exception/ # fails because m = 10 is less than the current value of 20 + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + ef_construction: 200 + m: 10 + confidence_interval: 0.3 + + - do: + catch: /illegal_argument_exception/ # fails because m = 16 by default, which is less than the current value of 20 + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_hnsw + ef_construction: 200 + confidence_interval: 0.3 + +--- +"Allowed dense vector updates on same type but different other index_options, int8_flat": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_flat } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + embedding: + type: dense_vector + dims: 4 + index_options: + type: int8_flat + confidence_interval: 0.3 + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.index_options.type: int8_flat } + - match: { test_index.mappings.properties.embedding.index_options.confidence_interval: 0.3 } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index c00f360e94c7..97961228f768 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -154,7 +154,9 @@ public class DenseVectorFieldMapper extends FieldMapper { }, m -> toType(m).fieldType().dims, XContentBuilder::field, Object::toString).setSerializerCheck((id, ic, v) -> v != null) .setMergeValidator((previous, current, c) -> previous == null || Objects.equals(previous, current)); private final Parameter similarity; + private final Parameter indexOptions; + private final Parameter indexed; private final Parameter> meta = Parameter.metaParam(); @@ -179,7 +181,7 @@ public class DenseVectorFieldMapper extends FieldMapper { ).acceptsNull().setSerializerCheck((id, ic, v) -> v != null); this.indexOptions = new Parameter<>( "index_options", - false, + true, () -> defaultInt8Hnsw && elementType.getValue() != ElementType.BYTE && this.indexed.getValue() ? new Int8HnswIndexOptions( Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, @@ -201,7 +203,9 @@ public class DenseVectorFieldMapper extends FieldMapper { "[element_type] cannot be [" + elementType.getValue().toString() + "] when using index type [" + v.type + "]" ); } - }).acceptsNull(); + }) + .acceptsNull() + .setMergeValidator((previous, current, c) -> previous == null || current == null || previous.updatableTo(current)); if (defaultInt8Hnsw) { this.indexOptions.alwaysSerialize(); } @@ -865,6 +869,8 @@ public class DenseVectorFieldMapper extends FieldMapper { boolean supportsElementType(ElementType elementType) { return true; } + + abstract boolean updatableTo(IndexOptions update); } private enum VectorIndexType { @@ -981,6 +987,13 @@ public class DenseVectorFieldMapper extends FieldMapper { boolean supportsElementType(ElementType elementType) { return elementType != ElementType.BYTE; } + + @Override + boolean updatableTo(IndexOptions update) { + return update.type.equals(this.type) + || update.type.equals(VectorIndexType.HNSW.name) + || update.type.equals(VectorIndexType.INT8_HNSW.name); + } } private static class FlatIndexOptions extends IndexOptions { @@ -1002,6 +1015,11 @@ public class DenseVectorFieldMapper extends FieldMapper { return new ES813FlatVectorFormat(); } + @Override + boolean updatableTo(IndexOptions update) { + return true; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -1074,6 +1092,21 @@ public class DenseVectorFieldMapper extends FieldMapper { boolean supportsElementType(ElementType elementType) { return elementType != ElementType.BYTE; } + + @Override + boolean updatableTo(IndexOptions update) { + boolean updatable = update.type.equals(this.type); + if (updatable) { + Int8HnswIndexOptions int8HnswIndexOptions = (Int8HnswIndexOptions) update; + // fewer connections would break assumptions on max number of connections (based on largest previous graph) during merge + // quantization could not behave as expected with different confidence intervals (and quantiles) to be created + updatable = int8HnswIndexOptions.m >= this.m; + updatable &= confidenceInterval == null + || int8HnswIndexOptions.confidenceInterval != null + && confidenceInterval.equals(int8HnswIndexOptions.confidenceInterval); + } + return updatable; + } } private static class HnswIndexOptions extends IndexOptions { @@ -1091,6 +1124,17 @@ public class DenseVectorFieldMapper extends FieldMapper { return new Lucene99HnswVectorsFormat(m, efConstruction, 1, null); } + @Override + boolean updatableTo(IndexOptions update) { + boolean updatable = update.type.equals(this.type); + if (updatable) { + // fewer connections would break assumptions on max number of connections (based on largest previous graph) during merge + HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) update; + updatable = hnswIndexOptions.m >= this.m; + } + return updatable || (update.type.equals(VectorIndexType.INT8_HNSW.name) && ((Int8HnswIndexOptions) update).m >= m); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 4072e0e95bfe..8e3b4e7e2e06 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -158,18 +158,112 @@ public class DenseVectorFieldMapperTests extends MapperTestCase { .field("element_type", "float") ) ); + checker.registerUpdateCheck( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "flat") + .endObject(), + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "int8_flat") + .endObject(), + m -> assertTrue(m.toString().contains("\"type\":\"int8_flat\"")) + ); + checker.registerUpdateCheck( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "flat") + .endObject(), + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "hnsw") + .endObject(), + m -> assertTrue(m.toString().contains("\"type\":\"hnsw\"")) + ); + checker.registerUpdateCheck( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "flat") + .endObject(), + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .endObject(), + m -> assertTrue(m.toString().contains("\"type\":\"int8_hnsw\"")) + ); + checker.registerUpdateCheck( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "int8_flat") + .endObject(), + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "hnsw") + .endObject(), + m -> assertTrue(m.toString().contains("\"type\":\"hnsw\"")) + ); + checker.registerUpdateCheck( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "int8_flat") + .endObject(), + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .endObject(), + m -> assertTrue(m.toString().contains("\"type\":\"int8_hnsw\"")) + ); + checker.registerUpdateCheck( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "hnsw") + .endObject(), + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .endObject(), + m -> assertTrue(m.toString().contains("\"type\":\"int8_hnsw\"")) + ); checker.registerConflictCheck( "index_options", - fieldMapping(b -> b.field("type", "dense_vector").field("dims", 4).field("index", true).field("similarity", "dot_product")), fieldMapping( b -> b.field("type", "dense_vector") .field("dims", 4) .field("index", true) - .field("similarity", "dot_product") .startObject("index_options") .field("type", "hnsw") - .field("m", 5) - .field("ef_construction", 80) + .endObject() + ), + fieldMapping( + b -> b.field("type", "dense_vector") + .field("dims", 4) + .field("index", true) + .startObject("index_options") + .field("type", "flat") .endObject() ) ); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index b2c2afd9df42..a3f965d06a5c 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -712,7 +712,11 @@ public abstract class MapperTestCase extends MapperServiceTestCase { ); assertThat( e.getMessage(), - anyOf(containsString("Cannot update parameter [" + param + "]"), containsString("different [" + param + "]")) + anyOf( + containsString("Cannot update parameter [" + param + "]"), + containsString("different [" + param + "]"), + containsString("[" + param + "] cannot be ") + ) ); } assertParseMaximalWarnings(); From 7490f5fc1a8b8e43ec79d35d85291a63b51d2c89 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 10 Jun 2024 18:48:32 +0200 Subject: [PATCH 43/58] Make ErrorReportingTestListener Gradle configuration cache compatible (#109415) and make it a bit simpler too --- .../test/ErrorReportingTestListener.java | 17 ++++++----------- .../internal/test/rest/RestTestBasePlugin.java | 4 ++-- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java index e3149d63e5c5..4361349392de 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.gradle.internal.test; -import org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin; import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter; import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter; import org.gradle.api.logging.Logger; @@ -39,21 +38,24 @@ import java.util.concurrent.ConcurrentHashMap; public class ErrorReportingTestListener implements TestOutputListener, TestListener { private static final String REPRODUCE_WITH_PREFIX = "REPRODUCE WITH"; - private final Test testTask; private final TestExceptionFormatter formatter; private final File outputDirectory; private final Logger taskLogger; private Map eventWriters = new ConcurrentHashMap<>(); private Map> reproductionLines = new ConcurrentHashMap<>(); private Set failedTests = new LinkedHashSet<>(); + private boolean dumpOutputOnFailure = true; public ErrorReportingTestListener(Test testTask, File outputDirectory) { - this.testTask = testTask; this.formatter = new FullExceptionFormatter(testTask.getTestLogging()); this.taskLogger = testTask.getLogger(); this.outputDirectory = outputDirectory; } + public void setDumpOutputOnFailure(boolean dumpOutputOnFailure) { + this.dumpOutputOnFailure = dumpOutputOnFailure; + } + @Override public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) { TestDescriptor suite = testDescriptor.getParent(); @@ -83,7 +85,7 @@ public class ErrorReportingTestListener implements TestOutputListener, TestListe Descriptor descriptor = Descriptor.of(suite); try { - if (isDumpOutputEnabled()) { + if (dumpOutputOnFailure) { // if the test suite failed, report all captured output if (result.getResultType().equals(TestResult.ResultType.FAILURE)) { EventWriter eventWriter = eventWriters.get(descriptor); @@ -256,11 +258,4 @@ public class ErrorReportingTestListener implements TestOutputListener, TestListe outputFile.delete(); } } - - private boolean isDumpOutputEnabled() { - return (Boolean) testTask.getExtensions() - .getExtraProperties() - .getProperties() - .getOrDefault(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); - } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 6d43ad109c32..77af3445f530 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -18,9 +18,9 @@ import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.ElasticsearchJavaPlugin; -import org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; @@ -167,7 +167,7 @@ public class RestTestBasePlugin implements Plugin { nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks())); // Disable test failure reporting since this stuff is now captured in build scans - task.getExtensions().getExtraProperties().set(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, false); + task.getExtensions().getByType(ErrorReportingTestListener.class).setDumpOutputOnFailure(false); // Disable the security manager and syscall filter since the test framework needs to fork processes task.systemProperty("tests.security.manager", "false"); From a9f31bd2aa6f47376f3be7694ec30cbb2855c34f Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Mon, 10 Jun 2024 10:26:31 -0700 Subject: [PATCH 44/58] Support synthetic source for date fields when ignore_malformed is used (#109410) --- docs/changelog/109410.yaml | 5 + docs/reference/mapping/types/date.asciidoc | 3 +- .../index/mapper/DateFieldMapper.java | 23 ++- .../index/mapper/DateFieldMapperTests.java | 76 ++++++---- .../index/mapper/MapperTestCase.java | 10 +- ...AggregateDoubleMetricFieldMapperTests.java | 142 +++++++----------- 6 files changed, 137 insertions(+), 122 deletions(-) create mode 100644 docs/changelog/109410.yaml diff --git a/docs/changelog/109410.yaml b/docs/changelog/109410.yaml new file mode 100644 index 000000000000..e8c4dcdab42c --- /dev/null +++ b/docs/changelog/109410.yaml @@ -0,0 +1,5 @@ +pr: 109410 +summary: Support synthetic source for date fields when `ignore_malformed` is used +area: Mapping +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index e3e800fa117f..a29db79167d2 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -242,8 +242,7 @@ of official GA features. `date` fields support <> in their default configuration. Synthetic `_source` cannot be used together with -<>, <> set to true -or with <> disabled. +<> or with <> disabled. Synthetic source always sorts `date` fields. For example: [source,console,id=synthetic-source-date-example] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 63045623c503..c817bed6e503 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -364,7 +364,16 @@ public final class DateFieldMapper extends FieldMapper { && ignoreMalformed.isConfigured() == false) { ignoreMalformed.setValue(false); } - return new DateFieldMapper(name(), ft, multiFieldsBuilder.build(this, context), copyTo, nullTimestamp, resolution, this); + return new DateFieldMapper( + name(), + ft, + multiFieldsBuilder.build(this, context), + copyTo, + nullTimestamp, + resolution, + context.isSourceSynthetic(), + this + ); } } @@ -850,6 +859,7 @@ public final class DateFieldMapper extends FieldMapper { private final Long nullValue; private final String nullValueAsString; private final Resolution resolution; + private final boolean isSourceSynthetic; private final boolean ignoreMalformedByDefault; private final IndexVersion indexCreatedVersion; @@ -865,6 +875,7 @@ public final class DateFieldMapper extends FieldMapper { CopyTo copyTo, Long nullValue, Resolution resolution, + boolean isSourceSynthetic, Builder builder ) { super(simpleName, mappedFieldType, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.get()); @@ -877,6 +888,7 @@ public final class DateFieldMapper extends FieldMapper { this.nullValueAsString = builder.nullValue.getValue(); this.nullValue = nullValue; this.resolution = resolution; + this.isSourceSynthetic = isSourceSynthetic; this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue(); this.indexCreatedVersion = builder.indexCreatedVersion; this.script = builder.script.get(); @@ -915,6 +927,10 @@ public final class DateFieldMapper extends FieldMapper { } catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) { if (ignoreMalformed) { context.addIgnoredField(mappedFieldType.name()); + if (isSourceSynthetic) { + // Save a copy of the field so synthetic source can load it + context.doc().add(IgnoreMalformedStoredValues.storedField(name(), context.parser())); + } return; } else { throw e; @@ -976,11 +992,6 @@ public final class DateFieldMapper extends FieldMapper { "field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it doesn't have doc values" ); } - if (ignoreMalformed) { - throw new IllegalArgumentException( - "field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it ignores malformed dates" - ); - } if (copyTo.copyToFields().isEmpty() != true) { throw new IllegalArgumentException( "field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it declares copy_to" diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 9e9437aa6b9d..d9894df9104a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Strings; -import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; @@ -35,6 +34,7 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.function.Function; +import java.util.stream.Stream; import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.containsString; @@ -152,7 +152,13 @@ public class DateFieldMapperTests extends MapperTestCase { return List.of( exampleMalformedValue("2016-03-99").mapping(mappingWithFormat("strict_date_optional_time||epoch_millis")) .errorMatches("failed to parse date field [2016-03-99] with format [strict_date_optional_time||epoch_millis]"), - exampleMalformedValue("-522000000").mapping(mappingWithFormat("date_optional_time")).errorMatches("long overflow") + exampleMalformedValue("-522000000").mapping(mappingWithFormat("date_optional_time")).errorMatches("long overflow"), + exampleMalformedValue("2020").mapping(mappingWithFormat("strict_date")) + .errorMatches("failed to parse date field [2020] with format [strict_date]"), + exampleMalformedValue("hello world").mapping(mappingWithFormat("strict_date_optional_time")) + .errorMatches("failed to parse date field [hello world]"), + exampleMalformedValue("true").mapping(mappingWithFormat("strict_date_optional_time")) + .errorMatches("failed to parse date field [true]") ); } @@ -561,7 +567,6 @@ public class DateFieldMapperTests extends MapperTestCase { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - assumeFalse("synthetic _source for date and date_millis doesn't support ignore_malformed", ignoreMalformed); return new SyntheticSourceSupport() { private final DateFieldMapper.Resolution resolution = randomFrom(DateFieldMapper.Resolution.values()); private final Object nullValue = usually() @@ -577,36 +582,62 @@ public class DateFieldMapperTests extends MapperTestCase { @Override public SyntheticSourceExample example(int maxValues) { if (randomBoolean()) { - Tuple v = generateValue(); + Value v = generateValue(); + if (v.malformedOutput != null) { + return new SyntheticSourceExample(v.input, v.malformedOutput, null, this::mapping); + } + return new SyntheticSourceExample( - v.v1(), - v.v2(), - resolution.convert(Instant.from(formatter.parse(v.v2()))), + v.input, + v.output, + resolution.convert(Instant.from(formatter.parse(v.output))), this::mapping ); } - List> values = randomList(1, maxValues, this::generateValue); - List in = values.stream().map(Tuple::v1).toList(); - List outList = values.stream() + + List values = randomList(1, maxValues, this::generateValue); + List in = values.stream().map(Value::input).toList(); + + List outputFromDocValues = values.stream() + .filter(v -> v.malformedOutput == null) .sorted( - Comparator.comparing(v -> Instant.from(formatter.parse(v.v1() == null ? nullValue.toString() : v.v1().toString()))) + Comparator.comparing( + v -> Instant.from(formatter.parse(v.input == null ? nullValue.toString() : v.input.toString())) + ) ) - .map(Tuple::v2) + .map(Value::output) .toList(); + + Stream malformedOutput = values.stream().filter(v -> v.malformedOutput != null).map(Value::malformedOutput); + + // Malformed values are always last in the implementation. + List outList = Stream.concat(outputFromDocValues.stream(), malformedOutput).toList(); Object out = outList.size() == 1 ? outList.get(0) : outList; - List outBlockList = outList.stream().map(v -> resolution.convert(Instant.from(formatter.parse(v)))).toList(); + List outBlockList = outputFromDocValues.stream() + .map(v -> resolution.convert(Instant.from(formatter.parse(v)))) + .toList(); Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; return new SyntheticSourceExample(in, out, outBlock, this::mapping); } - private Tuple generateValue() { + private record Value(Object input, String output, Object malformedOutput) {} + + private Value generateValue() { if (nullValue != null && randomBoolean()) { - return Tuple.tuple(null, outValue(nullValue)); + return new Value(null, outValue(nullValue), null); } + // Different malformed values are tested in #exampleMalformedValues(). + // Here we only verify behavior of arrays that contain malformed + // values since there are modifications specific to synthetic source. + if (ignoreMalformed && randomBoolean()) { + var malformedInput = randomAlphaOfLengthBetween(1, 10); + return new Value(malformedInput, null, malformedInput); + } + Object in = randomValue(); String out = outValue(in); - return Tuple.tuple(in, out); + return new Value(in, out, null); } private Object randomValue() { @@ -637,6 +668,9 @@ public class DateFieldMapperTests extends MapperTestCase { if (nullValue != null) { b.field("null_value", nullValue); } + if (ignoreMalformed) { + b.field("ignore_malformed", true); + } } @Override @@ -653,16 +687,6 @@ public class DateFieldMapperTests extends MapperTestCase { b -> b.field("type", fieldType).field("doc_values", false) ) ); - examples.add( - new SyntheticSourceInvalidExample( - equalTo( - "field [field] of type [" - + fieldType - + "] doesn't support synthetic source because it ignores malformed dates" - ), - b -> b.field("type", fieldType).field("ignore_malformed", true) - ) - ); } return examples; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index a3f965d06a5c..4b0f45fcc230 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1129,8 +1129,16 @@ public abstract class MapperTestCase extends MapperServiceTestCase { public void testSyntheticSourceIgnoreMalformedExamples() throws IOException { assumeTrue("type doesn't support ignore_malformed", supportsIgnoreMalformed()); - CheckedConsumer mapping = syntheticSourceSupport(true).example(1).mapping(); + // We need to call this in order to hit the assumption inside so that + // it tells us when field supports ignore_malformed but doesn't support it together with synthetic source. + // E.g. `assumeFalse(ignoreMalformed)` + syntheticSourceSupport(true); + for (ExampleMalformedValue v : exampleMalformedValues()) { + CheckedConsumer mapping = b -> { + v.mapping.accept(b); + b.field("ignore_malformed", true); + }; assertSyntheticSource(new SyntheticSourceExample(v.value, v.value, v.value, mapping)); } } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java index 83e701486d93..f46508093c4e 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; @@ -34,7 +33,6 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.function.Supplier; import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.IGNORE_MALFORMED; import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.METRICS; @@ -149,27 +147,75 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase { @Override protected List exampleMalformedValues() { + var min = randomDoubleBetween(-100, 100, false); + var max = randomDoubleBetween(min, 150, false); + var valueCount = randomIntBetween(1, Integer.MAX_VALUE); + + var randomString = randomAlphaOfLengthBetween(1, 10); + var randomLong = randomLong(); + var randomDouble = randomDouble(); + var randomBoolean = randomBoolean(); + return List.of( + // wrong input structure + exampleMalformedValue(b -> b.value(randomString)).errorMatches("Failed to parse object"), + exampleMalformedValue(b -> b.value(randomLong)).errorMatches("Failed to parse object"), + exampleMalformedValue(b -> b.value(randomDouble)).errorMatches("Failed to parse object"), + exampleMalformedValue(b -> b.value(randomBoolean)).errorMatches("Failed to parse object"), // no metrics exampleMalformedValue(b -> b.startObject().endObject()).errorMatches( "Aggregate metric field [field] must contain all metrics [min, max, value_count]" ), // unmapped metric exampleMalformedValue( - b -> b.startObject().field("min", -10.1).field("max", 50.0).field("value_count", 14).field("sum", 55).endObject() + b -> b.startObject() + .field("min", min) + .field("max", max) + .field("value_count", valueCount) + .field("sum", randomLong) + .endObject() ).errorMatches("Aggregate metric [sum] does not exist in the mapping of field [field]"), // missing metric - exampleMalformedValue(b -> b.startObject().field("min", -10.1).field("max", 50.0).endObject()).errorMatches( + exampleMalformedValue(b -> b.startObject().field("min", min).field("max", max).endObject()).errorMatches( "Aggregate metric field [field] must contain all metrics [min, max, value_count]" ), // invalid metric value - exampleMalformedValue(b -> b.startObject().field("min", "10.0").field("max", 50.0).field("value_count", 14).endObject()) + exampleMalformedValue(b -> b.startObject().field("min", "10.0").field("max", max).field("value_count", valueCount).endObject()) .errorMatches("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]"), + // Invalid metric value with additional data. + // `min` field triggers the error and all additional data should be preserved in synthetic source. + exampleMalformedValue( + b -> b.startObject() + .field("max", max) + .field("value_count", valueCount) + .field("min", "10.0") + .field("hello", randomString) + .startObject("object") + .field("hello", randomLong) + .endObject() + .array("list", randomString, randomString) + .endObject() + ).errorMatches("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]"), + // metric is an object + exampleMalformedValue( + b -> b.startObject() + .startObject("min") + .field("hello", "world") + .endObject() + .field("max", max) + .field("value_count", valueCount) + .endObject() + ).errorMatches("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]"), + // metric is an array + exampleMalformedValue( + b -> b.startObject().array("min", "hello", "world").field("max", max).field("value_count", valueCount).endObject() + ).errorMatches("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_ARRAY]"), // negative value count - exampleMalformedValue(b -> b.startObject().field("min", 10.0).field("max", 50.0).field("value_count", -14).endObject()) - .errorMatches("Aggregate metric [value_count] of field [field] cannot be a negative number"), + exampleMalformedValue( + b -> b.startObject().field("min", min).field("max", max).field("value_count", -1 * valueCount).endObject() + ).errorMatches("Aggregate metric [value_count] of field [field] cannot be a negative number"), // value count with decimal digits (whole numbers formatted as doubles are permitted, but non-whole numbers are not) - exampleMalformedValue(b -> b.startObject().field("min", 10.0).field("max", 50.0).field("value_count", 77.33).endObject()) + exampleMalformedValue(b -> b.startObject().field("min", min).field("max", max).field("value_count", 77.33).endObject()) .errorMatches("failed to parse [value_count] sub field: 77.33 cannot be converted to Integer without data loss") ); } @@ -472,18 +518,12 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase { return new AggregateDoubleMetricSyntheticSourceSupport(ignoreMalformed); } - @Override - public void testSyntheticSourceIgnoreMalformedExamples() { - assumeTrue("Scenarios are covered in scope of syntheticSourceSupport", false); - } - @Override protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); } protected final class AggregateDoubleMetricSyntheticSourceSupport implements SyntheticSourceSupport { - private final boolean malformedExample; private final EnumSet storedMetrics; @@ -499,79 +539,7 @@ public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase { return new SyntheticSourceExample(value, value, this::mapping); } - private Object randomAggregateMetric() { - if (malformedExample && randomBoolean()) { - return malformedValue(); - } - - return validMetrics(); - } - - private Object malformedValue() { - List> choices = List.of( - () -> randomAlphaOfLength(3), - ESTestCase::randomInt, - ESTestCase::randomLong, - ESTestCase::randomFloat, - ESTestCase::randomDouble, - ESTestCase::randomBoolean, - // no metrics - Map::of, - // unmapped metric - () -> { - var metrics = validMetrics(); - metrics.put("hello", "world"); - return metrics; - }, - // missing metric - () -> { - var metrics = validMetrics(); - metrics.remove(storedMetrics.stream().findFirst().get().name()); - return metrics; - }, - // invalid metric value - () -> { - var metrics = validMetrics(); - metrics.put(storedMetrics.stream().findFirst().get().name(), "boom"); - return metrics; - }, - // metric is an object - () -> { - var metrics = validMetrics(); - metrics.put(storedMetrics.stream().findFirst().get().name(), Map.of("hello", "world")); - return metrics; - }, - // invalid metric value with additional data - () -> { - var metrics = validMetrics(); - metrics.put(storedMetrics.stream().findFirst().get().name(), "boom"); - metrics.put("hello", "world"); - metrics.put("object", Map.of("hello", "world")); - metrics.put("list", List.of("hello", "world")); - return metrics; - }, - // negative value count - () -> { - var metrics = validMetrics(); - if (storedMetrics.contains(Metric.value_count.name())) { - metrics.put(Metric.value_count.name(), -100); - } - return metrics; - }, - // value count with decimal digits (whole numbers formatted as doubles are permitted, but non-whole numbers are not) - () -> { - var metrics = validMetrics(); - if (storedMetrics.contains(Metric.value_count.name())) { - metrics.put(Metric.value_count.name(), 10.5); - } - return metrics; - } - ); - - return randomFrom(choices).get(); - } - - private Map validMetrics() { + private Map randomAggregateMetric() { Map value = new LinkedHashMap<>(storedMetrics.size()); for (Metric m : storedMetrics) { if (Metric.value_count == m) { From 540d2b10a354c3b45b727f3488724fa008c5d6cf Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Mon, 10 Jun 2024 19:32:31 +0200 Subject: [PATCH 45/58] Add metrics@custom component template to metrics-*-* index template (#109540) This lets users customize the metrics data stream mappings, without having to override a managed component template that may get overridden. Fixes #109475 --- docs/changelog/109540.yaml | 6 + .../datastreams/AbstractDataStreamIT.java | 169 ++++++++++++++++++ .../datastreams/EcsLogsDataStreamIT.java | 4 +- .../datastreams/LogsDataStreamIT.java | 149 +-------------- .../datastreams/MetricsDataStreamIT.java | 101 +++++++++++ .../src/main/resources/metrics@template.json | 4 +- .../xpack/stack/StackTemplateRegistry.java | 2 +- 7 files changed, 286 insertions(+), 149 deletions(-) create mode 100644 docs/changelog/109540.yaml create mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java create mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MetricsDataStreamIT.java diff --git a/docs/changelog/109540.yaml b/docs/changelog/109540.yaml new file mode 100644 index 000000000000..722c60a30fb9 --- /dev/null +++ b/docs/changelog/109540.yaml @@ -0,0 +1,6 @@ +pr: 109540 +summary: Add metrics@custom component template to metrics-*-* index template +area: Data streams +type: enhancement +issues: + - 109475 diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java new file mode 100644 index 000000000000..ca33f0832453 --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java @@ -0,0 +1,169 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * This base class provides the boilerplate to simplify the development of integration tests. + * Aside from providing useful helper methods and disabling unnecessary plugins, + * it waits until an {@linkplain #indexTemplateName() index template} is installed, which happens asynchronously in StackTemplateRegistry. + * This avoids race conditions leading to flaky tests by ensuring the template has been installed before executing the tests. + */ +public abstract class AbstractDataStreamIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .setting("xpack.security.enabled", "false") + .setting("xpack.watcher.enabled", "false") + // Disable apm-data so the index templates it installs do not impact + // tests such as testIgnoreDynamicBeyondLimit. + .setting("xpack.apm_data.enabled", "false") + .build(); + protected RestClient client; + + static void waitForIndexTemplate(RestClient client, String indexTemplate) throws Exception { + assertBusy(() -> { + try { + Request request = new Request("GET", "_index_template/" + indexTemplate); + assertOK(client.performRequest(request)); + } catch (ResponseException e) { + fail(e.getMessage()); + } + }); + } + + static void createDataStream(RestClient client, String name) throws IOException { + Request request = new Request("PUT", "_data_stream/" + name); + assertOK(client.performRequest(request)); + } + + @SuppressWarnings("unchecked") + static String getWriteBackingIndex(RestClient client, String name) throws IOException { + Request request = new Request("GET", "_data_stream/" + name); + List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); + Map dataStream = (Map) dataStreams.get(0); + List> indices = (List>) dataStream.get("indices"); + return indices.get(0).get("index_name"); + } + + @SuppressWarnings("unchecked") + static Map getSettings(RestClient client, String indexName) throws IOException { + Request request = new Request("GET", "/" + indexName + "/_settings?flat_settings"); + return ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get("settings"); + } + + static void putMapping(RestClient client, String indexName) throws IOException { + Request request = new Request("PUT", "/" + indexName + "/_mapping"); + request.setJsonEntity(""" + { + "properties": { + "numeric_field": { + "type": "integer" + } + } + } + """); + assertOK(client.performRequest(request)); + } + + @SuppressWarnings("unchecked") + static Map getMappingProperties(RestClient client, String indexName) throws IOException { + Request request = new Request("GET", "/" + indexName + "/_mapping"); + Map map = (Map) entityAsMap(client.performRequest(request)).get(indexName); + Map mappings = (Map) map.get("mappings"); + return (Map) mappings.get("properties"); + } + + static void indexDoc(RestClient client, String dataStreamName, String doc) throws IOException { + Request request = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + request.setJsonEntity(doc); + assertOK(client.performRequest(request)); + } + + @SuppressWarnings("unchecked") + static List searchDocs(RestClient client, String dataStreamName, String query) throws IOException { + Request request = new Request("GET", "/" + dataStreamName + "/_search"); + request.setJsonEntity(query); + Map hits = (Map) entityAsMap(client.performRequest(request)).get("hits"); + return (List) hits.get("hits"); + } + + @SuppressWarnings("unchecked") + static Object getValueFromPath(Map map, List path) { + Map current = map; + for (int i = 0; i < path.size(); i++) { + Object value = current.get(path.get(i)); + if (i == path.size() - 1) { + return value; + } + if (value == null) { + throw new IllegalStateException("Path " + String.join(".", path) + " was not found in " + map); + } + if (value instanceof Map next) { + current = (Map) next; + } else { + throw new IllegalStateException( + "Failed to reach the end of the path " + + String.join(".", path) + + " last reachable field was " + + path.get(i) + + " in " + + map + ); + } + } + return current; + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restAdminSettings() { + if (super.restAdminSettings().keySet().contains(ThreadContext.PREFIX + ".Authorization")) { + return super.restAdminSettings(); + } else { + String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(super.restAdminSettings()).put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + } + + @Before + public void setup() throws Exception { + client = client(); + AbstractDataStreamIT.waitForIndexTemplate(client, indexTemplateName()); + } + + protected abstract String indexTemplateName(); + + @After + public void cleanUp() throws IOException { + adminClient().performRequest(new Request("DELETE", "_data_stream/*")); + } +} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java index 5fe72c38078e..e43b1e451c31 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java @@ -26,7 +26,7 @@ import static org.elasticsearch.datastreams.LogsDataStreamIT.getValueFromPath; import static org.elasticsearch.datastreams.LogsDataStreamIT.getWriteBackingIndex; import static org.elasticsearch.datastreams.LogsDataStreamIT.indexDoc; import static org.elasticsearch.datastreams.LogsDataStreamIT.searchDocs; -import static org.elasticsearch.datastreams.LogsDataStreamIT.waitForLogs; +import static org.elasticsearch.datastreams.LogsDataStreamIT.waitForIndexTemplate; import static org.hamcrest.Matchers.is; public class EcsLogsDataStreamIT extends DisabledSecurityDataStreamTestCase { @@ -38,7 +38,7 @@ public class EcsLogsDataStreamIT extends DisabledSecurityDataStreamTestCase { @Before public void setup() throws Exception { client = client(); - waitForLogs(client); + waitForIndexTemplate(client, "logs"); { Request request = new Request("PUT", "/_ingest/pipeline/logs@custom"); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java index c2a7a76ab751..9ab32f29f4a7 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -9,20 +9,7 @@ package org.elasticsearch.datastreams; import org.elasticsearch.client.Request; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -35,46 +22,7 @@ import static org.hamcrest.Matchers.matchesRegex; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -public class LogsDataStreamIT extends ESRestTestCase { - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .feature(FeatureFlag.FAILURE_STORE_ENABLED) - .setting("xpack.security.enabled", "false") - .setting("xpack.watcher.enabled", "false") - // Disable apm-data so the index templates it installs do not impact - // tests such as testIgnoreDynamicBeyondLimit. - .setting("xpack.apm_data.enabled", "false") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Override - protected Settings restAdminSettings() { - if (super.restAdminSettings().keySet().contains(ThreadContext.PREFIX + ".Authorization")) { - return super.restAdminSettings(); - } else { - String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); - return Settings.builder().put(super.restAdminSettings()).put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - } - - private RestClient client; - - @Before - public void setup() throws Exception { - client = client(); - waitForLogs(client); - } - - @After - public void cleanUp() throws IOException { - adminClient().performRequest(new Request("DELETE", "_data_stream/*")); - } +public class LogsDataStreamIT extends AbstractDataStreamIT { @SuppressWarnings("unchecked") public void testDefaultLogsSettingAndMapping() throws Exception { @@ -791,97 +739,8 @@ public class LogsDataStreamIT extends ESRestTestCase { assertThat(ignored.stream().filter(i -> i.startsWith("field") == false).toList(), empty()); } - static void waitForLogs(RestClient client) throws Exception { - assertBusy(() -> { - try { - Request request = new Request("GET", "_index_template/logs"); - assertOK(client.performRequest(request)); - } catch (ResponseException e) { - fail(e.getMessage()); - } - }); - } - - static void createDataStream(RestClient client, String name) throws IOException { - Request request = new Request("PUT", "_data_stream/" + name); - assertOK(client.performRequest(request)); - } - - @SuppressWarnings("unchecked") - static String getWriteBackingIndex(RestClient client, String name) throws IOException { - Request request = new Request("GET", "_data_stream/" + name); - List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); - Map dataStream = (Map) dataStreams.get(0); - List> indices = (List>) dataStream.get("indices"); - return indices.get(0).get("index_name"); - } - - @SuppressWarnings("unchecked") - static Map getSettings(RestClient client, String indexName) throws IOException { - Request request = new Request("GET", "/" + indexName + "/_settings?flat_settings"); - return ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get("settings"); - } - - static void putMapping(RestClient client, String indexName) throws IOException { - Request request = new Request("PUT", "/" + indexName + "/_mapping"); - request.setJsonEntity(""" - { - "properties": { - "numeric_field": { - "type": "integer" - } - } - } - """); - assertOK(client.performRequest(request)); - } - - @SuppressWarnings("unchecked") - static Map getMappingProperties(RestClient client, String indexName) throws IOException { - Request request = new Request("GET", "/" + indexName + "/_mapping"); - Map map = (Map) entityAsMap(client.performRequest(request)).get(indexName); - Map mappings = (Map) map.get("mappings"); - return (Map) mappings.get("properties"); - } - - static void indexDoc(RestClient client, String dataStreamName, String doc) throws IOException { - Request request = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); - request.setJsonEntity(doc); - assertOK(client.performRequest(request)); - } - - @SuppressWarnings("unchecked") - static List searchDocs(RestClient client, String dataStreamName, String query) throws IOException { - Request request = new Request("GET", "/" + dataStreamName + "/_search"); - request.setJsonEntity(query); - Map hits = (Map) entityAsMap(client.performRequest(request)).get("hits"); - return (List) hits.get("hits"); - } - - @SuppressWarnings("unchecked") - static Object getValueFromPath(Map map, List path) { - Map current = map; - for (int i = 0; i < path.size(); i++) { - Object value = current.get(path.get(i)); - if (i == path.size() - 1) { - return value; - } - if (value == null) { - throw new IllegalStateException("Path " + String.join(".", path) + " was not found in " + map); - } - if (value instanceof Map next) { - current = (Map) next; - } else { - throw new IllegalStateException( - "Failed to reach the end of the path " - + String.join(".", path) - + " last reachable field was " - + path.get(i) - + " in " - + map - ); - } - } - return current; + @Override + protected String indexTemplateName() { + return "logs"; } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MetricsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MetricsDataStreamIT.java new file mode 100644 index 000000000000..6cc300378a31 --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MetricsDataStreamIT.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.client.Request; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class MetricsDataStreamIT extends AbstractDataStreamIT { + + @SuppressWarnings("unchecked") + public void testCustomMapping() throws Exception { + { + Request request = new Request("POST", "/_component_template/metrics@custom"); + request.setJsonEntity(""" + { + "template": { + "settings": { + "index": { + "query": { + "default_field": ["custom-message"] + } + } + }, + "mappings": { + "properties": { + "numeric_field": { + "type": "integer" + }, + "socket": { + "properties": { + "ip": { + "type": "keyword" + } + } + } + } + } + } + } + """); + assertOK(client.performRequest(request)); + } + + String dataStreamName = "metrics-generic-default"; + createDataStream(client, dataStreamName); + String backingIndex = getWriteBackingIndex(client, dataStreamName); + + // Verify that the custom settings.index.query.default_field overrides the default query field - "message" + Map settings = getSettings(client, backingIndex); + assertThat(settings.get("index.query.default_field"), is(List.of("custom-message"))); + + // Verify that the new field from the custom component template is applied + putMapping(client, backingIndex); + Map mappingProperties = getMappingProperties(client, backingIndex); + assertThat(getValueFromPath(mappingProperties, List.of("numeric_field", "type")), equalTo("integer")); + assertThat(getValueFromPath(mappingProperties, List.of("socket", "properties", "ip", "type")), is("keyword")); + + // Insert valid doc and verify successful indexing + { + indexDoc(client, dataStreamName, """ + { + "@timestamp": "2024-06-10", + "test": "doc-with-ip", + "socket": { + "ip": "127.0.0.1" + } + } + """); + List results = searchDocs(client, dataStreamName, """ + { + "query": { + "term": { + "test": { + "value": "doc-with-ip" + } + } + }, + "fields": ["socket.ip"] + } + """); + Map fields = ((Map>) results.get(0)).get("_source"); + assertThat(fields.get("socket"), is(Map.of("ip", "127.0.0.1"))); + } + } + + @Override + protected String indexTemplateName() { + return "metrics"; + } +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json index 464df09ffe2c..776ed88857db 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json @@ -5,8 +5,10 @@ "composed_of": [ "metrics@mappings", "data-streams@mappings", - "metrics@settings" + "metrics@settings", + "metrics@custom" ], + "ignore_missing_component_templates": ["metrics@custom"], "allow_auto_create": true, "_meta": { "description": "default metrics template installed by x-pack", diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 30323a1d7d36..3cd551ca1f3d 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -47,7 +47,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 10; + public static final int REGISTRY_VERSION = 11; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( From ec0b573af66961e1990ec81268135a57f7d28aa1 Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Mon, 10 Jun 2024 14:17:25 -0400 Subject: [PATCH 46/58] Add Create or update query rule API call (#109042) --- docs/changelog/109042.yaml | 5 + .../rest-api-spec/api/query_rule.put.json | 42 +++ .../org/elasticsearch/TransportVersions.java | 1 + x-pack/plugin/ent-search/qa/rest/build.gradle | 15 +- .../entsearch/rules/10_query_ruleset_put.yml | 29 +- .../entsearch/rules/20_query_ruleset_list.yml | 54 +++- .../rules/30_query_ruleset_delete.yml | 17 +- .../entsearch/rules/40_rule_query_search.yml | 17 ++ .../entsearch/rules/50_query_rule_put.yml | 266 ++++++++++++++++++ .../xpack/application/EnterpriseSearch.java | 7 +- .../xpack/application/rules/QueryRule.java | 56 +++- .../rules/QueryRulesIndexService.java | 109 ++++++- .../xpack/application/rules/QueryRuleset.java | 6 +- .../rules/action/PutQueryRuleAction.java | 198 +++++++++++++ .../rules/action/PutQueryRulesetAction.java | 9 + .../rules/action/RestPutQueryRuleAction.java | 55 ++++ .../action/TransportPutQueryRuleAction.java | 49 ++++ ...a => EnterpriseSearchModuleTestUtils.java} | 19 +- ...ectorActionRequestBWCSerializingTests.java | 4 +- .../syncjob/ConnectorSyncJobTestUtils.java | 4 +- ...cJobsActionRequestBWCSerializingTests.java | 4 +- .../rules/QueryRuleCriteriaTests.java | 4 +- .../application/rules/QueryRuleTests.java | 29 +- .../rules/QueryRulesIndexServiceTests.java | 22 +- .../application/rules/QueryRulesetTests.java | 4 +- .../rules/RuleQueryBuilderTests.java | 3 +- ...esetActionResponseBWCSerializingTests.java | 11 +- ...esetsActionRequestBWCSerializingTests.java | 4 +- ...setsActionResponseBWCSerializingTests.java | 4 +- ...yRuleActionRequestBWCSerializingTests.java | 62 ++++ ...eryRuleActionResponseSerializingTests.java | 38 +++ ...lesetActionRequestBWCSerializingTests.java | 13 +- .../action/RestPutQueryRuleActionTests.java | 66 +++++ .../SearchApplicationIndexServiceTests.java | 5 +- .../search/SearchApplicationTests.java | 3 +- ...tionActionResponseBWCSerializingTests.java | 4 +- ...ationActionRequestBWCSerializingTests.java | 4 +- ...tionActionResponseBWCSerializingTests.java | 4 +- ...ationActionRequestBWCSerializingTests.java | 4 +- ...ationSearchRequestBWCSerializingTests.java | 4 +- .../xpack/security/operator/Constants.java | 1 + 41 files changed, 1144 insertions(+), 111 deletions(-) create mode 100644 docs/changelog/109042.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportPutQueryRuleAction.java rename x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/{search/SearchApplicationTestUtils.java => EnterpriseSearchModuleTestUtils.java} (85%) create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionResponseSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleActionTests.java diff --git a/docs/changelog/109042.yaml b/docs/changelog/109042.yaml new file mode 100644 index 000000000000..5aa80db991c0 --- /dev/null +++ b/docs/changelog/109042.yaml @@ -0,0 +1,5 @@ +pr: 109042 +summary: Add Create or update query rule API call +area: Application +type: enhancement +issues: [ ] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json new file mode 100644 index 000000000000..895f3654b162 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json @@ -0,0 +1,42 @@ +{ + "query_rule.put": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-rule.html", + "description": "Creates or updates a query rule within a ruleset." + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_query_rules/{ruleset_id}/{rule_id}", + "methods": [ + "PUT" + ], + "parts": { + "ruleset_id": { + "type": "string", + "description": "The unique identifier of the ruleset this rule should be added to. The ruleset will be created if it does not exist." + }, + "rule_id": { + "type": "string", + "description": "The unique identifier of the rule to be created or updated." + } + } + } + ] + }, + "body": { + "description": "The query rule configuration, including the type of rule, the criteria to match the rule, and the action that should be taken if the rule matches.", + "required": true + } + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index e6b98971ff8c..c2be2da12534 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -187,6 +187,7 @@ public class TransportVersions { public static final TransportVersion RANK_FEATURE_PHASE_ADDED = def(8_678_00_0); public static final TransportVersion RANK_DOC_IN_SHARD_FETCH_REQUEST = def(8_679_00_0); public static final TransportVersion SECURITY_SETTINGS_REQUEST_TIMEOUTS = def(8_680_00_0); + public static final TransportVersion QUERY_RULE_CRUD_API_PUT = def(8_681_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/ent-search/qa/rest/build.gradle b/x-pack/plugin/ent-search/qa/rest/build.gradle index 37f1d8f13c85..c24b0ffd44c6 100644 --- a/x-pack/plugin/ent-search/qa/rest/build.gradle +++ b/x-pack/plugin/ent-search/qa/rest/build.gradle @@ -7,7 +7,20 @@ dependencies { restResources { restApi { - include '_common', 'bulk', 'cluster', 'connector', 'nodes', 'indices', 'index', 'query_ruleset', 'search_application', 'xpack', 'security', 'search', 'ml' + include '_common', + 'bulk', + 'cluster', + 'connector', + 'nodes', + 'indices', + 'index', + 'query_ruleset', + 'query_rule', + 'search_application', + 'xpack', + 'security', + 'search', + 'ml' } } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml index 7868919dd6d1..f3f37e41ec75 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml @@ -1,9 +1,20 @@ - setup: - requires: - cluster_features: ["gte_v8.10.0"] + cluster_features: [ "gte_v8.10.0" ] reason: Introduced in 8.10.0 +--- +teardown: + - do: + query_ruleset.delete: + ruleset_id: test-ruleset + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: test-query-ruleset-recreating + ignore: 404 + --- 'Create Query Ruleset': - do: @@ -16,7 +27,7 @@ setup: criteria: - type: exact metadata: query_string - values: [elastic] + values: [ elastic ] actions: ids: - 'id1' @@ -26,7 +37,7 @@ setup: criteria: - type: exact metadata: query_string - values: [kibana] + values: [ kibana ] actions: docs: - '_index': 'test-index1' @@ -47,7 +58,7 @@ setup: criteria: - type: exact metadata: query_string - values: [elastic] + values: [ elastic ] actions: ids: - 'id1' @@ -57,7 +68,7 @@ setup: criteria: - type: exact metadata: query_string - values: [kibana] + values: [ kibana ] actions: docs: - '_index': 'test-index1' @@ -77,7 +88,7 @@ setup: criteria: type: 'exact' metadata: 'query_string' - values: ['elastic'] + values: [ 'elastic' ] actions: ids: - 'id1' @@ -94,7 +105,7 @@ setup: criteria: type: 'exact' metadata: 'query_string' - values: ['elastic'] + values: [ 'elastic' ] actions: ids: - 'id2' @@ -118,7 +129,7 @@ setup: criteria: type: 'exact' metadata: 'query_string' - values: ['elastic'] + values: [ 'elastic' ] actions: ids: - 'id1' diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml index 0183dc8930d7..b30f1c2418f4 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml @@ -1,6 +1,6 @@ setup: - requires: - cluster_features: ["gte_v8.10.0"] + cluster_features: [ "gte_v8.10.0" ] reason: Introduced in 8.10.0 - do: query_ruleset.put: @@ -12,7 +12,7 @@ setup: criteria: - type: exact metadata: query_string - values: [elastic] + values: [ elastic ] actions: ids: - 'id1' @@ -22,7 +22,7 @@ setup: criteria: - type: exact metadata: query_string - values: [kibana] + values: [ kibana ] actions: ids: - 'id3' @@ -38,7 +38,7 @@ setup: criteria: - type: exact metadata: query_string - values: [elastic] + values: [ elastic ] actions: ids: - 'id1' @@ -48,7 +48,7 @@ setup: criteria: - type: exact metadata: query_string - values: [kibana] + values: [ kibana ] actions: ids: - 'id3' @@ -58,7 +58,7 @@ setup: criteria: - type: exact metadata: query_string - values: [logstash] + values: [ logstash ] actions: ids: - 'id5' @@ -74,7 +74,7 @@ setup: criteria: - type: exact metadata: query_string - values: [elastic] + values: [ elastic ] actions: ids: - 'id1' @@ -84,7 +84,7 @@ setup: criteria: - type: exact metadata: query_string - values: [kibana] + values: [ kibana ] actions: ids: - 'id3' @@ -94,7 +94,7 @@ setup: criteria: - type: exact metadata: query_string - values: [logstash] + values: [ logstash ] actions: ids: - 'id5' @@ -104,11 +104,32 @@ setup: criteria: - type: exact metadata: query_string - values: [beats] + values: [ beats ] actions: ids: - 'id7' - 'id8' +--- +teardown: + - do: + query_ruleset.delete: + ruleset_id: test-query-ruleset-1 + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: test-query-ruleset-2 + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: test-query-ruleset-3 + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: a-test-query-ruleset-with-lots-of-criteria + ignore: 404 --- "List Query Rulesets": @@ -263,3 +284,16 @@ setup: prefix: 1 suffix: 1 always: 1 + +--- +'List Query Rulesets - Insufficient privilege': + - skip: + features: headers + + - do: + catch: forbidden + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + query_ruleset.list: { } + + - match: { error.type: 'security_exception' } + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml index cfc847b33f66..81e3e6c8411f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml @@ -1,6 +1,6 @@ setup: - requires: - cluster_features: ["gte_v8.10.0"] + cluster_features: [ "gte_v8.10.0" ] reason: Introduced in 8.10.0 - do: query_ruleset.put: @@ -12,7 +12,7 @@ setup: criteria: - type: exact metadata: query_string - values: [elastic] + values: [ elastic ] actions: ids: - 'id1' @@ -37,3 +37,16 @@ setup: catch: "missing" query_ruleset.delete: ruleset_id: test-nonexistent-query-ruleset + +--- +'Delete Query Ruleset - Insufficient privilege': + - skip: + features: headers + + - do: + catch: forbidden + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + query_ruleset.delete: + ruleset_id: test-query-ruleset-to-delete + + - match: { error.type: 'security_exception' } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml index 5b67f966bba6..bfd4c5e8a831 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml @@ -98,6 +98,23 @@ setup: ids: - 'doc6' +--- +teardown: + - do: + query_ruleset.delete: + ruleset_id: test-ruleset + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: another-test-ruleset + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: combined-ruleset + ignore: 404 + --- "Perform a rule query specifying a ruleset that does not exist": - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml new file mode 100644 index 000000000000..64a933261af9 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml @@ -0,0 +1,266 @@ +setup: + - requires: + cluster_features: [ "gte_v8.15.0" ] + reason: Introduced in 8.15.0 + + +--- +teardown: + - do: + query_ruleset.delete: + ruleset_id: test-ruleset + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: test-query-rule-recreating + ignore: 404 + + - do: + query_ruleset.delete: + ruleset_id: forbidden-query-ruleset + ignore: 404 + +--- +'Create query rule with existing ruleset respecting priority order': + # Start with 2 rules, one that specifies priority and one that does not (should go at the end) + - do: + query_ruleset.put: + ruleset_id: test-ruleset + body: + rules: + - rule_id: query-rule-id1 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ elastic ] + actions: + ids: + - 'id1' + - 'id2' + - rule_id: query-rule-id2 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ kibana ] + actions: + ids: + - 'id3' + - 'id4' + priority: 1 + + - match: { result: 'created' } + + - do: + query_ruleset.get: + ruleset_id: test-ruleset + + - match: { ruleset_id: test-ruleset } + - match: + rules: + - rule_id: query-rule-id2 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ kibana ] + actions: + ids: + - 'id3' + - 'id4' + priority: 1 + - rule_id: query-rule-id1 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ elastic ] + actions: + ids: + - 'id1' + - 'id2' + + # Next, add a rule with a priority 2 - this should go in the middle + - do: + query_rule.put: + ruleset_id: test-ruleset + rule_id: query-rule-id3 + body: + type: 'pinned' + criteria: + type: 'exact' + metadata: 'query_string' + values: [ 'logstash' ] + actions: + ids: + - 'id1' + priority: 2 + + - match: { result: 'created' } + + - do: + query_ruleset.get: + ruleset_id: test-ruleset + + - match: { ruleset_id: test-ruleset } + - match: + rules: + - rule_id: query-rule-id2 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ kibana ] + actions: + ids: + - 'id3' + - 'id4' + priority: 1 + - rule_id: query-rule-id3 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ logstash ] + actions: + ids: + - 'id1' + priority: 2 + - rule_id: query-rule-id1 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ elastic ] + actions: + ids: + - 'id1' + - 'id2' + + # Finally, add another single rule with no priority. This should be appended to the ruleset. + - do: + query_rule.put: + ruleset_id: test-ruleset + rule_id: query-rule-id4 + body: + type: 'pinned' + criteria: + type: 'exact' + metadata: 'query_string' + values: [ 'search' ] + actions: + ids: + - 'id2' + + - match: { result: 'created' } + + - do: + query_ruleset.get: + ruleset_id: test-ruleset + + - match: { ruleset_id: test-ruleset } + - match: + rules: + - rule_id: query-rule-id2 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ kibana ] + actions: + ids: + - 'id3' + - 'id4' + priority: 1 + - rule_id: query-rule-id3 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ logstash ] + actions: + ids: + - 'id1' + priority: 2 + - rule_id: query-rule-id1 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ elastic ] + actions: + ids: + - 'id1' + - 'id2' + - rule_id: query-rule-id4 + type: pinned + criteria: + - type: exact + metadata: query_string + values: [ search ] + actions: + ids: + - 'id2' + + +--- +'Create Query Rule - Resource already exists': + - do: + query_rule.put: + ruleset_id: test-query-rule-recreating + rule_id: abc + body: + type: 'pinned' + criteria: + type: 'exact' + metadata: 'query_string' + values: [ 'elastic' ] + actions: + ids: + - 'id1' + priority: 5 + + - match: { result: 'created' } + + - do: + query_rule.put: + ruleset_id: test-query-rule-recreating + rule_id: abc + body: + type: 'pinned' + criteria: + type: 'exact' + metadata: 'query_string' + values: [ 'elastic' ] + actions: + ids: + - 'id2' + priority: 3 + + - match: { result: 'updated' } + +--- +'Create Query Rule - Insufficient privilege': + - skip: + features: headers + + - do: + catch: forbidden + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + query_rule.put: + ruleset_id: forbidden-query-ruleset + rule_id: abc + body: + type: 'pinned' + criteria: + type: 'exact' + metadata: 'query_string' + values: [ 'elastic' ] + actions: + ids: + - 'id1' + - 'id2' + + - match: { error.type: 'security_exception' } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 871bf7fb122b..9572eb599f2d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -151,14 +151,17 @@ import org.elasticsearch.xpack.application.rules.RuleQueryBuilder; import org.elasticsearch.xpack.application.rules.action.DeleteQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.GetQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.ListQueryRulesetsAction; +import org.elasticsearch.xpack.application.rules.action.PutQueryRuleAction; import org.elasticsearch.xpack.application.rules.action.PutQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.RestDeleteQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.RestGetQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.RestListQueryRulesetsAction; +import org.elasticsearch.xpack.application.rules.action.RestPutQueryRuleAction; import org.elasticsearch.xpack.application.rules.action.RestPutQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.TransportDeleteQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.TransportGetQueryRulesetAction; import org.elasticsearch.xpack.application.rules.action.TransportListQueryRulesetsAction; +import org.elasticsearch.xpack.application.rules.action.TransportPutQueryRuleAction; import org.elasticsearch.xpack.application.rules.action.TransportPutQueryRulesetAction; import org.elasticsearch.xpack.application.search.SearchApplicationIndexService; import org.elasticsearch.xpack.application.search.action.DeleteSearchApplicationAction; @@ -251,6 +254,7 @@ public class EnterpriseSearch extends Plugin implements ActionPlugin, SystemInde new ActionHandler<>(GetQueryRulesetAction.INSTANCE, TransportGetQueryRulesetAction.class), new ActionHandler<>(ListQueryRulesetsAction.INSTANCE, TransportListQueryRulesetsAction.class), new ActionHandler<>(PutQueryRulesetAction.INSTANCE, TransportPutQueryRulesetAction.class), + new ActionHandler<>(PutQueryRuleAction.INSTANCE, TransportPutQueryRuleAction.class), usageAction, infoAction @@ -354,7 +358,8 @@ public class EnterpriseSearch extends Plugin implements ActionPlugin, SystemInde new RestDeleteQueryRulesetAction(getLicenseState()), new RestGetQueryRulesetAction(getLicenseState()), new RestListQueryRulesetsAction(getLicenseState()), - new RestPutQueryRulesetAction(getLicenseState()) + new RestPutQueryRulesetAction(getLicenseState()), + new RestPutQueryRuleAction(getLicenseState()) ) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java index b9093a2597d7..33fa74e5178c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.application.rules; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -53,6 +55,10 @@ public class QueryRule implements Writeable, ToXContentObject { private final QueryRuleType type; private final List criteria; private final Map actions; + private final Integer priority; + + public static final int MIN_PRIORITY = 0; + public static final int MAX_PRIORITY = 1000000; public enum QueryRuleType { PINNED; @@ -79,11 +85,17 @@ public class QueryRule implements Writeable, ToXContentObject { * @param type The {@link QueryRuleType} of this rule * @param criteria The {@link QueryRuleCriteria} required for a query to match this rule * @param actions The actions that should be taken if this rule is matched, dependent on the type of rule + * @param priority If specified, assigns a priority to the rule. Rules with specified priorities are applied before + * rules without specified priorities, in ascending priority order. */ - public QueryRule(String id, QueryRuleType type, List criteria, Map actions) { - if (Strings.isNullOrEmpty(id)) { - throw new IllegalArgumentException("Query rule id cannot be null or blank"); - } + public QueryRule( + @Nullable String id, + QueryRuleType type, + List criteria, + Map actions, + @Nullable Integer priority + ) { + // Interstitial null state allowed during rule creation; validation occurs in CRUD API this.id = id; Objects.requireNonNull(type, "Query rule type cannot be null"); @@ -100,16 +112,27 @@ public class QueryRule implements Writeable, ToXContentObject { throw new IllegalArgumentException("Query rule actions cannot be empty"); } this.actions = actions; + this.priority = priority; validate(); } + public QueryRule(String id, QueryRule other) { + this(id, other.type, other.criteria, other.actions, other.priority); + } + public QueryRule(StreamInput in) throws IOException { this.id = in.readString(); this.type = QueryRuleType.queryRuleType(in.readString()); this.criteria = in.readCollectionAsList(QueryRuleCriteria::new); this.actions = in.readGenericMap(); + if (in.getTransportVersion().onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + this.priority = in.readOptionalVInt(); + } else { + this.priority = null; + } + validate(); } @@ -126,6 +149,10 @@ public class QueryRule implements Writeable, ToXContentObject { } else { throw new IllegalArgumentException("Unsupported QueryRuleType: " + type); } + + if (priority != null && (priority < MIN_PRIORITY || priority > MAX_PRIORITY)) { + throw new IllegalArgumentException("Priority was " + priority + ", must be between " + MIN_PRIORITY + " and " + MAX_PRIORITY); + } } private void validatePinnedAction(Object action) { @@ -146,6 +173,9 @@ public class QueryRule implements Writeable, ToXContentObject { out.writeString(type.toString()); out.writeCollection(criteria); out.writeGenericMap(actions); + if (out.getTransportVersion().onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + out.writeOptionalVInt(priority); + } } @SuppressWarnings("unchecked") @@ -157,7 +187,8 @@ public class QueryRule implements Writeable, ToXContentObject { final QueryRuleType type = QueryRuleType.queryRuleType((String) params[1]); final List criteria = (List) params[2]; final Map actions = (Map) params[3]; - return new QueryRule(id, type, criteria, actions); + final Integer priority = (Integer) params[4]; + return new QueryRule(id, type, criteria, actions, priority); } ); @@ -165,12 +196,14 @@ public class QueryRule implements Writeable, ToXContentObject { public static final ParseField TYPE_FIELD = new ParseField("type"); public static final ParseField CRITERIA_FIELD = new ParseField("criteria"); public static final ParseField ACTIONS_FIELD = new ParseField("actions"); + public static final ParseField PRIORITY_FIELD = new ParseField("priority"); static { PARSER.declareStringOrNull(optionalConstructorArg(), ID_FIELD); PARSER.declareString(constructorArg(), TYPE_FIELD); PARSER.declareObjectArray(constructorArg(), (p, c) -> QueryRuleCriteria.fromXContent(p), CRITERIA_FIELD); PARSER.declareObject(constructorArg(), (p, c) -> p.map(), ACTIONS_FIELD); + PARSER.declareInt(optionalConstructorArg(), PRIORITY_FIELD); } /** @@ -213,7 +246,9 @@ public class QueryRule implements Writeable, ToXContentObject { builder.xContentList(CRITERIA_FIELD.getPreferredName(), criteria); builder.field(ACTIONS_FIELD.getPreferredName()); builder.map(actions); - + if (priority != null) { + builder.field(PRIORITY_FIELD.getPreferredName(), priority); + } } builder.endObject(); return builder; @@ -255,6 +290,10 @@ public class QueryRule implements Writeable, ToXContentObject { return actions; } + public Integer priority() { + return priority; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -263,12 +302,13 @@ public class QueryRule implements Writeable, ToXContentObject { return Objects.equals(id, queryRule.id) && type == queryRule.type && Objects.equals(criteria, queryRule.criteria) - && Objects.equals(actions, queryRule.actions); + && Objects.equals(actions, queryRule.actions) + && Objects.equals(priority, queryRule.priority); } @Override public int hashCode() { - return Objects.hash(id, type, criteria, actions); + return Objects.hash(id, type, criteria, actions, priority); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 1e98755cc7ac..adcd5da988b8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.VersionId; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -37,17 +38,20 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.application.rules.action.PutQueryRuleAction; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.EnumMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -60,9 +64,9 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; public class QueryRulesIndexService { private static final Logger logger = LogManager.getLogger(QueryRulesIndexService.class); public static final String QUERY_RULES_ALIAS_NAME = ".query-rules"; - public static final String QUERY_RULES_CONCRETE_INDEX_NAME = ".query-rules-1"; + public static final String QUERY_RULES_INDEX_PREFIX = ".query-rules-"; + public static final String QUERY_RULES_CONCRETE_INDEX_NAME = QUERY_RULES_INDEX_PREFIX + QueryRulesIndexMappingVersion.latest().id; public static final String QUERY_RULES_INDEX_NAME_PATTERN = ".query-rules-*"; - private static final int QUERY_RULES_INDEX_MAPPINGS_VERSION = 1; private final Client clientWithOrigin; private final ClusterSettings clusterSettings; @@ -77,16 +81,21 @@ public class QueryRulesIndexService { * @return The {@link SystemIndexDescriptor} for the {@link QueryRuleset} system index. */ public static SystemIndexDescriptor getSystemIndexDescriptor() { - return SystemIndexDescriptor.builder() - .setIndexPattern(QUERY_RULES_INDEX_NAME_PATTERN) - .setPrimaryIndex(QUERY_RULES_CONCRETE_INDEX_NAME) - .setDescription("Contains query ruleset configuration for query rules") - .setMappings(getIndexMappings()) - .setSettings(getIndexSettings()) - .setAliasName(QUERY_RULES_ALIAS_NAME) - .setVersionMetaKey("version") - .setOrigin(ENT_SEARCH_ORIGIN) - .setThreadPools(ExecutorNames.DEFAULT_SYSTEM_INDEX_THREAD_POOLS) + final Function systemIndexDescriptorBuilder = + mappingVersion -> SystemIndexDescriptor.builder() + .setIndexPattern(QUERY_RULES_INDEX_NAME_PATTERN) + .setPrimaryIndex(QUERY_RULES_CONCRETE_INDEX_NAME) + .setDescription("Contains query ruleset configuration for query rules") + .setMappings(getIndexMappings(mappingVersion)) + .setSettings(getIndexSettings()) + .setAliasName(QUERY_RULES_ALIAS_NAME) + .setIndexFormat(QueryRulesIndexMappingVersion.latest().id) + .setVersionMetaKey("version") + .setOrigin(ENT_SEARCH_ORIGIN) + .setThreadPools(ExecutorNames.DEFAULT_SYSTEM_INDEX_THREAD_POOLS); + + return systemIndexDescriptorBuilder.apply(QueryRulesIndexMappingVersion.latest()) + .setPriorSystemIndexDescriptors(List.of(systemIndexDescriptorBuilder.apply(QueryRulesIndexMappingVersion.INITIAL).build())) .build(); } @@ -96,18 +105,19 @@ public class QueryRulesIndexService { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.SETTING_PRIORITY, 100) + .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), QueryRulesIndexMappingVersion.latest().id) .put("index.refresh_interval", "1s") .build(); } - private static XContentBuilder getIndexMappings() { + private static XContentBuilder getIndexMappings(QueryRulesIndexMappingVersion version) { try { final XContentBuilder builder = jsonBuilder(); builder.startObject(); { builder.startObject("_meta"); builder.field("version", Version.CURRENT.toString()); - builder.field(SystemIndexDescriptor.VERSION_META_KEY, QUERY_RULES_INDEX_MAPPINGS_VERSION); + builder.field(SystemIndexDescriptor.VERSION_META_KEY, version.id); builder.endObject(); builder.field("dynamic", "strict"); @@ -151,6 +161,12 @@ public class QueryRulesIndexService { builder.field("type", "object"); builder.field("enabled", false); builder.endObject(); + + if (version.onOrAfter(QueryRulesIndexMappingVersion.ADD_PRIORITY)) { + builder.startObject(QueryRule.PRIORITY_FIELD.getPreferredName()); + builder.field("type", "integer"); + builder.endObject(); + } } builder.endObject(); builder.endObject(); @@ -191,7 +207,8 @@ public class QueryRulesIndexService { (String) rule.get(QueryRule.ID_FIELD.getPreferredName()), QueryRuleType.queryRuleType((String) rule.get(QueryRule.TYPE_FIELD.getPreferredName())), parseCriteria((List>) rule.get(QueryRule.CRITERIA_FIELD.getPreferredName())), - (Map) rule.get(QueryRule.ACTIONS_FIELD.getPreferredName()) + (Map) rule.get(QueryRule.ACTIONS_FIELD.getPreferredName()), + (Integer) rule.get(QueryRule.PRIORITY_FIELD.getPreferredName()) ) ) .collect(Collectors.toList()); @@ -243,7 +260,45 @@ public class QueryRulesIndexService { } catch (Exception e) { listener.onFailure(e); } + } + /** + * Creates or updates a {@link QueryRule} within a {@link QueryRuleset} with the given {@code queryRulesetId}. + * If the {@code queryRulesetId} is not associated with an existing {@link QueryRuleset}, a new {@link QueryRuleset} is created. + * @param queryRulesetId + * @param queryRule + * @param listener + */ + public void putQueryRule(String queryRulesetId, QueryRule queryRule, ActionListener listener) { + getQueryRuleset(queryRulesetId, new ActionListener<>() { + @Override + public void onResponse(QueryRuleset queryRuleset) { + final List rules = new ArrayList<>(queryRuleset.rules()).stream() + .filter(rule -> rule.id().equals(queryRule.id()) == false) + .collect(Collectors.toList()); + rules.add(queryRule); + final boolean created = queryRuleset.rules().stream().noneMatch(rule -> rule.id().equals(queryRule.id())); + + putQueryRuleset(new QueryRuleset(queryRulesetId, rules), listener.delegateFailureAndWrap((delegate, docWriteResponse) -> { + DocWriteResponse.Result result = created ? DocWriteResponse.Result.CREATED : docWriteResponse.getResult(); + delegate.onResponse(new PutQueryRuleAction.Response(result)); + })); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ResourceNotFoundException) { + putQueryRuleset( + new QueryRuleset(queryRulesetId, List.of(queryRule)), + listener.delegateFailureAndWrap((delegate, docWriteResponse) -> { + delegate.onResponse(new PutQueryRuleAction.Response(DocWriteResponse.Result.CREATED)); + }) + ); + return; + } + listener.onFailure(e); + } + }); } private void validateQueryRuleset(QueryRuleset queryRuleset) { @@ -355,4 +410,28 @@ public class QueryRulesIndexService { } public record QueryRulesetResult(List rulesets, long totalResults) {} + + public enum QueryRulesIndexMappingVersion implements VersionId { + INITIAL(1), + ADD_PRIORITY(2),; + + private static final QueryRulesIndexMappingVersion LATEST = Arrays.stream(values()) + .max(Comparator.comparingInt(v -> v.id)) + .orElseThrow(); + + private final int id; + + QueryRulesIndexMappingVersion(int id) { + this.id = id; + } + + @Override + public int id() { + return id; + } + + public static QueryRulesIndexMappingVersion latest() { + return LATEST; + } + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleset.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleset.java index f58d01e7afe7..6ce93113cee0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleset.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleset.java @@ -23,8 +23,10 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Comparator; import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -51,7 +53,9 @@ public class QueryRuleset implements Writeable, ToXContentObject { if (rules.isEmpty()) { throw new IllegalArgumentException("rules cannot be empty"); } - this.rules = rules; + this.rules = rules.stream() + .sorted(Comparator.comparing(QueryRule::priority, Comparator.nullsLast(Comparator.naturalOrder()))) + .collect(Collectors.toList()); } public QueryRuleset(StreamInput in) throws IOException { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleAction.java new file mode 100644 index 000000000000..75ab19ce8dff --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleAction.java @@ -0,0 +1,198 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.rules.QueryRule; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class PutQueryRuleAction { + + public static final String NAME = "cluster:admin/xpack/query_rule/put"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private PutQueryRuleAction() {/* no instances */} + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String queryRulesetId; + private final QueryRule queryRule; + private static final ParseField QUERY_RULESET_ID_FIELD = new ParseField("queryRulesetId"); + private static final ParseField QUERY_RULE_FIELD = new ParseField("queryRule"); + + public Request(StreamInput in) throws IOException { + super(in); + this.queryRulesetId = in.readString(); + this.queryRule = new QueryRule(in); + } + + public Request(String queryRulesetId, QueryRule queryRule) { + this.queryRulesetId = queryRulesetId; + this.queryRule = queryRule; + } + + public Request(String rulesetId, String ruleId, BytesReference content, XContentType contentType) { + this.queryRulesetId = rulesetId; + + QueryRule queryRule = QueryRule.fromXContentBytes(content, contentType); + if (queryRule.id() == null) { + this.queryRule = new QueryRule(ruleId, queryRule); + } else if (ruleId.equals(queryRule.id()) == false) { + throw new IllegalArgumentException("rule_id does not match the id in the query rule"); + } else { + this.queryRule = queryRule; + } + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(queryRulesetId)) { + validationException = addValidationError("ruleset_id cannot be null or empty", validationException); + } + + if (Strings.isNullOrEmpty(queryRule.id())) { + validationException = addValidationError("rule_id cannot be null or empty", validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(queryRulesetId); + queryRule.writeTo(out); + } + + public String queryRulesetId() { + return queryRulesetId; + } + + public QueryRule queryRule() { + return queryRule; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(queryRulesetId, request.queryRulesetId) && Objects.equals(queryRule, request.queryRule); + } + + @Override + public int hashCode() { + return Objects.hash(queryRulesetId, queryRule); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(QUERY_RULESET_ID_FIELD.getPreferredName(), queryRulesetId); + builder.field(QUERY_RULE_FIELD.getPreferredName(), queryRule); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_query_rule_request", + p -> new Request((String) p[0], (QueryRule) p[1]) + ); + + static { + PARSER.declareString(constructorArg(), QUERY_RULESET_ID_FIELD); + PARSER.declareObject(constructorArg(), (p, c) -> QueryRule.fromXContent(p), QUERY_RULE_FIELD); + } + + public static PutQueryRuleAction.Request parse(XContentParser parser, String resourceName) { + return PARSER.apply(parser, resourceName); + } + + public static PutQueryRuleAction.Request fromXContent(String queryRulesetId, XContentParser parser) throws IOException { + return new PutQueryRuleAction.Request(queryRulesetId, QueryRule.fromXContent(parser)); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case CREATED -> RestStatus.CREATED; + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java index 1a42d4c631a9..842d5d5e0cee 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java @@ -69,6 +69,15 @@ public class PutQueryRulesetAction { List rules = queryRuleset.rules(); if (rules == null || rules.isEmpty()) { validationException = addValidationError("rules cannot be null or empty", validationException); + } else { + for (QueryRule rule : rules) { + if (rule.id() == null) { + validationException = addValidationError( + "rule_id cannot be null or empty. rule: [" + rule + "]", + validationException + ); + } + } } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java new file mode 100644 index 000000000000..7abf156c9a14 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler; +import org.elasticsearch.xpack.application.utils.LicenseUtils; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +@ServerlessScope(Scope.PUBLIC) +public class RestPutQueryRuleAction extends EnterpriseSearchBaseRestHandler { + public RestPutQueryRuleAction(XPackLicenseState licenseState) { + super(licenseState, LicenseUtils.Product.QUERY_RULES); + } + + @Override + public String getName() { + return "query_rule_put_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.QUERY_RULES_API_ENDPOINT + "/{ruleset_id}/{rule_id}")); + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + PutQueryRuleAction.Request request = new PutQueryRuleAction.Request( + restRequest.param("ruleset_id"), + restRequest.param("rule_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + PutQueryRuleAction.INSTANCE, + request, + new RestToXContentListener<>(channel, PutQueryRuleAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportPutQueryRuleAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportPutQueryRuleAction.java new file mode 100644 index 000000000000..69a568ff3b1a --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TransportPutQueryRuleAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.rules.QueryRule; +import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; + +public class TransportPutQueryRuleAction extends HandledTransportAction { + protected final QueryRulesIndexService systemIndexService; + + @Inject + public TransportPutQueryRuleAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + PutQueryRuleAction.NAME, + transportService, + actionFilters, + PutQueryRuleAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.systemIndexService = new QueryRulesIndexService(client, clusterService.getClusterSettings()); + } + + @Override + protected void doExecute(Task task, PutQueryRuleAction.Request request, ActionListener listener) { + String queryRulesetId = request.queryRulesetId(); + QueryRule queryRule = request.queryRule(); + systemIndexService.putQueryRule(queryRulesetId, queryRule, ActionListener.wrap(listener::onResponse, listener::onFailure)); + + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java similarity index 85% rename from x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTestUtils.java rename to x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java index 711051cbaffd..06adb29e3269 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchModuleTestUtils.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.application.search; +package org.elasticsearch.xpack.application; import org.elasticsearch.core.Tuple; import org.elasticsearch.script.Script; @@ -15,6 +15,9 @@ import org.elasticsearch.xpack.application.rules.QueryRule; import org.elasticsearch.xpack.application.rules.QueryRuleCriteria; import org.elasticsearch.xpack.application.rules.QueryRuleCriteriaType; import org.elasticsearch.xpack.application.rules.QueryRuleset; +import org.elasticsearch.xpack.application.search.SearchApplication; +import org.elasticsearch.xpack.application.search.SearchApplicationTemplate; +import org.elasticsearch.xpack.application.search.TemplateParamValidator; import org.elasticsearch.xpack.core.action.util.PageParams; import java.util.ArrayList; @@ -33,12 +36,13 @@ import static org.elasticsearch.test.ESTestCase.randomIntBetween; import static org.elasticsearch.test.ESTestCase.randomList; import static org.elasticsearch.test.ESTestCase.randomLongBetween; import static org.elasticsearch.test.ESTestCase.randomMap; +import static org.elasticsearch.xpack.application.rules.QueryRule.MAX_PRIORITY; +import static org.elasticsearch.xpack.application.rules.QueryRule.MIN_PRIORITY; import static org.elasticsearch.xpack.application.rules.QueryRuleCriteriaType.ALWAYS; -// TODO - move this one package up and rename to EnterpriseSearchModuleTestUtils -public final class SearchApplicationTestUtils { +public final class EnterpriseSearchModuleTestUtils { - private SearchApplicationTestUtils() { + private EnterpriseSearchModuleTestUtils() { throw new UnsupportedOperationException("Don't instantiate this class!"); } @@ -93,7 +97,12 @@ public final class SearchApplicationTestUtils { QueryRule.QueryRuleType type = randomFrom(QueryRule.QueryRuleType.values()); List criteria = List.of(randomQueryRuleCriteria()); Map actions = Map.of(randomFrom("ids", "docs"), List.of(randomAlphaOfLengthBetween(2, 10))); - return new QueryRule(id, type, criteria, actions); + Integer priority = randomQueryRulePriority(); + return new QueryRule(id, type, criteria, actions, priority); + } + + public static Integer randomQueryRulePriority() { + return randomBoolean() ? randomIntBetween(MIN_PRIORITY, MAX_PRIORITY) : null; } public static QueryRuleset randomQueryRuleset() { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java index 366001b6dd21..c71fbaf6716e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.application.connector.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -25,7 +25,7 @@ public class ListConnectorActionRequestBWCSerializingTests extends AbstractBWCSe @Override protected ListConnectorAction.Request createTestInstance() { - PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + PageParams pageParams = EnterpriseSearchModuleTestUtils.randomPageParams(); return new ListConnectorAction.Request( pageParams, List.of(generateRandomStringArray(10, 10, false)), diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index eb280334510c..dcc6c9ba242d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; @@ -20,7 +21,6 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.ListConnecto import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import java.io.IOException; import java.time.Instant; @@ -188,7 +188,7 @@ public class ConnectorSyncJobTestUtils { public static ListConnectorSyncJobsAction.Request getRandomListConnectorSyncJobsActionRequest() { return new ListConnectorSyncJobsAction.Request( - SearchApplicationTestUtils.randomPageParams(), + EnterpriseSearchModuleTestUtils.randomPageParams(), randomAlphaOfLength(10), ConnectorTestUtils.getRandomSyncStatus(), Collections.singletonList(ConnectorTestUtils.getRandomSyncJobType()) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java index 790f588e8937..967994ebe57e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java @@ -10,10 +10,10 @@ package org.elasticsearch.xpack.application.connector.syncjob.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobType; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -29,7 +29,7 @@ public class ListConnectorSyncJobsActionRequestBWCSerializingTests extends Abstr @Override protected ListConnectorSyncJobsAction.Request createTestInstance() { - PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + PageParams pageParams = EnterpriseSearchModuleTestUtils.randomPageParams(); String connectorId = randomAlphaOfLength(10); ConnectorSyncStatus syncStatus = ConnectorTestUtils.getRandomSyncStatus(); ConnectorSyncJobType syncJobType = ConnectorTestUtils.getRandomSyncJobType(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteriaTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteriaTests.java index 7b5fa7d053df..881b77442dac 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteriaTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteriaTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.junit.Before; import java.io.IOException; @@ -51,7 +51,7 @@ public class QueryRuleCriteriaTests extends ESTestCase { public final void testRandomSerialization() throws IOException { for (int runs = 0; runs < 10; runs++) { - QueryRuleCriteria testInstance = SearchApplicationTestUtils.randomQueryRuleCriteria(); + QueryRuleCriteria testInstance = EnterpriseSearchModuleTestUtils.randomQueryRuleCriteria(); assertTransportSerialization(testInstance); assertXContent(testInstance, randomBoolean()); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleTests.java index 5576ec71667f..a48d6f45589e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRuleTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.junit.Before; import java.io.IOException; @@ -28,6 +28,8 @@ import java.util.Map; import static java.util.Collections.emptyList; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xpack.application.rules.QueryRule.MAX_PRIORITY; +import static org.elasticsearch.xpack.application.rules.QueryRule.MIN_PRIORITY; import static org.elasticsearch.xpack.application.rules.QueryRuleCriteriaType.EXACT; import static org.elasticsearch.xpack.application.rules.QueryRuleCriteriaType.PREFIX; import static org.elasticsearch.xpack.application.rules.QueryRuleCriteriaType.SUFFIX; @@ -46,7 +48,7 @@ public class QueryRuleTests extends ESTestCase { public final void testRandomSerialization() throws IOException { for (int runs = 0; runs < 10; runs++) { - QueryRule testInstance = SearchApplicationTestUtils.randomQueryRule(); + QueryRule testInstance = EnterpriseSearchModuleTestUtils.randomQueryRule(); assertTransportSerialization(testInstance); assertXContent(testInstance, randomBoolean()); } @@ -62,7 +64,8 @@ public class QueryRuleTests extends ESTestCase { ], "actions": { "ids": ["id1", "id2"] - } + }, + "priority": 5 }"""); QueryRule queryRule = QueryRule.fromXContentBytes(new BytesArray(content), XContentType.JSON); @@ -75,20 +78,6 @@ public class QueryRuleTests extends ESTestCase { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } - public void testToXContentMissingQueryRuleId() throws IOException { - String content = XContentHelper.stripWhitespace(""" - { - "type": "pinned", - "criteria": [ - { "type": "exact", "metadata": "query_string", "values": ["foo", "bar"] } - ], - "actions": { - "ids": ["id1", "id2"] - } - }"""); - expectThrows(IllegalArgumentException.class, () -> QueryRule.fromXContentBytes(new BytesArray(content), XContentType.JSON)); - } - public void testToXContentEmptyCriteria() throws IOException { String content = XContentHelper.stripWhitespace(""" { @@ -170,7 +159,8 @@ public class QueryRuleTests extends ESTestCase { randomAlphaOfLength(10), QueryRule.QueryRuleType.PINNED, List.of(new QueryRuleCriteria(EXACT, "query", List.of("elastic"))), - Map.of("ids", List.of("id1", "id2")) + Map.of("ids", List.of("id1", "id2")), + randomBoolean() ? randomIntBetween(MIN_PRIORITY, MAX_PRIORITY) : null ); AppliedQueryRules appliedQueryRules = new AppliedQueryRules(); rule.applyRule(appliedQueryRules, Map.of("query", "elastic")); @@ -186,7 +176,8 @@ public class QueryRuleTests extends ESTestCase { randomAlphaOfLength(10), QueryRule.QueryRuleType.PINNED, List.of(new QueryRuleCriteria(PREFIX, "query", List.of("elastic")), new QueryRuleCriteria(SUFFIX, "query", List.of("search"))), - Map.of("ids", List.of("id1", "id2")) + Map.of("ids", List.of("id1", "id2")), + randomBoolean() ? randomIntBetween(MIN_PRIORITY, MAX_PRIORITY) : null ); AppliedQueryRules appliedQueryRules = new AppliedQueryRules(); rule.applyRule(appliedQueryRules, Map.of("query", "elastic - you know, for search")); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java index 9ce62ee8d4c1..5eeca8465d39 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.junit.Before; import java.util.ArrayList; @@ -74,7 +75,8 @@ public class QueryRulesIndexServiceTests extends ESSingleNodeTestCase { "my_rule1", QueryRuleType.PINNED, List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))), - Map.of("ids", List.of("id1", "id2")) + Map.of("ids", List.of("id1", "id2")), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", Collections.singletonList(myQueryRule1)); DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset); @@ -89,13 +91,15 @@ public class QueryRulesIndexServiceTests extends ESSingleNodeTestCase { "my_rule1", QueryRuleType.PINNED, List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))), - Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id1"), Map.of("_index", "my_index2", "_id", "id2"))) + Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id1"), Map.of("_index", "my_index2", "_id", "id2"))), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ); final QueryRule myQueryRule2 = new QueryRule( "my_rule2", QueryRuleType.PINNED, List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))), - Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id3"), Map.of("_index", "my_index2", "_id", "id4"))) + Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id3"), Map.of("_index", "my_index2", "_id", "id4"))), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2)); DocWriteResponse newResp = awaitPutQueryRuleset(myQueryRuleset); @@ -116,7 +120,8 @@ public class QueryRulesIndexServiceTests extends ESSingleNodeTestCase { new QueryRuleCriteria(EXACT, "query_string", List.of("foo" + i)), new QueryRuleCriteria(GTE, "query_string", List.of(i)) ), - Map.of("ids", List.of("id1", "id2")) + Map.of("ids", List.of("id1", "id2")), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ), new QueryRule( "my_rule_" + i + "_" + (i + 1), @@ -125,7 +130,8 @@ public class QueryRulesIndexServiceTests extends ESSingleNodeTestCase { new QueryRuleCriteria(FUZZY, "query_string", List.of("bar" + i)), new QueryRuleCriteria(GTE, "user.age", List.of(i)) ), - Map.of("ids", List.of("id3", "id4")) + Map.of("ids", List.of("id3", "id4")), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ) ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset_" + i, rules); @@ -175,13 +181,15 @@ public class QueryRulesIndexServiceTests extends ESSingleNodeTestCase { "my_rule1", QueryRuleType.PINNED, List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))), - Map.of("ids", List.of("id1", "id2")) + Map.of("ids", List.of("id1", "id2")), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ); final QueryRule myQueryRule2 = new QueryRule( "my_rule2", QueryRuleType.PINNED, List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))), - Map.of("ids", List.of("id3", "id4")) + Map.of("ids", List.of("id3", "id4")), + EnterpriseSearchModuleTestUtils.randomQueryRulePriority() ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2)); DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesetTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesetTests.java index 4799396ef522..185e2429cf3c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesetTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesetTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.junit.Before; import java.io.IOException; @@ -41,7 +41,7 @@ public class QueryRulesetTests extends ESTestCase { public final void testRandomSerialization() throws IOException { for (int runs = 0; runs < 10; runs++) { - QueryRuleset testInstance = SearchApplicationTestUtils.randomQueryRuleset(); + QueryRuleset testInstance = EnterpriseSearchModuleTestUtils.randomQueryRuleset(); assertTransportSerialization(testInstance); assertXContent(testInstance, randomBoolean()); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilderTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilderTests.java index 6979f00476cb..bedd01540631 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilderTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilderTests.java @@ -161,7 +161,8 @@ public class RuleQueryBuilderTests extends AbstractQueryTestCase { public QueryRuleset queryRuleset; @@ -57,7 +58,13 @@ public class GetQueryRulesetActionResponseBWCSerializingTests extends AbstractBW new QueryRuleCriteria(criteria.criteriaType(), criteria.criteriaMetadata(), criteria.criteriaValues().subList(0, 1)) ); } - rules.add(new QueryRule(rule.id(), rule.type(), newCriteria, rule.actions())); + rules.add(new QueryRule(rule.id(), rule.type(), newCriteria, rule.actions(), null)); + } + return new GetQueryRulesetAction.Response(new QueryRuleset(instance.queryRuleset().id(), rules)); + } else if (version.before(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + List rules = new ArrayList<>(); + for (QueryRule rule : instance.queryRuleset().rules()) { + rules.add(new QueryRule(rule.id(), rule.type(), rule.criteria(), rule.actions(), null)); } return new GetQueryRulesetAction.Response(new QueryRuleset(instance.queryRuleset().id(), rules)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionRequestBWCSerializingTests.java index 92219f5f317d..dfac7c57e01d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionRequestBWCSerializingTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.application.rules.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -26,7 +26,7 @@ public class ListQueryRulesetsActionRequestBWCSerializingTests extends AbstractB @Override protected ListQueryRulesetsAction.Request createTestInstance() { - PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + PageParams pageParams = EnterpriseSearchModuleTestUtils.randomPageParams(); return new ListQueryRulesetsAction.Request(pageParams); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java index 1613e31f9420..5ae0f51cb611 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java @@ -9,10 +9,10 @@ package org.elasticsearch.xpack.application.rules.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.rules.QueryRuleCriteriaType; import org.elasticsearch.xpack.application.rules.QueryRuleset; import org.elasticsearch.xpack.application.rules.QueryRulesetListItem; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import java.util.ArrayList; @@ -29,7 +29,7 @@ public class ListQueryRulesetsActionResponseBWCSerializingTests extends Abstract private static ListQueryRulesetsAction.Response randomQueryRulesetListItem() { return new ListQueryRulesetsAction.Response(randomList(10, () -> { - QueryRuleset queryRuleset = SearchApplicationTestUtils.randomQueryRuleset(); + QueryRuleset queryRuleset = EnterpriseSearchModuleTestUtils.randomQueryRuleset(); Map criteriaTypeToCountMap = Map.of( randomFrom(QueryRuleCriteriaType.values()), randomIntBetween(0, 10) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java new file mode 100644 index 000000000000..a66d0c0aa589 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionRequestBWCSerializingTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; +import org.elasticsearch.xpack.application.rules.QueryRule; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase.getAllBWCVersions; + +public class PutQueryRuleActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + + private String queryRuleId; + + @Override + protected Writeable.Reader instanceReader() { + return PutQueryRuleAction.Request::new; + } + + @Override + protected PutQueryRuleAction.Request createTestInstance() { + String queryRulesetId = randomAlphaOfLengthBetween(5, 10); + QueryRule queryRule = EnterpriseSearchModuleTestUtils.randomQueryRule(); + this.queryRuleId = queryRule.id(); + return new PutQueryRuleAction.Request(queryRulesetId, queryRule); + } + + @Override + protected PutQueryRuleAction.Request mutateInstance(PutQueryRuleAction.Request instance) { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PutQueryRuleAction.Request doParseInstance(XContentParser parser) throws IOException { + return PutQueryRuleAction.Request.parse(parser, this.queryRuleId); + } + + @Override + protected PutQueryRuleAction.Request mutateInstanceForVersion(PutQueryRuleAction.Request instance, TransportVersion version) { + return new PutQueryRuleAction.Request(instance.queryRulesetId(), instance.queryRule()); + } + + @Override + protected List bwcVersions() { + return getAllBWCVersions().stream() + .filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_CRUD_API_PUT)) + .collect(Collectors.toList()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionResponseSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionResponseSerializingTests.java new file mode 100644 index 000000000000..47be14761684 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRuleActionResponseSerializingTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PutQueryRuleActionResponseSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PutQueryRuleAction.Response::new; + } + + @Override + protected PutQueryRuleAction.Response createTestInstance() { + return new PutQueryRuleAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected PutQueryRuleAction.Response mutateInstance(PutQueryRuleAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PutQueryRuleAction.Response mutateInstanceForVersion(PutQueryRuleAction.Response instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java index c6c463b677af..83702b0b0672 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetActionRequestBWCSerializingTests.java @@ -8,12 +8,13 @@ package org.elasticsearch.xpack.application.rules.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.rules.QueryRule; import org.elasticsearch.xpack.application.rules.QueryRuleCriteria; import org.elasticsearch.xpack.application.rules.QueryRuleset; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import java.io.IOException; @@ -33,7 +34,7 @@ public class PutQueryRulesetActionRequestBWCSerializingTests extends AbstractBWC @Override protected PutQueryRulesetAction.Request createTestInstance() { - this.queryRulesSet = SearchApplicationTestUtils.randomQueryRuleset(); + this.queryRulesSet = EnterpriseSearchModuleTestUtils.randomQueryRuleset(); return new PutQueryRulesetAction.Request(this.queryRulesSet); } @@ -59,7 +60,13 @@ public class PutQueryRulesetActionRequestBWCSerializingTests extends AbstractBWC new QueryRuleCriteria(criteria.criteriaType(), criteria.criteriaMetadata(), criteria.criteriaValues().subList(0, 1)) ); } - rules.add(new QueryRule(rule.id(), rule.type(), newCriteria, rule.actions())); + rules.add(new QueryRule(rule.id(), rule.type(), newCriteria, rule.actions(), null)); + } + return new PutQueryRulesetAction.Request(new QueryRuleset(instance.queryRuleset().id(), rules)); + } else if (version.before(TransportVersions.QUERY_RULE_CRUD_API_PUT)) { + List rules = new ArrayList<>(); + for (QueryRule rule : instance.queryRuleset().rules()) { + rules.add(new QueryRule(rule.id(), rule.type(), rule.criteria(), rule.actions(), null)); } return new PutQueryRulesetAction.Request(new QueryRuleset(instance.queryRuleset().id(), rules)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleActionTests.java new file mode 100644 index 000000000000..0aff0b804e53 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleActionTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.rules.action; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests; +import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler; +import org.elasticsearch.xpack.application.utils.LicenseUtils; + +import java.util.Map; + +public class RestPutQueryRuleActionTests extends AbstractRestEnterpriseSearchActionTests { + public void testWithNonCompliantLicense() throws Exception { + checkLicenseForRequest( + new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT) + .withParams(Map.of("ruleset_id", "ruleset-id", "rule_id", "rule-id")) + .withContent(new BytesArray(""" + { + "rule_id": "rule-id", + "type": "pinned", + "criteria": [ + { + "type": "exact", + "metadata": "query_string", + "values": ["elastic"] + } + ], + "actions": + { + "ids": [ + "id1", + "id2" + ] + } + } + """), XContentType.JSON) + .build(), + LicenseUtils.Product.QUERY_RULES + ); + } + + public void testInvalidRequestWithNonCompliantLicense() throws Exception { + checkLicenseForRequest( + new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT) + .withParams(Map.of("invalid_param_name", "invalid_value")) + .withContent(new BytesArray("{}"), XContentType.JSON) + .build(), + LicenseUtils.Product.QUERY_RULES + ); + } + + @Override + protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) { + return new RestPutQueryRuleAction(licenseState); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java index 7891f5773d1a..6e9d33b45041 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.junit.Before; import java.util.ArrayList; @@ -131,7 +132,7 @@ public class SearchApplicationIndexServiceTests extends ESSingleNodeTestCase { new String[] { "index_1", "index_2" }, null, System.currentTimeMillis(), - SearchApplicationTestUtils.getRandomSearchApplicationTemplate() + EnterpriseSearchModuleTestUtils.getRandomSearchApplicationTemplate() ); DocWriteResponse resp = awaitPutSearchApplication(searchApp, false); assertThat(resp.status(), equalTo(RestStatus.CREATED)); @@ -146,7 +147,7 @@ public class SearchApplicationIndexServiceTests extends ESSingleNodeTestCase { new String[] { "index_3", "index_4" }, "my_search_app_analytics_collection", System.currentTimeMillis(), - SearchApplicationTestUtils.getRandomSearchApplicationTemplate() + EnterpriseSearchModuleTestUtils.getRandomSearchApplicationTemplate() ); DocWriteResponse newResp = awaitPutSearchApplication(searchApp, false); assertThat(newResp.status(), equalTo(RestStatus.OK)); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java index 60b88476285d..67a5bd680044 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.junit.Before; import java.io.IOException; @@ -46,7 +47,7 @@ public class SearchApplicationTests extends ESTestCase { public final void testRandomSerialization() throws IOException { for (int runs = 0; runs < 10; runs++) { - SearchApplication testInstance = SearchApplicationTestUtils.randomSearchApplication(); + SearchApplication testInstance = EnterpriseSearchModuleTestUtils.randomSearchApplication(); assertTransportSerialization(testInstance); assertXContent(testInstance, randomBoolean()); assertIndexSerialization(testInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationActionResponseBWCSerializingTests.java index bb3e36c95f0a..11c28f062d27 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationActionResponseBWCSerializingTests.java @@ -10,8 +10,8 @@ package org.elasticsearch.xpack.application.search.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.search.SearchApplication; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import java.io.IOException; @@ -28,7 +28,7 @@ public class GetSearchApplicationActionResponseBWCSerializingTests extends Abstr @Override protected GetSearchApplicationAction.Response createTestInstance() { - SearchApplication searchApp = SearchApplicationTestUtils.randomSearchApplication(); + SearchApplication searchApp = EnterpriseSearchModuleTestUtils.randomSearchApplication(); this.searchApplicationName = searchApp.name(); return new GetSearchApplicationAction.Response(searchApp); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionRequestBWCSerializingTests.java index 62678e073a63..ba7b07441d8b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionRequestBWCSerializingTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.application.search.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -27,7 +27,7 @@ public class ListSearchApplicationActionRequestBWCSerializingTests extends Abstr @Override protected ListSearchApplicationAction.Request createTestInstance() { - PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + PageParams pageParams = EnterpriseSearchModuleTestUtils.randomPageParams(); String query = randomFrom(new String[] { null, randomAlphaOfLengthBetween(1, 10) }); return new ListSearchApplicationAction.Request(query, pageParams); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionResponseBWCSerializingTests.java index 38b1b94064b9..2489e14913e7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationActionResponseBWCSerializingTests.java @@ -9,9 +9,9 @@ package org.elasticsearch.xpack.application.search.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.search.SearchApplication; import org.elasticsearch.xpack.application.search.SearchApplicationListItem; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; public class ListSearchApplicationActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< @@ -24,7 +24,7 @@ public class ListSearchApplicationActionResponseBWCSerializingTests extends Abst private static ListSearchApplicationAction.Response randomSearchApplicationListItem() { return new ListSearchApplicationAction.Response(randomList(10, () -> { - SearchApplication app = SearchApplicationTestUtils.randomSearchApplication(); + SearchApplication app = EnterpriseSearchModuleTestUtils.randomSearchApplication(); return new SearchApplicationListItem(app.name(), app.analyticsCollectionName(), app.updatedAtMillis()); }), randomLongBetween(0, 1000)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationActionRequestBWCSerializingTests.java index 0d79950d2081..88b752c80c26 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationActionRequestBWCSerializingTests.java @@ -10,8 +10,8 @@ package org.elasticsearch.xpack.application.search.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.application.search.SearchApplication; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import java.io.IOException; @@ -28,7 +28,7 @@ public class PutSearchApplicationActionRequestBWCSerializingTests extends Abstra @Override protected PutSearchApplicationAction.Request createTestInstance() { - SearchApplication searchApp = SearchApplicationTestUtils.randomSearchApplication(); + SearchApplication searchApp = EnterpriseSearchModuleTestUtils.randomSearchApplication(); this.searchApplicationName = searchApp.name(); return new PutSearchApplicationAction.Request(searchApp, randomBoolean()); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/SearchApplicationSearchRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/SearchApplicationSearchRequestBWCSerializingTests.java index a107d02cc2ab..7c3b504655bf 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/SearchApplicationSearchRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/SearchApplicationSearchRequestBWCSerializingTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.application.search.action; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.application.EnterpriseSearchModuleTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import java.io.IOException; @@ -26,7 +26,7 @@ public class SearchApplicationSearchRequestBWCSerializingTests extends AbstractB protected SearchApplicationSearchRequest createTestInstance() { return new SearchApplicationSearchRequest( randomAlphaOfLengthBetween(1, 10), - SearchApplicationTestUtils.randomSearchApplicationQueryParams() + EnterpriseSearchModuleTestUtils.randomSearchApplicationQueryParams() ); } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 33503bc55879..8885c62dad55 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -232,6 +232,7 @@ public class Constants { "cluster:admin/xpack/ml/upgrade_mode", "cluster:admin/xpack/monitoring/bulk", "cluster:admin/xpack/monitoring/migrate/alerts", + "cluster:admin/xpack/query_rule/put", "cluster:admin/xpack/query_rules/delete", "cluster:admin/xpack/query_rules/get", "cluster:admin/xpack/query_rules/list", From e4c10d82bb5ec9be854f1068377722661e813968 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 10 Jun 2024 12:03:28 -0700 Subject: [PATCH 47/58] Fix no match scorer time series source (#109545) The returned scorer can be null when the weight matches no document. --- ...TimeSeriesSortedSourceOperatorFactory.java | 7 ++- .../TimeSeriesSortedSourceOperatorTests.java | 54 +++++++++++++++++++ 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index 887761fbd5a8..8b52aa84aef2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; @@ -361,7 +362,8 @@ public class TimeSeriesSortedSourceOperatorFactory extends LuceneOperator.Factor this.createdThread = Thread.currentThread(); tsids = leaf.reader().getSortedDocValues("_tsid"); timestamps = leaf.reader().getSortedNumericDocValues("@timestamp"); - iterator = weight.scorer(leaf).iterator(); + final Scorer scorer = weight.scorer(leaf); + iterator = scorer != null ? scorer.iterator() : DocIdSetIterator.empty(); } boolean nextDoc() throws IOException { @@ -384,7 +386,8 @@ public class TimeSeriesSortedSourceOperatorFactory extends LuceneOperator.Factor if (executingThread != createdThread) { tsids = leaf.reader().getSortedDocValues("_tsid"); timestamps = leaf.reader().getSortedNumericDocValues("@timestamp"); - iterator = weight.scorer(leaf).iterator(); + final Scorer scorer = weight.scorer(leaf); + iterator = scorer != null ? scorer.iterator() : DocIdSetIterator.empty(); if (docID != -1) { iterator.advance(docID); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 29e78f7abffd..17d302f198bf 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.document.DoubleDocValuesField; import org.apache.lucene.document.FloatDocValuesField; +import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; @@ -18,6 +19,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -59,6 +61,7 @@ import java.util.Map; import java.util.function.Consumer; import java.util.function.Function; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -208,6 +211,57 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { assertThat(offset, equalTo(Math.min(limit, numDocs))); } + public void testMatchNone() throws Exception { + long t0 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + Sort sort = new Sort( + new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false), + new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true) + ); + try ( + var directory = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig().setIndexSort(sort).setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + int numDocs = between(1, 100); + long timestamp = t0; + int metrics = randomIntBetween(1, 3); + for (int i = 0; i < numDocs; i++) { + timestamp += between(1, 1000); + for (int j = 0; j < metrics; j++) { + String hostname = String.format(Locale.ROOT, "sensor-%02d", j); + writeTS(writer, timestamp, new Object[] { "sensor", hostname }, new Object[] { "voltage", j + 5 }); + } + } + try (var reader = writer.getReader()) { + var ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0); + Query query = randomFrom(LongField.newRangeQuery("@timestamp", 0, t0), new MatchNoDocsQuery()); + var timeSeriesFactory = TimeSeriesSortedSourceOperatorFactory.create( + Integer.MAX_VALUE, + randomIntBetween(1, 1024), + 1, + TimeValue.ZERO, + List.of(ctx), + unused -> query + ); + var driverContext = driverContext(); + List results = new ArrayList<>(); + OperatorTestCase.runDriver( + new Driver( + driverContext, + timeSeriesFactory.get(driverContext), + List.of(), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + assertThat(results, empty()); + } + } + } + @Override protected Operator.OperatorFactory simple() { return createTimeSeriesSourceOperator(directory, r -> this.reader = r, 1, 1, false, TimeValue.ZERO, writer -> { From 0be3c741df3d8e0f09f2a3e652527eb9df4133fb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 10 Jun 2024 13:55:53 -0700 Subject: [PATCH 48/58] Guard file settings readiness on file settings support (#109500) Consistency of file settings is an important invariant. However, when upgrading from Elasticsearch versions before file settings existed, cluster state will not yet have the file settings metadata. If the first node upgraded is not the master node, new nodes will never become ready while they wait for file settings metadata to exist. This commit adds a node feature for file settings to guard waiting on file settings for readiness. Although file settings has existed since 8.4, the feature is not a historical feature because historical features are not applied to cluster state that readiness checks. In this case it is not needed since upgrading from 8.4+ will already contain file settings metadata. --- docs/changelog/109500.yaml | 5 ++ server/src/main/java/module-info.java | 3 +- .../readiness/ReadinessService.java | 19 ++++- .../service/FileSettingsFeatures.java | 26 +++++++ ...lasticsearch.features.FeatureSpecification | 1 + .../readiness/ReadinessServiceTests.java | 70 +++++++++++++------ 6 files changed, 99 insertions(+), 25 deletions(-) create mode 100644 docs/changelog/109500.yaml create mode 100644 server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java diff --git a/docs/changelog/109500.yaml b/docs/changelog/109500.yaml new file mode 100644 index 000000000000..cfd6bc770d5d --- /dev/null +++ b/docs/changelog/109500.yaml @@ -0,0 +1,5 @@ +pr: 109500 +summary: Guard file settings readiness on file settings support +area: Infra/Settings +type: bug +issues: [] diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 2f08129b4080..0d6bc6b29b2c 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -430,7 +430,8 @@ module org.elasticsearch.server { org.elasticsearch.indices.IndicesFeatures, org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.index.mapper.MapperFeatures, - org.elasticsearch.search.retriever.RetrieversFeatures; + org.elasticsearch.search.retriever.RetrieversFeatures, + org.elasticsearch.reservedstate.service.FileSettingsFeatures; uses org.elasticsearch.plugins.internal.SettingsExtension; uses RestExtension; diff --git a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java index 4aab146230f7..a50929062d51 100644 --- a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java +++ b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java @@ -21,7 +21,9 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; +import org.elasticsearch.reservedstate.service.FileSettingsFeatures; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.transport.BindTransportException; @@ -277,7 +279,22 @@ public class ReadinessService extends AbstractLifecycleComponent implements Clus // protected to allow mock service to override protected boolean areFileSettingsApplied(ClusterState clusterState) { ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - return fileSettingsMetadata != null && fileSettingsMetadata.version().equals(ReservedStateMetadata.NO_VERSION) == false; + if (fileSettingsMetadata == null) { + // In order to block readiness on file settings being applied, we need to know that the master node has written an initial + // version, or a marker that file settings don't exist. When upgrading from a version that did not have file settings, the + // current master node may not be the first node upgraded. To be safe, we wait to consider file settings application for + // readiness until the whole cluster supports file settings. Note that this only applies when no reserved state metadata + // exists, so either we are starting up a current cluster (and the feature will be found) or we are upgrading from + // a version before file settings existed (before 8.4). + return supportsFileSettings(clusterState) == false; + } else { + return fileSettingsMetadata.version().equals(ReservedStateMetadata.NO_VERSION) == false; + } + } + + @SuppressForbidden(reason = "need to check file settings support on exact cluster state") + private static boolean supportsFileSettings(ClusterState clusterState) { + return clusterState.clusterFeatures().clusterHasFeature(FileSettingsFeatures.FILE_SETTINGS_SUPPORTED); } private void setReady(boolean ready) { diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java new file mode 100644 index 000000000000..d707680b3e06 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +public class FileSettingsFeatures implements FeatureSpecification { + + // Although file settings were supported starting in 8.4.0, this is really about whether file settings + // are used in readiness. + public static final NodeFeature FILE_SETTINGS_SUPPORTED = new NodeFeature("file_settings"); + + @Override + public Set getFeatures() { + return Set.of(FILE_SETTINGS_SUPPORTED); + } +} diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index a158f91903c7..d8a29a84ddbb 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -15,3 +15,4 @@ org.elasticsearch.indices.IndicesFeatures org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.index.mapper.MapperFeatures org.elasticsearch.search.retriever.RetrieversFeatures +org.elasticsearch.reservedstate.service.FileSettingsFeatures diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index 6bc58cc37a31..62443d6accb4 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; +import org.elasticsearch.reservedstate.service.FileSettingsFeatures; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; @@ -46,6 +47,7 @@ import java.net.UnknownHostException; import java.nio.channels.ServerSocketChannel; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata.ErrorKind.TRANSIENT; @@ -57,6 +59,7 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private ThreadPool threadpool; private Environment env; private FakeHttpTransport httpTransport; + private static final Set nodeFeatures = Set.of(FileSettingsFeatures.FILE_SETTINGS_SUPPORTED.id()); private static Metadata emptyReservedStateMetadata; static { @@ -205,21 +208,8 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient // initially the service isn't ready assertFalse(readinessService.ready()); - ClusterState emptyState = ClusterState.builder(new ClusterName("cluster")) - .nodes( - DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("node2", new TransportAddress(TransportAddress.META_ADDRESS, 9201))) - ) - .build(); - - ClusterState noFileSettingsState = ClusterState.builder(emptyState) - .nodes( - DiscoveryNodes.builder(emptyState.nodes()) - .add(httpTransport.node) - .masterNodeId(httpTransport.node.getId()) - .localNodeId(httpTransport.node.getId()) - ) - .build(); - ClusterChangedEvent event = new ClusterChangedEvent("test", noFileSettingsState, emptyState); + ClusterState noFileSettingsState = noFileSettingsState(); + ClusterChangedEvent event = new ClusterChangedEvent("test", noFileSettingsState, emptyState()); readinessService.clusterChanged(event); // sending a cluster state with active master should not yet bring up the service, file settings still are not applied @@ -306,14 +296,7 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(21L) .errorMetadata(new ReservedStateErrorMetadata(22L, TRANSIENT, List.of("dummy error"))); - ClusterState state = ClusterState.builder(new ClusterName("cluster")) - .nodes( - DiscoveryNodes.builder() - .add(DiscoveryNodeUtils.create("node2", new TransportAddress(TransportAddress.META_ADDRESS, 9201))) - .add(httpTransport.node) - .masterNodeId(httpTransport.node.getId()) - .localNodeId(httpTransport.node.getId()) - ) + ClusterState state = ClusterState.builder(noFileSettingsState()) .metadata(new Metadata.Builder().put(fileSettingsState.build())) .build(); @@ -324,4 +307,45 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient readinessService.stop(); readinessService.close(); } + + public void testFileSettingsMixedCluster() throws Exception { + readinessService.start(); + + // initially the service isn't ready because initial cluster state has not been applied yet + assertFalse(readinessService.ready()); + + ClusterState noFileSettingsState = ClusterState.builder(noFileSettingsState()) + // the master node is upgraded to support file settings, but existing node2 is not + .nodeFeatures(Map.of(httpTransport.node.getId(), nodeFeatures)) + .build(); + ClusterChangedEvent event = new ClusterChangedEvent("test", noFileSettingsState, emptyState()); + readinessService.clusterChanged(event); + + // when upgrading from nodes before file settings exist, readiness should return true once a master is elected + assertTrue(readinessService.ready()); + + readinessService.stop(); + readinessService.close(); + } + + private ClusterState emptyState() { + return ClusterState.builder(new ClusterName("cluster")) + .nodes( + DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("node2", new TransportAddress(TransportAddress.META_ADDRESS, 9201))) + ) + .build(); + } + + private ClusterState noFileSettingsState() { + ClusterState emptyState = emptyState(); + return ClusterState.builder(emptyState) + .nodes( + DiscoveryNodes.builder(emptyState.nodes()) + .add(httpTransport.node) + .masterNodeId(httpTransport.node.getId()) + .localNodeId(httpTransport.node.getId()) + ) + .nodeFeatures(Map.of(httpTransport.node.getId(), nodeFeatures, "node2", nodeFeatures)) + .build(); + } } From f75afb00983349bb3019a62370b05dfa8e03da25 Mon Sep 17 00:00:00 2001 From: Ry Biesemeyer Date: Mon, 10 Jun 2024 15:12:38 -0700 Subject: [PATCH 49/58] Update year in NOTICE.txt (#109548) --- NOTICE.txt | 2 +- .../InternalDistributionArchiveCheckPluginFuncTest.groovy | 4 ++-- .../internal/InternalDistributionArchiveCheckPlugin.java | 2 +- distribution/packages/build.gradle | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/NOTICE.txt b/NOTICE.txt index c44f918942dc..9027c024fd87 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ Elasticsearch -Copyright 2009-2021 Elasticsearch +Copyright 2009-2024 Elasticsearch This product includes software developed by The Apache Software Foundation (http://www.apache.org/). diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy index 587343133b08..860dc4e6f4d9 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy @@ -117,7 +117,7 @@ Copyright 2009-2018 Acme Coorp""" result.task(":darwin-tar:checkNotice").outcome == TaskOutcome.FAILED result.output.contains("> expected line [2] in " + "[./darwin-tar/build/tar-extracted/elasticsearch-${VersionProperties.getElasticsearch()}/NOTICE.txt] " + - "to be [Copyright 2009-2021 Elasticsearch] but was [Copyright 2009-2018 Acme Coorp]") + "to be [Copyright 2009-2024 Elasticsearch] but was [Copyright 2009-2018 Acme Coorp]") } def "fails on unexpected ml notice content"() { @@ -125,7 +125,7 @@ Copyright 2009-2018 Acme Coorp""" elasticLicense() elasticLicense(file("LICENSE.txt")) file("NOTICE.txt").text = """Elasticsearch -Copyright 2009-2021 Elasticsearch""" +Copyright 2009-2024 Elasticsearch""" file("ml/NOTICE.txt").text = "Boost Software License - Version 1.0 - August 17th, 2003" file('darwin-tar/build.gradle') << """ diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java index 6fafe513662c..94b1c70f2965 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java @@ -135,7 +135,7 @@ public class InternalDistributionArchiveCheckPlugin implements Plugin { task.doLast(new Action() { @Override public void execute(Task task) { - final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2021 Elasticsearch"); + final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2024 Elasticsearch"); final Path noticePath = checkExtraction.get() .getDestinationDir() .toPath() diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 6c31bc44017c..2dfd24d97cbb 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -495,7 +495,7 @@ subprojects { (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) } doLast { - final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2021 Elasticsearch") + final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2024 Elasticsearch") final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") assertLinesInFile(noticePath, noticeLines) } From 09fc32090cce4edf2f3b28e90c935002181cbc2a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 11 Jun 2024 10:17:48 +0200 Subject: [PATCH 50/58] Re-define index.mapper.dynamic setting in 8.x (#109341) Currently when upgrading a 7.x cluster to 8.x with `index.mapper.dynamic` index setting defined the following happens: - In case of a full cluster restart upgrade, then the index setting gets archived and after the upgrade the cluster is in a green health. - In case of a rolling cluster restart upgrade, then shards of indices with the index setting fail to allocate as nodes start with 8.x version. The result is that the cluster has a red health and the index setting isn't archived. Closing and opening the index should archive the index setting and allocate the shards. The change is about ensuring the same behavior happens when upgrading a cluster from 7.x to 8.x with indices that have the `index.mapper.dynamic` index setting defined. By re-defining the `index.mapper.dynamic `index setting with `IndexSettingDeprecatedInV7AndRemovedInV8` property, the index is allowed to exist in 7.x indices, but can't be defined in new indices after the upgrade. This way we don't have to rely on index archiving and upgrading via full cluster restart or rolling restart will yield the same outcome. Based on the test in #109301. Relates to #109160 and #96075 --- docs/changelog/109341.yaml | 5 ++ .../UpgradeWithOldIndexSettingsIT.java | 82 +++++++++++++++++++ .../UpgradeWithOldIndexSettingsIT.java | 34 ++++++++ .../common/settings/IndexScopedSettings.java | 1 + .../index/mapper/MapperService.java | 14 ++++ 5 files changed, 136 insertions(+) create mode 100644 docs/changelog/109341.yaml create mode 100644 qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java diff --git a/docs/changelog/109341.yaml b/docs/changelog/109341.yaml new file mode 100644 index 000000000000..0c1eaa98a8aa --- /dev/null +++ b/docs/changelog/109341.yaml @@ -0,0 +1,5 @@ +pr: 109341 +summary: Re-define `index.mapper.dynamic` setting in 8.x for a better 7.x to 8.x upgrade if this setting is used. +area: Mapping +type: bug +issues: [] diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java new file mode 100644 index 000000000000..95178429317b --- /dev/null +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class UpgradeWithOldIndexSettingsIT extends ParameterizedFullClusterRestartTestCase { + + protected static LocalClusterConfigProvider clusterConfig = c -> {}; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(2) + .setting("xpack.security.enabled", "false") + .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .apply(() -> clusterConfig) + .build(); + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + public UpgradeWithOldIndexSettingsIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { + super(upgradeStatus); + } + + public void testMapperDynamicIndexSetting() throws IOException { + assumeTrue( + "Setting deprecated in 6.x, but remained in 7.x and is no longer defined in 8.x", + getOldClusterTestVersion().before("8.0.0") + ); + String indexName = "my-index"; + if (isRunningAgainstOldCluster()) { + createIndex(indexName); + + var request = new Request("PUT", "/my-index/_settings"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(Settings.builder().put("index.mapper.dynamic", true).build())); + request.setOptions( + expectWarnings( + "[index.mapper.dynamic] setting was deprecated in Elasticsearch and will be removed in a future release! " + + "See the breaking changes documentation for the next major version." + ) + ); + assertOK(client().performRequest(request)); + } else { + var indexSettings = getIndexSettings(indexName); + assertThat(XContentMapValues.extractValue(indexName + ".settings.index.mapper.dynamic", indexSettings), equalTo("true")); + ensureGreen(indexName); + // New indices can never define the index.mapper.dynamic setting. + Exception e = expectThrows( + ResponseException.class, + () -> createIndex("my-index2", Settings.builder().put("index.mapper.dynamic", true).build()) + ); + assertThat(e.getMessage(), containsString("unknown setting [index.mapper.dynamic]")); + } + } + +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 8dc3b43abf3e..ba873ef6bbd7 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -22,6 +22,8 @@ import java.io.IOException; import java.util.Map; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { @@ -102,6 +104,38 @@ public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCas } } + public void testMapperDynamicIndexSetting() throws IOException { + assumeTrue( + "Setting deprecated in 6.x, but remained in 7.x and is no longer defined in 8.x", + getOldClusterTestVersion().before("8.0.0") + ); + String indexName = "my-index"; + if (isOldCluster()) { + createIndex(indexName); + Request request = new Request("PUT", "/" + indexName + "/_settings"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(Settings.builder().put("index.mapper.dynamic", true).build())); + request.setOptions( + expectWarnings( + "[index.mapper.dynamic] setting was deprecated in Elasticsearch and will be removed in a future release! " + + "See the breaking changes documentation for the next major version." + ) + ); + assertOK(client().performRequest(request)); + } else { + if (isUpgradedCluster()) { + var indexSettings = getIndexSettings(indexName); + assertThat(XContentMapValues.extractValue(indexName + ".settings.index.mapper.dynamic", indexSettings), equalTo("true")); + ensureGreen(indexName); + // New indices can never define the index.mapper.dynamic setting. + Exception e = expectThrows( + ResponseException.class, + () -> createIndex("my-index2", Settings.builder().put("index.mapper.dynamic", true).build()) + ); + assertThat(e.getMessage(), containsString("unknown setting [index.mapper.dynamic]")); + } + } + } + private void assertCount(String index, int countAtLeast) throws IOException { Request searchTestIndexRequest = new Request("POST", "/" + index + "/_search"); searchTestIndexRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 452fc14025e2..f3eff9ae8838 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -158,6 +158,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING, MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING, MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING, + MapperService.INDEX_MAPPER_DYNAMIC_SETTING, BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, IndexModule.INDEX_STORE_TYPE_SETTING, IndexModule.INDEX_STORE_PRE_LOAD_SETTING, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index e5dc95ddbc2a..3ac4c0b0e18e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -147,6 +148,19 @@ public class MapperService extends AbstractIndexComponent implements Closeable { Property.Dynamic, Property.IndexScope ); + /** + * Legacy index setting, kept for 7.x BWC compatibility. This setting has no effect in 8.x. Do not use. + * TODO: Remove in 9.0 + */ + @Deprecated + @UpdateForV9 + public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting( + "index.mapper.dynamic", + true, + Property.Dynamic, + Property.IndexScope, + Property.IndexSettingDeprecatedInV7AndRemovedInV8 + ); private final IndexAnalyzers indexAnalyzers; private final MappingParser mappingParser; From 62f7c0e88f3a9d12c72a7cb099328003366615e7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 11 Jun 2024 09:19:32 +0100 Subject: [PATCH 51/58] Avoid closing when partial snapshotting in `SnapshotStressTestsIT` (#109526) Closing an index while it's being partially-snapshotted is forbidden, but `SnapshotStressTestsIT#testRandomActivities` still sometimes attempts to do so which causes it to fail. This commit changes the behaviour to avoid doing these things to the same index at the same time. Closes #109138 --- .../snapshots/SnapshotStressTestsIT.java | 51 +++++++++++++++++-- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java index 3f43da20fec3..b759993be26d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java @@ -81,7 +81,9 @@ import java.util.stream.Stream; import static org.elasticsearch.repositories.blobstore.ChecksumBlobStoreFormat.SNAPSHOT_ONLY_FORMAT_PARAMS; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; @LuceneTestCase.SuppressFileSystems(value = "HandleLimitFS") // we sometimes have >2048 open files @@ -468,17 +470,20 @@ public class SnapshotStressTestsIT extends AbstractSnapshotIntegTestCase { restoreSpecificIndicesTmp = true; continue; } - if (randomBoolean() && localReleasables.add(tryAcquireAllPermits(indices.get(indexName).permits)) != null) { + final var trackedIndex = indices.get(indexName); + if (randomBoolean() && localReleasables.add(tryAcquireAllPermits(trackedIndex.permits)) != null) { indicesToRestoreList.add(indexName); final int snapshotShardCount = snapshotInfo.indexSnapshotDetails().get(indexName).getShardCount(); - final int indexShardCount = indices.get(indexName).shardCount; - if (snapshotShardCount == indexShardCount && randomBoolean()) { + final int indexShardCount = trackedIndex.shardCount; + if (snapshotShardCount == indexShardCount + && randomBoolean() + && localReleasables.add(trackedIndex.tryAcquireClosingPermit()) != null) { indicesToCloseList.add(indexName); } else { indicesToDeleteList.add(indexName); - indices.get(indexName).shardCount = snapshotShardCount; + trackedIndex.shardCount = snapshotShardCount; } } else { restoreSpecificIndicesTmp = true; @@ -994,7 +999,9 @@ public class SnapshotStressTestsIT extends AbstractSnapshotIntegTestCase { boolean snapshotSpecificIndicesTmp = randomBoolean(); final List targetIndexNames = new ArrayList<>(indices.size()); for (TrackedIndex trackedIndex : indices.values()) { - if (usually() && releasableAfterStart.add(tryAcquirePermit(trackedIndex.permits)) != null) { + if (usually() + && releasableAfterStart.add(tryAcquirePermit(trackedIndex.permits)) != null + && localReleasables.add(trackedIndex.tryAcquirePartialSnapshottingPermit()) != null) { targetIndexNames.add(trackedIndex.indexName); } else { snapshotSpecificIndicesTmp = true; @@ -1550,6 +1557,40 @@ public class SnapshotStressTestsIT extends AbstractSnapshotIntegTestCase { }); } + /** + * We must not close an index while it's being partially snapshotted; this counter tracks the number of ongoing + * close operations (positive) or partial snapshot operations (negative) in order to avoid them happening concurrently. + *

    + * This is only a problem for partial snapshots because we release the index permit once a partial snapshot has started. With + * non-partial snapshots we retain the index permit until it completes which blocks other operations. + */ + private final AtomicInteger closingOrPartialSnapshottingCount = new AtomicInteger(); + + private static boolean closingPermitAvailable(int value) { + return value >= 0 && value != Integer.MAX_VALUE; + } + + private static boolean partialSnapshottingPermitAvailable(int value) { + return value <= 0 && value != Integer.MIN_VALUE; + } + + Releasable tryAcquireClosingPermit() { + final var previous = closingOrPartialSnapshottingCount.getAndUpdate(c -> closingPermitAvailable(c) ? c + 1 : c); + if (closingPermitAvailable(previous)) { + return () -> assertThat(closingOrPartialSnapshottingCount.getAndDecrement(), greaterThan(0)); + } else { + return null; + } + } + + Releasable tryAcquirePartialSnapshottingPermit() { + final var previous = closingOrPartialSnapshottingCount.getAndUpdate(c -> partialSnapshottingPermitAvailable(c) ? c - 1 : c); + if (partialSnapshottingPermitAvailable(previous)) { + return () -> assertThat(closingOrPartialSnapshottingCount.getAndIncrement(), lessThan(0)); + } else { + return null; + } + } } } From d975997a3a1ed7013ac957159604378b6f994afe Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 11 Jun 2024 10:33:25 +0200 Subject: [PATCH 52/58] Add semantic-text warning about inference endpoints removal (#109561) --- docs/reference/mapping/types/semantic-text.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 7fc1314ff888..454eefd20b07 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -52,6 +52,8 @@ Use the <> to create the endpoint. The `inference_id` will not be validated when the mapping is created, but when documents are ingested into the index. When the first document is indexed, the `inference_id` will be used to generate underlying indexing structures for the field. +WARNING: Removing an inference endpoint will cause ingestion of documents and semantic queries to fail on indices that define `semantic_text` fields with that inference endpoint as their `inference_id`. +Please check that inference endpoints are not used in `semantic_text` fields before removal. [discrete] [[auto-text-chunking]] From 0cd8e48cfb8202753ce0e4d165484ff601b6944f Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 11 Jun 2024 10:44:22 +0200 Subject: [PATCH 53/58] Improve PrevalidateShardPathIT#testCheckShards (#109525) Add additional logging to log why `PrevalidateShardPathResponse` returns that shards is on the node while it's not in the cluster state. See #104807 --- .../cluster/PrevalidateShardPathIT.java | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java index dd701244756c..ea566c90ad76 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -70,6 +71,8 @@ public class PrevalidateShardPathIT extends ESIntegTestCase { } // Check that after relocation the source node doesn't have the shard path String node3 = internalCluster().startDataOnlyNode(); + ensureStableCluster(4); + logger.info("Relocating shards from the node {}", node2); updateIndexSettings(Settings.builder().put("index.routing.allocation.exclude._name", node2), indexName); ensureGreen(indexName); assertBusy(() -> { @@ -82,13 +85,20 @@ public class PrevalidateShardPathIT extends ESIntegTestCase { assertTrue("There should be no failures in the response", resp.failures().isEmpty()); Set node2ShardIds = resp2.getNodes().get(0).getShardIds(); if (node2ShardIds.size() > 0) { - for (var node2Shard : clusterService().state() + logger.info( + "Relocation source node {} should have no shards after the relocation, but still got {}", + node2Id, + node2ShardIds + ); + List node2Shards = clusterService().state() .routingTable() .allShards() .filter(s -> s.getIndexName().equals(indexName)) .filter(s -> node2ShardIds.contains(s.shardId())) .filter(s -> s.currentNodeId().equals(node2Id)) - .toList()) { + .toList(); + logger.info("Found {} shards on the relocation source node {} in the cluster state", node2Shards, node2Id); + for (var node2Shard : node2Shards) { var explanation = ClusterAllocationExplanationUtils.getClusterAllocationExplanation( client(), node2Shard.getIndexName(), @@ -109,6 +119,7 @@ public class PrevalidateShardPathIT extends ESIntegTestCase { // If for whatever reason the removal is not triggered (e.g. not enough nodes reported that the shards are active) or it // temporarily failed to clean up the shard folder, we need to trigger another cluster state change for this removal to // finally succeed. + logger.info("Triggering an extra cluster state update"); updateIndexSettings( Settings.builder().put("index.routing.allocation.exclude.name", "non-existent" + randomAlphaOfLength(5)), indexName From 0480c1acba41cb0cd8fb22c384adfff0790a2f85 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 11 Jun 2024 11:24:52 +0200 Subject: [PATCH 54/58] [DOCS] Remove ESQL demo env link from 8.14+ (#109562) --- docs/reference/esql/esql-get-started.asciidoc | 8 ++++---- .../esql/esql-getting-started-sample-data.asciidoc | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index b7928898a3bb..207794c064da 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -15,10 +15,9 @@ This getting started is also available as an https://github.com/elastic/elastics [[esql-getting-started-prerequisites]] === Prerequisites -To follow along with the queries in this guide, you can either set up your own -deployment, or use Elastic's public {esql} demo environment. +To follow along with the queries in this guide, you'll need an {es} deployment with our sample data. -include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc[] +include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-sample-data.asciidoc[tag=own-deployment] [discrete] [[esql-getting-started-running-queries]] @@ -269,7 +268,8 @@ Before you can use `ENRICH`, you first need to <> and <> an <>. -include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc[] +include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc[tag=own-deployment] + After creating and executing a policy, you can use it with the `ENRICH` command: diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc index d9b08b7281f7..97f4859e012a 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc @@ -1,6 +1,6 @@ // tag::own-deployment[] -First ingest some sample data. In {kib}, open the main menu and select *Dev +First, you'll need to ingest the sample data. In {kib}, open the main menu and select *Dev Tools*. Run the following two requests: [source,console] From 5f3f7db7b622c7e706cb9c5032f66515479bd547 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 11 Jun 2024 10:40:47 +0100 Subject: [PATCH 55/58] [ML] Fix IndexOutOfBoundsException during inference (#109533) The error was caused by a empty input field list created by the ELSER and Elasticsearch inference services. --- docs/changelog/109533.yaml | 5 ++ .../core/ml/inference/TrainedModelConfig.java | 2 - .../ElasticsearchInternalService.java | 5 +- .../services/elser/ElserInternalService.java | 5 +- .../ElasticsearchInternalServiceTests.java | 40 +++++++++++++ .../elser/ElserInternalServiceTests.java | 58 +++++++++++++++++++ .../TransportPutTrainedModelAction.java | 31 ++++++---- .../TransportPutTrainedModelActionTests.java | 22 +++---- 8 files changed, 139 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/109533.yaml diff --git a/docs/changelog/109533.yaml b/docs/changelog/109533.yaml new file mode 100644 index 000000000000..5720410e5f37 --- /dev/null +++ b/docs/changelog/109533.yaml @@ -0,0 +1,5 @@ +pr: 109533 +summary: Fix IndexOutOfBoundsException during inference +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index 80fd28f3ab03..24fc24e43226 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -636,8 +636,6 @@ public class TrainedModelConfig implements ToXContentObject, Writeable { private InferenceConfig inferenceConfig; private TrainedModelLocation location; private ModelPackageConfig modelPackageConfig; - private Long perDeploymentMemoryBytes; - private Long perAllocationMemoryBytes; private String platformArchitecture; private TrainedModelPrefixStrings prefixStrings; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 0052607ce325..df546efd161e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -418,9 +418,8 @@ public class ElasticsearchInternalService implements InferenceService { return; } else if (model instanceof MultilingualE5SmallModel e5Model) { String modelId = e5Model.getServiceSettings().getModelId(); - var fieldNames = List.of(); - var input = new TrainedModelInput(fieldNames); - var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).build(); + var input = new TrainedModelInput(List.of("text_field")); // by convention text_field is used + var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).validate(true).build(); PutTrainedModelAction.Request putRequest = new PutTrainedModelAction.Request(config, false, true); executeAsyncWithOrigin( client, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index 6e7c177861cd..ed0f1cd93c83 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -362,9 +362,8 @@ public class ElserInternalService implements InferenceService { return; } else { String modelId = ((ElserInternalModel) model).getServiceSettings().getModelId(); - var fieldNames = List.of(); - var input = new TrainedModelInput(fieldNames); - var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).build(); + var input = new TrainedModelInput(List.of("text_field")); // by convention text_field is used + var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).validate(true).build(); PutTrainedModelAction.Request putRequest = new PutTrainedModelAction.Request(config, false, true); executeAsyncWithOrigin( client, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 8f8c73eaab79..e34ce410bbab 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbedd import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; @@ -44,6 +45,7 @@ import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import java.util.ArrayList; @@ -694,6 +696,44 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { assertEquals(chunk, request.isChunked()); } + @SuppressWarnings("unchecked") + public void testPutModel() { + var client = mock(Client.class); + ArgumentCaptor argument = ArgumentCaptor.forClass(PutTrainedModelAction.Request.class); + + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new PutTrainedModelAction.Response(mock(TrainedModelConfig.class))); + return null; + }).when(client).execute(Mockito.same(PutTrainedModelAction.INSTANCE), argument.capture(), any()); + + when(client.threadPool()).thenReturn(threadPool); + + var service = createService(client); + + var model = new MultilingualE5SmallModel( + "my-e5", + TaskType.TEXT_EMBEDDING, + "e5", + new MultilingualE5SmallInternalServiceSettings(1, 1, ".multilingual-e5-small") + ); + + service.putModel(model, new ActionListener<>() { + @Override + public void onResponse(Boolean success) { + assertTrue(success); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); + + var putConfig = argument.getValue().getTrainedModelConfig(); + assertEquals("text_field", putConfig.getInput().getFieldNames().get(0)); + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElasticsearchInternalService(context); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index 27db8143f0c8..31962e44851c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -27,10 +27,16 @@ import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResul import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.junit.After; +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; import java.util.ArrayList; import java.util.Collections; @@ -38,6 +44,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -53,6 +60,18 @@ import static org.mockito.Mockito.when; public class ElserInternalServiceTests extends ESTestCase { + private static ThreadPool threadPool; + + @Before + public void setUpThreadPool() { + threadPool = new TestThreadPool("test"); + } + + @After + public void shutdownThreadPool() { + TestThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + } + public static Model randomModelConfig(String inferenceEntityId, TaskType taskType) { return switch (taskType) { case SPARSE_EMBEDDING -> new ElserInternalModel( @@ -451,6 +470,45 @@ public class ElserInternalServiceTests extends ESTestCase { } } + @SuppressWarnings("unchecked") + public void testPutModel() { + var client = mock(Client.class); + ArgumentCaptor argument = ArgumentCaptor.forClass(PutTrainedModelAction.Request.class); + + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new PutTrainedModelAction.Response(mock(TrainedModelConfig.class))); + return null; + }).when(client).execute(Mockito.same(PutTrainedModelAction.INSTANCE), argument.capture(), any()); + + when(client.threadPool()).thenReturn(threadPool); + + var service = createService(client); + + var model = new ElserInternalModel( + "my-elser", + TaskType.SPARSE_EMBEDDING, + "elser", + new ElserInternalServiceSettings(1, 1, ".elser_model_2"), + ElserMlNodeTaskSettings.DEFAULT + ); + + service.putModel(model, new ActionListener<>() { + @Override + public void onResponse(Boolean success) { + assertTrue(success); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); + + var putConfig = argument.getValue().getTrainedModelConfig(); + assertEquals("text_field", putConfig.getInput().getFieldNames().get(0)); + } + private ElserInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElserInternalService(context); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 87fc956c224c..c4cefc1750c3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -323,12 +323,12 @@ public class TransportPutTrainedModelAction extends TransportMasterNodeAction handlePackageAndTagsListener.onResponse(null), request.ackTimeout() ); } @@ -371,14 +371,26 @@ public class TransportPutTrainedModelAction extends TransportMasterNodeAction sendResponseListener, - ActionListener storeModelListener, + ActionListener isBeingDownloadedListener, + Runnable createModelAction, TimeValue timeout ) { TaskRetriever.getDownloadTaskInfo( @@ -389,12 +401,12 @@ public class TransportPutTrainedModelAction extends TransportMasterNodeAction "Timed out waiting for model download to complete", ActionListener.wrap(taskInfo -> { if (taskInfo != null) { - getModelInformation(client, modelId, sendResponseListener); + getModelInformation(client, modelId, isBeingDownloadedListener); } else { // no task exists so proceed with creating the model - storeModelListener.onResponse(null); + createModelAction.run(); } - }, sendResponseListener::onFailure) + }, isBeingDownloadedListener::onFailure) ); } @@ -554,5 +566,4 @@ public class TransportPutTrainedModelAction extends TransportMasterNodeAction(); - TransportPutTrainedModelAction.checkForExistingTask( + TransportPutTrainedModelAction.checkForExistingModelDownloadTask( client, "inferenceEntityId", true, responseListener, - new PlainActionFuture(), + () -> {}, TIMEOUT ); @@ -178,18 +178,18 @@ public class TransportPutTrainedModelActionTests extends ESTestCase { public void testCheckForExistingTaskCallsStoreModelListenerWhenNoTasksExist() { var client = mockClientWithTasksResponse(Collections.emptyList(), threadPool); - var storeListener = new PlainActionFuture(); + var createModelCalled = new AtomicBoolean(); - TransportPutTrainedModelAction.checkForExistingTask( + TransportPutTrainedModelAction.checkForExistingModelDownloadTask( client, "inferenceEntityId", true, new PlainActionFuture<>(), - storeListener, + () -> createModelCalled.set(Boolean.TRUE), TIMEOUT ); - assertThat(storeListener.actionGet(TIMEOUT), nullValue()); + assertTrue(createModelCalled.get()); } public void testCheckForExistingTaskThrowsNoModelFoundError() { @@ -197,12 +197,12 @@ public class TransportPutTrainedModelActionTests extends ESTestCase { prepareGetTrainedModelResponse(client, Collections.emptyList()); var respListener = new PlainActionFuture(); - TransportPutTrainedModelAction.checkForExistingTask( + TransportPutTrainedModelAction.checkForExistingModelDownloadTask( client, "inferenceEntityId", true, respListener, - new PlainActionFuture<>(), + () -> {}, TIMEOUT ); @@ -224,12 +224,12 @@ public class TransportPutTrainedModelActionTests extends ESTestCase { prepareGetTrainedModelResponse(client, List.of(trainedModel)); var respListener = new PlainActionFuture(); - TransportPutTrainedModelAction.checkForExistingTask( + TransportPutTrainedModelAction.checkForExistingModelDownloadTask( client, "inferenceEntityId", true, respListener, - new PlainActionFuture<>(), + () -> {}, TIMEOUT ); From 8e9e9bc6c886e35277c5d6e315f45405673afb86 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Tue, 11 Jun 2024 11:53:18 +0200 Subject: [PATCH 56/58] Relaxed resulting docs checks (#109560) --- .../180_update_dense_vector_type.yml | 104 +++++++++--------- 1 file changed, 53 insertions(+), 51 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml index 0780b789e92a..3502a5e64308 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/180_update_dense_vector_type.yml @@ -2,6 +2,9 @@ setup: - requires: cluster_features: "gte_v8.15.0" reason: 'updatable dense vector field types was added in 8.15' + - skip: + reason: "contains is a newly added assertion" + features: contains --- "Test create and update dense vector mapping with per-doc indexing and flush": - do: @@ -110,9 +113,9 @@ setup: - match: { hits.total.value: 10 } - length: {hits.hits: 3} - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "3" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "3" } } - do: indices.put_mapping: @@ -216,9 +219,9 @@ setup: - match: { hits.total.value: 20 } - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "11" } - - match: { hits.hits.2._id: "2" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "11" } } + - contains: { hits.hits: { _id: "2" } } - do: indices.put_mapping: @@ -323,10 +326,10 @@ setup: - match: { hits.total.value: 30 } - length: { hits.hits: 4 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "11" } - - match: { hits.hits.2._id: "2" } - - match: { hits.hits.3._id: "21" } + - contains: {hits.hits: {_id: "1"}} + - contains: {hits.hits: {_id: "11"}} + - contains: {hits.hits: {_id: "2"}} + - contains: {hits.hits: {_id: "21"}} - do: indices.put_mapping: @@ -431,12 +434,11 @@ setup: - match: { hits.total.value: 40 } - length: { hits.hits: 5 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "11" } - - match: { hits.hits.2._id: "31" } - - match: { hits.hits.3._id: "2" } - - match: { hits.hits.4._id: "21" } - + - contains: {hits.hits: {_id: "1"}} + - contains: {hits.hits: {_id: "11"}} + - contains: {hits.hits: {_id: "2"}} + - contains: {hits.hits: {_id: "21"}} + - contains: {hits.hits: {_id: "31"}} --- "Test create and update dense vector mapping with bulk indexing": @@ -501,9 +503,9 @@ setup: - match: { hits.total.value: 10 } - length: {hits.hits: 3} - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "3" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "3" } } - do: indices.put_mapping: @@ -561,9 +563,9 @@ setup: - match: { hits.total.value: 20 } - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "11" } } - do: indices.put_mapping: @@ -622,10 +624,10 @@ setup: - match: { hits.total.value: 30 } - length: { hits.hits: 4 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "21" } - - match: { hits.hits.3._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "11" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "21" } } - do: indices.put_mapping: @@ -684,11 +686,11 @@ setup: - match: { hits.total.value: 40 } - length: { hits.hits: 5 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "21" } - - match: { hits.hits.3._id: "31" } - - match: { hits.hits.4._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "11" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "21" } } + - contains: { hits.hits: { _id: "31" } } --- "Index, update and merge": @@ -753,9 +755,9 @@ setup: - match: { hits.total.value: 10 } - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "3" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "3" } } - do: indices.put_mapping: @@ -793,9 +795,9 @@ setup: - match: { hits.total.value: 10 } - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "3" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "3" } } - do: bulk: @@ -835,9 +837,9 @@ setup: - match: { hits.total.value: 20 } - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "11" } } - do: indices.put_mapping: @@ -871,9 +873,9 @@ setup: - match: { hits.total.value: 20 } - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "11" } } - do: bulk: @@ -913,10 +915,10 @@ setup: - match: { hits.total.value: 30 } - length: { hits.hits: 4 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "21" } - - match: { hits.hits.3._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "11" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "21" } } - do: indices.forcemerge: @@ -936,10 +938,10 @@ setup: - match: { hits.total.value: 30 } - length: { hits.hits: 4 } - - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.2._id: "21" } - - match: { hits.hits.3._id: "11" } + - contains: { hits.hits: { _id: "1" } } + - contains: { hits.hits: { _id: "11" } } + - contains: { hits.hits: { _id: "2" } } + - contains: { hits.hits: { _id: "21" } } --- From c471b01f9e6bf8357c4260fabcd8d94995b1a426 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 11 Jun 2024 11:40:10 +0100 Subject: [PATCH 57/58] Add permission to secure access to certain config files specified by settings (#108895) --- docs/changelog/108895.yaml | 5 + ...=> SecuredConfigFileAccessPermission.java} | 11 +- ...uredConfigFileSettingAccessPermission.java | 26 ++++ .../elasticsearch/bootstrap/PolicyUtil.java | 6 +- .../org/elasticsearch/bootstrap/Security.java | 113 +++++++++++++----- 5 files changed, 124 insertions(+), 37 deletions(-) create mode 100644 docs/changelog/108895.yaml rename server/src/main/java/org/elasticsearch/{SecuredFileAccessPermission.java => SecuredConfigFileAccessPermission.java} (53%) create mode 100644 server/src/main/java/org/elasticsearch/SecuredConfigFileSettingAccessPermission.java diff --git a/docs/changelog/108895.yaml b/docs/changelog/108895.yaml new file mode 100644 index 000000000000..15293896b20c --- /dev/null +++ b/docs/changelog/108895.yaml @@ -0,0 +1,5 @@ +pr: 108895 +summary: Add permission to secure access to certain config files specified by settings +area: "Security" +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/SecuredFileAccessPermission.java b/server/src/main/java/org/elasticsearch/SecuredConfigFileAccessPermission.java similarity index 53% rename from server/src/main/java/org/elasticsearch/SecuredFileAccessPermission.java rename to server/src/main/java/org/elasticsearch/SecuredConfigFileAccessPermission.java index 3d24a9bc5ddb..d6372b5ef988 100644 --- a/server/src/main/java/org/elasticsearch/SecuredFileAccessPermission.java +++ b/server/src/main/java/org/elasticsearch/SecuredConfigFileAccessPermission.java @@ -13,13 +13,14 @@ import java.security.BasicPermission; /** * A permission granted to ensure secured access to a file in the config directory. *

    - * By granting this permission, all code that does not have the same permission on the same file - * will be denied all read/write access to that file. - * Note that you also need to wrap any access to the secured files in an {@code AccessController.doPrivileged()} block + * By granting this permission with a file relative to the config directory, + * the file is secured from general access by Elasticsearch and other Elasticsearch plugins. + * All code that does not have a secured permission on the same file will be denied all read/write access to that file. + * Note that you also need to wrap any access to secured files in an {@code AccessController.doPrivileged()} block * as Elasticsearch itself is denied access to files secured by plugins. */ -public class SecuredFileAccessPermission extends BasicPermission { - public SecuredFileAccessPermission(String path) { +public class SecuredConfigFileAccessPermission extends BasicPermission { + public SecuredConfigFileAccessPermission(String path) { super(path, ""); } } diff --git a/server/src/main/java/org/elasticsearch/SecuredConfigFileSettingAccessPermission.java b/server/src/main/java/org/elasticsearch/SecuredConfigFileSettingAccessPermission.java new file mode 100644 index 000000000000..fdea47d449a1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/SecuredConfigFileSettingAccessPermission.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch; + +import java.security.BasicPermission; + +/** + * A permission granted to ensure secured access to a file specified by a setting in the config directory. + *

    + * By granting this permission with a setting key (wildcards are supported), + * the files pointed to by the settings are secured from general access by Elasticsearch and other Elasticsearch plugins. + * All code that does not have a secured permission on the same file will be denied all read/write access to that file. + * Note that you also need to wrap any access to secured files in an {@code AccessController.doPrivileged()} block + * as Elasticsearch itself is denied access to files secured by plugins. + */ +public class SecuredConfigFileSettingAccessPermission extends BasicPermission { + public SecuredConfigFileSettingAccessPermission(String setting) { + super(setting, ""); + } +} diff --git a/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java b/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java index 3279bc5b1bfd..b9574f1a29ae 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java @@ -8,7 +8,8 @@ package org.elasticsearch.bootstrap; -import org.elasticsearch.SecuredFileAccessPermission; +import org.elasticsearch.SecuredConfigFileAccessPermission; +import org.elasticsearch.SecuredConfigFileSettingAccessPermission; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; @@ -169,7 +170,8 @@ public class PolicyUtil { entry(PrivateCredentialPermission.class, ALLOW_ALL_NAMES), entry(SQLPermission.class, List.of("callAbort", "setNetworkTimeout")), entry(ClassPermission.class, ALLOW_ALL_NAMES), - entry(SecuredFileAccessPermission.class, ALLOW_ALL_NAMES) + entry(SecuredConfigFileAccessPermission.class, ALLOW_ALL_NAMES), + entry(SecuredConfigFileSettingAccessPermission.class, ALLOW_ALL_NAMES) ).collect(Collectors.toMap(e -> e.getKey().getCanonicalName(), Map.Entry::getValue)); PermissionCollection pluginPermissionCollection = new Permissions(); namedPermissions.forEach(pluginPermissionCollection::add); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index e24e13dfff37..12edf344c72a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -9,13 +9,16 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.SecuredFileAccessPermission; +import org.elasticsearch.SecuredConfigFileAccessPermission; +import org.elasticsearch.SecuredConfigFileSettingAccessPermission; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.plugins.PluginsUtils; import org.elasticsearch.secure_sm.SecureSM; import org.elasticsearch.transport.TcpTransport; @@ -46,7 +49,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Consumer; -import java.util.stream.Stream; +import java.util.regex.Pattern; import static java.lang.invoke.MethodType.methodType; import static org.elasticsearch.bootstrap.ESPolicy.POLICY_RESOURCE; @@ -104,6 +107,8 @@ import static org.elasticsearch.reservedstate.service.FileSettingsService.SETTIN */ final class Security { + private static Logger logger; // not init'd until configure call below + static { prepopulateSecurityCaller(); } @@ -122,6 +127,8 @@ final class Security { * @param filterBadDefaults true if we should filter out bad java defaults in the system policy. */ static void configure(Environment environment, boolean filterBadDefaults, Path pidFile) throws IOException { + logger = LogManager.getLogger(Security.class); + // enable security policy: union of template and environment-based paths, and possibly plugin permissions Map codebases = PolicyUtil.getCodebaseJarMap(JarHell.parseModulesAndClassPath()); Policy mainPolicy = PolicyUtil.readPolicy(ESPolicy.class.getResource(POLICY_RESOURCE), codebases); @@ -133,7 +140,7 @@ final class Security { pluginPolicies, filterBadDefaults, createRecursiveDataPathPermission(environment), - readSecuredFiles(environment, mainPolicy, codebases.values(), pluginPolicies) + readSecuredConfigFiles(environment, mainPolicy, codebases.values(), pluginPolicies) ) ); @@ -196,57 +203,103 @@ final class Security { return toFilePermissions(policy); } - private static Map> readSecuredFiles( + private static Map> readSecuredConfigFiles( Environment environment, Policy template, Collection mainCodebases, Map pluginPolicies ) throws IOException { - Map> securedFiles = new HashMap<>(); + Map> securedConfigFiles = new HashMap<>(); + Map> securedSettingKeys = new HashMap<>(); for (URL url : mainCodebases) { - PolicyUtil.getPolicyPermissions(url, template, environment.tmpFile()) - .stream() - .flatMap(Security::extractSecuredFileName) - .map(environment.configFile()::resolve) - .forEach(f -> securedFiles.computeIfAbsent(f.toString(), k -> new HashSet<>()).add(url)); + for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpFile())) { + readSecuredConfigFilePermissions(environment, url, p, securedConfigFiles, securedSettingKeys); + } } for (var pp : pluginPolicies.entrySet()) { - PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpFile()) - .stream() - .flatMap(Security::extractSecuredFileName) - .map(environment.configFile()::resolve) - .forEach(f -> securedFiles.computeIfAbsent(f.toString(), k -> new HashSet<>()).add(pp.getKey())); + for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpFile())) { + readSecuredConfigFilePermissions(environment, pp.getKey(), p, securedConfigFiles, securedSettingKeys); + } + } + + // compile a Pattern for each setting key we'll be looking for + // the key could include a * wildcard + List>> settingPatterns = securedSettingKeys.entrySet() + .stream() + .map(e -> Map.entry(Pattern.compile(e.getKey()), e.getValue())) + .toList(); + + for (String setting : environment.settings().keySet()) { + for (Map.Entry> ps : settingPatterns) { + if (ps.getKey().matcher(setting).matches()) { + // add the setting value to the secured files for these codebase URLs + Path file = environment.configFile().resolve(environment.settings().get(setting)); + if (file.startsWith(environment.configFile()) == false) { + throw new IllegalStateException(ps.getValue() + " tried to grant access to file outside config directory " + file); + } + if (logger.isDebugEnabled()) { + ps.getValue() + .forEach( + url -> logger.debug("Jar {} securing access to config file {} through setting {}", url, file, setting) + ); + } + securedConfigFiles.computeIfAbsent(file.toString(), k -> new HashSet<>()).addAll(ps.getValue()); + } + } } // always add some config files as exclusive files that no one can access // there's no reason for anyone to read these once the security manager is initialized // so if something has tried to grant itself access, crash out with an error - addSpeciallySecuredFile(securedFiles, environment.configFile().resolve("elasticsearch.yml").toString()); - addSpeciallySecuredFile(securedFiles, environment.configFile().resolve("jvm.options").toString()); - addSpeciallySecuredFile(securedFiles, environment.configFile().resolve("jvm.options.d/-").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("elasticsearch.yml").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options.d/-").toString()); - return Collections.unmodifiableMap(securedFiles); + return Collections.unmodifiableMap(securedConfigFiles); } - private static void addSpeciallySecuredFile(Map> securedFiles, String path) { + private static void readSecuredConfigFilePermissions( + Environment environment, + URL url, + Permission p, + Map> securedFiles, + Map> securedSettingKeys + ) { + String securedFileName = extractSecuredName(p, SecuredConfigFileAccessPermission.class); + if (securedFileName != null) { + Path securedFile = environment.configFile().resolve(securedFileName); + if (securedFile.startsWith(environment.configFile()) == false) { + throw new IllegalStateException("[" + url + "] tried to grant access to file outside config directory " + securedFile); + } + logger.debug("Jar {} securing access to config file {}", url, securedFile); + securedFiles.computeIfAbsent(securedFile.toString(), k -> new HashSet<>()).add(url); + } + + String securedKey = extractSecuredName(p, SecuredConfigFileSettingAccessPermission.class); + if (securedKey != null) { + securedSettingKeys.computeIfAbsent(securedKey, k -> new HashSet<>()).add(url); + } + } + + private static String extractSecuredName(Permission p, Class permissionType) { + if (permissionType.isInstance(p)) { + return p.getName(); + } else if (p instanceof UnresolvedPermission up && up.getUnresolvedType().equals(permissionType.getCanonicalName())) { + return up.getUnresolvedName(); + } else { + return null; + } + } + + private static void addSpeciallySecuredConfigFile(Map> securedFiles, String path) { Set attemptedToGrant = securedFiles.put(path, Set.of()); if (attemptedToGrant != null) { throw new IllegalStateException(attemptedToGrant + " tried to grant access to special config file " + path); } } - private static Stream extractSecuredFileName(Permission p) { - if (p instanceof SecuredFileAccessPermission) { - return Stream.of(p.getName()); - } - if (p instanceof UnresolvedPermission up && up.getUnresolvedType().equals(SecuredFileAccessPermission.class.getCanonicalName())) { - return Stream.of(up.getUnresolvedName()); - } - return Stream.empty(); - } - /** Adds access to classpath jars/classes for jar hell scan, etc */ @SuppressForbidden(reason = "accesses fully qualified URLs to configure security") static void addClasspathPermissions(Permissions policy) throws IOException { From 9647f6bd12f08b947203abcceefaeb15dc7d12cf Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 11 Jun 2024 12:52:20 +0200 Subject: [PATCH 58/58] Don't set number of shards to 0 for system indices that use auto-expand replicas (#108994) Many of our system indices that rely on auto_expand_replicas get created providing a manual number of replicas. Such number will be immediately overridden by the auto expand replicas functionality according to the number of data nodes available. While this causes no harm, it seems misleading and unnecessary, a potential misuse that we can avoid for indices that we create ourselves. Ideally we'd even prevent this from happening by rejecting such index creation requests, but that would be a breaking change that we'd prefer not to make at this time. --- .../java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java | 1 - .../java/org/elasticsearch/system/indices/SystemIndicesQA.java | 2 -- .../elasticsearch/synonyms/SynonymsManagementAPIService.java | 1 - .../elasticsearch/xpack/core/async/AsyncTaskIndexService.java | 1 - .../xpack/application/rules/QueryRulesIndexService.java | 1 - .../application/search/SearchApplicationIndexService.java | 1 - .../xpack/security/support/SecuritySystemIndices.java | 3 --- .../src/main/java/org/elasticsearch/xpack/watcher/Watcher.java | 1 - 8 files changed, 11 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index e5756652a984..9d0f9848d97b 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -188,7 +188,6 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, SystemInd .setSettings( Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .build() ) diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index 9fc256e79873..1d69ae5c1ee4 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -72,7 +72,6 @@ public class SystemIndicesQA extends Plugin implements SystemIndexPlugin, Action .setSettings( Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .build() ) @@ -95,7 +94,6 @@ public class SystemIndicesQA extends Plugin implements SystemIndexPlugin, Action .setSettings( Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .build() ) diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index 9409aef96d8b..2cd35fd6889b 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -483,7 +483,6 @@ public class SynonymsManagementAPIService { static Settings settings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all") .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), SYNONYMS_INDEX_FORMAT) .build(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java index 2d92ded3b545..e44af60a45e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java @@ -99,7 +99,6 @@ public final class AsyncTaskIndexService> { return Settings.builder() .put("index.codec", "best_compression") .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .build(); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index adcd5da988b8..86192d8070cc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -102,7 +102,6 @@ public class QueryRulesIndexService { private static Settings getIndexSettings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.SETTING_PRIORITY, 100) .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), QueryRulesIndexMappingVersion.latest().id) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 0ccef9acba08..9e8a8f750b76 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -139,7 +139,6 @@ public class SearchApplicationIndexService { private static Settings getIndexSettings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.SETTING_PRIORITY, 100) .put("index.refresh_interval", "1s") diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 75937bf3e2c1..4c5ce703f48a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -152,7 +152,6 @@ public class SecuritySystemIndices { private static Settings getMainIndexSettings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.SETTING_PRIORITY, 1000) .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), INTERNAL_MAIN_INDEX_FORMAT) @@ -667,7 +666,6 @@ public class SecuritySystemIndices { private static Settings getTokenIndexSettings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.SETTING_PRIORITY, 1000) .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), INTERNAL_TOKENS_INDEX_FORMAT) @@ -868,7 +866,6 @@ public class SecuritySystemIndices { private static Settings getProfileIndexSettings(Settings settings) { final Settings.Builder settingsBuilder = Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.SETTING_PRIORITY, 1000) .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), INTERNAL_PROFILE_INDEX_FORMAT) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index f8f910c38c08..2d71aef08ea1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -874,7 +874,6 @@ public class Watcher extends Plugin implements SystemIndexPlugin, ScriptPlugin, private static Settings getWatchesIndexSettings() { return Settings.builder() .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0) .put("index.auto_expand_replicas", "0-1") .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), 6) .put(IndexMetadata.SETTING_PRIORITY, 800)