diff --git a/.gitignore b/.gitignore index d1af97cbaea3..8b2da4dc0832 100644 --- a/.gitignore +++ b/.gitignore @@ -69,3 +69,6 @@ testfixtures_shared/ # Generated checkstyle_ide.xml x-pack/plugin/esql/src/main/generated-src/generated/ + +# JEnv +.java-version diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java index e62c26c7fbc0..3ab85ba69dc8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java @@ -31,11 +31,15 @@ import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.bundling.Jar; import org.gradle.api.tasks.javadoc.Javadoc; import org.gradle.external.javadoc.CoreJavadocOptions; +import org.gradle.jvm.toolchain.JavaLanguageVersion; +import org.gradle.jvm.toolchain.JavaToolchainService; import org.gradle.language.base.plugins.LifecycleBasePlugin; import java.io.File; import java.util.Map; +import javax.inject.Inject; + import static org.elasticsearch.gradle.internal.conventions.util.Util.toStringable; import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; @@ -44,6 +48,14 @@ import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams * common configuration for production code. */ public class ElasticsearchJavaPlugin implements Plugin { + + private final JavaToolchainService javaToolchains; + + @Inject + ElasticsearchJavaPlugin(JavaToolchainService javaToolchains) { + this.javaToolchains = javaToolchains; + } + @Override public void apply(Project project) { project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); @@ -55,7 +67,7 @@ public class ElasticsearchJavaPlugin implements Plugin { // configureConfigurations(project); configureJars(project, buildParams.get()); configureJarManifest(project, buildParams.get()); - configureJavadoc(project); + configureJavadoc(project, buildParams.get()); testCompileOnlyDeps(project); } @@ -128,7 +140,7 @@ public class ElasticsearchJavaPlugin implements Plugin { project.getPluginManager().apply("nebula.info-jar"); } - private static void configureJavadoc(Project project) { + private void configureJavadoc(Project project, BuildParameterExtension buildParams) { project.getTasks().withType(Javadoc.class).configureEach(javadoc -> { /* * Generate docs using html5 to suppress a warning from `javadoc` @@ -136,6 +148,10 @@ public class ElasticsearchJavaPlugin implements Plugin { */ CoreJavadocOptions javadocOptions = (CoreJavadocOptions) javadoc.getOptions(); javadocOptions.addBooleanOption("html5", true); + + javadoc.getJavadocTool().set(javaToolchains.javadocToolFor(spec -> { + spec.getLanguageVersion().set(JavaLanguageVersion.of(buildParams.getMinimumRuntimeVersion().getMajorVersion())); + })); }); TaskProvider javadoc = project.getTasks().withType(Javadoc.class).named("javadoc"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index da26cb66122a..0e8dbb7fce26 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -17,12 +17,12 @@ import org.gradle.api.InvalidUserDataException; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.ProjectLayout; import org.gradle.api.model.ObjectFactory; import org.gradle.api.plugins.JvmToolchainsPlugin; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; -import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.TaskProvider; import org.gradle.jvm.toolchain.JavaToolchainService; @@ -54,11 +54,17 @@ public class InternalDistributionBwcSetupPlugin implements Plugin { private final ObjectFactory objectFactory; private ProviderFactory providerFactory; private JavaToolchainService toolChainService; + private FileSystemOperations fileSystemOperations; @Inject - public InternalDistributionBwcSetupPlugin(ObjectFactory objectFactory, ProviderFactory providerFactory) { + public InternalDistributionBwcSetupPlugin( + ObjectFactory objectFactory, + ProviderFactory providerFactory, + FileSystemOperations fileSystemOperations + ) { this.objectFactory = objectFactory; this.providerFactory = providerFactory; + this.fileSystemOperations = fileSystemOperations; } @Override @@ -76,7 +82,8 @@ public class InternalDistributionBwcSetupPlugin implements Plugin { providerFactory, objectFactory, toolChainService, - isCi + isCi, + fileSystemOperations ); }); } @@ -88,7 +95,8 @@ public class InternalDistributionBwcSetupPlugin implements Plugin { ProviderFactory providerFactory, ObjectFactory objectFactory, JavaToolchainService toolChainService, - Boolean isCi + Boolean isCi, + FileSystemOperations fileSystemOperations ) { ProjectLayout layout = project.getLayout(); Provider versionInfoProvider = providerFactory.provider(() -> versionInfo); @@ -120,11 +128,18 @@ public class InternalDistributionBwcSetupPlugin implements Plugin { List distributionProjects = resolveArchiveProjects(checkoutDir.get(), bwcVersion.get()); // Setup gradle user home directory - project.getTasks().register("setupGradleUserHome", Copy.class, copy -> { - copy.into(project.getGradle().getGradleUserHomeDir().getAbsolutePath() + "-" + project.getName()); - copy.from(project.getGradle().getGradleUserHomeDir().getAbsolutePath(), copySpec -> { - copySpec.include("gradle.properties"); - copySpec.include("init.d/*"); + // We don't use a normal `Copy` task here as snapshotting the entire gradle user home is very expensive. This task is cheap, so + // up-to-date checking doesn't buy us much + project.getTasks().register("setupGradleUserHome", task -> { + task.doLast(t -> { + fileSystemOperations.copy(copy -> { + String gradleUserHome = project.getGradle().getGradleUserHomeDir().getAbsolutePath(); + copy.into(gradleUserHome + "-" + project.getName()); + copy.from(gradleUserHome, copySpec -> { + copySpec.include("gradle.properties"); + copySpec.include("init.d/*"); + }); + }); }); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 7c488e6e73fe..5402e0a04fe8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -86,14 +86,14 @@ public class MrjarPlugin implements Plugin { configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME), 21); for (int javaVersion : mainVersions) { String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; - SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion, true); configureSourceSetInJar(project, mainSourceSet, javaVersion); addJar(project, mainSourceSet, javaVersion); mainSourceSets.add(mainSourceSetName); testSourceSets.add(mainSourceSetName); String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; - SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion, false); testSourceSets.add(testSourceSetName); createTestTask(project, buildParams, testSourceSet, javaVersion, mainSourceSets); } @@ -121,7 +121,8 @@ public class MrjarPlugin implements Plugin { JavaPluginExtension javaExtension, String sourceSetName, List parentSourceSets, - int javaVersion + int javaVersion, + boolean isMainSourceSet ) { SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourceSetName); for (String parentSourceSetName : parentSourceSets) { @@ -135,6 +136,13 @@ public class MrjarPlugin implements Plugin { CompileOptions compileOptions = compileTask.getOptions(); compileOptions.getRelease().set(javaVersion); }); + if (isMainSourceSet) { + project.getTasks().create(sourceSet.getJavadocTaskName(), Javadoc.class, javadocTask -> { + javadocTask.getJavadocTool().set(javaToolchains.javadocToolFor(spec -> { + spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)); + })); + }); + } configurePreviewFeatures(project, sourceSet, javaVersion); // Since we configure MRJAR sourcesets to allow preview apis, class signatures for those diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index f55d90933ed6..94fc6f2cb902 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -9,7 +9,7 @@ ## should create one or more files in the jvm.options.d ## directory containing your adjustments. ## -## See https://www.elastic.co/guide/en/elasticsearch/reference/@project.minor.version@/jvm-options.html +## See https://www.elastic.co/guide/en/elasticsearch/reference/@project.minor.version@/advanced-configuration.html#set-jvm-options ## for more information. ## ################################################################ diff --git a/docs/build.gradle b/docs/build.gradle index dec0de8ffa84..93b727732728 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -130,8 +130,9 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { setting 'xpack.security.enabled', 'true' setting 'xpack.security.authc.api_key.enabled', 'true' setting 'xpack.security.authc.token.enabled', 'true' - // disable the ILM history for doc tests to avoid potential lingering tasks that'd cause test flakiness + // disable the ILM and SLM history for doc tests to avoid potential lingering tasks that'd cause test flakiness setting 'indices.lifecycle.history_index_enabled', 'false' + setting 'slm.history_index_enabled', 'false' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.authc.realms.file.file.order', '0' setting 'xpack.security.authc.realms.native.native.order', '1' diff --git a/docs/changelog/116423.yaml b/docs/changelog/116423.yaml deleted file mode 100644 index d6d10eab410e..000000000000 --- a/docs/changelog/116423.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116423 -summary: Support mTLS for the Elastic Inference Service integration inside the inference API -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/117778.yaml b/docs/changelog/117778.yaml new file mode 100644 index 000000000000..880d4f831e53 --- /dev/null +++ b/docs/changelog/117778.yaml @@ -0,0 +1,5 @@ +pr: 117778 +summary: "[Connector APIs] Enforce index prefix for managed connectors" +area: Extract&Transform +type: feature +issues: [] diff --git a/docs/changelog/118266.yaml b/docs/changelog/118266.yaml new file mode 100644 index 000000000000..1b14b12b973c --- /dev/null +++ b/docs/changelog/118266.yaml @@ -0,0 +1,5 @@ +pr: 118266 +summary: Prevent data nodes from sending stack traces to coordinator when `error_trace=false` +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/118366.yaml b/docs/changelog/118366.yaml new file mode 100644 index 000000000000..cfeab1937738 --- /dev/null +++ b/docs/changelog/118366.yaml @@ -0,0 +1,22 @@ +pr: 118366 +summary: |- + Configuring a bind DN in an LDAP or Active Directory (AD) realm without a corresponding bind password + will prevent node from starting +area: Authentication +type: breaking +issues: [] +breaking: + title: -| + Configuring a bind DN in an LDAP or Active Directory (AD) realm without + a corresponding bind password will prevent node from starting + area: Cluster and node setting + details: -| + For LDAP or AD authentication realms, setting a bind DN (via the + `xpack.security.authc.realms.ldap.*.bind_dn` or `xpack.security.authc.realms.active_directory.*.bind_dn` + realm settings) without a bind password is a misconfiguration that may prevent successful authentication + to the node. Nodes will fail to start if a bind DN is specified without a password. + impact: -| + If you have a bind DN configured for an LDAP or AD authentication + realm, set a bind password for {ref}/ldap-realm.html#ldap-realm-configuration[LDAP] + or {ref}/active-directory-realm.html#ad-realm-configuration[Active Directory]. + Configuring a bind DN without a password prevents the misconfigured node from starting. diff --git a/docs/changelog/118585.yaml b/docs/changelog/118585.yaml new file mode 100644 index 000000000000..4caa5efabbd3 --- /dev/null +++ b/docs/changelog/118585.yaml @@ -0,0 +1,7 @@ +pr: 118585 +summary: Add a generic `rescorer` retriever based on the search request's rescore + functionality +area: Ranking +type: feature +issues: + - 118327 diff --git a/docs/changelog/118757.yaml b/docs/changelog/118757.yaml new file mode 100644 index 000000000000..956e220f21ae --- /dev/null +++ b/docs/changelog/118757.yaml @@ -0,0 +1,5 @@ +pr: 118757 +summary: Improve handling of nested fields in index reader wrappers +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/118816.yaml b/docs/changelog/118816.yaml new file mode 100644 index 000000000000..f1c1eac90dbc --- /dev/null +++ b/docs/changelog/118816.yaml @@ -0,0 +1,6 @@ +pr: 118816 +summary: Support flattened field with downsampling +area: Downsampling +type: bug +issues: + - 116319 diff --git a/docs/changelog/116944.yaml b/docs/changelog/118825.yaml similarity index 84% rename from docs/changelog/116944.yaml rename to docs/changelog/118825.yaml index e7833e49cf96..23170ec4705d 100644 --- a/docs/changelog/116944.yaml +++ b/docs/changelog/118825.yaml @@ -1,4 +1,4 @@ -pr: 116944 +pr: 118825 summary: "Remove support for type, fields, `copy_to` and boost in metadata field definition" area: Mapping type: breaking @@ -6,6 +6,6 @@ issues: [] breaking: title: "Remove support for type, fields, copy_to and boost in metadata field definition" area: Mapping - details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition starting with version 9. impact: Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. notable: false diff --git a/docs/changelog/118858.yaml b/docs/changelog/118858.yaml new file mode 100644 index 000000000000..a2161df1c84c --- /dev/null +++ b/docs/changelog/118858.yaml @@ -0,0 +1,5 @@ +pr: 118858 +summary: Lookup join on multiple join fields not yet supported +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/118890.yaml b/docs/changelog/118890.yaml new file mode 100644 index 000000000000..d3fc17157f13 --- /dev/null +++ b/docs/changelog/118890.yaml @@ -0,0 +1,5 @@ +pr: 118890 +summary: Add action to create index from a source index +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/119007.yaml b/docs/changelog/119007.yaml new file mode 100644 index 000000000000..458101b68d45 --- /dev/null +++ b/docs/changelog/119007.yaml @@ -0,0 +1,6 @@ +pr: 119007 +summary: Block-writes cannot be added after read-only +area: Data streams +type: bug +issues: + - 119002 diff --git a/docs/reference/connector/docs/connectors-content-extraction.asciidoc b/docs/reference/connector/docs/connectors-content-extraction.asciidoc index a87d38c9bf53..744fe1d87cb4 100644 --- a/docs/reference/connector/docs/connectors-content-extraction.asciidoc +++ b/docs/reference/connector/docs/connectors-content-extraction.asciidoc @@ -8,7 +8,7 @@ The logic for content extraction is defined in {connectors-python}/connectors/ut While intended primarily for PDF and Microsoft Office formats, you can use any of the <>. Enterprise Search uses an {ref}/ingest.html[Elasticsearch ingest pipeline^] to power the web crawler's binary content extraction. -The default pipeline, `ent-search-generic-ingestion`, is automatically created when Enterprise Search first starts. +The default pipeline, `search-default-ingestion`, is automatically created when Enterprise Search first starts. You can {ref}/ingest.html#create-manage-ingest-pipelines[view^] this pipeline in Kibana. Customizing your pipeline usage is also an option. diff --git a/docs/reference/connector/docs/connectors-filter-extract-transform.asciidoc b/docs/reference/connector/docs/connectors-filter-extract-transform.asciidoc index 278478c908bf..62a99928bfb4 100644 --- a/docs/reference/connector/docs/connectors-filter-extract-transform.asciidoc +++ b/docs/reference/connector/docs/connectors-filter-extract-transform.asciidoc @@ -13,7 +13,7 @@ The following diagram provides an overview of how content extraction, sync rules [.screenshot] image::images/pipelines-extraction-sync-rules.png[Architecture diagram of data pipeline with content extraction, sync rules, and ingest pipelines] -By default, only the connector specific logic (2) and the default `ent-search-generic-ingestion` pipeline (6) extract and transform your data, as configured in your deployment. +By default, only the connector specific logic (2) and the default `search-default-ingestion` pipeline (6) extract and transform your data, as configured in your deployment. The following tools are available for more advanced use cases: @@ -50,4 +50,4 @@ Use ingest pipelines for data enrichment, normalization, and more. Elastic connectors use a default ingest pipeline, which you can copy and customize to meet your needs. -Refer to {ref}/ingest-pipeline-search.html[ingest pipelines in Search] in the {es} documentation. \ No newline at end of file +Refer to {ref}/ingest-pipeline-search.html[ingest pipelines in Search] in the {es} documentation. diff --git a/docs/reference/esql/functions/examples/bucket.asciidoc b/docs/reference/esql/functions/examples/bucket.asciidoc index 4afea3066033..264efc191748 100644 --- a/docs/reference/esql/functions/examples/bucket.asciidoc +++ b/docs/reference/esql/functions/examples/bucket.asciidoc @@ -116,4 +116,18 @@ include::{esql-specs}/bucket.csv-spec[tag=reuseGroupingFunctionWithExpression] |=== include::{esql-specs}/bucket.csv-spec[tag=reuseGroupingFunctionWithExpression-result] |=== +Sometimes you need to change the start value of each bucket by a given duration (similar to date histogram +aggregation's <> parameter). To do so, you will need to +take into account how the language handles expressions within the `STATS` command: if these contain functions or +arithmetic operators, a virtual `EVAL` is inserted before and/or after the `STATS` command. Consequently, a double +compensation is needed to adjust the bucketed date value before the aggregation and then again after. For instance, +inserting a negative offset of `1 hour` to buckets of `1 year` looks like this: +[source.merge.styled,esql] +---- +include::{esql-specs}/bucket.csv-spec[tag=bucketWithOffset] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/bucket.csv-spec[tag=bucketWithOffset-result] +|=== diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index 18802f5ff8fe..3d96de05c840 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -1598,7 +1598,8 @@ "FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS c = COUNT(1) BY b = BUCKET(salary, 5000.)\n| SORT b", "FROM sample_data \n| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW()\n| STATS COUNT(*) BY bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW())", "FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS AVG(salary) BY bucket = BUCKET(hire_date, 20, \"1985-01-01T00:00:00Z\", \"1986-01-01T00:00:00Z\")\n| SORT bucket", - "FROM employees\n| STATS s1 = b1 + 1, s2 = BUCKET(salary / 1000 + 999, 50.) + 2 BY b1 = BUCKET(salary / 100 + 99, 50.), b2 = BUCKET(salary / 1000 + 999, 50.)\n| SORT b1, b2\n| KEEP s1, b1, s2, b2" + "FROM employees\n| STATS s1 = b1 + 1, s2 = BUCKET(salary / 1000 + 999, 50.) + 2 BY b1 = BUCKET(salary / 100 + 99, 50.), b2 = BUCKET(salary / 1000 + 999, 50.)\n| SORT b1, b2\n| KEEP s1, b1, s2, b2", + "FROM employees \n| STATS dates = VALUES(birth_date) BY b = BUCKET(birth_date + 1 HOUR, 1 YEAR) - 1 HOUR\n| EVAL d_count = MV_COUNT(dates)\n| SORT d_count\n| LIMIT 3" ], "preview" : false, "snapshot_only" : false diff --git a/docs/reference/ingest/search-inference-processing.asciidoc b/docs/reference/ingest/search-inference-processing.asciidoc index 006cc9629447..73642b3bb344 100644 --- a/docs/reference/ingest/search-inference-processing.asciidoc +++ b/docs/reference/ingest/search-inference-processing.asciidoc @@ -88,7 +88,7 @@ The `monitor_ml` <> is req To create the index-specific ML inference pipeline, go to *Search -> Content -> Indices -> -> Pipelines* in the Kibana UI. -If you only see the `ent-search-generic-ingestion` pipeline, you will need to click *Copy and customize* to create index-specific pipelines. +If you only see the `search-default-ingestion` pipeline, you will need to click *Copy and customize* to create index-specific pipelines. This will create the `{index_name}@ml-inference` pipeline. Once your index-specific ML inference pipeline is ready, you can add inference processors that use your ML trained models. diff --git a/docs/reference/ingest/search-ingest-pipelines.asciidoc b/docs/reference/ingest/search-ingest-pipelines.asciidoc index e414dacaab96..272c6ba2884b 100644 --- a/docs/reference/ingest/search-ingest-pipelines.asciidoc +++ b/docs/reference/ingest/search-ingest-pipelines.asciidoc @@ -40,7 +40,7 @@ Considerations such as error handling, conditional execution, sequencing, versio To this end, when you create indices for search use cases, (including {enterprise-search-ref}/crawler.html[Elastic web crawler], <>. , and API indices), each index already has a pipeline set up with several processors that optimize your content for search. -This pipeline is called `ent-search-generic-ingestion`. +This pipeline is called `search-default-ingestion`. While it is a "managed" pipeline (meaning it should not be tampered with), you can view its details via the Kibana UI or the Elasticsearch API. You can also <>. @@ -56,14 +56,14 @@ This will not effect existing indices. Each index also provides the capability to easily create index-specific ingest pipelines with customizable processing. If you need that extra flexibility, you can create a custom pipeline by going to your pipeline settings and choosing to "copy and customize". -This will replace the index's use of `ent-search-generic-ingestion` with 3 newly generated pipelines: +This will replace the index's use of `search-default-ingestion` with 3 newly generated pipelines: 1. `` 2. `@custom` 3. `@ml-inference` -Like `ent-search-generic-ingestion`, the first of these is "managed", but the other two can and should be modified to fit your needs. -You can view these pipelines using the platform tools (Kibana UI, Elasticsearch API), and can also +Like `search-default-ingestion`, the first of these is "managed", but the other two can and should be modified to fit your needs. +You can view these pipelines using the platform tools (Kibana UI, Elasticsearch API), and can also <>. [discrete#ingest-pipeline-search-pipeline-settings] @@ -123,7 +123,7 @@ If the pipeline is not specified, the underscore-prefixed fields will actually b === Details [discrete#ingest-pipeline-search-details-generic-reference] -==== `ent-search-generic-ingestion` Reference +==== `search-default-ingestion` Reference You can access this pipeline with the <> or via Kibana's < Ingest Pipelines>> UI. @@ -149,7 +149,7 @@ If you want to make customizations, we recommend you utilize index-specific pipe [discrete#ingest-pipeline-search-details-generic-reference-params] ===== Control flow parameters -The `ent-search-generic-ingestion` pipeline does not always run all processors. +The `search-default-ingestion` pipeline does not always run all processors. It utilizes a feature of ingest pipelines to <> based on the contents of each individual document. * `_extract_binary_content` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `attachment`, `set_body`, and `remove_replacement_chars` processors. @@ -167,8 +167,8 @@ See <>. ==== Index-specific ingest pipelines In the Kibana UI for your index, by clicking on the Pipelines tab, then *Settings > Copy and customize*, you can quickly generate 3 pipelines which are specific to your index. -These 3 pipelines replace `ent-search-generic-ingestion` for the index. -There is nothing lost in this action, as the `` pipeline is a superset of functionality over the `ent-search-generic-ingestion` pipeline. +These 3 pipelines replace `search-default-ingestion` for the index. +There is nothing lost in this action, as the `` pipeline is a superset of functionality over the `search-default-ingestion` pipeline. [IMPORTANT] ==== @@ -179,7 +179,7 @@ Refer to the Elastic subscriptions pages for https://www.elastic.co/subscription [discrete#ingest-pipeline-search-details-specific-reference] ===== `` Reference -This pipeline looks and behaves a lot like the <>, but with <>. +This pipeline looks and behaves a lot like the <>, but with <>. [WARNING] ========================= @@ -197,7 +197,7 @@ If you want to make customizations, we recommend you utilize <>, the index-specific pipeline also defines: +In addition to the processors inherited from the <>, the index-specific pipeline also defines: * `index_ml_inference_pipeline` - this uses the <> processor to run the `@ml-inference` pipeline. This processor will only be run if the source document includes a `_run_ml_inference` field with the value `true`. @@ -206,7 +206,7 @@ In addition to the processors inherited from the <` pipeline does not always run all processors. +Like the `search-default-ingestion` pipeline, the `` pipeline does not always run all processors. In addition to the `_extract_binary_content` and `_reduce_whitespace` control flow parameters, the `` pipeline also supports: * `_run_ml_inference` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `index_ml_inference_pipeline` processor. @@ -220,7 +220,7 @@ See <>. ===== `@ml-inference` Reference This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines tab of your index, or from the *Stack Management > Ingest Pipelines* page. -Unlike the `ent-search-generic-ingestion` pipeline and the `` pipeline, this pipeline is NOT "managed". +Unlike the `search-default-ingestion` pipeline and the `` pipeline, this pipeline is NOT "managed". It's possible to add one or more ML inference pipelines to an index in the *Content* UI. This pipeline will serve as a container for all of the ML inference pipelines configured for the index. @@ -241,7 +241,7 @@ The `monitor_ml` Elasticsearch cluster permission is required in order to manage This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines tab of your index, or from the *Stack Management > Ingest Pipelines* page. -Unlike the `ent-search-generic-ingestion` pipeline and the `` pipeline, this pipeline is NOT "managed". +Unlike the `search-default-ingestion` pipeline and the `` pipeline, this pipeline is NOT "managed". You are encouraged to make additions and edits to this pipeline, provided its name remains the same. This provides a convenient hook from which to add custom processing and transformations for your data. @@ -272,9 +272,12 @@ extraction. These changes should be re-applied to each index's `@custom` pipeline in order to ensure a consistent data processing experience. In 8.5+, the <> is required *in addition* to the configurations mentioned in the {enterprise-search-ref}/crawler-managing.html#crawler-managing-binary-content[Elastic web crawler Guide]. -* `ent-search-generic-ingestion` - Since 8.5, Native Connectors, Connector Clients, and new (>8.4) Elastic web crawler indices will all make use of this pipeline by default. +* `ent-search-generic-ingestion` - Since 8.5, Native Connectors, Connector Clients, and new (>8.4) Elastic web crawler indices all made use of this pipeline by default. + This pipeline evolved into the `search-default-ingestion` pipeline. + +* `search-default-ingestion` - Since 9.0, Connectors have made use of this pipeline by default. You can <> above. - As this pipeline is "managed", any modifications that were made to `app_search_crawler` and/or `ent_search_crawler` should NOT be made to `ent-search-generic-ingestion`. + As this pipeline is "managed", any modifications that were made to `app_search_crawler` and/or `ent_search_crawler` should NOT be made to `search-default-ingestion`. Instead, if such customizations are desired, you should utilize <>, placing all modifications in the `@custom` pipeline(s). ============= diff --git a/docs/reference/ingest/search-nlp-tutorial.asciidoc b/docs/reference/ingest/search-nlp-tutorial.asciidoc index afdceeeb8bac..b23a15c96b1a 100644 --- a/docs/reference/ingest/search-nlp-tutorial.asciidoc +++ b/docs/reference/ingest/search-nlp-tutorial.asciidoc @@ -164,8 +164,8 @@ Now it's time to create an inference pipeline. 1. From the overview page for your `search-photo-comments` index in "Search", click the *Pipelines* tab. By default, Elasticsearch does not create any index-specific ingest pipelines. -2. Because we want to customize these pipelines, we need to *Copy and customize* the `ent-search-generic-ingestion` ingest pipeline. -Find this option above the settings for the `ent-search-generic-ingestion` ingest pipeline. +2. Because we want to customize these pipelines, we need to *Copy and customize* the `search-default-ingestion` ingest pipeline. +Find this option above the settings for the `search-default-ingestion` ingest pipeline. This will create two new index-specific ingest pipelines. Next, we'll add an inference pipeline. diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index f20e9148bf5e..c7df40ff5e07 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -22,6 +22,9 @@ A <> that replaces the functionality of a traditi `knn`:: A <> that replaces the functionality of a <>. +`rescorer`:: +A <> that replaces the functionality of the <>. + `rrf`:: A <> that produces top documents from <>. @@ -371,6 +374,122 @@ GET movies/_search ---- // TEST[skip:uses ELSER] +[[rescorer-retriever]] +==== Rescorer Retriever + +The `rescorer` retriever re-scores only the results produced by its child retriever. +For the `standard` and `knn` retrievers, the `window_size` parameter specifies the number of documents examined per shard. + +For compound retrievers like `rrf`, the `window_size` parameter defines the total number of documents examined globally. + +When using the `rescorer`, an error is returned if the following conditions are not met: + +* The minimum configured rescore's `window_size` is: +** Greater than or equal to the `size` of the parent retriever for nested `rescorer` setups. +** Greater than or equal to the `size` of the search request when used as the primary retriever in the tree. + +* And the maximum rescore's `window_size` is: +** Smaller than or equal to the `size` or `rank_window_size` of the child retriever. + +[discrete] +[[rescorer-retriever-parameters]] +===== Parameters + +`rescore`:: +(Required. <>) ++ +Defines the <> applied sequentially to the top documents returned by the child retriever. + +`retriever`:: +(Required. <>) ++ +Specifies the child retriever responsible for generating the initial set of top documents to be re-ranked. + +`filter`:: +(Optional. <>) ++ +Applies a <> to the retriever, ensuring that all documents match the filter criteria without affecting their scores. + +[discrete] +[[rescorer-retriever-example]] +==== Example + +The `rescorer` retriever can be placed at any level within the retriever tree. +The following example demonstrates a `rescorer` applied to the results produced by an `rrf` retriever: + +[source,console] +---- +GET movies/_search +{ + "size": 10, <1> + "retriever": { + "rescorer": { <2> + "rescore": { + "query": { <3> + "window_size": 50, <4> + "rescore_query": { + "script_score": { + "script": { + "source": "cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0", + "params": { + "queryVector": [-0.5, 90.0, -10, 14.8, -156.0] + } + } + } + } + } + }, + "retriever": { <5> + "rrf": { + "rank_window_size": 100, <6> + "retrievers": [ + { + "standard": { + "query": { + "sparse_vector": { + "field": "plot_embedding", + "inference_id": "my-elser-model", + "query": "films that explore psychological depths" + } + } + } + }, + { + "standard": { + "query": { + "multi_match": { + "query": "crime", + "fields": [ + "plot", + "title" + ] + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [10, 22, 77], + "k": 10, + "num_candidates": 10 + } + } + ] + } + } + } + } +} +---- +// TEST[skip:uses ELSER] +<1> Specifies the number of top documents to return in the final response. +<2> A `rescorer` retriever applied as the final step. +<3> The definition of the `query` rescorer. +<4> Defines the number of documents to rescore from the child retriever. +<5> Specifies the child retriever definition. +<6> Defines the number of documents returned by the `rrf` retriever, which limits the available documents to + [[text-similarity-reranker-retriever]] ==== Text Similarity Re-ranker Retriever @@ -777,4 +896,4 @@ When a retriever is specified as part of a search, the following elements are no * <> * <> * <> -* <> +* <> use a <> instead diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 86a0e567f6ee..f3576db0c786 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -25,6 +25,21 @@ TIP: This setup doesn't run multiple {es} nodes or {kib} by default. To create a multi-node cluster with {kib}, use Docker Compose instead. See <>. +[[docker-wolfi-hardened-image]] +===== Hardened Docker images + +You can also use the hardened https://wolfi.dev/[Wolfi] image for additional security. +Using Wolfi images requires Docker version 20.10.10 or higher. + +To use the Wolfi image, append `-wolfi` to the image tag in the Docker command. + +For example: + +[source,sh,subs="attributes"] +---- +docker pull {docker-wolfi-image} +---- + ===== Start a single-node cluster . Install Docker. Visit https://docs.docker.com/get-docker/[Get Docker] to @@ -55,12 +70,6 @@ docker pull {docker-image} // REVIEWED[DEC.10.24] -- -Alternatevely, you can use the Wolfi based image. Using Wolfi based images requires Docker version 20.10.10 or superior. -[source,sh,subs="attributes"] ----- -docker pull {docker-wolfi-image} ----- - . Optional: Install https://docs.sigstore.dev/cosign/system_config/installation/[Cosign] for your environment. Then use Cosign to verify the {es} image's signature. diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index a6b8a31fc389..25f4e97bd12e 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -11,6 +11,7 @@ package org.elasticsearch.entitlement.bridge; import java.net.URL; import java.net.URLStreamHandlerFactory; +import java.util.List; public interface EntitlementChecker { @@ -29,4 +30,10 @@ public interface EntitlementChecker { void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent); void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); + + // Process creation + void check$$start(Class callerClass, ProcessBuilder that, ProcessBuilder.Redirect[] redirects); + + void check$java_lang_ProcessBuilder$startPipeline(Class callerClass, List builders); + } diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java index 1ac4a7506eac..3cc4b97e9bfe 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java @@ -29,43 +29,47 @@ import java.util.Set; import java.util.stream.Collectors; import static java.util.Map.entry; +import static org.elasticsearch.entitlement.qa.common.RestEntitlementsCheckAction.CheckAction.deniedToPlugins; +import static org.elasticsearch.entitlement.qa.common.RestEntitlementsCheckAction.CheckAction.forPlugins; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestEntitlementsCheckAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class); private final String prefix; - private record CheckAction(Runnable action, boolean isServerOnly) { - - static CheckAction serverOnly(Runnable action) { + record CheckAction(Runnable action, boolean isAlwaysDeniedToPlugins) { + /** + * These cannot be granted to plugins, so our test plugins cannot test the "allowed" case. + * Used both for always-denied entitlements as well as those granted only to the server itself. + */ + static CheckAction deniedToPlugins(Runnable action) { return new CheckAction(action, true); } - static CheckAction serverAndPlugin(Runnable action) { + static CheckAction forPlugins(Runnable action) { return new CheckAction(action, false); } } private static final Map checkActions = Map.ofEntries( - entry("runtime_exit", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeExit)), - entry("runtime_halt", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeHalt)), - entry("create_classloader", CheckAction.serverAndPlugin(RestEntitlementsCheckAction::createClassLoader)) + entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), + entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), + entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), + // entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), + entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)) ); @SuppressForbidden(reason = "Specifically testing Runtime.exit") private static void runtimeExit() { - logger.info("Calling Runtime.exit;"); Runtime.getRuntime().exit(123); } @SuppressForbidden(reason = "Specifically testing Runtime.halt") private static void runtimeHalt() { - logger.info("Calling Runtime.halt;"); Runtime.getRuntime().halt(123); } private static void createClassLoader() { - logger.info("Calling new URLClassLoader"); try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { logger.info("Created URLClassLoader [{}]", classLoader.getName()); } catch (IOException e) { @@ -73,6 +77,18 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { } } + private static void processBuilder_start() { + // TODO: processBuilder().start(); + } + + private static void processBuilder_startPipeline() { + try { + ProcessBuilder.startPipeline(List.of()); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + public RestEntitlementsCheckAction(String prefix) { this.prefix = prefix; } @@ -80,7 +96,7 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { public static Set getServerAndPluginsCheckActions() { return checkActions.entrySet() .stream() - .filter(kv -> kv.getValue().isServerOnly() == false) + .filter(kv -> kv.getValue().isAlwaysDeniedToPlugins() == false) .map(Map.Entry::getKey) .collect(Collectors.toSet()); } @@ -112,6 +128,7 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { } return channel -> { + logger.info("Calling check action [{}]", actionName); checkAction.action().run(); channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName))); }; diff --git a/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementAllowedNonModularPlugin.java b/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementAllowedNonModularPlugin.java index d65981c30f0b..82146e6a8775 100644 --- a/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementAllowedNonModularPlugin.java +++ b/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementAllowedNonModularPlugin.java @@ -27,7 +27,6 @@ import java.util.function.Predicate; import java.util.function.Supplier; public class EntitlementAllowedNonModularPlugin extends Plugin implements ActionPlugin { - @Override public List getRestHandlers( final Settings settings, diff --git a/libs/entitlement/qa/entitlement-allowed/src/main/java/org/elasticsearch/entitlement/qa/EntitlementAllowedPlugin.java b/libs/entitlement/qa/entitlement-allowed/src/main/java/org/elasticsearch/entitlement/qa/EntitlementAllowedPlugin.java index d81e23e311be..8649daf272e7 100644 --- a/libs/entitlement/qa/entitlement-allowed/src/main/java/org/elasticsearch/entitlement/qa/EntitlementAllowedPlugin.java +++ b/libs/entitlement/qa/entitlement-allowed/src/main/java/org/elasticsearch/entitlement/qa/EntitlementAllowedPlugin.java @@ -27,7 +27,6 @@ import java.util.function.Predicate; import java.util.function.Supplier; public class EntitlementAllowedPlugin extends Plugin implements ActionPlugin { - @Override public List getRestHandlers( final Settings settings, diff --git a/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementDeniedNonModularPlugin.java b/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementDeniedNonModularPlugin.java index 0f908d84260f..7ca89c735a60 100644 --- a/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementDeniedNonModularPlugin.java +++ b/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/java/org/elasticsearch/entitlement/qa/nonmodular/EntitlementDeniedNonModularPlugin.java @@ -27,7 +27,6 @@ import java.util.function.Predicate; import java.util.function.Supplier; public class EntitlementDeniedNonModularPlugin extends Plugin implements ActionPlugin { - @Override public List getRestHandlers( final Settings settings, diff --git a/libs/entitlement/qa/entitlement-denied/src/main/java/org/elasticsearch/entitlement/qa/EntitlementDeniedPlugin.java b/libs/entitlement/qa/entitlement-denied/src/main/java/org/elasticsearch/entitlement/qa/EntitlementDeniedPlugin.java index 0ed27e2e576e..2a2fd35d47cf 100644 --- a/libs/entitlement/qa/entitlement-denied/src/main/java/org/elasticsearch/entitlement/qa/EntitlementDeniedPlugin.java +++ b/libs/entitlement/qa/entitlement-denied/src/main/java/org/elasticsearch/entitlement/qa/EntitlementDeniedPlugin.java @@ -27,7 +27,6 @@ import java.util.function.Predicate; import java.util.function.Supplier; public class EntitlementDeniedPlugin extends Plugin implements ActionPlugin { - @Override public List getRestHandlers( final Settings settings, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 9118f67cdc14..8e4cddc4d63e 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -53,6 +53,7 @@ import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNN public class EntitlementInitialization { private static final String POLICY_FILE_NAME = "entitlement-policy.yaml"; + private static final Module ENTITLEMENTS_MODULE = PolicyManager.class.getModule(); private static ElasticsearchEntitlementChecker manager; @@ -92,7 +93,7 @@ public class EntitlementInitialization { "server", List.of(new Scope("org.elasticsearch.server", List.of(new ExitVMEntitlement(), new CreateClassLoaderEntitlement()))) ); - return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); + return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver(), ENTITLEMENTS_MODULE); } private static Map createPluginPolicies(Collection pluginData) throws IOException { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index a5ca0543ad15..75365fbb74d6 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -14,6 +14,7 @@ import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import java.net.URL; import java.net.URLStreamHandlerFactory; +import java.util.List; /** * Implementation of the {@link EntitlementChecker} interface, providing additional @@ -67,4 +68,14 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { ) { policyManager.checkCreateClassLoader(callerClass); } + + @Override + public void check$$start(Class callerClass, ProcessBuilder processBuilder, ProcessBuilder.Redirect[] redirects) { + policyManager.checkStartProcess(callerClass); + } + + @Override + public void check$java_lang_ProcessBuilder$startPipeline(Class callerClass, List builders) { + policyManager.checkStartProcess(callerClass); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 8d3efe4eb98e..e06f7768eb8b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -15,6 +15,7 @@ import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import java.lang.StackWalker.StackFrame; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; import java.util.ArrayList; @@ -29,6 +30,10 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; +import static java.util.Objects.requireNonNull; +import static java.util.function.Predicate.not; + public class PolicyManager { private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); @@ -63,6 +68,11 @@ public class PolicyManager { private static final Set systemModules = findSystemModules(); + /** + * Frames originating from this module are ignored in the permission logic. + */ + private final Module entitlementsModule; + private static Set findSystemModules() { var systemModulesDescriptors = ModuleFinder.ofSystem() .findAll() @@ -77,19 +87,44 @@ public class PolicyManager { .collect(Collectors.toUnmodifiableSet()); } - public PolicyManager(Policy defaultPolicy, Map pluginPolicies, Function, String> pluginResolver) { - this.serverEntitlements = buildScopeEntitlementsMap(Objects.requireNonNull(defaultPolicy)); - this.pluginsEntitlements = Objects.requireNonNull(pluginPolicies) - .entrySet() + public PolicyManager( + Policy defaultPolicy, + Map pluginPolicies, + Function, String> pluginResolver, + Module entitlementsModule + ) { + this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(defaultPolicy)); + this.pluginsEntitlements = requireNonNull(pluginPolicies).entrySet() .stream() .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue()))); this.pluginResolver = pluginResolver; + this.entitlementsModule = entitlementsModule; } private static Map> buildScopeEntitlementsMap(Policy policy) { return policy.scopes.stream().collect(Collectors.toUnmodifiableMap(scope -> scope.name, scope -> scope.entitlements)); } + public void checkStartProcess(Class callerClass) { + neverEntitled(callerClass, "start process"); + } + + private void neverEntitled(Class callerClass, String operationDescription) { + var requestingModule = requestingModule(callerClass); + if (isTriviallyAllowed(requestingModule)) { + return; + } + + throw new NotEntitledException( + Strings.format( + "Not entitled: caller [%s], module [%s], operation [%s]", + callerClass, + requestingModule.getName(), + operationDescription + ) + ); + } + public void checkExitVM(Class callerClass) { checkEntitlementPresent(callerClass, ExitVMEntitlement.class); } @@ -185,7 +220,16 @@ public class PolicyManager { return requestingModule.isNamed() && requestingModule.getLayer() == ModuleLayer.boot(); } - private static Module requestingModule(Class callerClass) { + /** + * Walks the stack to determine which module's entitlements should be checked. + * + * @param callerClass when non-null will be used if its module is suitable; + * this is a fast-path check that can avoid the stack walk + * in cases where the caller class is available. + * @return the requesting module, or {@code null} if the entire call stack + * comes from modules that are trusted. + */ + Module requestingModule(Class callerClass) { if (callerClass != null) { Module callerModule = callerClass.getModule(); if (systemModules.contains(callerModule) == false) { @@ -193,21 +237,34 @@ public class PolicyManager { return callerModule; } } - int framesToSkip = 1 // getCallingClass (this method) - + 1 // the checkXxx method - + 1 // the runtime config method - + 1 // the instrumented method - ; - Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) - .walk( - s -> s.skip(framesToSkip) - .map(f -> f.getDeclaringClass().getModule()) - .filter(m -> systemModules.contains(m) == false) - .findFirst() - ); + Optional module = StackWalker.getInstance(RETAIN_CLASS_REFERENCE) + .walk(frames -> findRequestingModule(frames.map(StackFrame::getDeclaringClass))); return module.orElse(null); } + /** + * Given a stream of classes corresponding to the frames from a {@link StackWalker}, + * returns the module whose entitlements should be checked. + * + * @throws NullPointerException if the requesting module is {@code null} + */ + Optional findRequestingModule(Stream> classes) { + return classes.map(Objects::requireNonNull) + .map(PolicyManager::moduleOf) + .filter(m -> m != entitlementsModule) // Ignore the entitlements library itself + .filter(not(systemModules::contains)) // Skip trusted JDK modules + .findFirst(); + } + + private static Module moduleOf(Class c) { + var result = c.getModule(); + if (result == null) { + throw new NullPointerException("Entitlements system does not support non-modular class [" + c.getName() + "]"); + } else { + return result; + } + } + private static boolean isTriviallyAllowed(Module requestingModule) { if (requestingModule == null) { logger.debug("Entitlement trivially allowed: entire call stack is in composed of classes in system modules"); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 45bdf2e45782..0789fcc8dc77 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; import static java.util.Map.entry; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; @@ -37,11 +38,14 @@ import static org.hamcrest.Matchers.sameInstance; @ESTestCase.WithoutSecurityManager public class PolicyManagerTests extends ESTestCase { + private static final Module NO_ENTITLEMENTS_MODULE = null; + public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { var policyManager = new PolicyManager( createEmptyTestServerPolicy(), Map.of("plugin1", createPluginPolicy("plugin.module")), - c -> "plugin1" + c -> "plugin1", + NO_ENTITLEMENTS_MODULE ); // Any class from the current module (unnamed) will do @@ -62,7 +66,7 @@ public class PolicyManagerTests extends ESTestCase { } public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { - var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1"); + var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); @@ -82,7 +86,7 @@ public class PolicyManagerTests extends ESTestCase { } public void testGetEntitlementsFailureIsCached() { - var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1"); + var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); @@ -103,7 +107,8 @@ public class PolicyManagerTests extends ESTestCase { var policyManager = new PolicyManager( createEmptyTestServerPolicy(), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), - c -> "plugin2" + c -> "plugin2", + NO_ENTITLEMENTS_MODULE ); // Any class from the current module (unnamed) will do @@ -115,7 +120,7 @@ public class PolicyManagerTests extends ESTestCase { } public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotFoundException { - var policyManager = new PolicyManager(createTestServerPolicy("example"), Map.of(), c -> null); + var policyManager = new PolicyManager(createTestServerPolicy("example"), Map.of(), c -> null, NO_ENTITLEMENTS_MODULE); // Tests do not run modular, so we cannot use a server class. // But we know that in production code the server module and its classes are in the boot layer. @@ -138,7 +143,7 @@ public class PolicyManagerTests extends ESTestCase { } public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException { - var policyManager = new PolicyManager(createTestServerPolicy("jdk.httpserver"), Map.of(), c -> null); + var policyManager = new PolicyManager(createTestServerPolicy("jdk.httpserver"), Map.of(), c -> null, NO_ENTITLEMENTS_MODULE); // Tests do not run modular, so we cannot use a server class. // But we know that in production code the server module and its classes are in the boot layer. @@ -155,12 +160,13 @@ public class PolicyManagerTests extends ESTestCase { public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOException, ClassNotFoundException { final Path home = createTempDir(); - Path jar = creteMockPluginJar(home); + Path jar = createMockPluginJar(home); var policyManager = new PolicyManager( createEmptyTestServerPolicy(), Map.of("mock-plugin", createPluginPolicy("org.example.plugin")), - c -> "mock-plugin" + c -> "mock-plugin", + NO_ENTITLEMENTS_MODULE ); var layer = createLayerForJar(jar, "org.example.plugin"); @@ -179,7 +185,8 @@ public class PolicyManagerTests extends ESTestCase { var policyManager = new PolicyManager( createEmptyTestServerPolicy(), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), - c -> "plugin2" + c -> "plugin2", + NO_ENTITLEMENTS_MODULE ); // Any class from the current module (unnamed) will do @@ -197,6 +204,73 @@ public class PolicyManagerTests extends ESTestCase { assertThat(entitlementsAgain, sameInstance(cachedResult)); } + public void testRequestingModuleFastPath() throws IOException, ClassNotFoundException { + var callerClass = makeClassInItsOwnModule(); + assertEquals(callerClass.getModule(), policyManagerWithEntitlementsModule(NO_ENTITLEMENTS_MODULE).requestingModule(callerClass)); + } + + public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException { + var requestingClass = makeClassInItsOwnModule(); + var runtimeClass = makeClassInItsOwnModule(); // A class in the entitlements library itself + var ignorableClass = makeClassInItsOwnModule(); + var systemClass = Object.class; + + var policyManager = policyManagerWithEntitlementsModule(runtimeClass.getModule()); + + var requestingModule = requestingClass.getModule(); + + assertEquals( + "Skip one system frame", + requestingModule, + policyManager.findRequestingModule(Stream.of(systemClass, requestingClass, ignorableClass)).orElse(null) + ); + assertEquals( + "Skip multiple system frames", + requestingModule, + policyManager.findRequestingModule(Stream.of(systemClass, systemClass, systemClass, requestingClass, ignorableClass)) + .orElse(null) + ); + assertEquals( + "Skip system frame between runtime frames", + requestingModule, + policyManager.findRequestingModule(Stream.of(runtimeClass, systemClass, runtimeClass, requestingClass, ignorableClass)) + .orElse(null) + ); + assertEquals( + "Skip runtime frame between system frames", + requestingModule, + policyManager.findRequestingModule(Stream.of(systemClass, runtimeClass, systemClass, requestingClass, ignorableClass)) + .orElse(null) + ); + assertEquals( + "No system frames", + requestingModule, + policyManager.findRequestingModule(Stream.of(requestingClass, ignorableClass)).orElse(null) + ); + assertEquals( + "Skip runtime frames up to the first system frame", + requestingModule, + policyManager.findRequestingModule(Stream.of(runtimeClass, runtimeClass, systemClass, requestingClass, ignorableClass)) + .orElse(null) + ); + assertThrows( + "Non-modular caller frames are not supported", + NullPointerException.class, + () -> policyManager.findRequestingModule(Stream.of(systemClass, null)) + ); + } + + private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { + final Path home = createTempDir(); + Path jar = createMockPluginJar(home); + var layer = createLayerForJar(jar, "org.example.plugin"); + return layer.findLoader("org.example.plugin").loadClass("q.B"); + } + + private static PolicyManager policyManagerWithEntitlementsModule(Module entitlementsModule) { + return new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "test", entitlementsModule); + } + private static Policy createEmptyTestServerPolicy() { return new Policy("server", List.of()); } @@ -219,7 +293,7 @@ public class PolicyManagerTests extends ESTestCase { ); } - private static Path creteMockPluginJar(Path home) throws IOException { + private static Path createMockPluginJar(Path home) throws IOException { Path jar = home.resolve("mock-plugin.jar"); Map sources = Map.ofEntries( diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java index 5249077bdfdb..7adf6a09e9a1 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java @@ -96,6 +96,8 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase { } logger.info("Executing search"); + // we have to explicitly set error_trace=true for the later exception check for `TimeSeriesIndexSearcher` + client().threadPool().getThreadContext().putHeader("error_trace", "true"); TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg"); ActionFuture searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addAggregation( diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index a97154fd4d1f..c980aaba7144 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -101,7 +101,12 @@ import org.apache.lucene.analysis.tr.ApostropheFilter; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; @@ -134,6 +139,8 @@ import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings; public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, ScriptPlugin { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisPlugin.class); + private final SetOnce scriptServiceHolder = new SetOnce<>(); private final SetOnce synonymsManagementServiceHolder = new SetOnce<>(); @@ -224,6 +231,28 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new)); filters.put("dutch_stem", DutchStemTokenFilterFactory::new); filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new); + filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { + return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings) { + @Override + public TokenStream create(TokenStream tokenStream) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { + throw new IllegalArgumentException( + "The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + + "Please change the filter name to [edge_ngram] instead." + ); + } else { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "edgeNGram_deprecation", + "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [edge_ngram] instead." + ); + } + return super.create(tokenStream); + } + + }; + }); filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new)); filters.put("fingerprint", FingerprintTokenFilterFactory::new); filters.put("flatten_graph", FlattenGraphTokenFilterFactory::new); @@ -243,6 +272,28 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri filters.put("min_hash", MinHashTokenFilterFactory::new); filters.put("multiplexer", MultiplexerTokenFilterFactory::new); filters.put("ngram", NGramTokenFilterFactory::new); + filters.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { + return new NGramTokenFilterFactory(indexSettings, environment, name, settings) { + @Override + public TokenStream create(TokenStream tokenStream) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { + throw new IllegalArgumentException( + "The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + + "Please change the filter name to [ngram] instead." + ); + } else { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "nGram_deprecation", + "The [nGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [ngram] instead." + ); + } + return super.create(tokenStream); + } + + }; + }); filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new)); filters.put("persian_normalization", PersianNormalizationFilterFactory::new); @@ -294,7 +345,39 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri tokenizers.put("simple_pattern", SimplePatternTokenizerFactory::new); tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new); tokenizers.put("thai", ThaiTokenizerFactory::new); + tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { + throw new IllegalArgumentException( + "The [nGram] tokenizer name was deprecated in 7.6. " + + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." + ); + } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "nGram_tokenizer_deprecation", + "The [nGram] tokenizer name is deprecated and will be removed in a future version. " + + "Please change the tokenizer name to [ngram] instead." + ); + } + return new NGramTokenizerFactory(indexSettings, environment, name, settings); + }); tokenizers.put("ngram", NGramTokenizerFactory::new); + tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated in 7.6. " + + "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead." + ); + } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "edgeNGram_tokenizer_deprecation", + "The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. " + + "Please change the tokenizer name to [edge_ngram] instead." + ); + } + return new EdgeNGramTokenizerFactory(indexSettings, environment, name, settings); + }); tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new); tokenizers.put("char_group", CharGroupTokenizerFactory::new); tokenizers.put("classic", ClassicTokenizerFactory::new); @@ -505,17 +588,53 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new)); - tokenizers.add( - PreConfiguredTokenizer.indexVersion( - "edge_ngram", - (version) -> new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE) - ) - ); + tokenizers.add(PreConfiguredTokenizer.indexVersion("edge_ngram", (version) -> { + if (version.onOrAfter(IndexVersions.V_7_3_0)) { + return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); + } + return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); + })); tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1))); tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new)); // TODO deprecate and remove in API // This is already broken with normalization, so backwards compat isn't necessary? tokenizers.add(PreConfiguredTokenizer.singleton("lowercase", XLowerCaseTokenizer::new)); + + tokenizers.add(PreConfiguredTokenizer.indexVersion("nGram", (version) -> { + if (version.onOrAfter(IndexVersions.V_8_0_0)) { + throw new IllegalArgumentException( + "The [nGram] tokenizer name was deprecated in 7.6. " + + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." + ); + } else if (version.onOrAfter(IndexVersions.V_7_6_0)) { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "nGram_tokenizer_deprecation", + "The [nGram] tokenizer name is deprecated and will be removed in a future version. " + + "Please change the tokenizer name to [ngram] instead." + ); + } + return new NGramTokenizer(); + })); + tokenizers.add(PreConfiguredTokenizer.indexVersion("edgeNGram", (version) -> { + if (version.onOrAfter(IndexVersions.V_8_0_0)) { + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated in 7.6. " + + "Please use the tokenizer name to [edge_ngram] for indices created in versions 8 or higher instead." + ); + } else if (version.onOrAfter(IndexVersions.V_7_6_0)) { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "edgeNGram_tokenizer_deprecation", + "The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. " + + "Please change the tokenizer name to [edge_ngram] instead." + ); + } + if (version.onOrAfter(IndexVersions.V_7_3_0)) { + return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); + } + return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); + })); tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new)); return tokenizers; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java new file mode 100644 index 000000000000..9972d58b2dcc --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -0,0 +1,292 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Tokenizer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; +import java.util.Map; + +public class CommonAnalysisPluginTests extends ESTestCase { + + /** + * Check that the deprecated "nGram" filter throws exception for indices created since 7.0.0 and + * logs a warning for earlier indices when the filter is used as a custom filter + */ + public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException { + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) + ) + .put("index.analysis.analyzer.custom_analyzer.type", "custom") + .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") + .put("index.analysis.filter.my_ngram.type", "nGram") + .build(); + + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin) + ); + assertEquals( + "The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + + "Please change the filter name to [ngram] instead.", + ex.getMessage() + ); + } + + final Settings settingsPre7 = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) + ) + .put("index.analysis.analyzer.custom_analyzer.type", "custom") + .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") + .put("index.analysis.filter.my_ngram.type", "nGram") + .build(); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin); + assertWarnings( + "The [nGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [ngram] instead." + ); + } + } + + /** + * Check that the deprecated "edgeNGram" filter throws exception for indices created since 7.0.0 and + * logs a warning for earlier indices when the filter is used as a custom filter + */ + public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOException { + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) + ) + .put("index.analysis.analyzer.custom_analyzer.type", "custom") + .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") + .put("index.analysis.filter.my_ngram.type", "edgeNGram") + .build(); + + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin) + ); + assertEquals( + "The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + + "Please change the filter name to [edge_ngram] instead.", + ex.getMessage() + ); + } + + final Settings settingsPre7 = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) + ) + .put("index.analysis.analyzer.custom_analyzer.type", "custom") + .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") + .put("index.analysis.filter.my_ngram.type", "edgeNGram") + .build(); + + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin); + assertWarnings( + "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [edge_ngram] instead." + ); + } + } + + /** + * Check that we log a deprecation warning for "nGram" and "edgeNGram" tokenizer names with 7.6 and + * disallow usages for indices created after 8.0 + */ + public void testNGramTokenizerDeprecation() throws IOException { + // tests for prebuilt tokenizer + doTestPrebuiltTokenizerDeprecation( + "nGram", + "ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), + false + ); + doTestPrebuiltTokenizerDeprecation( + "edgeNGram", + "edge_ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), + false + ); + doTestPrebuiltTokenizerDeprecation( + "nGram", + "ngram", + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) + ), + true + ); + doTestPrebuiltTokenizerDeprecation( + "edgeNGram", + "edge_ngram", + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) + ), + true + ); + expectThrows( + IllegalArgumentException.class, + () -> doTestPrebuiltTokenizerDeprecation( + "nGram", + "ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), + true + ) + ); + expectThrows( + IllegalArgumentException.class, + () -> doTestPrebuiltTokenizerDeprecation( + "edgeNGram", + "edge_ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), + true + ) + ); + + // same batch of tests for custom tokenizer definition in the settings + doTestCustomTokenizerDeprecation( + "nGram", + "ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), + false + ); + doTestCustomTokenizerDeprecation( + "edgeNGram", + "edge_ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), + false + ); + doTestCustomTokenizerDeprecation( + "nGram", + "ngram", + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) + ), + true + ); + doTestCustomTokenizerDeprecation( + "edgeNGram", + "edge_ngram", + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) + ), + true + ); + expectThrows( + IllegalArgumentException.class, + () -> doTestCustomTokenizerDeprecation( + "nGram", + "ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), + true + ) + ); + expectThrows( + IllegalArgumentException.class, + () -> doTestCustomTokenizerDeprecation( + "edgeNGram", + "edge_ngram", + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), + true + ) + ); + } + + public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, IndexVersion version, boolean expectWarning) + throws IOException { + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetadata.SETTING_VERSION_CREATED, version) + .build(); + + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map tokenizers = createTestAnalysis( + IndexSettingsModule.newIndexSettings("index", settings), + settings, + commonAnalysisPlugin + ).tokenizer; + TokenizerFactory tokenizerFactory = tokenizers.get(deprecatedName); + + Tokenizer tokenizer = tokenizerFactory.create(); + assertNotNull(tokenizer); + if (expectWarning) { + assertWarnings( + "The [" + + deprecatedName + + "] tokenizer name is deprecated and will be removed in a future version. " + + "Please change the tokenizer name to [" + + replacement + + "] instead." + ); + } + } + } + + public void doTestCustomTokenizerDeprecation(String deprecatedName, String replacement, IndexVersion version, boolean expectWarning) + throws IOException { + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetadata.SETTING_VERSION_CREATED, version) + .put("index.analysis.analyzer.custom_analyzer.type", "custom") + .put("index.analysis.analyzer.custom_analyzer.tokenizer", "my_tokenizer") + .put("index.analysis.tokenizer.my_tokenizer.type", deprecatedName) + .build(); + + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin); + + if (expectWarning) { + assertWarnings( + "The [" + + deprecatedName + + "] tokenizer name is deprecated and will be removed in a future version. " + + "Please change the tokenizer name to [" + + replacement + + "] instead." + ); + } + } + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java index 11d1653439e5..c998e927e25a 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -34,7 +34,7 @@ import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTok public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase { - private static IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) throws IOException { + private IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) throws IOException { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, version) @@ -54,6 +54,7 @@ public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase { assertNotNull(analyzer); assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); } + } public void testCustomTokenChars() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java index 8c365a1362f8..35c01b5b9296 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java @@ -161,7 +161,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { for (int i = 0; i < iters; i++) { final Index index = new Index("test", "_na_"); final String name = "ngr"; - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); boolean reverse = random().nextBoolean(); if (reverse) { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java index 7b962538c2a1..153c3e954928 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java @@ -56,7 +56,7 @@ public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase { public void testPersianAnalyzerPreLucene10() throws IOException { IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( random(), - IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getLowestReadCompatibleVersion(), IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) ); Settings settings = ESTestCase.indexSettings(1, 1) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java index 1af44bc71f35..29e27e62e316 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java @@ -57,7 +57,7 @@ public class RomanianAnalyzerTests extends ESTokenStreamTestCase { public void testRomanianAnalyzerPreLucene10() throws IOException { IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( random(), - IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getLowestReadCompatibleVersion(), IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) ); Settings settings = ESTestCase.indexSettings(1, 1) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java index bb06c221873b..4e774d92e3d6 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -39,7 +39,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testEnglishFilterFactory() throws IOException { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put("index.analysis.filter.my_english.type", "stemmer") .put("index.analysis.filter.my_english.language", "english") @@ -66,7 +66,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put("index.analysis.filter.my_porter2.type", "stemmer") .put("index.analysis.filter.my_porter2.language", "porter2") @@ -90,7 +90,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { } public void testMultipleLanguagesThrowsException() throws IOException { - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put("index.analysis.filter.my_english.type", "stemmer") .putList("index.analysis.filter.my_english.language", "english", "light_english") @@ -142,7 +142,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { } public void testKpDeprecation() throws IOException { - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put("index.analysis.filter.my_kp.type", "stemmer") .put("index.analysis.filter.my_kp.language", "kp") @@ -155,7 +155,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { } public void testLovinsDeprecation() throws IOException { - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put("index.analysis.filter.my_lovins.type", "stemmer") .put("index.analysis.filter.my_lovins.language", "lovins") diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java index 2e7f0de027de..37bff97a07ae 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java @@ -9,8 +9,10 @@ package org.elasticsearch.painless.spi; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser; +import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.lang.reflect.Constructor; @@ -140,7 +142,7 @@ public final class WhitelistLoader { * } * } */ - public static Whitelist loadFromResourceFiles(Class resource, Map parsers, String... filepaths) { + public static Whitelist loadFromResourceFiles(Class owner, Map parsers, String... filepaths) { List whitelistClasses = new ArrayList<>(); List whitelistStatics = new ArrayList<>(); List whitelistClassBindings = new ArrayList<>(); @@ -153,7 +155,7 @@ public final class WhitelistLoader { try ( LineNumberReader reader = new LineNumberReader( - new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8) + new InputStreamReader(getResourceAsStream(owner, filepath), StandardCharsets.UTF_8) ) ) { @@ -483,16 +485,40 @@ public final class WhitelistLoader { if (javaClassName != null) { throw new IllegalArgumentException("invalid definition: expected closing bracket"); } + } catch (ResourceNotFoundException e) { + throw e; // rethrow } catch (Exception exception) { throw new RuntimeException("error in [" + filepath + "] at line [" + number + "]", exception); } } - ClassLoader loader = AccessController.doPrivileged((PrivilegedAction) resource::getClassLoader); + ClassLoader loader = AccessController.doPrivileged((PrivilegedAction) owner::getClassLoader); return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList()); } + private static InputStream getResourceAsStream(Class owner, String name) { + InputStream stream = owner.getResourceAsStream(name); + if (stream == null) { + String msg = "Whitelist file [" + + owner.getPackageName().replace(".", "/") + + "/" + + name + + "] not found from owning class [" + + owner.getName() + + "]."; + if (owner.getModule().isNamed()) { + msg += " Check that the file exists and the package [" + + owner.getPackageName() + + "] is opened " + + "to module " + + WhitelistLoader.class.getModule().getName(); + } + throw new ResourceNotFoundException(msg); + } + return stream; + } + private static List parseWhitelistAnnotations(Map parsers, String line) { List annotations; diff --git a/modules/lang-painless/spi/src/test/java/org/elasticsearch/painless/WhitelistLoaderTests.java b/modules/lang-painless/spi/src/test/java/org/elasticsearch/painless/WhitelistLoaderTests.java index e62d0b438b09..b46bc118e091 100644 --- a/modules/lang-painless/spi/src/test/java/org/elasticsearch/painless/WhitelistLoaderTests.java +++ b/modules/lang-painless/spi/src/test/java/org/elasticsearch/painless/WhitelistLoaderTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.painless; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.painless.spi.WhitelistClass; import org.elasticsearch.painless.spi.WhitelistLoader; @@ -17,10 +18,18 @@ import org.elasticsearch.painless.spi.annotation.DeprecatedAnnotation; import org.elasticsearch.painless.spi.annotation.NoImportAnnotation; import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.compiler.InMemoryJavaCompiler; +import org.elasticsearch.test.jar.JarUtils; +import java.lang.ModuleLayer.Controller; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + public class WhitelistLoaderTests extends ESTestCase { public void testUnknownAnnotations() { @@ -96,4 +105,52 @@ public class WhitelistLoaderTests extends ESTestCase { assertEquals(3, count); } + + public void testMissingWhitelistResource() { + var e = expectThrows(ResourceNotFoundException.class, () -> WhitelistLoader.loadFromResourceFiles(Whitelist.class, "missing.txt")); + assertThat( + e.getMessage(), + equalTo( + "Whitelist file [org/elasticsearch/painless/spi/missing.txt] not found" + + " from owning class [org.elasticsearch.painless.spi.Whitelist]." + ) + ); + } + + public void testMissingWhitelistResourceInModule() throws Exception { + Map sources = new HashMap<>(); + sources.put("module-info", "module m {}"); + sources.put("p.TestOwner", "package p; public class TestOwner { }"); + var classToBytes = InMemoryJavaCompiler.compile(sources); + + Path dir = createTempDir(getTestName()); + Path jar = dir.resolve("m.jar"); + Map jarEntries = new HashMap<>(); + jarEntries.put("module-info.class", classToBytes.get("module-info")); + jarEntries.put("p/TestOwner.class", classToBytes.get("p.TestOwner")); + jarEntries.put("p/resource.txt", "# test resource".getBytes(StandardCharsets.UTF_8)); + JarUtils.createJarWithEntries(jar, jarEntries); + + try (var loader = JarUtils.loadJar(jar)) { + Controller controller = JarUtils.loadModule(jar, loader.classloader(), "m"); + Module module = controller.layer().findModule("m").orElseThrow(); + + Class ownerClass = module.getClassLoader().loadClass("p.TestOwner"); + + // first check we get a nice error message when accessing the resource + var e = expectThrows(ResourceNotFoundException.class, () -> WhitelistLoader.loadFromResourceFiles(ownerClass, "resource.txt")); + assertThat( + e.getMessage(), + equalTo( + "Whitelist file [p/resource.txt] not found from owning class [p.TestOwner]." + + " Check that the file exists and the package [p] is opened to module null" + ) + ); + + // now check we can actually read it once the package is opened to us + controller.addOpens(module, "p", WhitelistLoader.class.getModule()); + var whitelist = WhitelistLoader.loadFromResourceFiles(ownerClass, "resource.txt"); + assertThat(whitelist, notNullValue()); + } + } } diff --git a/muted-tests.yml b/muted-tests.yml index 2677b3e75d86..35a9b3168579 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -238,8 +238,6 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/120_data_streams_stats/Multiple data stream} issue: https://github.com/elastic/elasticsearch/issues/118217 -- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/118224 - class: org.elasticsearch.packaging.test.ArchiveTests method: test60StartAndStop issue: https://github.com/elastic/elasticsearch/issues/118216 @@ -290,6 +288,17 @@ tests: - class: org.elasticsearch.cluster.service.MasterServiceTests method: testThreadContext issue: https://github.com/elastic/elasticsearch/issues/118914 +- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT + method: test {yaml=indices.create/20_synthetic_source/create index with use_synthetic_source} + issue: https://github.com/elastic/elasticsearch/issues/118955 +- class: org.elasticsearch.repositories.blobstore.testkit.analyze.SecureHdfsRepositoryAnalysisRestIT + issue: https://github.com/elastic/elasticsearch/issues/118970 +- class: org.elasticsearch.xpack.security.authc.AuthenticationServiceTests + method: testInvalidToken + issue: https://github.com/elastic/elasticsearch/issues/119019 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} + issue: https://github.com/elastic/elasticsearch/issues/116777 # Examples: # diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java new file mode 100644 index 000000000000..6f9ab8ccdfde --- /dev/null +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http; + +import org.apache.http.entity.ContentType; +import org.apache.http.nio.entity.NByteArrayEntity; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Request; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.transport.TransportMessageListener; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; + +public class SearchErrorTraceIT extends HttpSmokeTestCase { + private AtomicBoolean hasStackTrace; + + @Before + private void setupMessageListener() { + internalCluster().getDataNodeInstances(TransportService.class).forEach(ts -> { + ts.addMessageListener(new TransportMessageListener() { + @Override + public void onResponseSent(long requestId, String action, Exception error) { + TransportMessageListener.super.onResponseSent(requestId, action, error); + if (action.startsWith("indices:data/read/search")) { + Optional throwable = ExceptionsHelper.unwrapCausesAndSuppressed( + error, + t -> t.getStackTrace().length > 0 + ); + hasStackTrace.set(throwable.isPresent()); + } + } + }); + }); + } + + private void setupIndexWithDocs() { + createIndex("test1", "test2"); + indexRandom( + true, + prepareIndex("test1").setId("1").setSource("field", "foo"), + prepareIndex("test2").setId("10").setSource("field", 5) + ); + refresh(); + } + + public void testSearchFailingQueryErrorTraceDefault() throws IOException { + hasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + getRestClient().performRequest(searchRequest); + assertFalse(hasStackTrace.get()); + } + + public void testSearchFailingQueryErrorTraceTrue() throws IOException { + hasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("error_trace", "true"); + getRestClient().performRequest(searchRequest); + assertTrue(hasStackTrace.get()); + } + + public void testSearchFailingQueryErrorTraceFalse() throws IOException { + hasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("error_trace", "false"); + getRestClient().performRequest(searchRequest); + assertFalse(hasStackTrace.get()); + } + + public void testMultiSearchFailingQueryErrorTraceDefault() throws IOException { + hasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + XContentType contentType = XContentType.JSON; + MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add( + new SearchRequest("test*").source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo").field("field"))) + ); + Request searchRequest = new Request("POST", "/_msearch"); + byte[] requestBody = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, contentType.xContent()); + searchRequest.setEntity( + new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null)) + ); + getRestClient().performRequest(searchRequest); + assertFalse(hasStackTrace.get()); + } + + public void testMultiSearchFailingQueryErrorTraceTrue() throws IOException { + hasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + XContentType contentType = XContentType.JSON; + MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add( + new SearchRequest("test*").source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo").field("field"))) + ); + Request searchRequest = new Request("POST", "/_msearch"); + byte[] requestBody = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, contentType.xContent()); + searchRequest.setEntity( + new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null)) + ); + searchRequest.addParameter("error_trace", "true"); + getRestClient().performRequest(searchRequest); + assertTrue(hasStackTrace.get()); + } + + public void testMultiSearchFailingQueryErrorTraceFalse() throws IOException { + hasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + XContentType contentType = XContentType.JSON; + MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add( + new SearchRequest("test*").source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo").field("field"))) + ); + Request searchRequest = new Request("POST", "/_msearch"); + byte[] requestBody = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, contentType.xContent()); + searchRequest.setEntity( + new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null)) + ); + searchRequest.addParameter("error_trace", "false"); + getRestClient().performRequest(searchRequest); + + assertFalse(hasStackTrace.get()); + } +} diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index bdee32e596c4..f23b5460f7d5 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -70,4 +70,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("search.vectors/41_knn_search_bbq_hnsw/Test knn search", "Scoring has changed in latest versions") task.skipTest("search.vectors/42_knn_search_bbq_flat/Test knn search", "Scoring has changed in latest versions") task.skipTest("synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set", "Can't work until auto-expand replicas is 0-1 for synonyms index") + task.skipTest("search/90_search_after/_shard_doc sort", "restriction has been lifted in latest versions") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index af3d88fb3573..edb684168278 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -2012,3 +2012,43 @@ synthetic_source with copy_to pointing inside dynamic object: hits.hits.2.fields: c.copy.keyword: [ "hello", "zap" ] +--- +create index with use_synthetic_source: + - requires: + cluster_features: ["mapper.synthetic_recovery_source"] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test + body: + settings: + index: + recovery: + use_synthetic_source: true + mapping: + source: + mode: synthetic + + - do: + indices.get_settings: {} + - match: { test.settings.index.mapping.source.mode: synthetic} + - is_true: test.settings.index.recovery.use_synthetic_source + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "field": "aaaa" }' + - '{ "create": { } }' + - '{ "field": "bbbb" }' + + - do: + indices.disk_usage: + index: test + run_expensive_tasks: true + flush: false + - gt: { test.store_size_in_bytes: 0 } + - is_false: test.fields._recovery_source diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/30_rescorer_retriever.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/30_rescorer_retriever.yml new file mode 100644 index 000000000000..2c16de61c6b1 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/30_rescorer_retriever.yml @@ -0,0 +1,225 @@ +setup: + - requires: + cluster_features: [ "search.retriever.rescorer.enabled" ] + reason: "Support for rescorer retriever" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + available: + type: boolean + features: + type: rank_features + + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {"_id": 1 }}' + - '{"features": { "first_stage": 1, "second_stage": 10}, "available": true, "group": 1}' + - '{"index": {"_id": 2 }}' + - '{"features": { "first_stage": 2, "second_stage": 9}, "available": false, "group": 1}' + - '{"index": {"_id": 3 }}' + - '{"features": { "first_stage": 3, "second_stage": 8}, "available": false, "group": 3}' + - '{"index": {"_id": 4 }}' + - '{"features": { "first_stage": 4, "second_stage": 7}, "available": true, "group": 1}' + - '{"index": {"_id": 5 }}' + - '{"features": { "first_stage": 5, "second_stage": 6}, "available": true, "group": 3}' + - '{"index": {"_id": 6 }}' + - '{"features": { "first_stage": 6, "second_stage": 5}, "available": false, "group": 2}' + - '{"index": {"_id": 7 }}' + - '{"features": { "first_stage": 7, "second_stage": 4}, "available": true, "group": 3}' + - '{"index": {"_id": 8 }}' + - '{"features": { "first_stage": 8, "second_stage": 3}, "available": true, "group": 1}' + - '{"index": {"_id": 9 }}' + - '{"features": { "first_stage": 9, "second_stage": 2}, "available": true, "group": 2}' + - '{"index": {"_id": 10 }}' + - '{"features": { "first_stage": 10, "second_stage": 1}, "available": false, "group": 1}' + +--- +"Rescorer retriever basic": + - do: + search: + index: test + body: + retriever: + rescorer: + rescore: + window_size: 10 + query: + rescore_query: + rank_feature: + field: "features.second_stage" + linear: { } + query_weight: 0 + retriever: + standard: + query: + rank_feature: + field: "features.first_stage" + linear: { } + size: 2 + + - match: { hits.total.value: 10 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 10.0 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 9.0 } + + - do: + search: + index: test + body: + retriever: + rescorer: + rescore: + window_size: 3 + query: + rescore_query: + rank_feature: + field: "features.second_stage" + linear: {} + query_weight: 0 + retriever: + standard: + query: + rank_feature: + field: "features.first_stage" + linear: {} + size: 2 + + - match: {hits.total.value: 10} + - match: {hits.hits.0._id: "8"} + - match: { hits.hits.0._score: 3.0 } + - match: {hits.hits.1._id: "9"} + - match: { hits.hits.1._score: 2.0 } + +--- +"Rescorer retriever with pre-filters": + - do: + search: + index: test + body: + retriever: + rescorer: + filter: + match: + available: true + rescore: + window_size: 10 + query: + rescore_query: + rank_feature: + field: "features.second_stage" + linear: { } + query_weight: 0 + retriever: + standard: + query: + rank_feature: + field: "features.first_stage" + linear: { } + size: 2 + + - match: { hits.total.value: 6 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 10.0 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1._score: 7.0 } + + - do: + search: + index: test + body: + retriever: + rescorer: + rescore: + window_size: 4 + query: + rescore_query: + rank_feature: + field: "features.second_stage" + linear: { } + query_weight: 0 + retriever: + standard: + filter: + match: + available: true + query: + rank_feature: + field: "features.first_stage" + linear: { } + size: 2 + + - match: { hits.total.value: 6 } + - match: { hits.hits.0._id: "5" } + - match: { hits.hits.0._score: 6.0 } + - match: { hits.hits.1._id: "7" } + - match: { hits.hits.1._score: 4.0 } + +--- +"Rescorer retriever and collapsing": + - do: + search: + index: test + body: + retriever: + rescorer: + rescore: + window_size: 10 + query: + rescore_query: + rank_feature: + field: "features.second_stage" + linear: { } + query_weight: 0 + retriever: + standard: + query: + rank_feature: + field: "features.first_stage" + linear: { } + collapse: + field: group + size: 3 + + - match: { hits.total.value: 10 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 10.0 } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.1._score: 8.0 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2._score: 5.0 } + +--- +"Rescorer retriever and invalid window size": + - do: + catch: "/\\[rescorer\\] requires \\[window_size: 5\\] be greater than or equal to \\[size: 10\\]/" + search: + index: test + body: + retriever: + rescorer: + rescore: + window_size: 5 + query: + rescore_query: + rank_feature: + field: "features.second_stage" + linear: { } + query_weight: 0 + retriever: + standard: + query: + rank_feature: + field: "features.first_stage" + linear: { } + size: 10 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml index 1fefc8bffffa..d3b2b5a41271 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml @@ -218,31 +218,6 @@ - match: {hits.hits.0._source.timestamp: "2019-10-21 00:30:04.828740" } - match: {hits.hits.0.sort: [1571617804828740000] } - ---- -"_shard_doc sort": - - requires: - cluster_features: ["gte_v7.12.0"] - reason: _shard_doc sort was added in 7.12 - - - do: - indices.create: - index: test - - do: - index: - index: test - id: "1" - body: { id: 1, foo: bar, age: 18 } - - - do: - catch: /\[_shard_doc\] sort field cannot be used without \[point in time\]/ - search: - index: test - body: - size: 1 - sort: ["_shard_doc"] - search_after: [ 0L ] - --- "Format sort values": - requires: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index 9bcaf6d75939..ad5ac675359b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -236,7 +236,7 @@ public class ShrinkIndexIT extends ESIntegTestCase { public void testCreateShrinkIndex() { internalCluster().ensureAtLeastNumDataNodes(2); - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomWriteVersion(); prepareCreate("source").setSettings( Settings.builder().put(indexSettings()).put("number_of_shards", randomIntBetween(2, 7)).put("index.version.created", version) ).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index bc5d65821617..69971a99e3cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -74,6 +74,7 @@ import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassi import static org.elasticsearch.test.XContentTestUtils.convertToMap; import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder; import static org.elasticsearch.test.index.IndexVersionUtils.randomVersion; +import static org.elasticsearch.test.index.IndexVersionUtils.randomWriteVersion; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -234,7 +235,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { private DiscoveryNode randomNode(String nodeId) { Version nodeVersion = VersionUtils.randomVersion(random()); - IndexVersion indexVersion = randomVersion(random()); + IndexVersion indexVersion = randomVersion(); return DiscoveryNodeUtils.builder(nodeId) .roles(emptySet()) .version(nodeVersion, IndexVersion.fromId(indexVersion.id() - 1_000_000), indexVersion) @@ -578,7 +579,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { IndexMetadata.Builder builder = IndexMetadata.builder(name); Settings.Builder settingsBuilder = Settings.builder(); setRandomIndexSettings(random(), settingsBuilder); - settingsBuilder.put(randomSettings(Settings.EMPTY)).put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion(random())); + settingsBuilder.put(randomSettings(Settings.EMPTY)).put(IndexMetadata.SETTING_VERSION_CREATED, randomWriteVersion()); builder.settings(settingsBuilder); builder.numberOfShards(randomIntBetween(1, 10)).numberOfReplicas(randomInt(10)); builder.eventIngestedRange(IndexLongFieldRange.UNKNOWN, TransportVersion.current()); @@ -790,7 +791,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { ImmutableOpenMap.of(), null, SnapshotInfoTestUtils.randomUserMetadata(), - randomVersion(random()) + randomVersion() ) ); case 1 -> new RestoreInProgress.Builder().add( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 48aef0d34804..891b0319f880 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -47,7 +47,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { PreBuiltAnalyzers preBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt]; String name = preBuiltAnalyzer.name().toLowerCase(Locale.ROOT); - IndexVersion randomVersion = IndexVersionUtils.randomVersion(random()); + IndexVersion randomVersion = IndexVersionUtils.randomWriteVersion(); if (loadedAnalyzers.containsKey(preBuiltAnalyzer) == false) { loadedAnalyzers.put(preBuiltAnalyzer, new ArrayList<>()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index a54599fc59cc..663aff5578a9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -2050,7 +2050,7 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase { IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersionBetween( random(), - IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getLowestWriteCompatibleVersion(), IndexVersionUtils.getPreviousVersion(IndexVersions.MERGE_ON_RECOVERY_VERSION) ) ) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index a7efb2fe0e68..fbdcfe26d28e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.rescore.QueryRescoreMode; import org.elasticsearch.search.rescore.QueryRescorerBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.ParseField; @@ -840,6 +841,20 @@ public class QueryRescorerIT extends ESIntegTestCase { } } ); + + assertResponse( + prepareSearch().addSort(SortBuilders.scoreSort()) + .addSort(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME)) + .setTrackScores(true) + .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), + response -> { + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(response.getHits().getHits().length, equalTo(5)); + for (SearchHit hit : response.getHits().getHits()) { + assertThat(hit.getScore(), equalTo(101f)); + } + } + ); } record GroupDoc(String id, String group, float firstPassScore, float secondPassScore, boolean shouldFilter) {} @@ -879,6 +894,10 @@ public class QueryRescorerIT extends ESIntegTestCase { .setQuery(fieldValueScoreQuery("firstPassScore")) .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore"))) .setCollapse(new CollapseBuilder("group")); + if (randomBoolean()) { + request.addSort(SortBuilders.scoreSort()); + request.addSort(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME)); + } assertResponse(request, resp -> { assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getHits().length, equalTo(3)); @@ -958,6 +977,10 @@ public class QueryRescorerIT extends ESIntegTestCase { .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore")).setQueryWeight(0f).windowSize(numGroups)) .setCollapse(new CollapseBuilder("group")) .setSize(Math.min(numGroups, 10)); + if (randomBoolean()) { + request.addSort(SortBuilders.scoreSort()); + request.addSort(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME)); + } long expectedNumHits = numHits; assertResponse(request, resp -> { assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 934e3094aafb..f0d8dbc88fff 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -141,6 +141,7 @@ public class TransportVersions { public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0); public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_00_0); public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0); + public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0); /* * WARNING: DO NOT MERGE INTO MAIN! diff --git a/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java b/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java index bdff3b16a11a..7de89849e178 100644 --- a/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java +++ b/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java @@ -150,10 +150,26 @@ public class ResolvedIndices { RemoteClusterService remoteClusterService, long startTimeInMillis ) { - final Map remoteClusterIndices = remoteClusterService.groupIndices( + return resolveWithIndexNamesAndOptions( + request.indices(), request.indicesOptions(), - request.indices() + clusterState, + indexNameExpressionResolver, + remoteClusterService, + startTimeInMillis ); + } + + public static ResolvedIndices resolveWithIndexNamesAndOptions( + String[] indexNames, + IndicesOptions indicesOptions, + ClusterState clusterState, + IndexNameExpressionResolver indexNameExpressionResolver, + RemoteClusterService remoteClusterService, + long startTimeInMillis + ) { + final Map remoteClusterIndices = remoteClusterService.groupIndices(indicesOptions, indexNames); + final OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); Index[] concreteLocalIndices = localIndices == null diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index cfc2e1bcdaf2..2041754bc2bc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -456,7 +456,8 @@ public class SearchTransportService { (request, channel, task) -> searchService.executeQueryPhase( request, (SearchShardTask) task, - new ChannelActionListener<>(channel) + new ChannelActionListener<>(channel), + channel.getVersion() ) ); TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, true, QuerySearchResult::new); @@ -468,7 +469,8 @@ public class SearchTransportService { (request, channel, task) -> searchService.executeQueryPhase( request, (SearchShardTask) task, - new ChannelActionListener<>(channel) + new ChannelActionListener<>(channel), + channel.getVersion() ) ); TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, true, ScrollQuerySearchResult::new); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index faa0cd0875c2..bf5ff9e7c514 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -24,6 +24,8 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpTransportSettings; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.telemetry.tracing.TraceContext; @@ -530,6 +532,17 @@ public final class ThreadContext implements Writeable, TraceContext { return value; } + /** + * Returns the header for the given key or defaultValue if not present + */ + public String getHeaderOrDefault(String key, String defaultValue) { + String value = getHeader(key); + if (value == null) { + return defaultValue; + } + return value; + } + /** * Returns all of the request headers from the thread's context.
* Be advised, headers might contain credentials. @@ -589,6 +602,14 @@ public final class ThreadContext implements Writeable, TraceContext { threadLocal.set(threadLocal.get().putHeaders(header)); } + public void setErrorTraceTransportHeader(RestRequest r) { + // set whether data nodes should send back stack trace based on the `error_trace` query parameter + if (r.paramAsBoolean("error_trace", RestController.ERROR_TRACE_DEFAULT)) { + // We only set it if error_trace is true (defaults to false) to avoid sending useless bytes + putHeader("error_trace", "true"); + } + } + /** * Puts a transient header object into this context */ diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index b15828c5594a..9273888b9ec9 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -685,29 +685,11 @@ public final class IndexSettings { ); } } - - // Verify that all nodes can handle this setting - var version = (IndexVersion) settings.get(SETTING_INDEX_VERSION_CREATED); - if (version.before(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY) - && version.between( - IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT, - IndexVersions.UPGRADE_TO_LUCENE_10_0_0 - ) == false) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "The setting [%s] is unavailable on this cluster because some nodes are running older " - + "versions that do not support it. Please upgrade all nodes to the latest version " - + "and try again.", - RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey() - ) - ); - } } @Override public Iterator> settings() { - List> res = List.of(INDEX_MAPPER_SOURCE_MODE_SETTING, SETTING_INDEX_VERSION_CREATED, MODE); + List> res = List.of(INDEX_MAPPER_SOURCE_MODE_SETTING, MODE); return res.iterator(); } }, @@ -1050,6 +1032,24 @@ public final class IndexSettings { indexMappingSourceMode = scopedSettings.get(INDEX_MAPPER_SOURCE_MODE_SETTING); recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings); recoverySourceSyntheticEnabled = scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING); + if (recoverySourceSyntheticEnabled) { + // Verify that all nodes can handle this setting + if (version.before(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY) + && version.between( + IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT, + IndexVersions.UPGRADE_TO_LUCENE_10_0_0 + ) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "The setting [%s] is unavailable on this cluster because some nodes are running older " + + "versions that do not support it. Please upgrade all nodes to the latest version " + + "and try again.", + RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey() + ) + ); + } + } scopedSettings.addSettingsUpdateConsumer( MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, diff --git a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java index 2811c7493a27..6c044ab99989 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java @@ -14,6 +14,8 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -53,6 +55,8 @@ import java.util.function.Supplier; **/ public final class IndexSortConfig { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(IndexSortConfig.class); + /** * The list of field names */ @@ -134,10 +138,14 @@ public final class IndexSortConfig { // visible for tests final FieldSortSpec[] sortSpecs; + private final IndexVersion indexCreatedVersion; + private final String indexName; private final IndexMode indexMode; public IndexSortConfig(IndexSettings indexSettings) { final Settings settings = indexSettings.getSettings(); + this.indexCreatedVersion = indexSettings.getIndexVersionCreated(); + this.indexName = indexSettings.getIndex().getName(); this.indexMode = indexSettings.getMode(); if (this.indexMode == IndexMode.TIME_SERIES) { @@ -230,7 +238,22 @@ public final class IndexSortConfig { throw new IllegalArgumentException(err); } if (Objects.equals(ft.name(), sortSpec.field) == false) { - throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field"); + if (this.indexCreatedVersion.onOrAfter(IndexVersions.V_7_13_0)) { + throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field"); + } else { + DEPRECATION_LOGGER.warn( + DeprecationCategory.MAPPINGS, + "index-sort-aliases", + "Index sort for index [" + + indexName + + "] defined on field [" + + sortSpec.field + + "] which resolves to field [" + + ft.name() + + "]. " + + "You will not be able to define an index sort over aliased fields in new indexes" + ); + } } boolean reverse = sortSpec.order == null ? false : (sortSpec.order == SortOrder.DESC); MultiValueMode mode = sortSpec.mode; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index fd321f625619..8af10524813c 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -12,7 +12,6 @@ package org.elasticsearch.index; import org.apache.lucene.util.Version; import org.elasticsearch.ReleaseVersions; import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; import java.text.ParseException; @@ -25,6 +24,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.function.IntFunction; +import java.util.stream.Collectors; @SuppressWarnings("deprecation") public class IndexVersions { @@ -58,7 +58,6 @@ public class IndexVersions { } } - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); @@ -244,10 +243,12 @@ public class IndexVersions { return Collections.unmodifiableNavigableMap(builder); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // We can simplify this once we've removed all references to index versions earlier than MINIMUM_COMPATIBLE + static Collection getAllWriteVersions() { + return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)).collect(Collectors.toSet()); + } + static Collection getAllVersions() { - return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(MINIMUM_COMPATIBLE)).toList(); + return VERSION_IDS.values(); } static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class, LATEST_DEFINED.id()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 276d3e151361..5dbaf0e0f40a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -77,7 +77,8 @@ public class MapperFeatures implements FeatureSpecification { DocumentParser.FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE, CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, META_FETCH_FIELDS_ERROR_CODE_CHANGED, - SPARSE_VECTOR_STORE_SUPPORT + SPARSE_VECTOR_STORE_SUPPORT, + SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 31aa787c3f75..033742b3b57f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -10,13 +10,17 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Iterator; import java.util.Map; +import java.util.Set; import java.util.function.Function; /** @@ -132,6 +136,8 @@ public abstract class MetadataFieldMapper extends FieldMapper { return build(); } + private static final Set UNSUPPORTED_PARAMETERS_8_6_0 = Set.of("type", "fields", "copy_to", "boost"); + public final void parseMetadataField(String name, MappingParserContext parserContext, Map fieldNode) { final Parameter[] params = getParameters(); Map> paramsMap = Maps.newHashMapWithExpectedSize(params.length); @@ -144,6 +150,22 @@ public abstract class MetadataFieldMapper extends FieldMapper { final Object propNode = entry.getValue(); Parameter parameter = paramsMap.get(propName); if (parameter == null) { + IndexVersion indexVersionCreated = parserContext.indexVersionCreated(); + if (indexVersionCreated.before(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + && UNSUPPORTED_PARAMETERS_8_6_0.contains(propName)) { + if (indexVersionCreated.onOrAfter(IndexVersions.V_8_6_0)) { + // silently ignore type, and a few other parameters: sadly we've been doing this for a long time + deprecationLogger.warn( + DeprecationCategory.API, + propName, + "Parameter [{}] has no effect on metadata field [{}] and will be removed in future", + propName, + name + ); + } + iterator.remove(); + continue; + } throw new MapperParsingException("unknown parameter [" + propName + "] on metadata field [" + name + "]"); } parameter.parse(name, parserContext, propNode); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 85f4217811a8..5f1ba6f0ab2a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -56,6 +56,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { "mapper.source.remove_synthetic_source_only_validation" ); public static final NodeFeature SOURCE_MODE_FROM_INDEX_SETTING = new NodeFeature("mapper.source.mode_from_index_setting"); + public static final NodeFeature SYNTHETIC_RECOVERY_SOURCE = new NodeFeature("mapper.synthetic_recovery_source"); public static final String NAME = "_source"; public static final String RECOVERY_SOURCE_NAME = "_recovery_source"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 93a2157b2338..de2632165b0c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.DynamicFieldType; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -670,7 +671,7 @@ public final class FlattenedFieldMapper extends FieldMapper { private final boolean isDimension; private final int ignoreAbove; - public RootFlattenedFieldType( + RootFlattenedFieldType( String name, boolean indexed, boolean hasDocValues, @@ -682,7 +683,7 @@ public final class FlattenedFieldMapper extends FieldMapper { this(name, indexed, hasDocValues, meta, splitQueriesOnWhitespace, eagerGlobalOrdinals, Collections.emptyList(), ignoreAbove); } - public RootFlattenedFieldType( + RootFlattenedFieldType( String name, boolean indexed, boolean hasDocValues, @@ -806,6 +807,10 @@ public final class FlattenedFieldMapper extends FieldMapper { return new KeyedFlattenedFieldType(name(), childPath, this); } + public MappedFieldType getKeyedFieldType() { + return new KeywordFieldMapper.KeywordFieldType(name() + KEYED_FIELD_SUFFIX); + } + @Override public boolean isDimension() { return isDimension; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java index 950fef95772f..53f68fb6edee 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java @@ -55,7 +55,7 @@ import java.util.Objects; * }` * */ -class FlattenedFieldSyntheticWriterHelper { +public class FlattenedFieldSyntheticWriterHelper { private record Prefix(List prefix) { @@ -225,17 +225,17 @@ class FlattenedFieldSyntheticWriterHelper { } } - interface SortedKeyedValues { + public interface SortedKeyedValues { BytesRef next() throws IOException; } private final SortedKeyedValues sortedKeyedValues; - FlattenedFieldSyntheticWriterHelper(final SortedKeyedValues sortedKeyedValues) { + public FlattenedFieldSyntheticWriterHelper(final SortedKeyedValues sortedKeyedValues) { this.sortedKeyedValues = sortedKeyedValues; } - void write(final XContentBuilder b) throws IOException { + public void write(final XContentBuilder b) throws IOException { KeyValue curr = new KeyValue(sortedKeyedValues.next()); KeyValue prev = KeyValue.EMPTY; final List values = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 92c7edb54b4e..9292f8d7bec3 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -85,7 +85,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -555,9 +555,10 @@ class NodeConstruction { return settingsModule; } - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) + @UpdateForV10(owner = UpdateForV10.Owner.SEARCH_FOUNDATIONS) private static void addBwcSearchWorkerSettings(List> additionalSettings) { - // TODO remove the below settings, they are unused and only here to enable BwC for deployments that still use them + // Search workers thread pool has been removed in Elasticsearch 8.16.0. These settings are deprecated and take no effect. + // They are here only to enable BwC for deployments that still use them additionalSettings.add( Setting.intSetting("thread_pool.search_worker.queue_size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) ); diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 4564a37dacf4..509086b98231 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -269,5 +269,4 @@ public abstract class BaseRestHandler implements RestHandler { protected Set responseParams(RestApiVersion restApiVersion) { return responseParams(); } - } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 49fe794bbe61..49801499ea99 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -93,6 +93,7 @@ public class RestController implements HttpServerTransport.Dispatcher { public static final String STATUS_CODE_KEY = "es_rest_status_code"; public static final String HANDLER_NAME_KEY = "es_rest_handler_name"; public static final String REQUEST_METHOD_KEY = "es_rest_request_method"; + public static final boolean ERROR_TRACE_DEFAULT = false; static { try (InputStream stream = RestController.class.getResourceAsStream("/config/favicon.ico")) { @@ -638,7 +639,7 @@ public class RestController implements HttpServerTransport.Dispatcher { private static void validateErrorTrace(RestRequest request, RestChannel channel) { // error_trace cannot be used when we disable detailed errors // we consume the error_trace parameter first to ensure that it is always consumed - if (request.paramAsBoolean("error_trace", false) && channel.detailedErrorsEnabled() == false) { + if (request.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && channel.detailedErrorsEnabled() == false) { throw new IllegalArgumentException("error traces in responses are disabled."); } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java index d04397405566..0c359e0a4a05 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -37,6 +37,7 @@ import java.util.Set; import static java.util.Collections.singletonMap; import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE; import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER; +import static org.elasticsearch.rest.RestController.ERROR_TRACE_DEFAULT; public final class RestResponse implements Releasable { @@ -143,7 +144,7 @@ public final class RestResponse implements Releasable { // switched in the xcontent rendering parameters. // For authorization problems (RestStatus.UNAUTHORIZED) we don't want to do this since this could // leak information to the caller who is unauthorized to make this call - if (params.paramAsBoolean("error_trace", false) && status != RestStatus.UNAUTHORIZED) { + if (params.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && status != RestStatus.UNAUTHORIZED) { params = new ToXContent.DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false"), params); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 24fab92ced39..87b1a6b9c2fa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -72,6 +72,9 @@ public class RestMultiSearchAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + if (client.threadPool() != null && client.threadPool().getThreadContext() != null) { + client.threadPool().getThreadContext().setErrorTraceTransportHeader(request); + } final MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature); return channel -> { final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index a9c2ff7576b0..99c11bb60b8f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -95,7 +95,9 @@ public class RestSearchAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - + if (client.threadPool() != null && client.threadPool().getThreadContext() != null) { + client.threadPool().getThreadContext().setErrorTraceTransportHeader(request); + } SearchRequest searchRequest = new SearchRequest(); // access the BwC param, but just drop it // this might be set by old clients diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index b87d097413b6..47d3ed337af7 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -73,6 +73,7 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rescore.RescoreContext; +import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -377,7 +378,7 @@ final class DefaultSearchContext extends SearchContext { ); } if (rescore != null) { - if (sort != null) { + if (RescorePhase.validateSort(sort) == false) { throw new IllegalArgumentException("Cannot use [sort] option in conjunction with [rescore]."); } int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow(); diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index beac39c2de30..553511346b18 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -23,4 +23,11 @@ public final class SearchFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); } + + public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled"); + + @Override + public Set getTestFeatures() { + return Set.of(RETRIEVER_RESCORER_ENABLED); + } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index d282ba425b12..3294e1ba03f6 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -231,6 +231,7 @@ import org.elasticsearch.search.rank.feature.RankFeatureShardResult; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.retriever.KnnRetrieverBuilder; +import org.elasticsearch.search.retriever.RescorerRetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; import org.elasticsearch.search.retriever.StandardRetrieverBuilder; @@ -1080,6 +1081,7 @@ public class SearchModule { private void registerRetrieverParsers(List plugins) { registerRetriever(new RetrieverSpec<>(StandardRetrieverBuilder.NAME, StandardRetrieverBuilder::fromXContent)); registerRetriever(new RetrieverSpec<>(KnnRetrieverBuilder.NAME, KnnRetrieverBuilder::fromXContent)); + registerRetriever(new RetrieverSpec<>(RescorerRetrieverBuilder.NAME, RescorerRetrieverBuilder::fromXContent)); registerFromPlugin(plugins, SearchPlugin::getRetrievers, this::registerRetriever); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index ee9ad3c559cf..561b5ff67312 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -17,6 +17,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ResolvedIndices; @@ -152,6 +154,7 @@ import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; +import static org.elasticsearch.TransportVersions.ERROR_TRACE_IN_TRANSPORT_HEADER; import static org.elasticsearch.core.TimeValue.timeValueHours; import static org.elasticsearch.core.TimeValue.timeValueMillis; import static org.elasticsearch.core.TimeValue.timeValueMinutes; @@ -272,6 +275,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv public static final int DEFAULT_SIZE = 10; public static final int DEFAULT_FROM = 0; + private static final StackTraceElement[] EMPTY_STACK_TRACE_ARRAY = new StackTraceElement[0]; private final ThreadPool threadPool; @@ -506,7 +510,41 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv keepAliveReaper.cancel(); } + /** + * Wraps the listener to avoid sending StackTraces back to the coordinating + * node if the `error_trace` header is set to {@code false}. Upon reading we + * default to {@code true} to maintain the same behavior as before the change, + * due to older nodes not being able to specify whether it needs stack traces. + * + * @param the type of the response + * @param listener the action listener to be wrapped + * @param version channel version of the request + * @param threadPool with context where to write the new header + * @return the wrapped action listener + */ + static ActionListener maybeWrapListenerForStackTrace( + ActionListener listener, + TransportVersion version, + ThreadPool threadPool + ) { + boolean header = true; + if (version.onOrAfter(ERROR_TRACE_IN_TRANSPORT_HEADER) && threadPool.getThreadContext() != null) { + header = Boolean.parseBoolean(threadPool.getThreadContext().getHeaderOrDefault("error_trace", "false")); + } + if (header == false) { + return listener.delegateResponse((l, e) -> { + ExceptionsHelper.unwrapCausesAndSuppressed(e, err -> { + err.setStackTrace(EMPTY_STACK_TRACE_ARRAY); + return false; + }); + l.onFailure(e); + }); + } + return listener; + } + public void executeDfsPhase(ShardSearchRequest request, SearchShardTask task, ActionListener listener) { + listener = maybeWrapListenerForStackTrace(listener, request.getChannelVersion(), threadPool); final IndexShard shard = getShard(request); rewriteAndFetchShardRequest(shard, request, listener.delegateFailure((l, rewritten) -> { // fork the execution in the search thread pool @@ -544,10 +582,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } public void executeQueryPhase(ShardSearchRequest request, SearchShardTask task, ActionListener listener) { + ActionListener finalListener = maybeWrapListenerForStackTrace(listener, request.getChannelVersion(), threadPool); assert request.canReturnNullResponseIfMatchNoDocs() == false || request.numberOfShards() > 1 : "empty responses require more than one shard"; final IndexShard shard = getShard(request); - rewriteAndFetchShardRequest(shard, request, listener.delegateFailure((l, orig) -> { + rewriteAndFetchShardRequest(shard, request, finalListener.delegateFailure((l, orig) -> { // check if we can shortcut the query phase entirely. if (orig.canReturnNullResponseIfMatchNoDocs()) { assert orig.scroll() == null; @@ -561,7 +600,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv ); CanMatchShardResponse canMatchResp = canMatch(canMatchContext, false); if (canMatchResp.canMatch() == false) { - listener.onResponse(QuerySearchResult.nullInstance()); + finalListener.onResponse(QuerySearchResult.nullInstance()); return; } } @@ -736,6 +775,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } public void executeRankFeaturePhase(RankFeatureShardRequest request, SearchShardTask task, ActionListener listener) { + listener = maybeWrapListenerForStackTrace(listener, request.getShardSearchRequest().getChannelVersion(), threadPool); final ReaderContext readerContext = findReaderContext(request.contextId(), request); final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest()); final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest)); @@ -779,8 +819,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv public void executeQueryPhase( InternalScrollSearchRequest request, SearchShardTask task, - ActionListener listener + ActionListener listener, + TransportVersion version ) { + listener = maybeWrapListenerForStackTrace(listener, version, threadPool); final LegacyReaderContext readerContext = (LegacyReaderContext) findReaderContext(request.contextId(), request); final Releasable markAsUsed; try { @@ -816,7 +858,13 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv * It is the responsibility of the caller to ensure that the ref count is correctly decremented * when the object is no longer needed. */ - public void executeQueryPhase(QuerySearchRequest request, SearchShardTask task, ActionListener listener) { + public void executeQueryPhase( + QuerySearchRequest request, + SearchShardTask task, + ActionListener listener, + TransportVersion version + ) { + listener = maybeWrapListenerForStackTrace(listener, version, threadPool); final ReaderContext readerContext = findReaderContext(request.contextId(), request.shardSearchRequest()); final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.shardSearchRequest()); final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest)); diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 3554a6dc08b9..8c21abe4180e 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -48,9 +48,7 @@ import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.slice.SliceBuilder; -import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; -import org.elasticsearch.search.sort.ShardDocSortField; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -2341,18 +2339,6 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R validationException = rescorer.validate(this, validationException); } } - - if (pointInTimeBuilder() == null && sorts() != null) { - for (var sortBuilder : sorts()) { - if (sortBuilder instanceof FieldSortBuilder fieldSortBuilder - && ShardDocSortField.NAME.equals(fieldSortBuilder.getFieldName())) { - validationException = addValidationError( - "[" + FieldSortBuilder.SHARD_DOC_FIELD_NAME + "] sort field cannot be used without [point in time]", - validationException - ); - } - } - } return validationException; } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java index cbc04dd460ff..3d793a164f40 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java @@ -58,6 +58,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.InternalProfileCollector; import org.elasticsearch.search.rescore.RescoreContext; +import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; @@ -238,7 +239,7 @@ abstract class QueryPhaseCollectorManager implements CollectorManager { + if (t instanceof FieldDoc fieldDoc) { + fieldDoc.score = (float) fieldDoc.fields[0]; + } + }); TopFieldGroups topGroups = null; + TopFieldDocs topFields = null; if (topDocs instanceof TopFieldGroups topFieldGroups) { - assert context.collapse() != null; + assert context.collapse() != null && validateSortFields(topFieldGroups.fields); topGroups = topFieldGroups; + } else if (topDocs instanceof TopFieldDocs topFieldDocs) { + assert validateSortFields(topFieldDocs.fields); + topFields = topFieldDocs; } try { Runnable cancellationCheck = getCancellationChecks(context); @@ -56,17 +72,18 @@ public class RescorePhase { topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx); // It is the responsibility of the rescorer to sort the resulted top docs, // here we only assert that this condition is met. - assert context.sort() == null && topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore"; + assert topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore"; ctx.setCancellationChecker(null); } + /** + * Since rescorers are building top docs with score only, we must reconstruct the {@link TopFieldGroups} + * or {@link TopFieldDocs} using their original version before rescoring. + */ if (topGroups != null) { assert context.collapse() != null; - /** - * Since rescorers don't preserve collapsing, we must reconstruct the group and field - * values from the originalTopGroups to create a new {@link TopFieldGroups} from the - * rescored top documents. - */ - topDocs = rewriteTopGroups(topGroups, topDocs); + topDocs = rewriteTopFieldGroups(topGroups, topDocs); + } else if (topFields != null) { + topDocs = rewriteTopFieldDocs(topFields, topDocs); } context.queryResult() .topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats()); @@ -81,29 +98,84 @@ public class RescorePhase { } } - private static TopFieldGroups rewriteTopGroups(TopFieldGroups originalTopGroups, TopDocs rescoredTopDocs) { - assert originalTopGroups.fields.length == 1 && SortField.FIELD_SCORE.equals(originalTopGroups.fields[0]) - : "rescore must always sort by score descending"; + /** + * Returns whether the provided {@link SortAndFormats} can be used to rescore + * top documents. + */ + public static boolean validateSort(SortAndFormats sortAndFormats) { + if (sortAndFormats == null) { + return true; + } + return validateSortFields(sortAndFormats.sort.getSort()); + } + + private static boolean validateSortFields(SortField[] fields) { + if (fields[0].equals(SortField.FIELD_SCORE) == false) { + return false; + } + if (fields.length == 1) { + return true; + } + + // The ShardDocSortField can be used as a tiebreaker because it maintains + // the natural document ID order within the shard. + if (fields[1] instanceof ShardDocSortField == false || fields[1].getReverse()) { + return false; + } + return true; + } + + private static TopFieldDocs rewriteTopFieldDocs(TopFieldDocs originalTopFieldDocs, TopDocs rescoredTopDocs) { + Map docIdToFieldDoc = Maps.newMapWithExpectedSize(originalTopFieldDocs.scoreDocs.length); + for (int i = 0; i < originalTopFieldDocs.scoreDocs.length; i++) { + docIdToFieldDoc.put(originalTopFieldDocs.scoreDocs[i].doc, (FieldDoc) originalTopFieldDocs.scoreDocs[i]); + } + var newScoreDocs = new FieldDoc[rescoredTopDocs.scoreDocs.length]; + int pos = 0; + for (var doc : rescoredTopDocs.scoreDocs) { + newScoreDocs[pos] = docIdToFieldDoc.get(doc.doc); + newScoreDocs[pos].score = doc.score; + newScoreDocs[pos].fields[0] = newScoreDocs[pos].score; + pos++; + } + return new TopFieldDocs(originalTopFieldDocs.totalHits, newScoreDocs, originalTopFieldDocs.fields); + } + + private static TopFieldGroups rewriteTopFieldGroups(TopFieldGroups originalTopGroups, TopDocs rescoredTopDocs) { + var newFieldDocs = rewriteFieldDocs((FieldDoc[]) originalTopGroups.scoreDocs, rescoredTopDocs.scoreDocs); + Map docIdToGroupValue = Maps.newMapWithExpectedSize(originalTopGroups.scoreDocs.length); for (int i = 0; i < originalTopGroups.scoreDocs.length; i++) { docIdToGroupValue.put(originalTopGroups.scoreDocs[i].doc, originalTopGroups.groupValues[i]); } - var newScoreDocs = new FieldDoc[rescoredTopDocs.scoreDocs.length]; var newGroupValues = new Object[originalTopGroups.groupValues.length]; int pos = 0; for (var doc : rescoredTopDocs.scoreDocs) { - newScoreDocs[pos] = new FieldDoc(doc.doc, doc.score, new Object[] { doc.score }); newGroupValues[pos++] = docIdToGroupValue.get(doc.doc); } return new TopFieldGroups( originalTopGroups.field, originalTopGroups.totalHits, - newScoreDocs, + newFieldDocs, originalTopGroups.fields, newGroupValues ); } + private static FieldDoc[] rewriteFieldDocs(FieldDoc[] originalTopDocs, ScoreDoc[] rescoredTopDocs) { + Map docIdToFieldDoc = Maps.newMapWithExpectedSize(rescoredTopDocs.length); + Arrays.stream(originalTopDocs).forEach(d -> docIdToFieldDoc.put(d.doc, d)); + var newDocs = new FieldDoc[rescoredTopDocs.length]; + int pos = 0; + for (var doc : rescoredTopDocs) { + newDocs[pos] = docIdToFieldDoc.get(doc.doc); + newDocs[pos].score = doc.score; + newDocs[pos].fields[0] = doc.score; + pos++; + } + return newDocs; + } + /** * Returns true if the provided docs are sorted by score. */ diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java index f62496151538..38a319321207 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java @@ -39,7 +39,7 @@ public abstract class RescorerBuilder> protected Integer windowSize; - private static final ParseField WINDOW_SIZE_FIELD = new ParseField("window_size"); + public static final ParseField WINDOW_SIZE_FIELD = new ParseField("window_size"); /** * Construct an empty RescoreBuilder. diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 2ab6395db73b..298340e5c579 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -32,10 +32,12 @@ import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.ShardDocSortField; import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -49,6 +51,8 @@ public abstract class CompoundRetrieverBuilder rankWindowSize) { validationException = addValidationError( - "[" - + this.getName() - + "] requires [rank_window_size: " - + rankWindowSize - + "]" - + " be greater than or equal to [size: " - + source.size() - + "]", + String.format( + Locale.ROOT, + "[%s] requires [%s: %d] be greater than or equal to [size: %d]", + getName(), + getRankWindowSizeField().getPreferredName(), + rankWindowSize, + source.size() + ), validationException ); } @@ -231,6 +243,21 @@ public abstract class CompoundRetrieverBuilder compoundChild) { + if (rankWindowSize > compoundChild.rankWindowSize) { + String errorMessage = String.format( + Locale.ROOT, + "[%s] requires [%s: %d] to be smaller than or equal to its sub retriever's %s [%s: %d]", + this.getName(), + getRankWindowSizeField().getPreferredName(), + rankWindowSize, + compoundChild.getName(), + compoundChild.getRankWindowSizeField(), + compoundChild.rankWindowSize + ); + validationException = addValidationError(errorMessage, validationException); + } + } } return validationException; } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java new file mode 100644 index 000000000000..09688b5b9b00 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.retriever; + +import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.rescore.RescorerBuilder; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.search.builder.SearchSourceBuilder.RESCORE_FIELD; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A {@link CompoundRetrieverBuilder} that re-scores only the results produced by its child retriever. + */ +public final class RescorerRetrieverBuilder extends CompoundRetrieverBuilder { + + public static final String NAME = "rescorer"; + public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + args -> new RescorerRetrieverBuilder((RetrieverBuilder) args[0], (List>) args[1]) + ); + + static { + PARSER.declareNamedObject(constructorArg(), (parser, context, n) -> { + RetrieverBuilder innerRetriever = parser.namedObject(RetrieverBuilder.class, n, context); + context.trackRetrieverUsage(innerRetriever.getName()); + return innerRetriever; + }, RETRIEVER_FIELD); + PARSER.declareField(constructorArg(), (parser, context) -> { + if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + List> rescorers = new ArrayList<>(); + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + rescorers.add(RescorerBuilder.parseFromXContent(parser, name -> context.trackRescorerUsage(name))); + } + return rescorers; + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + return List.of(RescorerBuilder.parseFromXContent(parser, name -> context.trackRescorerUsage(name))); + } else { + throw new IllegalArgumentException( + "Unknown format for [rescorer.rescore], expects an object or an array of objects, got: " + parser.currentToken() + ); + } + }, RESCORE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY); + RetrieverBuilder.declareBaseParserFields(NAME, PARSER); + } + + public static RescorerRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { + try { + return PARSER.apply(parser, context); + } catch (Exception e) { + throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e); + } + } + + private final List> rescorers; + + public RescorerRetrieverBuilder(RetrieverBuilder retriever, List> rescorers) { + super(List.of(new RetrieverSource(retriever, null)), extractMinWindowSize(rescorers)); + if (rescorers.isEmpty()) { + throw new IllegalArgumentException("Missing rescore definition"); + } + this.rescorers = rescorers; + } + + private RescorerRetrieverBuilder(RetrieverSource retriever, List> rescorers) { + super(List.of(retriever), extractMinWindowSize(rescorers)); + this.rescorers = rescorers; + } + + /** + * The minimum window size is used as the {@link CompoundRetrieverBuilder#rankWindowSize}, + * the final number of top documents to return in this retriever. + */ + private static int extractMinWindowSize(List> rescorers) { + int windowSize = Integer.MAX_VALUE; + for (var rescore : rescorers) { + windowSize = Math.min(rescore.windowSize() == null ? RescorerBuilder.DEFAULT_WINDOW_SIZE : rescore.windowSize(), windowSize); + } + return windowSize; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public ParseField getRankWindowSizeField() { + return RescorerBuilder.WINDOW_SIZE_FIELD; + } + + @Override + protected SearchSourceBuilder finalizeSourceBuilder(SearchSourceBuilder source) { + /** + * The re-scorer is passed downstream because this query operates only on + * the top documents retrieved by the child retriever. + * + * - If the sub-retriever is a {@link CompoundRetrieverBuilder}, only the top + * documents are re-scored since they are already determined at this stage. + * - For other retrievers that do not require a rewrite, the re-scorer's window + * size is applied per shard. As a result, more documents are re-scored + * compared to the final top documents produced by these retrievers in isolation. + */ + for (var rescorer : rescorers) { + source.addRescorer(rescorer); + } + return source; + } + + @Override + public void doToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(RETRIEVER_FIELD.getPreferredName(), innerRetrievers.getFirst().retriever()); + builder.startArray(RESCORE_FIELD.getPreferredName()); + for (RescorerBuilder rescorer : rescorers) { + rescorer.toXContent(builder, params); + } + builder.endArray(); + } + + @Override + protected RescorerRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { + var newInstance = new RescorerRetrieverBuilder(newChildRetrievers.get(0), rescorers); + newInstance.preFilterQueryBuilders = newPreFilterQueryBuilders; + return newInstance; + } + + @Override + protected RankDoc[] combineInnerRetrieverResults(List rankResults) { + assert rankResults.size() == 1; + ScoreDoc[] scoreDocs = rankResults.getFirst(); + RankDoc[] rankDocs = new RankDoc[scoreDocs.length]; + for (int i = 0; i < scoreDocs.length; i++) { + ScoreDoc scoreDoc = scoreDocs[i]; + rankDocs[i] = new RankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex); + rankDocs[i].rank = i + 1; + } + return rankDocs; + } + + @Override + public boolean doEquals(Object o) { + RescorerRetrieverBuilder that = (RescorerRetrieverBuilder) o; + return super.doEquals(o) && Objects.equals(rescorers, that.rescorers); + } + + @Override + public int doHashCode() { + return Objects.hash(super.doHashCode(), rescorers); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java index d52c354cad69..b9bfdfdf3402 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java @@ -63,7 +63,7 @@ public abstract class RetrieverBuilder implements Rewriteable, AbstractObjectParser parser ) { parser.declareObjectArray( - (r, v) -> r.preFilterQueryBuilders = v, + (r, v) -> r.preFilterQueryBuilders = new ArrayList<>(v), (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p, c::trackQueryUsage), PRE_FILTER_FIELD ); diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 040a7a6205f9..462fab651c21 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -88,6 +88,7 @@ grant codeBase "${codebase.elasticsearch}" { // this is the test-framework, but the jar is horribly named grant codeBase "${codebase.framework}" { permission java.lang.RuntimePermission "setSecurityManager"; + permission java.lang.RuntimePermission "createClassLoader"; }; grant codeBase "${codebase.elasticsearch-rest-client}" { @@ -129,4 +130,5 @@ grant { permission java.nio.file.LinkPermission "symbolic"; // needed for keystore tests permission java.lang.RuntimePermission "accessUserInformation"; + permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java index 476ade857658..8752e68112bf 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java @@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.equalTo; public class HumanReadableIndexSettingsTests extends ESTestCase { public void testHumanReadableSettings() { - IndexVersion versionCreated = randomVersion(random()); + IndexVersion versionCreated = randomVersion(); long created = System.currentTimeMillis(); Settings testSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, versionCreated) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 28551068c754..b42eebbf1339 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -387,11 +387,7 @@ public class MetadataCreateIndexServiceTests extends ESTestCase { } public void testPrepareResizeIndexSettings() { - final List versions = Stream.of(IndexVersionUtils.randomVersion(random()), IndexVersionUtils.randomVersion(random())) - .sorted() - .toList(); - final IndexVersion version = versions.get(0); - final IndexVersion upgraded = versions.get(1); + final IndexVersion version = IndexVersionUtils.randomWriteVersion(); final Settings.Builder indexSettingsBuilder = Settings.builder() .put("index.version.created", version) .put("index.similarity.default.type", "BM25") diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java index de31abd974c4..9a60dd0fa2a0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java @@ -103,7 +103,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase { Map.of(), null, SnapshotInfoTestUtils.randomUserMetadata(), - IndexVersionUtils.randomVersion(random()) + IndexVersionUtils.randomVersion() ) ); final Index index = new Index(indexName, randomUUID()); @@ -162,7 +162,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase { String alias = randomAlphaOfLength(5); IndexMetadata idxMetadata = IndexMetadata.builder(index) - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random()))) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion())) .putAlias(AliasMetadata.builder(alias).writeIndex(true).build()) .numberOfShards(1) .numberOfReplicas(1) @@ -403,7 +403,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase { IndexMetadata.builder(index.getName()) .settings( indexSettings( - IndexVersionUtils.randomVersion(random()), + IndexVersionUtils.randomVersion(), index.getUUID(), randomIntBetween(1, 3), randomIntBetween(0, 2) @@ -438,7 +438,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase { private ClusterState clusterState(Index index) { final IndexMetadata indexMetadata = IndexMetadata.builder(index.getName()) - .settings(indexSettings(IndexVersionUtils.randomVersion(random()), index.getUUID(), 1, 1)) + .settings(indexSettings(IndexVersionUtils.randomVersion(), index.getUUID(), 1, 1)) .build(); final ProjectId projectId = randomProjectIdOrDefault(); final Metadata.Builder metadataBuilder = Metadata.builder().put(ProjectMetadata.builder(projectId).put(indexMetadata, false)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index 5959587b9c82..d621cf7eb672 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -864,7 +864,7 @@ public class MetadataIndexAliasesServiceTests extends ESTestCase { private ClusterState createIndex(ClusterState state, String index) { IndexMetadata indexMetadata = IndexMetadata.builder(index) - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random()))) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomWriteVersion())) .numberOfShards(1) .numberOfReplicas(1) .build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java index b489d1b7ac1e..dd5eede7a35a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java @@ -431,7 +431,7 @@ public class MetadataIndexStateServiceTests extends ESTestCase { shardsBuilder, null, SnapshotInfoTestUtils.randomUserMetadata(), - IndexVersionUtils.randomVersion(random()) + IndexVersionUtils.randomVersion() ); return ClusterState.builder(newState).putCustom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY.withAddedEntry(entry)).build(); } diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index 22308e15f484..eccdd1c6ffea 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -43,7 +43,7 @@ public class NodeMetadataTests extends ESTestCase { } private IndexVersion randomIndexVersion() { - return rarely() ? IndexVersion.fromId(randomInt()) : IndexVersionUtils.randomVersion(random()); + return rarely() ? IndexVersion.fromId(randomInt()) : IndexVersionUtils.randomVersion(); } public void testEqualsHashcodeSerialization() { diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 441ad8a5a225..7221d69b74d4 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -160,6 +160,20 @@ public class IndexSortSettingsTests extends ESTestCase { assertEquals("Cannot use alias [field] as an index sort field", e.getMessage()); } + public void testSortingAgainstAliasesPre713() { + IndexSettings indexSettings = indexSettings( + Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_12_0).put("index.sort.field", "field").build() + ); + MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased"); + Sort sort = buildIndexSort(indexSettings, Map.of("field", aliased)); + assertThat(sort.getSort(), arrayWithSize(1)); + assertThat(sort.getSort()[0].getField(), equalTo("aliased")); + assertWarnings( + "Index sort for index [test] defined on field [field] which resolves to field [aliased]. " + + "You will not be able to define an index sort over aliased fields in new indexes" + ); + } + public void testTimeSeriesMode() { IndexSettings indexSettings = indexSettings( Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index 2a425c9256c3..8575b87c3679 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -11,7 +11,6 @@ package org.elasticsearch.index; import org.apache.lucene.util.Version; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.hamcrest.Matchers; @@ -151,9 +150,7 @@ public class IndexVersionTests extends ESTestCase { } } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - @AwaitsFix(bugUrl = "believe this fails because index version has not yet been bumped to 9.0") - public void testMinimumCompatibleVersion() { + public void testGetMinimumCompatibleIndexVersion() { assertThat(IndexVersion.getMinimumCompatibleIndexVersion(7170099), equalTo(IndexVersion.fromId(6000099))); assertThat(IndexVersion.getMinimumCompatibleIndexVersion(8000099), equalTo(IndexVersion.fromId(7000099))); assertThat(IndexVersion.getMinimumCompatibleIndexVersion(10000000), equalTo(IndexVersion.fromId(9000000))); @@ -193,8 +190,6 @@ public class IndexVersionTests extends ESTestCase { } } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - @AwaitsFix(bugUrl = "can be unmuted once lucene is bumped to version 10") public void testLuceneVersionOnUnknownVersions() { // between two known versions, should use the lucene version of the previous version IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(); @@ -207,7 +202,7 @@ public class IndexVersionTests extends ESTestCase { // too old version, major should be the oldest supported lucene version minus 1 IndexVersion oldVersion = IndexVersion.fromId(5020199); - assertThat(oldVersion.luceneVersion().major, equalTo(IndexVersionUtils.getFirstVersion().luceneVersion().major - 1)); + assertThat(oldVersion.luceneVersion().major, equalTo(IndexVersionUtils.getLowestReadCompatibleVersion().luceneVersion().major - 1)); // future version, should be the same version as today IndexVersion futureVersion = IndexVersion.fromId(currentVersion.id() + 100); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 04170030c117..db780f064098 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -106,7 +106,7 @@ public class AnalysisRegistryTests extends ESTestCase { } public void testDefaultAnalyzers() throws IOException { - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, version) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) @@ -120,7 +120,7 @@ public class AnalysisRegistryTests extends ESTestCase { } public void testOverrideDefaultAnalyzer() throws IOException { - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); IndexAnalyzers indexAnalyzers = AnalysisRegistry.build( IndexCreationContext.CREATE_INDEX, @@ -137,7 +137,7 @@ public class AnalysisRegistryTests extends ESTestCase { } public void testOverrideDefaultAnalyzerWithoutAnalysisModeAll() throws IOException { - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("index", settings); TokenFilterFactory tokenFilter = new AbstractTokenFilterFactory("my_filter") { @@ -216,7 +216,7 @@ public class AnalysisRegistryTests extends ESTestCase { } public void testOverrideDefaultSearchAnalyzer() { - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); IndexAnalyzers indexAnalyzers = AnalysisRegistry.build( IndexCreationContext.CREATE_INDEX, @@ -319,8 +319,8 @@ public class AnalysisRegistryTests extends ESTestCase { } } - public void testNoTypeOrTokenizerErrorMessage() throws IOException { - IndexVersion version = IndexVersionUtils.randomVersion(random()); + public void testNoTypeOrTokenizerErrorMessage() { + IndexVersion version = IndexVersionUtils.randomVersion(); Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, version) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 0a7bd495f2f2..f5b86f422915 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -63,7 +63,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { PreBuiltAnalyzers.STANDARD.getAnalyzer(IndexVersion.current()) ); // same index version should be cached - IndexVersion v = IndexVersionUtils.randomVersion(random()); + IndexVersion v = IndexVersionUtils.randomVersion(); assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(v), PreBuiltAnalyzers.STANDARD.getAnalyzer(v)); assertNotSame( PreBuiltAnalyzers.STANDARD.getAnalyzer(IndexVersion.current()), @@ -71,7 +71,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { ); // Same Lucene version should be cached: - IndexVersion v1 = IndexVersionUtils.randomVersion(random()); + IndexVersion v1 = IndexVersionUtils.randomVersion(); IndexVersion v2 = new IndexVersion(v1.id() - 1, v1.luceneVersion()); assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(v1), PreBuiltAnalyzers.STOP.getAnalyzer(v2)); } @@ -81,7 +81,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt]; String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT); - IndexVersion randomVersion = IndexVersionUtils.randomVersion(random()); + IndexVersion randomVersion = IndexVersionUtils.randomWriteVersion(); Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion).build(); NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider( diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilterTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilterTests.java index 40b37452990c..a1a91ef2373f 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilterTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilterTests.java @@ -41,7 +41,7 @@ public class PreConfiguredTokenFilterTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY); - IndexVersion version1 = IndexVersionUtils.randomVersion(random()); + IndexVersion version1 = IndexVersionUtils.randomVersion(); Settings settings1 = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1).build(); TokenFilterFactory tff_v1_1 = pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1); TokenFilterFactory tff_v1_2 = pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1); @@ -66,7 +66,7 @@ public class PreConfiguredTokenFilterTests extends ESTestCase { } ); - IndexVersion version1 = IndexVersionUtils.randomVersion(random()); + IndexVersion version1 = IndexVersionUtils.randomVersion(); IndexSettings indexSettings1 = IndexSettingsModule.newIndexSettings( "test", Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1).build() @@ -133,7 +133,7 @@ public class PreConfiguredTokenFilterTests extends ESTestCase { ); assertSame(tff_v1_1, tff_v1_2); - IndexVersion version2 = IndexVersionUtils.getPreviousMajorVersion(IndexVersionUtils.getFirstVersion()); + IndexVersion version2 = IndexVersionUtils.getPreviousMajorVersion(IndexVersionUtils.getLowestReadCompatibleVersion()); IndexSettings indexSettings2 = IndexSettingsModule.newIndexSettings( "test", Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version2).build() diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 3e3be6a315af..d07c775da7e2 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -6645,7 +6645,7 @@ public class InternalEngineTests extends EngineTestCase { for (IndexVersion createdVersion : List.of( IndexVersion.current(), lowestCompatiblePreviousVersion, - IndexVersionUtils.getFirstVersion() + IndexVersionUtils.getLowestWriteCompatibleVersion() )) { Settings settings = Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_VERSION_CREATED, createdVersion).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index 74d6e83aff26..b8600842effe 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -44,7 +44,7 @@ import java.util.Set; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class RecoverySourcePruneMergePolicyTests extends ESTestCase { @@ -191,7 +191,7 @@ public class RecoverySourcePruneMergePolicyTests extends ESTestCase { } assertEquals(i, extra_source.docID()); if (syntheticRecoverySource) { - assertThat(extra_source.longValue(), greaterThan(10L)); + assertThat(extra_source.longValue(), greaterThanOrEqualTo(10L)); } else { assertThat(extra_source.longValue(), equalTo(1L)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 378920d0e6db..b7693513a434 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -465,60 +465,6 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { ) ); } - { - Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.toString()) - .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) - .build(); - IllegalArgumentException exc = expectThrows( - IllegalArgumentException.class, - () -> createMapperService( - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT), - settings, - () -> false, - topMapping(b -> {}) - ) - ); - assertThat( - exc.getMessage(), - containsString( - String.format( - Locale.ROOT, - "The setting [%s] is unavailable on this cluster", - IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey() - ) - ) - ); - } - { - Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.toString()) - .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) - .build(); - IllegalArgumentException exc = expectThrows( - IllegalArgumentException.class, - () -> createMapperService( - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.UPGRADE_TO_LUCENE_10_0_0, - IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER - ), - settings, - () -> false, - topMapping(b -> {}) - ) - ); - assertThat( - exc.getMessage(), - containsString( - String.format( - Locale.ROOT, - "The setting [%s] is unavailable on this cluster", - IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey() - ) - ) - ); - } } public void testRecoverySourceWithSyntheticSource() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 342d61b78def..3f574a29469c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -1514,19 +1514,19 @@ public class DenseVectorFieldMapperTests extends MapperTestCase { ); assertEquals( VectorSimilarityFunction.EUCLIDEAN, - VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.BYTE) + VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(), ElementType.BYTE) ); assertEquals( VectorSimilarityFunction.EUCLIDEAN, - VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.FLOAT) + VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(), ElementType.FLOAT) ); assertEquals( VectorSimilarityFunction.DOT_PRODUCT, - VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.BYTE) + VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(), ElementType.BYTE) ); assertEquals( VectorSimilarityFunction.DOT_PRODUCT, - VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.FLOAT) + VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(), ElementType.FLOAT) ); } diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index cf6941b84b79..1bcd84aadd6c 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -233,7 +233,7 @@ public class AnalysisModuleTests extends ESTestCase { new StablePluginsRegistry() ).getAnalysisRegistry(); - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); IndexAnalyzers analyzers = getIndexAnalyzers( registry, Settings.builder() @@ -302,7 +302,7 @@ public class AnalysisModuleTests extends ESTestCase { new StablePluginsRegistry() ).getAnalysisRegistry(); - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); IndexAnalyzers analyzers = getIndexAnalyzers( registry, Settings.builder() @@ -389,7 +389,7 @@ public class AnalysisModuleTests extends ESTestCase { new StablePluginsRegistry() ).getAnalysisRegistry(); - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); IndexAnalyzers analyzers = getIndexAnalyzers( registry, Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/IncorrectSetupStablePluginsTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/IncorrectSetupStablePluginsTests.java index ca9184bca75d..181d3ec44f2b 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/IncorrectSetupStablePluginsTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/IncorrectSetupStablePluginsTests.java @@ -63,7 +63,7 @@ public class IncorrectSetupStablePluginsTests extends ESTestCase { Settings.builder() .put("index.analysis.analyzer.char_filter_test.tokenizer", "standard") .put("index.analysis.analyzer.char_filter_test.char_filter", "incorrectlyAnnotatedSettings") - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build(), Map.of( "incorrectlyAnnotatedSettings", @@ -90,7 +90,7 @@ public class IncorrectSetupStablePluginsTests extends ESTestCase { Settings.builder() .put("index.analysis.analyzer.char_filter_test.tokenizer", "standard") .put("index.analysis.analyzer.char_filter_test.char_filter", "noInjectCharFilter") - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build(), Map.of("noInjectCharFilter", new PluginInfo("noInjectCharFilter", NoInjectCharFilter.class.getName(), classLoader)) ) @@ -112,7 +112,7 @@ public class IncorrectSetupStablePluginsTests extends ESTestCase { Settings.builder() .put("index.analysis.analyzer.char_filter_test.tokenizer", "standard") .put("index.analysis.analyzer.char_filter_test.char_filter", "multipleConstructors") - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build(), Map.of("multipleConstructors", new PluginInfo("multipleConstructors", MultipleConstructors.class.getName(), classLoader)) ) diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsNoSettingsTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsNoSettingsTests.java index 7cbda0e7086c..6eac3847efa4 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsNoSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsNoSettingsTests.java @@ -61,7 +61,7 @@ public class StableAnalysisPluginsNoSettingsTests extends ESTestCase { } public void testStablePlugins() throws IOException { - IndexVersion version = IndexVersionUtils.randomVersion(random()); + IndexVersion version = IndexVersionUtils.randomVersion(); IndexAnalyzers analyzers = getIndexAnalyzers( Settings.builder() .put("index.analysis.analyzer.char_filter_test.tokenizer", "standard") diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsWithSettingsTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsWithSettingsTests.java index acde315b140a..82f49888e911 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsWithSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsWithSettingsTests.java @@ -72,7 +72,7 @@ public class StableAnalysisPluginsWithSettingsTests extends ESTestCase { .put("index.analysis.analyzer.char_filter_with_defaults_test.tokenizer", "standard") .put("index.analysis.analyzer.char_filter_with_defaults_test.char_filter", "stableCharFilterFactory") - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build() ); assertTokenStreamContents(analyzers.get("char_filter_test").tokenStream("", "t#st"), new String[] { "t3st" }); @@ -88,7 +88,7 @@ public class StableAnalysisPluginsWithSettingsTests extends ESTestCase { .put("index.analysis.analyzer.token_filter_test.tokenizer", "standard") .put("index.analysis.analyzer.token_filter_test.filter", "my_token_filter") - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build() ); assertTokenStreamContents( @@ -109,7 +109,7 @@ public class StableAnalysisPluginsWithSettingsTests extends ESTestCase { .putList("index.analysis.tokenizer.my_tokenizer.tokenizer_list_of_chars", "_", " ") .put("index.analysis.analyzer.tokenizer_test.tokenizer", "my_tokenizer") - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build() ); assertTokenStreamContents(analyzers.get("tokenizer_test").tokenStream("", "x_y z"), new String[] { "x", "y", "z" }); @@ -124,7 +124,7 @@ public class StableAnalysisPluginsWithSettingsTests extends ESTestCase { .put("index.analysis.analyzer.analyzer_provider_test.old_char", "#") .put("index.analysis.analyzer.analyzer_provider_test.new_number", 3) .put("index.analysis.analyzer.analyzer_provider_test.analyzerUseTokenListOfChars", true) - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()) .build() ); assertTokenStreamContents(analyzers.get("analyzer_provider_test").tokenStream("", "1x_y_#z"), new String[] { "y", "3z" }); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index d039c265c98a..d9b2936dc30c 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -721,7 +721,7 @@ public class RecoverySourceHandlerTests extends MapperServiceTestCase { final IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test") .settings( - indexSettings(IndexVersionUtils.randomVersion(random()), between(1, 5), between(0, 5)).put( + indexSettings(IndexVersionUtils.randomVersion(), between(1, 5), between(0, 5)).put( IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID(random()) ) diff --git a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 33801dfb9841..ace8499d8ffd 100644 --- a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -243,7 +243,7 @@ public class NodeInfoStreamingTests extends ESTestCase { return new NodeInfo( randomAlphaOfLengthBetween(6, 32), new CompatibilityVersions(TransportVersionUtils.randomVersion(random()), Map.of()), - IndexVersionUtils.randomVersion(random()), + IndexVersionUtils.randomVersion(), componentVersions, build, node, diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java index 074a8a817a9a..3eab25562516 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java @@ -967,7 +967,7 @@ public class PersistentTasksClusterServiceTests extends ESTestCase { } // Just add a random index - that shouldn't change anything IndexMetadata indexMetadata = IndexMetadata.builder(randomAlphaOfLength(10)) - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random()))) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion())) .numberOfShards(1) .numberOfReplicas(1) .build(); @@ -1045,7 +1045,7 @@ public class PersistentTasksClusterServiceTests extends ESTestCase { private void changeRoutingTable(Metadata.Builder metadata, RoutingTable.Builder routingTable) { IndexMetadata indexMetadata = IndexMetadata.builder(randomAlphaOfLength(10)) - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random()))) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion())) .numberOfShards(1) .numberOfReplicas(1) .build(); diff --git a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java index b85ad31288c8..bd810cea216f 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java @@ -51,6 +51,7 @@ import java.util.Map; import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE; import static org.elasticsearch.ElasticsearchExceptionTests.assertDeepEquals; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.rest.RestController.ERROR_TRACE_DEFAULT; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -180,7 +181,7 @@ public class RestResponseTests extends ESTestCase { } else { assertThat(response.status(), is(RestStatus.BAD_REQUEST)); } - boolean traceExists = request.paramAsBoolean("error_trace", false) && channel.detailedErrorsEnabled(); + boolean traceExists = request.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && channel.detailedErrorsEnabled(); if (traceExists) { assertThat(response.content().utf8ToString(), containsString(ElasticsearchException.STACK_TRACE)); } else { diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index a474c1dc38c5..d3a3792f605d 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.BaseDirectoryWrapper; @@ -245,7 +246,10 @@ public class DefaultSearchContextTests extends MapperServiceTestCase { // resultWindow not greater than maxResultWindow and both rescore and sort are not null context1.from(0); DocValueFormat docValueFormat = mock(DocValueFormat.class); - SortAndFormats sortAndFormats = new SortAndFormats(new Sort(), new DocValueFormat[] { docValueFormat }); + SortAndFormats sortAndFormats = new SortAndFormats( + new Sort(new SortField[] { SortField.FIELD_DOC }), + new DocValueFormat[] { docValueFormat } + ); context1.sort(sortAndFormats); RescoreContext rescoreContext = mock(RescoreContext.class); diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java index 02593e41f5d8..0fc1694d3992 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java @@ -2684,7 +2684,8 @@ public class SearchServiceSingleNodeTests extends ESSingleNodeTestCase { service.executeQueryPhase( new QuerySearchRequest(null, context.id(), request, new AggregatedDfs(Map.of(), Map.of(), 10)), new SearchShardTask(42L, "", "", "", null, emptyMap()), - plainActionFuture + plainActionFuture, + TransportVersion.current() ); plainActionFuture.actionGet(); diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 31bcab31ca8a..d041121b8a96 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -13,6 +13,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -53,9 +55,14 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.IOException; import java.util.Collections; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiFunction; import java.util.function.Predicate; +import static org.elasticsearch.search.SearchService.maybeWrapListenerForStackTrace; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.not; + public class SearchServiceTests extends IndexShardTestCase { public void testCanMatchMatchAll() throws IOException { @@ -117,6 +124,33 @@ public class SearchServiceTests extends IndexShardTestCase { doTestCanMatch(searchRequest, sortField, true, null, false); } + public void testMaybeWrapListenerForStackTrace() { + // Tests that the same listener has stack trace if is not wrapped or does not have stack trace if it is wrapped. + AtomicBoolean isWrapped = new AtomicBoolean(false); + ActionListener listener = new ActionListener<>() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + // noop - we only care about failure scenarios + } + + @Override + public void onFailure(Exception e) { + if (isWrapped.get()) { + assertThat(e.getStackTrace().length, is(0)); + } else { + assertThat(e.getStackTrace().length, is(not(0))); + } + } + }; + Exception e = new Exception(); + e.fillInStackTrace(); + assertThat(e.getStackTrace().length, is(not(0))); + listener.onFailure(e); + listener = maybeWrapListenerForStackTrace(listener, TransportVersion.current(), threadPool); + isWrapped.set(true); + listener.onFailure(e); + } + private void doTestCanMatch( SearchRequest searchRequest, SortField sortField, diff --git a/server/src/test/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilderParsingTests.java b/server/src/test/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilderParsingTests.java new file mode 100644 index 000000000000..fa83246d90cb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilderParsingTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.retriever; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.rescore.QueryRescorerBuilderTests; +import org.elasticsearch.search.rescore.RescorerBuilder; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.usage.SearchUsage; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParser; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static java.util.Collections.emptyList; + +public class RescorerRetrieverBuilderParsingTests extends AbstractXContentTestCase { + private static List xContentRegistryEntries; + + @BeforeClass + public static void init() { + xContentRegistryEntries = new SearchModule(Settings.EMPTY, emptyList()).getNamedXContents(); + } + + @AfterClass + public static void afterClass() throws Exception { + xContentRegistryEntries = null; + } + + @Override + protected RescorerRetrieverBuilder createTestInstance() { + int num = randomIntBetween(1, 3); + List> rescorers = new ArrayList<>(); + for (int i = 0; i < num; i++) { + rescorers.add(QueryRescorerBuilderTests.randomRescoreBuilder()); + } + return new RescorerRetrieverBuilder(TestRetrieverBuilder.createRandomTestRetrieverBuilder(), rescorers); + } + + @Override + protected RescorerRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { + return (RescorerRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( + parser, + new RetrieverParserContext(new SearchUsage(), n -> true) + ); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + List entries = new ArrayList<>(xContentRegistryEntries); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + TestRetrieverBuilder.TEST_SPEC.getName(), + (p, c) -> TestRetrieverBuilder.TEST_SPEC.getParser().fromXContent(p, (RetrieverParserContext) c), + TestRetrieverBuilder.TEST_SPEC.getName().getForRestApiVersion() + ) + ); + return new NamedXContentRegistry(entries); + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java index a92d55f6d419..91ab253b2e1f 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java @@ -133,7 +133,7 @@ public class SnapshotsInProgressSerializationTests extends SimpleDiffableWireSer shards, null, SnapshotInfoTestUtils.randomUserMetadata(), - IndexVersionUtils.randomVersion(random()) + IndexVersionUtils.randomVersion() ); } diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 9dc0263f49ae..8296bc14fd66 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -10,12 +10,10 @@ apply plugin: 'elasticsearch.java' apply plugin: 'com.gradleup.shadow' + import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar configurations { -// all { -// transitive = true -// } hdfs2 hdfs3 consumable("shadowedHdfs2") @@ -27,20 +25,76 @@ dependencies { transitive false } compileOnly "junit:junit:${versions.junit}" - hdfs2 "org.apache.hadoop:hadoop-minicluster:2.8.5" - hdfs3 "org.apache.hadoop:hadoop-minicluster:3.3.1" + def commonExcludes = [ + [group: "org.apache.commons", module: "commons-compress"], + [group: "org.apache.hadoop", module: "hadoop-mapreduce-client-app"], + [group: "org.apache.hadoop", module: "hadoop-mapreduce-client-core"], + [group: "org.apache.hadoop", module: "hadoop-mapreduce-client-hs"], + [group: "org.apache.hadoop", module: "hadoop-mapreduce-client-jobclient"], + [group: "org.apache.hadoop", module: "hadoop-yarn-server-tests"], + [group: "org.apache.httpcomponents", module: "httpclient"], + [group: "org.apache.zookeeper", module: "zookeeper"], + [group: "org.apache.curator", module: "curator-recipes"], + [group: "org.apache.curator", module: "curator-client"], + [group: "org.apache.curator", module: "curator-framework"], + [group: "org.apache.avro", module: "avro"], + [group: "log4j", module: "log4j"], + [group: "io.netty", module: "netty-all"], + [group: "io.netty", module: "netty"], + [group: "com.squareup.okhttp", module: "okhttp"], + [group: "com.google.guava", module: "guava"], + [group: "com.google.code.gson", module: "gson"], + [group: "javax.servlet.jsp", module: "jsp-api"], + [group: "org.fusesource.leveldbjni", module: "leveldbjni-all"], + [group: "commons-cli", module: "commons-cli"], + [group: "org.mortbay.jetty", module: "servlet-api"], + [group: "commons-logging", module: "commons-logging"], + [group: "org.slf4j", module: "slf4j-log4j12"], + [group: "commons-codec", module: "commons-codec"], + [group: "com.sun.jersey", module: "jersey-core"], + [group: "com.sun.jersey", module: "jersey-json"], + [group: "com.google.code.findbugs", module: "jsr305"], + [group: "com.sun.jersey", module: "jersey-json"], + [group: "com.nimbusds", module: "nimbus-jose-jwt"], + [group: "com.jcraft", module: "jsch"], + [group: "org.slf4j", module: "slf4j-api"], + ] + + hdfs2("org.apache.hadoop:hadoop-minicluster:2.8.5") { + commonExcludes.each { exclude it } + exclude group: "org.apache.commons", module: "commons-math3" + exclude group: "xmlenc", module: "xmlenc" + exclude group: "net.java.dev.jets3t", module: "jets3t" + exclude group: "org.apache.directory.server", module: "apacheds-i18n" + exclude group: "xerces", module: "xercesImpl" + } + + hdfs3("org.apache.hadoop:hadoop-minicluster:3.3.1") { + commonExcludes.each { exclude it } + exclude group: "dnsjava", module: "dnsjava" + exclude group: "com.google.inject.extensions", module: "guice-servlet" + exclude group: "com.google.inject", module: "guice" + exclude group: "com.microsoft.sqlserver", module: "mssql-jdbc" + exclude group: "com.sun.jersey.contribs", module: "jersey-guice" + exclude group: "com.zaxxer", module: "HikariCP-java7" + exclude group: "com.sun.jersey", module: "jersey-server" + exclude group: "org.bouncycastle", module: "bcpkix-jdk15on" + exclude group: "org.bouncycastle", module: "bcprov-jdk15on" + exclude group: "org.ehcache", module: "ehcache" + exclude group: "org.apache.geronimo.specs", module: "geronimo-jcache_1.0_spec" + exclude group: "org.xerial.snappy", module: "snappy-java" + } } tasks.named("shadowJar").configure { archiveClassifier.set("hdfs3") // fix issues with signed jars - relocate("org.apache.hadoop", "fixture.hdfs3.org.apache.hadoop") { exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" exclude "org.apache.hadoop.ipc.StandbyException" } - configurations << project.configurations.hdfs3 + configurations.add(project.configurations.hdfs3) } def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) { @@ -50,26 +104,15 @@ def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) { } archiveClassifier.set("hdfs2") from sourceSets.main.output - configurations << project.configurations.hdfs2 + configurations.add(project.configurations.hdfs2) } tasks.withType(ShadowJar).configureEach { dependencies { -// exclude(dependency('commons-io:commons-io:2.8.0')) exclude(dependency("com.carrotsearch.randomizedtesting:randomizedtesting-runner:.*")) exclude(dependency("junit:junit:.*")) - exclude(dependency("org.slf4j:slf4j-api:.*")) - exclude(dependency("com.google.guava:guava:.*")) - exclude(dependency("org.apache.commons:commons-compress:.*")) - exclude(dependency("commons-logging:commons-logging:.*")) - exclude(dependency("commons-codec:commons-codec:.*")) - exclude(dependency("org.apache.httpcomponents:httpclient:.*")) exclude(dependency("org.apache.httpcomponents:httpcore:.*")) exclude(dependency("org.apache.logging.log4j:log4j-1.2-api:.*")) - exclude(dependency("log4j:log4j:.*")) - exclude(dependency("io.netty:.*:.*")) - exclude(dependency("com.nimbusds:nimbus-jose-jwt:.*")) - exclude(dependency("commons-cli:commons-cli:1.2")) exclude(dependency("net.java.dev.jna:jna:.*")) exclude(dependency("org.objenesis:objenesis:.*")) exclude(dependency('com.fasterxml.jackson.core:.*:.*')) diff --git a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java index 47d239540814..4f559a5f3eae 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java @@ -16,7 +16,12 @@ import java.util.List; */ public class KnownIndexVersions { /** - * A sorted list of all known transport versions + * A sorted list of all known index versions */ public static final List ALL_VERSIONS = List.copyOf(IndexVersions.getAllVersions()); + + /** + * A sorted list of all known index versions that can be written to + */ + public static final List ALL_WRITE_VERSIONS = List.copyOf(IndexVersions.getAllWriteVersions()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index 449ecc099412..580eb6eacb27 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -12,6 +12,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -142,4 +143,88 @@ public abstract class MetadataMapperTestCase extends MapperServiceTestCase { ); assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } + + public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { + assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); + IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_6_0); + // we randomly also pick read-only versions to test that we can still parse the parameters for them + IndexVersion version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getLowestReadCompatibleVersion(), + previousVersion + ); + assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); + MapperService mapperService = createMapperService(version, mapping(b -> {})); + // these parameters were previously silently ignored, they will still be ignored in existing indices + String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; + for (String param : unsupportedParameters) { + String mappingAsString = "{\n" + + " \"_doc\" : {\n" + + " \"" + + fieldName() + + "\" : {\n" + + " \"" + + param + + "\" : \"any\"\n" + + " }\n" + + " }\n" + + "}"; + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); + } + } + + public void testTypeAndFriendsAreDeprecatedFrom_8_6_0_TO_9_0_0() throws IOException { + assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); + IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_6_0, previousVersion); + assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); + MapperService mapperService = createMapperService(version, mapping(b -> {})); + // these parameters were deprecated, they now should throw an error in new indices + String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; + for (String param : unsupportedParameters) { + String mappingAsString = "{\n" + + " \"_doc\" : {\n" + + " \"" + + fieldName() + + "\" : {\n" + + " \"" + + param + + "\" : \"any\"\n" + + " }\n" + + " }\n" + + "}"; + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); + assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); + } + } + + public void testTypeAndFriendsThrow_After_9_0_0() throws IOException { + assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); + IndexVersion version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); + MapperService mapperService = createMapperService(version, mapping(b -> {})); + // these parameters were previously silently ignored, they are now deprecated in new indices + String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; + for (String param : unsupportedParameters) { + String mappingAsString = "{\n" + + " \"_doc\" : {\n" + + " \"" + + fieldName() + + "\" : {\n" + + " \"" + + param + + "\" : \"any\"\n" + + " }\n" + + " }\n" + + "}"; + expectThrows( + MapperParsingException.class, + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) + ); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 20cb66affdde..d239c6453a7f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -241,6 +241,10 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { return serviceHolder.idxSettings; } + protected static MapperService mapperService() { + return serviceHolder.mapperService; + } + protected static String expectedFieldName(String builderFieldName) { return ALIAS_TO_CONCRETE_FIELD_NAME.getOrDefault(builderFieldName, builderFieldName); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java index f83e7e17f9aa..592cffac3355 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java @@ -24,32 +24,38 @@ import java.util.stream.Collectors; public class IndexVersionUtils { private static final List ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; + private static final List ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; /** Returns all released versions */ public static List allReleasedVersions() { return ALL_VERSIONS; } - /** Returns the oldest known {@link IndexVersion} */ - public static IndexVersion getFirstVersion() { + /** Returns the oldest known {@link IndexVersion}. This version can only be read from and not written to */ + public static IndexVersion getLowestReadCompatibleVersion() { return ALL_VERSIONS.get(0); } + /** Returns the oldest known {@link IndexVersion} that can be written to */ + public static IndexVersion getLowestWriteCompatibleVersion() { + return ALL_WRITE_VERSIONS.get(0); + } + /** Returns a random {@link IndexVersion} from all available versions. */ public static IndexVersion randomVersion() { return ESTestCase.randomFrom(ALL_VERSIONS); } + /** Returns a random {@link IndexVersion} from all versions that can be written to. */ + public static IndexVersion randomWriteVersion() { + return ESTestCase.randomFrom(ALL_WRITE_VERSIONS); + } + /** Returns a random {@link IndexVersion} from all available versions without the ignore set */ public static IndexVersion randomVersion(Set ignore) { return ESTestCase.randomFrom(ALL_VERSIONS.stream().filter(v -> ignore.contains(v) == false).collect(Collectors.toList())); } - /** Returns a random {@link IndexVersion} from all available versions. */ - public static IndexVersion randomVersion(Random random) { - return ALL_VERSIONS.get(random.nextInt(ALL_VERSIONS.size())); - } - /** Returns a random {@link IndexVersion} between minVersion and maxVersion (inclusive). */ public static IndexVersion randomVersionBetween(Random random, @Nullable IndexVersion minVersion, @Nullable IndexVersion maxVersion) { if (minVersion != null && maxVersion != null && maxVersion.before(minVersion)) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/jar/JarUtils.java b/test/framework/src/main/java/org/elasticsearch/test/jar/JarUtils.java index e5bdd66e949f..0da392cb7fb0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/jar/JarUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/jar/JarUtils.java @@ -9,13 +9,24 @@ package org.elasticsearch.test.jar; +import org.elasticsearch.test.PrivilegedOperations.ClosableURLClassLoader; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.List; import java.util.Map; +import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; @@ -85,6 +96,28 @@ public final class JarUtils { createJarWithEntries(jarfile, map); } + /** + * Creates a class loader for the given jar file. + * @param path Path to the jar file to load + * @return A URLClassLoader that will load classes from the jar. It should be closed when no longer needed. + */ + public static ClosableURLClassLoader loadJar(Path path) { + try { + URL[] urls = new URL[] { path.toUri().toURL() }; + return new ClosableURLClassLoader(URLClassLoader.newInstance(urls, JarUtils.class.getClassLoader())); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + } + + public static ModuleLayer.Controller loadModule(Path path, ClassLoader loader, String name) { + var finder = ModuleFinder.of(path.getParent()); + var cf = Configuration.resolveAndBind(finder, List.of(ModuleLayer.boot().configuration()), ModuleFinder.of(), Set.of(name)); + return AccessController.doPrivileged( + (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, List.of(ModuleLayer.boot()), loader) + ); + } + @FunctionalInterface interface UncheckedIOFunction { R apply(T t) throws IOException; diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java new file mode 100644 index 000000000000..39a6fa1e4b34 --- /dev/null +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.search; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportMessageListener; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; + +public class AsyncSearchErrorTraceIT extends ESIntegTestCase { + + @Override + protected boolean addMockHttpTransport() { + return false; // enable http + } + + @Override + protected Collection> nodePlugins() { + return List.of(AsyncSearch.class); + } + + private AtomicBoolean transportMessageHasStackTrace; + + @Before + private void setupMessageListener() { + internalCluster().getDataNodeInstances(TransportService.class).forEach(ts -> { + ts.addMessageListener(new TransportMessageListener() { + @Override + public void onResponseSent(long requestId, String action, Exception error) { + TransportMessageListener.super.onResponseSent(requestId, action, error); + if (action.startsWith("indices:data/read/search")) { + Optional throwable = ExceptionsHelper.unwrapCausesAndSuppressed( + error, + t -> t.getStackTrace().length > 0 + ); + transportMessageHasStackTrace.set(throwable.isPresent()); + } + } + }); + }); + } + + private void setupIndexWithDocs() { + createIndex("test1", "test2"); + indexRandom( + true, + prepareIndex("test1").setId("1").setSource("field", "foo"), + prepareIndex("test2").setId("10").setSource("field", 5) + ); + refresh(); + } + + public void testAsyncSearchFailingQueryErrorTraceDefault() throws IOException, InterruptedException { + transportMessageHasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_async_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("keep_on_completion", "true"); + searchRequest.addParameter("wait_for_completion_timeout", "0ms"); + Map responseEntity = performRequestAndGetResponseEntityAfterDelay(searchRequest, TimeValue.ZERO); + String asyncExecutionId = (String) responseEntity.get("id"); + Request request = new Request("GET", "/_async_search/" + asyncExecutionId); + while (responseEntity.get("is_running") instanceof Boolean isRunning && isRunning) { + responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); + } + // check that the stack trace was not sent from the data node to the coordinating node + assertFalse(transportMessageHasStackTrace.get()); + } + + public void testAsyncSearchFailingQueryErrorTraceTrue() throws IOException, InterruptedException { + transportMessageHasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_async_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("error_trace", "true"); + searchRequest.addParameter("keep_on_completion", "true"); + searchRequest.addParameter("wait_for_completion_timeout", "0ms"); + Map responseEntity = performRequestAndGetResponseEntityAfterDelay(searchRequest, TimeValue.ZERO); + String asyncExecutionId = (String) responseEntity.get("id"); + Request request = new Request("GET", "/_async_search/" + asyncExecutionId); + request.addParameter("error_trace", "true"); + while (responseEntity.get("is_running") instanceof Boolean isRunning && isRunning) { + responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); + } + // check that the stack trace was sent from the data node to the coordinating node + assertTrue(transportMessageHasStackTrace.get()); + } + + public void testAsyncSearchFailingQueryErrorTraceFalse() throws IOException, InterruptedException { + transportMessageHasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_async_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("error_trace", "false"); + searchRequest.addParameter("keep_on_completion", "true"); + searchRequest.addParameter("wait_for_completion_timeout", "0ms"); + Map responseEntity = performRequestAndGetResponseEntityAfterDelay(searchRequest, TimeValue.ZERO); + String asyncExecutionId = (String) responseEntity.get("id"); + Request request = new Request("GET", "/_async_search/" + asyncExecutionId); + request.addParameter("error_trace", "false"); + while (responseEntity.get("is_running") instanceof Boolean isRunning && isRunning) { + responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); + } + // check that the stack trace was not sent from the data node to the coordinating node + assertFalse(transportMessageHasStackTrace.get()); + } + + public void testAsyncSearchFailingQueryErrorTraceFalseOnSubmitAndTrueOnGet() throws IOException, InterruptedException { + transportMessageHasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_async_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("error_trace", "false"); + searchRequest.addParameter("keep_on_completion", "true"); + searchRequest.addParameter("wait_for_completion_timeout", "0ms"); + Map responseEntity = performRequestAndGetResponseEntityAfterDelay(searchRequest, TimeValue.ZERO); + String asyncExecutionId = (String) responseEntity.get("id"); + Request request = new Request("GET", "/_async_search/" + asyncExecutionId); + request.addParameter("error_trace", "true"); + while (responseEntity.get("is_running") instanceof Boolean isRunning && isRunning) { + responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); + } + // check that the stack trace was not sent from the data node to the coordinating node + assertFalse(transportMessageHasStackTrace.get()); + } + + public void testAsyncSearchFailingQueryErrorTraceTrueOnSubmitAndFalseOnGet() throws IOException, InterruptedException { + transportMessageHasStackTrace = new AtomicBoolean(); + setupIndexWithDocs(); + + Request searchRequest = new Request("POST", "/_async_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "simple_query_string" : { + "query": "foo", + "fields": ["field"] + } + } + } + """); + searchRequest.addParameter("error_trace", "true"); + searchRequest.addParameter("keep_on_completion", "true"); + searchRequest.addParameter("wait_for_completion_timeout", "0ms"); + Map responseEntity = performRequestAndGetResponseEntityAfterDelay(searchRequest, TimeValue.ZERO); + String asyncExecutionId = (String) responseEntity.get("id"); + Request request = new Request("GET", "/_async_search/" + asyncExecutionId); + request.addParameter("error_trace", "false"); + while (responseEntity.get("is_running") instanceof Boolean isRunning && isRunning) { + responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); + } + // check that the stack trace was sent from the data node to the coordinating node + assertTrue(transportMessageHasStackTrace.get()); + } + + private Map performRequestAndGetResponseEntityAfterDelay(Request r, TimeValue sleep) throws IOException, + InterruptedException { + Thread.sleep(sleep.millis()); + Response response = getRestClient().performRequest(r); + XContentType entityContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); + return XContentHelper.convertToMap(entityContentType.xContent(), response.getEntity().getContent(), false); + } +} diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java index bd09d8f7740a..952febd46c34 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java @@ -55,6 +55,9 @@ public final class RestSubmitAsyncSearchAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + if (client.threadPool() != null && client.threadPool().getThreadContext() != null) { + client.threadPool().getThreadContext().setErrorTraceTransportHeader(request); + } SubmitAsyncSearchRequest submit = new SubmitAsyncSearchRequest(); IntConsumer setSize = size -> submit.getSearchRequest().source().size(size); // for simplicity, we share parsing with ordinary search. That means a couple of unsupported parameters, like scroll diff --git a/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java b/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java index e5dfea7b772f..4c6190447509 100644 --- a/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java +++ b/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java @@ -26,6 +26,9 @@ import org.junit.Before; import org.junit.ClassRule; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Locale; import java.util.Map; @@ -33,11 +36,14 @@ import java.util.Map; public class ShardChangesRestIT extends ESRestTestCase { private static final String CCR_SHARD_CHANGES_ENDPOINT = "/%s/ccr/shard_changes"; private static final String BULK_INDEX_ENDPOINT = "/%s/_bulk"; + private static final String DATA_STREAM_ENDPOINT = "/_data_stream/%s"; + private static final String INDEX_TEMPLATE_ENDPOINT = "/_index_template/%s"; private static final String[] SHARD_RESPONSE_FIELDS = new String[] { "took_in_millis", "operations", "shard_id", + "index_abstraction", "index", "settings_version", "max_seq_no_of_updates_or_deletes", @@ -46,6 +52,11 @@ public class ShardChangesRestIT extends ESRestTestCase { "aliases_version", "max_seq_no", "global_checkpoint" }; + + private static final String BULK_INDEX_TEMPLATE = """ + { "index": { "op_type": "create" } } + { "@timestamp": "%s", "name": "%s" } + """;; private static final String[] NAMES = { "skywalker", "leia", "obi-wan", "yoda", "chewbacca", "r2-d2", "c-3po", "darth-vader" }; @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() @@ -99,13 +110,86 @@ public class ShardChangesRestIT extends ESRestTestCase { createIndex(indexName, settings, mappings); assertTrue(indexExists(indexName)); - assertOK(client().performRequest(bulkRequest(indexName, randomIntBetween(10, 20)))); + assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); final Response response = client().performRequest(shardChangesRequest); assertOK(response); assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false) + XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), + indexName + ); + } + + public void testDataStreamShardChangesDefaultParams() throws IOException { + final String templateName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); + assertOK(createIndexTemplate(templateName, """ + { + "index_patterns": [ "test-*-*" ], + "data_stream": {}, + "priority": 100, + "template": { + "mappings": { + "properties": { + "@timestamp": { + "type": "date" + }, + "name": { + "type": "keyword" + } + } + } + } + }""")); + + final String dataStreamName = "test-" + + randomAlphanumericOfLength(5).toLowerCase(Locale.ROOT) + + "-" + + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + assertOK(createDataStream(dataStreamName)); + + assertOK(bulkIndex(dataStreamName, randomIntBetween(10, 20))); + + final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(dataStreamName)); + final Response response = client().performRequest(shardChangesRequest); + assertOK(response); + assertShardChangesResponse( + XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), + dataStreamName + ); + } + + public void testIndexAliasShardChangesDefaultParams() throws IOException { + final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + final String aliasName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); + final Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "1s") + .build(); + final String mappings = """ + { + "properties": { + "name": { + "type": "keyword" + } + } + } + """; + createIndex(indexName, settings, mappings); + assertTrue(indexExists(indexName)); + + final Request putAliasRequest = new Request("PUT", "/" + indexName + "/_alias/" + aliasName); + assertOK(client().performRequest(putAliasRequest)); + + assertOK(bulkIndex(aliasName, randomIntBetween(10, 20))); + + final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(aliasName)); + final Response response = client().performRequest(shardChangesRequest); + assertOK(response); + assertShardChangesResponse( + XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), + aliasName ); } @@ -121,7 +205,7 @@ public class ShardChangesRestIT extends ESRestTestCase { ); assertTrue(indexExists(indexName)); - assertOK(client().performRequest(bulkRequest(indexName, randomIntBetween(100, 200)))); + assertOK(bulkIndex(indexName, randomIntBetween(100, 200))); final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); shardChangesRequest.addParameter("from_seq_no", "0"); @@ -132,7 +216,8 @@ public class ShardChangesRestIT extends ESRestTestCase { final Response response = client().performRequest(shardChangesRequest); assertOK(response); assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false) + XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), + indexName ); } @@ -148,7 +233,7 @@ public class ShardChangesRestIT extends ESRestTestCase { ); assertTrue(indexExists(indexName)); - assertOK(client().performRequest(bulkRequest(indexName, randomIntBetween(100, 200)))); + assertOK(bulkIndex(indexName, randomIntBetween(100, 200))); final Request firstRequest = new Request("GET", shardChangesEndpoint(indexName)); firstRequest.addParameter("from_seq_no", "0"); @@ -159,7 +244,8 @@ public class ShardChangesRestIT extends ESRestTestCase { final Response firstResponse = client().performRequest(firstRequest); assertOK(firstResponse); assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(firstResponse.getEntity()), false) + XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(firstResponse.getEntity()), false), + indexName ); final Request secondRequest = new Request("GET", shardChangesEndpoint(indexName)); @@ -171,7 +257,8 @@ public class ShardChangesRestIT extends ESRestTestCase { final Response secondResponse = client().performRequest(secondRequest); assertOK(secondResponse); assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(secondResponse.getEntity()), false) + XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(secondResponse.getEntity()), false), + indexName ); } @@ -231,17 +318,36 @@ public class ShardChangesRestIT extends ESRestTestCase { assertResponseException(ex, RestStatus.BAD_REQUEST, "Failed to process shard changes for index [" + indexName + "]"); } - private static Request bulkRequest(final String indexName, int numberOfDocuments) { + private static Response bulkIndex(final String indexName, int numberOfDocuments) throws IOException { final StringBuilder sb = new StringBuilder(); + long timestamp = System.currentTimeMillis(); for (int i = 0; i < numberOfDocuments; i++) { - sb.append(String.format(Locale.ROOT, "{ \"index\": { \"_id\": \"%d\" } }\n{ \"name\": \"%s\" }\n", i + 1, randomFrom(NAMES))); + sb.append( + String.format( + Locale.ROOT, + BULK_INDEX_TEMPLATE, + Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME), + randomFrom(NAMES) + ) + ); + timestamp += 1000; // 1 second } final Request request = new Request("POST", bulkEndpoint(indexName)); request.setJsonEntity(sb.toString()); request.addParameter("refresh", "true"); - return request; + return client().performRequest(request); + } + + private Response createDataStream(final String dataStreamName) throws IOException { + return client().performRequest(new Request("PUT", dataStreamEndpoint(dataStreamName))); + } + + private static Response createIndexTemplate(final String templateName, final String mappings) throws IOException { + final Request request = new Request("PUT", indexTemplateEndpoint(templateName)); + request.setJsonEntity(mappings); + return client().performRequest(request); } private static String shardChangesEndpoint(final String indexName) { @@ -252,16 +358,28 @@ public class ShardChangesRestIT extends ESRestTestCase { return String.format(Locale.ROOT, BULK_INDEX_ENDPOINT, indexName); } + private static String dataStreamEndpoint(final String dataStreamName) { + return String.format(Locale.ROOT, DATA_STREAM_ENDPOINT, dataStreamName); + } + + private static String indexTemplateEndpoint(final String templateName) { + return String.format(Locale.ROOT, INDEX_TEMPLATE_ENDPOINT, templateName); + } + private void assertResponseException(final ResponseException ex, final RestStatus restStatus, final String error) { assertEquals(restStatus.getStatus(), ex.getResponse().getStatusLine().getStatusCode()); assertThat(ex.getMessage(), Matchers.containsString(error)); } - private void assertShardChangesResponse(final Map shardChangesResponseBody) { + private void assertShardChangesResponse(final Map shardChangesResponseBody, final String indexAbstractionName) { for (final String fieldName : SHARD_RESPONSE_FIELDS) { final Object fieldValue = shardChangesResponseBody.get(fieldName); assertNotNull("Field " + fieldName + " is missing or has a null value.", fieldValue); + if ("index_abstraction".equals(fieldName)) { + assertEquals(indexAbstractionName, fieldValue); + } + if ("operations".equals(fieldName)) { if (fieldValue instanceof List operationsList) { assertFalse("Field 'operations' is empty.", operationsList.isEmpty()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java index 84171ebce162..4a1d26d05a98 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java @@ -10,6 +10,8 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -32,6 +34,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Comparator; import java.util.List; +import java.util.Locale; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -42,10 +45,14 @@ import java.util.function.Supplier; import static org.elasticsearch.rest.RestRequest.Method.GET; /** - * A REST handler that retrieves shard changes in a specific index whose name is provided as a parameter. - * It handles GET requests to the "/{index}/ccr/shard_changes" endpoint retrieving shard-level changes, - * such as translog operations, mapping version, settings version, aliases version, the global checkpoint, - * maximum sequence number and maximum sequence number of updates or deletes. + * A REST handler that retrieves shard changes in a specific index, data stream or alias whose name is + * provided as a parameter. It handles GET requests to the "/{index}/ccr/shard_changes" endpoint retrieving + * shard-level changes, such as Translog operations, mapping version, settings version, aliases version, + * the global checkpoint, maximum sequence number and maximum sequence number of updates or deletes. + *

+ * In the case of a data stream, the first backing index is considered the target for retrieving shard changes. + * In the case of an alias, the first index that the alias points to is considered the target for retrieving + * shard changes. *

* Note: This handler is only available for snapshot builds. */ @@ -84,32 +91,36 @@ public class RestShardChangesAction extends BaseRestHandler { */ @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { - final var indexName = restRequest.param(INDEX_PARAM_NAME); + final var indexAbstractionName = restRequest.param(INDEX_PARAM_NAME); final var fromSeqNo = restRequest.paramAsLong(FROM_SEQ_NO_PARAM_NAME, DEFAULT_FROM_SEQ_NO); final var maxBatchSize = restRequest.paramAsSize(MAX_BATCH_SIZE_PARAM_NAME, DEFAULT_MAX_BATCH_SIZE); final var pollTimeout = restRequest.paramAsTime(POLL_TIMEOUT_PARAM_NAME, DEFAULT_POLL_TIMEOUT); final var maxOperationsCount = restRequest.paramAsInt(MAX_OPERATIONS_COUNT_PARAM_NAME, DEFAULT_MAX_OPERATIONS_COUNT); - final CompletableFuture indexUUIDCompletableFuture = asyncGetIndexUUID( + // NOTE: we first retrieve the concrete index name in case we are dealing with an alias or data stream. + // Then we use the concrete index name to retrieve the index UUID and shard stats. + final CompletableFuture indexNameCompletableFuture = asyncGetIndexName( client, - indexName, + indexAbstractionName, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME) ); - final CompletableFuture shardStatsCompletableFuture = asyncShardStats( - client, - indexName, - client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME) + final CompletableFuture indexUUIDCompletableFuture = indexNameCompletableFuture.thenCompose( + concreteIndexName -> asyncGetIndexUUID(client, concreteIndexName, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME)) + ); + final CompletableFuture shardStatsCompletableFuture = indexNameCompletableFuture.thenCompose( + concreteIndexName -> asyncShardStats(client, concreteIndexName, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME)) ); return channel -> CompletableFuture.allOf(indexUUIDCompletableFuture, shardStatsCompletableFuture).thenRun(() -> { try { + final String concreteIndexName = indexNameCompletableFuture.get(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); final String indexUUID = indexUUIDCompletableFuture.get(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); final ShardStats shardStats = shardStatsCompletableFuture.get(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); final ShardId shardId = shardStats.getShardRouting().shardId(); final String expectedHistoryUUID = shardStats.getCommitStats().getUserData().get(Engine.HISTORY_UUID_KEY); final ShardChangesAction.Request shardChangesRequest = shardChangesRequest( - indexName, + concreteIndexName, indexUUID, shardId, expectedHistoryUUID, @@ -121,7 +132,12 @@ public class RestShardChangesAction extends BaseRestHandler { client.execute(ShardChangesAction.INSTANCE, shardChangesRequest, new RestActionListener<>(channel) { @Override protected void processResponse(final ShardChangesAction.Response response) { - channel.sendResponse(new RestResponse(RestStatus.OK, shardChangesResponseToXContent(response, indexName, shardId))); + channel.sendResponse( + new RestResponse( + RestStatus.OK, + shardChangesResponseToXContent(response, indexAbstractionName, concreteIndexName, shardId) + ) + ); } }); @@ -132,7 +148,12 @@ public class RestShardChangesAction extends BaseRestHandler { throw new IllegalStateException("Timeout while waiting for shard stats or index UUID", te); } }).exceptionally(ex -> { - channel.sendResponse(new RestResponse(RestStatus.BAD_REQUEST, "Failed to process shard changes for index [" + indexName + "]")); + channel.sendResponse( + new RestResponse( + RestStatus.BAD_REQUEST, + "Failed to process shard changes for index [" + indexAbstractionName + "] " + ex.getMessage() + ) + ); return null; }); } @@ -175,17 +196,20 @@ public class RestShardChangesAction extends BaseRestHandler { * Converts the response to XContent JSOn format. * * @param response The ShardChangesAction response. - * @param indexName The name of the index. + * @param indexAbstractionName The name of the index abstraction. + * @param concreteIndexName The name of the index. * @param shardId The ShardId. */ private static XContentBuilder shardChangesResponseToXContent( final ShardChangesAction.Response response, - final String indexName, + final String indexAbstractionName, + final String concreteIndexName, final ShardId shardId ) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); - builder.field("index", indexName); + builder.field("index_abstraction", indexAbstractionName); + builder.field("index", concreteIndexName); builder.field("shard_id", shardId); builder.field("mapping_version", response.getMappingVersion()); builder.field("settings_version", response.getSettingsVersion()); @@ -249,26 +273,60 @@ public class RestShardChangesAction extends BaseRestHandler { }, executorService); } + /** + * Asynchronously retrieves the index name for a given index, alias or data stream. + * If the name represents a data stream, the name of the first backing index is returned. + * If the name represents an alias, the name of the first index that the alias points to is returned. + * + * @param client The NodeClient for executing the asynchronous request. + * @param indexAbstractionName The name of the index, alias or data stream. + * @return A CompletableFuture that completes with the retrieved index name. + */ + private static CompletableFuture asyncGetIndexName( + final NodeClient client, + final String indexAbstractionName, + final ExecutorService executorService + ) { + return supplyAsyncTask(() -> { + final ClusterState clusterState = client.admin() + .cluster() + .prepareState(new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)) + .get(GET_INDEX_UUID_TIMEOUT) + .getState(); + final IndexAbstraction indexAbstraction = clusterState.metadata().getIndicesLookup().get(indexAbstractionName); + if (indexAbstraction == null) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Invalid index or data stream name [%s]", indexAbstractionName) + ); + } + if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM + || indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) { + return indexAbstraction.getIndices().getFirst().getName(); + } + return indexAbstractionName; + }, executorService, "Error while retrieving index name for index or data stream [" + indexAbstractionName + "]"); + } + /** * Asynchronously retrieves the shard stats for a given index using an executor service. * * @param client The NodeClient for executing the asynchronous request. - * @param indexName The name of the index for which to retrieve shard statistics. + * @param concreteIndexName The name of the index for which to retrieve shard statistics. * @param executorService The executorService service for executing the asynchronous task. * @return A CompletableFuture that completes with the retrieved ShardStats. * @throws ElasticsearchException If an error occurs while retrieving shard statistics. */ private static CompletableFuture asyncShardStats( final NodeClient client, - final String indexName, + final String concreteIndexName, final ExecutorService executorService ) { return supplyAsyncTask( - () -> Arrays.stream(client.admin().indices().prepareStats(indexName).clear().get(SHARD_STATS_TIMEOUT).getShards()) + () -> Arrays.stream(client.admin().indices().prepareStats(concreteIndexName).clear().get(SHARD_STATS_TIMEOUT).getShards()) .max(Comparator.comparingLong(shardStats -> shardStats.getCommitStats().getGeneration())) - .orElseThrow(() -> new ElasticsearchException("Unable to retrieve shard stats for index: " + indexName)), + .orElseThrow(() -> new ElasticsearchException("Unable to retrieve shard stats for index: " + concreteIndexName)), executorService, - "Error while retrieving shard stats for index [" + indexName + "]" + "Error while retrieving shard stats for index [" + concreteIndexName + "]" ); } @@ -276,25 +334,25 @@ public class RestShardChangesAction extends BaseRestHandler { * Asynchronously retrieves the index UUID for a given index using an executor service. * * @param client The NodeClient for executing the asynchronous request. - * @param indexName The name of the index for which to retrieve the index UUID. + * @param concreteIndexName The name of the index for which to retrieve the index UUID. * @param executorService The executorService service for executing the asynchronous task. * @return A CompletableFuture that completes with the retrieved index UUID. * @throws ElasticsearchException If an error occurs while retrieving the index UUID. */ private static CompletableFuture asyncGetIndexUUID( final NodeClient client, - final String indexName, + final String concreteIndexName, final ExecutorService executorService ) { return supplyAsyncTask( () -> client.admin() .indices() .prepareGetIndex() - .setIndices(indexName) + .setIndices(concreteIndexName) .get(GET_INDEX_UUID_TIMEOUT) - .getSetting(indexName, IndexMetadata.SETTING_INDEX_UUID), + .getSetting(concreteIndexName, IndexMetadata.SETTING_INDEX_UUID), executorService, - "Error while retrieving index UUID for index [" + indexName + "]" + "Error while retrieving index UUID for index [" + concreteIndexName + "]" ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java index 24f0a5243620..92bb03788849 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java @@ -159,13 +159,15 @@ public final class DocumentPermissions implements CacheKey { if (queryBuilder != null) { failIfQueryUsesClient(queryBuilder, context); Query roleQuery = context.toQuery(queryBuilder).query(); - filter.add(roleQuery, SHOULD); - if (context.nestedLookup() != NestedLookup.EMPTY) { + if (context.nestedLookup() == NestedLookup.EMPTY) { + filter.add(roleQuery, SHOULD); + } else { if (NestedHelper.mightMatchNestedDocs(roleQuery, context)) { roleQuery = new BooleanQuery.Builder().add(roleQuery, FILTER) .add(Queries.newNonNestedFilter(context.indexVersionCreated()), FILTER) .build(); } + filter.add(roleQuery, SHOULD); // If access is allowed on root doc then also access is allowed on all nested docs of that root document: BitSetProducer rootDocs = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); ToChildBlockJoinQuery includeNestedDocs = new ToChildBlockJoinQuery(roleQuery, rootDocs); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index d0d5e463f965..9704335776f1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -596,8 +596,6 @@ public class SSLService { sslSettingsMap.put(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX, settings.getByPrefix(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX)); sslSettingsMap.put(XPackSettings.TRANSPORT_SSL_PREFIX, settings.getByPrefix(XPackSettings.TRANSPORT_SSL_PREFIX)); sslSettingsMap.putAll(getTransportProfileSSLSettings(settings)); - // Mount Elastic Inference Service (part of the Inference plugin) configuration - sslSettingsMap.put("xpack.inference.elastic.http.ssl", settings.getByPrefix("xpack.inference.elastic.http.ssl.")); // Only build remote cluster server SSL if the port is enabled if (REMOTE_CLUSTER_SERVER_ENABLED.get(settings)) { sslSettingsMap.put(XPackSettings.REMOTE_CLUSTER_SERVER_SSL_PREFIX, getRemoteClusterServerSslSettings(settings)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index d50f7bb27a5d..1f2c89c473a6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -623,7 +623,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin } @SuppressWarnings("unchecked") - protected List filterPlugins(Class type) { + private List filterPlugins(Class type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.getClass())).map(p -> ((T) p)).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 4751f66cf548..89b42228d891 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -23,9 +23,12 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TotalHitCountCollectorManager; import org.apache.lucene.store.Directory; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; @@ -33,9 +36,11 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperMetrics; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MockFieldMapper; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.TermsQueryBuilder; @@ -45,6 +50,9 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.AbstractBuilderTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; @@ -52,6 +60,8 @@ import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContext import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; +import java.io.IOException; +import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -340,6 +350,176 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT directory.close(); } + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("f1") + .field("type", "keyword") + .endObject() + .startObject("nested1") + .field("type", "nested") + .startObject("properties") + .startObject("field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(builder)), + MapperService.MergeReason.MAPPING_UPDATE + ); + } + + public void testDLSWithNestedDocs() throws Exception { + Directory directory = newDirectory(); + try ( + IndexWriter iw = new IndexWriter( + directory, + new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + var parser = mapperService().documentParser(); + String doc = """ + { + "f1": "value", + "nested1": [ + { + "field": "0" + }, + { + "field": "1" + }, + {} + ] + } + """; + var parsedDoc = parser.parseDocument( + new SourceToParse("0", new BytesArray(doc), XContentType.JSON), + mapperService().mappingLookup() + ); + iw.addDocuments(parsedDoc.docs()); + + doc = """ + { + "nested1": [ + { + "field": "12" + }, + { + "field": "13" + }, + {} + ] + } + """; + parsedDoc = parser.parseDocument( + new SourceToParse("1", new BytesArray(doc), XContentType.JSON), + mapperService().mappingLookup() + ); + iw.addDocuments(parsedDoc.docs()); + + doc = """ + { + "f1": "value", + "nested1": [ + { + "field": "12" + }, + {} + ] + } + """; + parsedDoc = parser.parseDocument( + new SourceToParse("2", new BytesArray(doc), XContentType.JSON), + mapperService().mappingLookup() + ); + iw.addDocuments(parsedDoc.docs()); + + doc = """ + { + "nested1": [ + { + "field": "12" + }, + {} + ] + } + """; + parsedDoc = parser.parseDocument( + new SourceToParse("3", new BytesArray(doc), XContentType.JSON), + mapperService().mappingLookup() + ); + iw.addDocuments(parsedDoc.docs()); + + iw.commit(); + } + + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap( + DirectoryReader.open(directory), + new ShardId(indexSettings().getIndex(), 0) + ); + SearchExecutionContext context = createSearchExecutionContext(new IndexSearcher(directoryReader)); + + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); + final Authentication authentication = AuthenticationTestHelper.builder().build(); + new AuthenticationContextSerializer().writeToContext(authentication, threadContext); + + Set queries = new HashSet<>(); + queries.add(new BytesArray("{\"bool\": { \"must_not\": { \"exists\": { \"field\": \"f1\" } } } }")); + IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl( + FieldPermissions.DEFAULT, + DocumentPermissions.filteredBy(queries) + ); + + DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); + + final MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(true); + ScriptService scriptService = mock(ScriptService.class); + SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper( + s -> context, + bitsetCache, + securityContext, + licenseState, + scriptService + ) { + + @Override + protected IndicesAccessControl getIndicesAccessControl() { + IndicesAccessControl indicesAccessControl = new IndicesAccessControl( + true, + singletonMap(indexSettings().getIndex().getName(), indexAccessControl) + ); + return indicesAccessControl; + } + }; + + DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); + IndexSearcher indexSearcher = new ContextIndexSearcher( + wrappedDirectoryReader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true + ); + + ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs; + assertThat(Arrays.stream(hits).map(h -> h.doc).collect(Collectors.toSet()), containsInAnyOrder(4, 5, 6, 7, 11, 12, 13)); + + hits = indexSearcher.search(Queries.newNonNestedFilter(context.indexVersionCreated()), 1000).scoreDocs; + assertThat(Arrays.stream(hits).map(h -> h.doc).collect(Collectors.toSet()), containsInAnyOrder(7, 13)); + + bitsetCache.close(); + directoryReader.close(); + directory.close(); + } + private static MappingLookup createMappingLookup(List concreteFields) { List mappers = concreteFields.stream().map(MockFieldMapper::new).collect(Collectors.toList()); return MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index bfac286bc3c3..9663e41a647a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -614,8 +614,7 @@ public class SSLServiceTests extends ESTestCase { "xpack.security.authc.realms.ldap.realm1.ssl", "xpack.security.authc.realms.saml.realm2.ssl", "xpack.monitoring.exporters.mon1.ssl", - "xpack.monitoring.exporters.mon2.ssl", - "xpack.inference.elastic.http.ssl" }; + "xpack.monitoring.exporters.mon2.ssl" }; assumeTrue("Not enough cipher suites are available to support this test", getCipherSuites.length >= contextNames.length); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json index 651e1c84da73..5afa557e1405 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json @@ -7,7 +7,7 @@ "dynamic": "false", "_meta": { "pipeline": { - "default_name": "ent-search-generic-ingestion", + "default_name": "search-default-ingestion", "default_extract_binary_content": true, "default_run_ml_inference": true, "default_reduce_whitespace": true diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/generic_ingestion_pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/generic_ingestion_pipeline.json deleted file mode 100644 index e2a2cbd46011..000000000000 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/generic_ingestion_pipeline.json +++ /dev/null @@ -1,130 +0,0 @@ -{ - "version": ${xpack.application.connector.template.version}, - "description": "Generic Enterprise Search ingest pipeline", - "_meta": { - "managed_by": "Enterprise Search", - "managed": true - }, - "processors": [ - { - "attachment": { - "description": "Extract text from binary attachments", - "field": "_attachment", - "target_field": "_extracted_attachment", - "ignore_missing": true, - "indexed_chars_field": "_attachment_indexed_chars", - "if": "ctx?._extract_binary_content == true", - "on_failure": [ - { - "append": { - "description": "Record error information", - "field": "_ingestion_errors", - "value": "Processor 'attachment' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" - } - } - ], - "remove_binary": false - } - }, - { - "set": { - "tag": "set_body", - "description": "Set any extracted text on the 'body' field", - "field": "body", - "copy_from": "_extracted_attachment.content", - "ignore_empty_value": true, - "if": "ctx?._extract_binary_content == true", - "on_failure": [ - { - "append": { - "description": "Record error information", - "field": "_ingestion_errors", - "value": "Processor 'set' with tag 'set_body' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" - } - } - ] - } - }, - { - "gsub": { - "tag": "remove_replacement_chars", - "description": "Remove unicode 'replacement' characters", - "field": "body", - "pattern": "�", - "replacement": "", - "ignore_missing": true, - "if": "ctx?._extract_binary_content == true", - "on_failure": [ - { - "append": { - "description": "Record error information", - "field": "_ingestion_errors", - "value": "Processor 'gsub' with tag 'remove_replacement_chars' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" - } - } - ] - } - }, - { - "gsub": { - "tag": "remove_extra_whitespace", - "description": "Squish whitespace", - "field": "body", - "pattern": "\\s+", - "replacement": " ", - "ignore_missing": true, - "if": "ctx?._reduce_whitespace == true", - "on_failure": [ - { - "append": { - "description": "Record error information", - "field": "_ingestion_errors", - "value": "Processor 'gsub' with tag 'remove_extra_whitespace' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" - } - } - ] - } - }, - { - "trim" : { - "description": "Trim leading and trailing whitespace", - "field": "body", - "ignore_missing": true, - "if": "ctx?._reduce_whitespace == true", - "on_failure": [ - { - "append": { - "description": "Record error information", - "field": "_ingestion_errors", - "value": "Processor 'trim' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" - } - } - ] - } - }, - { - "remove": { - "tag": "remove_meta_fields", - "description": "Remove meta fields", - "field": [ - "_attachment", - "_attachment_indexed_chars", - "_extracted_attachment", - "_extract_binary_content", - "_reduce_whitespace", - "_run_ml_inference" - ], - "ignore_missing": true, - "on_failure": [ - { - "append": { - "description": "Record error information", - "field": "_ingestion_errors", - "value": "Processor 'remove' with tag 'remove_meta_fields' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" - } - } - ] - } - } - ] -} diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/70_flattened_field_type.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/70_flattened_field_type.yml new file mode 100644 index 000000000000..0f586ec0ed66 --- /dev/null +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/70_flattened_field_type.yml @@ -0,0 +1,307 @@ +--- +"A flattened label field": + - do: + indices.create: + index: source_index + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [ metricset, k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + subobjects: false + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + agent: + type: flattened + value: + type: long + time_series_metric: gauge + + - do: + bulk: + refresh: true + index: source_index + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.4" }, "value": 10 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.5" }, "value": 20 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.6" }, "value": 12 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.7" }, "value": 15 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.7" }, "value": 9 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.8" }, "value": 16 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.9" }, "value": 25 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.10" }, "value": 17 }}' + + - do: + indices.put_settings: + index: source_index + body: + index.blocks.write: true + + - do: + indices.downsample: + index: source_index + target_index: target_index + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: target_index + body: + sort: [ "_tsid", "@timestamp" ] + + - length: { hits.hits: 4 } + - match: { hits.hits.0._source._doc_count: 2 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } + - match: { hits.hits.0._source.k8s\.agent: { "id": "second", "version": "2.1.8" } } + + - match: { hits.hits.1._source._doc_count: 2 } + - match: { hits.hits.1._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.1._source.@timestamp: 2021-04-28T19:00:00.000Z } + - match: { hits.hits.1._source.k8s\.agent: { "id": "second", "version": "2.1.10" } } + + - match: { hits.hits.2._source._doc_count: 2 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.@timestamp: 2021-04-28T18:00:00.000Z } + - match: { hits.hits.2._source.k8s\.agent: { "id": "first", "version": "2.0.5" } } + + - match: { hits.hits.3._source._doc_count: 2 } + - match: { hits.hits.3._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.3._source.@timestamp: 2021-04-28T20:00:00.000Z } + - match: { hits.hits.3._source.k8s\.agent: { "id": "first", "version": "2.0.7" } } + +--- +"A flattened label field with no doc values": + - do: + indices.create: + index: source_index + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [ metricset, k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + subobjects: false + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + agent: + type: flattened + doc_values: false + value: + type: long + time_series_metric: gauge + + - do: + bulk: + refresh: true + index: source_index + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.4" }, "value": 10 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.5" }, "value": 20 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.6" }, "value": 12 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.7" }, "value": 15 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.7" }, "value": 9 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.8" }, "value": 16 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.9" }, "value": 25 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.10" }, "value": 17 }}' + + - do: + indices.put_settings: + index: source_index + body: + index.blocks.write: true + + - do: + indices.downsample: + index: source_index + target_index: target_index + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: target_index + body: + sort: [ "_tsid", "@timestamp" ] + + - length: { hits.hits: 4 } + - match: { hits.hits.0._source._doc_count: 2 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } + - is_false: hits.hits.0._source.k8s\.agent + + - match: { hits.hits.1._source._doc_count: 2 } + - match: { hits.hits.1._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.1._source.@timestamp: 2021-04-28T19:00:00.000Z } + - is_false: hits.hits.1._source.k8s\.agent + + - match: { hits.hits.2._source._doc_count: 2 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.@timestamp: 2021-04-28T18:00:00.000Z } + - is_false: hits.hits.2._source.k8s\.agent + + - match: { hits.hits.3._source._doc_count: 2 } + - match: { hits.hits.3._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.3._source.@timestamp: 2021-04-28T20:00:00.000Z } + - is_false: hits.hits.3._source.k8s\.agent + +--- +"A flattened label field with mixed content": + - do: + indices.create: + index: source_index + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [ metricset, k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + subobjects: false + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + agent: + type: flattened + null_value: my_null_value + value: + type: long + time_series_metric: gauge + + - do: + bulk: + refresh: true + index: source_index + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.4", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11 }, "value": 10 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.5", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 20 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.6", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 12 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}, "agent": { "id": "first", "version": "2.0.7", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 15 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.7", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 9 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.8", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 16 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.9", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 25 }}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}, "agent": { "id": "second", "version": "2.1.10", "versions": ["1", "2", "3"], "optional_version": null, "dotted.version": "1.1", "numeric_version": 11}, "value": 17 }}' + + - do: + indices.put_settings: + index: source_index + body: + index.blocks.write: true + + - do: + indices.downsample: + index: source_index + target_index: target_index + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: target_index + body: + sort: [ "_tsid", "@timestamp" ] + + - length: { hits.hits: 4 } + - match: { hits.hits.0._source._doc_count: 2 } + - match: { hits.hits.0._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } + - match: { hits.hits.0._source.k8s\.agent: { "id": "second", "version": "2.1.8", "versions": ["1", "2", "3"], "dotted": {"version": "1.1"}, "numeric_version": "11", optional_version: "my_null_value" } } + + - match: { hits.hits.1._source._doc_count: 2 } + - match: { hits.hits.1._source.k8s\.pod\.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } + - match: { hits.hits.1._source.@timestamp: 2021-04-28T19:00:00.000Z } + - match: { hits.hits.1._source.k8s\.agent: { "id": "second", "version": "2.1.10", "versions": ["1", "2", "3"], "dotted": {"version": "1.1"}, "numeric_version": "11", optional_version: "my_null_value" } } + + - match: { hits.hits.2._source._doc_count: 2 } + - match: { hits.hits.2._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.2._source.@timestamp: 2021-04-28T18:00:00.000Z } + - match: { hits.hits.2._source.k8s\.agent: { "id": "first", "version": "2.0.5", "versions": ["1", "2", "3"], "dotted": {"version": "1.1"}, "numeric_version": "11", optional_version: "my_null_value" } } + + - match: { hits.hits.3._source._doc_count: 2 } + - match: { hits.hits.3._source.k8s\.pod\.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 } + - match: { hits.hits.3._source.@timestamp: 2021-04-28T20:00:00.000Z } + - match: { hits.hits.3._source.k8s\.agent: { "id": "first", "version": "2.0.7", "versions": ["1", "2", "3"], "dotted": {"version": "1.1"}, "numeric_version": "11", optional_version: "my_null_value" } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java index 74375bbe2793..3657e4989ccb 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java @@ -12,6 +12,7 @@ import org.elasticsearch.index.fielddata.FormattedDocValues; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; @@ -65,6 +66,8 @@ class FieldValueFetcher { // If field is not a metric, we downsample it as a label if ("histogram".equals(fieldType.typeName())) { return new LabelFieldProducer.HistogramLastLabelFieldProducer(name()); + } else if ("flattened".equals(fieldType.typeName())) { + return new LabelFieldProducer.FlattenedLastValueFieldProducer(name()); } return new LabelFieldProducer.LabelLastValueFieldProducer(name()); } @@ -90,7 +93,13 @@ class FieldValueFetcher { } } else { if (context.fieldExistsInIndex(field)) { - final IndexFieldData fieldData = context.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); + final IndexFieldData fieldData; + if (fieldType instanceof FlattenedFieldMapper.RootFlattenedFieldType flattenedFieldType) { + var keyedFieldType = flattenedFieldType.getKeyedFieldType(); + fieldData = context.getForField(keyedFieldType, MappedFieldType.FielddataOperation.SEARCH); + } else { + fieldData = context.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); + } final String fieldName = context.isMultiField(field) ? fieldType.name().substring(0, fieldType.name().lastIndexOf('.')) : fieldType.name(); diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java index 05b4852d0dfd..b211c5bfb0d1 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.downsample; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.FormattedDocValues; import org.elasticsearch.index.fielddata.HistogramValue; +import org.elasticsearch.index.mapper.flattened.FlattenedFieldSyntheticWriterHelper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; @@ -141,14 +143,14 @@ abstract class LabelFieldProducer extends AbstractDownsampleFieldProducer { } } - static class AggregateMetricFieldProducer extends LabelLastValueFieldProducer { + static final class AggregateMetricFieldProducer extends LabelLastValueFieldProducer { AggregateMetricFieldProducer(String name, Metric metric) { super(name, new LastValueLabel(metric.name())); } } - public static class HistogramLastLabelFieldProducer extends LabelLastValueFieldProducer { + static final class HistogramLastLabelFieldProducer extends LabelLastValueFieldProducer { HistogramLastLabelFieldProducer(String name) { super(name); } @@ -167,4 +169,40 @@ abstract class LabelFieldProducer extends AbstractDownsampleFieldProducer { } } } + + static final class FlattenedLastValueFieldProducer extends LabelLastValueFieldProducer { + + FlattenedLastValueFieldProducer(String name) { + super(name); + } + + @Override + public void write(XContentBuilder builder) throws IOException { + if (isEmpty() == false) { + builder.startObject(name()); + + var value = label.get(); + List list; + if (value instanceof Object[] values) { + list = new ArrayList<>(values.length); + for (Object v : values) { + list.add(new BytesRef(v.toString())); + } + } else { + list = List.of(new BytesRef(value.toString())); + } + + var iterator = list.iterator(); + var helper = new FlattenedFieldSyntheticWriterHelper(() -> { + if (iterator.hasNext()) { + return iterator.next(); + } else { + return null; + } + }); + helper.write(builder); + builder.endObject(); + } + } + } } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/LabelFieldProducerTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/LabelFieldProducerTests.java index 469e00f7af9a..844eb1b8e27d 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/LabelFieldProducerTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/LabelFieldProducerTests.java @@ -7,10 +7,18 @@ package org.elasticsearch.xpack.downsample; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.fielddata.FormattedDocValues; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.util.Iterator; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; public class LabelFieldProducerTests extends AggregatorTestCase { @@ -93,4 +101,50 @@ public class LabelFieldProducerTests extends AggregatorTestCase { assertTrue(producer.isEmpty()); assertNull(producer.label().get()); } + + public void testFlattenedLastValueFieldProducer() throws IOException { + var producer = new LabelFieldProducer.FlattenedLastValueFieldProducer("dummy"); + assertTrue(producer.isEmpty()); + assertEquals("dummy", producer.name()); + assertEquals("last_value", producer.label().name()); + + var bytes = List.of("a\0value_a", "b\0value_b", "c\0value_c", "d\0value_d"); + var docValues = new FormattedDocValues() { + + Iterator iterator = bytes.iterator(); + + @Override + public boolean advanceExact(int docId) { + return true; + } + + @Override + public int docValueCount() { + return bytes.size(); + } + + @Override + public Object nextValue() { + return iterator.next(); + } + }; + + producer.collect(docValues, 1); + assertFalse(producer.isEmpty()); + assertEquals("a\0value_a", (((Object[]) producer.label().get())[0]).toString()); + assertEquals("b\0value_b", (((Object[]) producer.label().get())[1]).toString()); + assertEquals("c\0value_c", (((Object[]) producer.label().get())[2]).toString()); + assertEquals("d\0value_d", (((Object[]) producer.label().get())[3]).toString()); + + var builder = new XContentBuilder(XContentType.JSON.xContent(), new ByteArrayOutputStream()); + builder.startObject(); + producer.write(builder); + builder.endObject(); + var content = Strings.toString(builder); + assertThat(content, equalTo("{\"dummy\":{\"a\":\"value_a\",\"b\":\"value_b\",\"c\":\"value_c\",\"d\":\"value_d\"}}")); + + producer.reset(); + assertTrue(producer.isEmpty()); + assertNull(producer.label().get()); + } } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml index 094d9cbf4308..4240467ea4ff 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml @@ -152,6 +152,19 @@ setup: service_type: super-connector +--- +'Create Connector - Invalid Managed Connector Index Prefix': + - do: + catch: "bad_request" + connector.put: + connector_id: test-connector-test-managed + body: + index_name: wrong-prefix-index + name: my-connector + language: pl + is_native: true + service_type: super-connector + --- 'Create Connector - Id returned as part of response': - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml index f804dc02a9e0..b63bf595af5f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml @@ -151,3 +151,18 @@ setup: - match: { index_name: content-search-2-test } +--- +"Update Managed Connector Index Name - Bad Prefix": + - do: + connector.put: + connector_id: test-connector-2 + body: + is_native: true + service_type: super-connector + + - do: + catch: "bad_request" + connector.update_index_name: + connector_id: test-connector-2 + body: + index_name: wrong-prefix-search-2-test diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml index f8cd24d17531..6811c3340ce4 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml @@ -73,3 +73,43 @@ setup: field_1: test field_2: something +--- +"Update Connector Native - changing connector to Elastic-managed wrong index name": + + - do: + connector.put: + connector_id: test-connector-1 + body: + is_native: false + index_name: super-connector + + - do: + catch: "bad_request" + connector.update_native: + connector_id: test-connector-1 + body: + is_native: true + +--- +"Update Connector Native - changing connector to Elastic-managed correct index name": + + - do: + connector.put: + connector_id: test-connector-1 + body: + is_native: false + index_name: content-super-connector + + - do: + connector.update_native: + connector_id: test-connector-1 + body: + is_native: true + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector-1 + + - match: { is_native: true } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml index 634f99cd53fd..4acca493c42c 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml @@ -103,6 +103,18 @@ setup: service_type: super-connector +--- +'Create Connector - Invalid Managed Connector Index Prefix': + - do: + catch: "bad_request" + connector.post: + body: + index_name: wrong-prefix-index + name: my-connector + language: pl + is_native: true + service_type: super-connector + --- 'Create Connector - Index name used by another connector': - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 5e1fde0dfb94..d5d2159d8f37 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -76,6 +76,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorFiltering.fromXContentBytesConnectorFiltering; import static org.elasticsearch.xpack.application.connector.ConnectorFiltering.sortFilteringRulesByOrder; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.MANAGED_CONNECTOR_INDEX_PREFIX; /** * A service that manages persistent {@link Connector} configurations. @@ -807,8 +808,8 @@ public class ConnectorIndexService { } /** - * Updates the is_native property of a {@link Connector}. It always sets the {@link ConnectorStatus} to - * CONFIGURED. + * Updates the is_native property of a {@link Connector}. It sets the {@link ConnectorStatus} to + * CONFIGURED when connector is in CONNECTED state to indicate that connector needs to reconnect. * * @param request The request for updating the connector's is_native property. * @param listener The listener for handling responses, including successful updates or errors. @@ -816,29 +817,62 @@ public class ConnectorIndexService { public void updateConnectorNative(UpdateConnectorNativeAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); + boolean isNative = request.isNative(); - final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( - new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connectorId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source( - Map.of( - Connector.IS_NATIVE_FIELD.getPreferredName(), - request.isNative(), - Connector.STATUS_FIELD.getPreferredName(), - ConnectorStatus.CONFIGURED.toString() + getConnector(connectorId, listener.delegateFailure((l, connector) -> { + + String indexName = getConnectorIndexNameFromSearchResult(connector); + + boolean doesNotHaveContentPrefix = indexName != null && isValidManagedConnectorIndexName(indexName) == false; + // Ensure attached content index is prefixed correctly + if (isNative && doesNotHaveContentPrefix) { + l.onFailure( + new ElasticsearchStatusException( + "The index name [" + + indexName + + "] attached to the connector [" + + connectorId + + "] must start with the required prefix: [" + + MANAGED_CONNECTOR_INDEX_PREFIX + + "] to be Elastic-managed. Please update the attached index first to comply with this requirement.", + RestStatus.BAD_REQUEST ) - ) - - ); - client.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { - if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + ); return; } - l.onResponse(updateResponse); - })); + ConnectorStatus status = getConnectorStatusFromSearchResult(connector); + + // If connector was connected already, change its status to CONFIGURED as we need to re-connect + boolean isConnected = status == ConnectorStatus.CONNECTED; + boolean isValidTransitionToConfigured = ConnectorStateMachine.isValidTransition(status, ConnectorStatus.CONFIGURED); + if (isConnected && isValidTransitionToConfigured) { + status = ConnectorStatus.CONFIGURED; + } + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .source( + Map.of( + Connector.IS_NATIVE_FIELD.getPreferredName(), + isNative, + Connector.STATUS_FIELD.getPreferredName(), + status.toString() + ) + ) + ); + client.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (ll, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + ll.onResponse(updateResponse); + })); + })); } catch (Exception e) { listener.onFailure(e); } @@ -896,22 +930,45 @@ public class ConnectorIndexService { return; } - final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( - new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connectorId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(new HashMap<>() { - { - put(Connector.INDEX_NAME_FIELD.getPreferredName(), request.getIndexName()); - } - }) - ); - client.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (ll, updateResponse) -> { - if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + getConnector(connectorId, l.delegateFailure((ll, connector) -> { + + Boolean isNativeConnector = getConnectorIsNativeFlagFromSearchResult(connector); + Boolean doesNotHaveContentPrefix = indexName != null && isValidManagedConnectorIndexName(indexName) == false; + + if (isNativeConnector && doesNotHaveContentPrefix) { + ll.onFailure( + new ElasticsearchStatusException( + "Index attached to an Elastic-managed connector must start with the prefix: [" + + MANAGED_CONNECTOR_INDEX_PREFIX + + "]. The index name in the payload [" + + indexName + + "] doesn't comply with this requirement.", + RestStatus.BAD_REQUEST + ) + ); return; } - ll.onResponse(updateResponse); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(new HashMap<>() { + { + put(Connector.INDEX_NAME_FIELD.getPreferredName(), request.getIndexName()); + } + }) + ); + client.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (lll, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + lll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + lll.onResponse(updateResponse); + }) + ); })); })); @@ -1064,6 +1121,18 @@ public class ConnectorIndexService { return ConnectorStatus.connectorStatus((String) searchResult.getResultMap().get(Connector.STATUS_FIELD.getPreferredName())); } + private Boolean getConnectorIsNativeFlagFromSearchResult(ConnectorSearchResult searchResult) { + return (Boolean) searchResult.getResultMap().get(Connector.IS_NATIVE_FIELD.getPreferredName()); + } + + private String getConnectorIndexNameFromSearchResult(ConnectorSearchResult searchResult) { + return (String) searchResult.getResultMap().get(Connector.INDEX_NAME_FIELD.getPreferredName()); + } + + private boolean isValidManagedConnectorIndexName(String indexName) { + return indexName.startsWith(MANAGED_CONNECTOR_INDEX_PREFIX); + } + @SuppressWarnings("unchecked") private Map getConnectorConfigurationFromSearchResult(ConnectorSearchResult searchResult) { return (Map) searchResult.getResultMap().get(Connector.CONFIGURATION_FIELD.getPreferredName()); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index 9b8cc7cfdbe4..fd35acc89db5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -45,11 +45,9 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { public static final String ACCESS_CONTROL_INDEX_NAME_PATTERN = ".search-acl-filter-*"; public static final String ACCESS_CONTROL_TEMPLATE_NAME = "search-acl-filter"; + public static final String MANAGED_CONNECTOR_INDEX_PREFIX = "content-"; + // Pipeline constants - - public static final String ENT_SEARCH_GENERIC_PIPELINE_NAME = "ent-search-generic-ingestion"; - public static final String ENT_SEARCH_GENERIC_PIPELINE_FILE = "generic_ingestion_pipeline"; - public static final String SEARCH_DEFAULT_PIPELINE_NAME = "search-default-ingestion"; public static final String SEARCH_DEFAULT_PIPELINE_FILE = "search_default_pipeline"; @@ -109,12 +107,6 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { @Override protected List getIngestPipelines() { return List.of( - new JsonIngestPipelineConfig( - ENT_SEARCH_GENERIC_PIPELINE_NAME, - ROOT_RESOURCE_PATH + ENT_SEARCH_GENERIC_PIPELINE_FILE + ".json", - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE - ), new JsonIngestPipelineConfig( SEARCH_DEFAULT_PIPELINE_NAME, ROOT_RESOURCE_PATH + SEARCH_DEFAULT_PIPELINE_FILE + ".json", diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorActionRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorActionRequest.java index 1799121505da..66f347bc4dbb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorActionRequest.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorActionRequest.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.MANAGED_CONNECTOR_INDEX_PREFIX; /** * Abstract base class for action requests targeting the connectors index. Implements {@link org.elasticsearch.action.IndicesRequest} @@ -52,6 +53,32 @@ public abstract class ConnectorActionRequest extends ActionRequest implements In return validationException; } + /** + * Validates that the given index name starts with the required prefix for Elastic-managed connectors. + * If the index name does not start with the required prefix, the validation exception is updated with an error message. + * + * @param indexName The index name to validate. If null, no validation is performed. + * @param validationException The exception to accumulate validation errors. + * @return The updated or original {@code validationException} with any new validation errors added, + * if the index name does not start with the required prefix. + */ + public ActionRequestValidationException validateManagedConnectorIndexPrefix( + String indexName, + ActionRequestValidationException validationException + ) { + if (indexName != null && indexName.startsWith(MANAGED_CONNECTOR_INDEX_PREFIX) == false) { + return addValidationError( + "Index [" + + indexName + + "] is invalid. Index attached to an Elastic-managed connector must start with the prefix: [" + + MANAGED_CONNECTOR_INDEX_PREFIX + + "]", + validationException + ); + } + return validationException; + } + @Override public String[] indices() { return new String[] { ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN }; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java index fad349cd3187..b1c38637298c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java @@ -127,6 +127,10 @@ public class PostConnectorAction { validationException = validateIndexName(indexName, validationException); + if (Boolean.TRUE.equals(isNative)) { + validationException = validateManagedConnectorIndexPrefix(indexName, validationException); + } + return validationException; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java index 687a801ab8fd..f3e8ed6b6e76 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PutConnectorAction.java @@ -147,6 +147,10 @@ public class PutConnectorAction { validationException = validateIndexName(indexName, validationException); + if (Boolean.TRUE.equals(isNative)) { + validationException = validateManagedConnectorIndexPrefix(indexName, validationException); + } + return validationException; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java index 5b27cc7a3e05..3a53ed977318 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java @@ -50,7 +50,6 @@ public final class QueryRuleRetrieverBuilder extends CompoundRetrieverBuilder PARSER = new ConstructingObjectParser<>( diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 12abca3a7859..28d4fe0956d0 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -56,6 +56,7 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.MANAGED_CONNECTOR_INDEX_PREFIX; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomConnectorFeatures; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomCronExpression; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.randomConnectorFeatureEnabled; @@ -648,8 +649,8 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { assertThat(initialScheduling.getIncremental(), equalTo(indexedConnector.getScheduling().getIncremental())); } - public void testUpdateConnectorIndexName() throws Exception { - Connector connector = ConnectorTestUtils.getRandomConnector(); + public void testUpdateConnectorIndexName_ForSelfManagedConnector() throws Exception { + Connector connector = ConnectorTestUtils.getRandomSelfManagedConnector(); String connectorId = randomUUID(); ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); @@ -669,8 +670,8 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { assertThat(newIndexName, equalTo(indexedConnector.getIndexName())); } - public void testUpdateConnectorIndexName_WithTheSameIndexName() throws Exception { - Connector connector = ConnectorTestUtils.getRandomConnector(); + public void testUpdateConnectorIndexName_ForSelfManagedConnector_WithTheSameIndexName() throws Exception { + Connector connector = ConnectorTestUtils.getRandomSelfManagedConnector(); String connectorId = randomUUID(); ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); @@ -685,6 +686,42 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { assertThat(updateResponse.getResult(), equalTo(DocWriteResponse.Result.NOOP)); } + public void testUpdateConnectorIndexName_ForManagedConnector_WithIllegalIndexName() throws Exception { + Connector connector = ConnectorTestUtils.getRandomElasticManagedConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorIndexNameAction.Request updateIndexNameRequest = new UpdateConnectorIndexNameAction.Request( + connectorId, + "wrong-prefix-" + randomAlphaOfLengthBetween(3, 10) + ); + + expectThrows(ElasticsearchStatusException.class, () -> awaitUpdateConnectorIndexName(updateIndexNameRequest)); + } + + public void testUpdateConnectorIndexName_ForManagedConnector_WithPrefixedIndexName() throws Exception { + Connector connector = ConnectorTestUtils.getRandomElasticManagedConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + String newIndexName = MANAGED_CONNECTOR_INDEX_PREFIX + randomAlphaOfLengthBetween(3, 10); + + UpdateConnectorIndexNameAction.Request updateIndexNameRequest = new UpdateConnectorIndexNameAction.Request( + connectorId, + newIndexName + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorIndexName(updateIndexNameRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(newIndexName, equalTo(indexedConnector.getIndexName())); + } + public void testUpdateConnectorServiceType() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); @@ -756,7 +793,7 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { } public void testUpdateConnectorNative() throws Exception { - Connector connector = ConnectorTestUtils.getRandomConnector(); + Connector connector = ConnectorTestUtils.getRandomConnectorWithDetachedIndex(); String connectorId = randomUUID(); ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); @@ -773,6 +810,39 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { assertThat(isNative, equalTo(indexedConnector.isNative())); } + public void testUpdateConnectorNativeTrue_WhenIllegalIndexPrefix() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnectorWithAttachedIndex("wrong-prefix-" + randomAlphaOfLength(10)); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + boolean isNative = true; + + UpdateConnectorNativeAction.Request updateNativeRequest = new UpdateConnectorNativeAction.Request(connectorId, isNative); + + expectThrows(ElasticsearchStatusException.class, () -> awaitUpdateConnectorNative(updateNativeRequest)); + } + + public void testUpdateConnectorNativeTrue_WithCorrectIndexPrefix() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnectorWithAttachedIndex( + MANAGED_CONNECTOR_INDEX_PREFIX + randomAlphaOfLength(10) + ); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + boolean isNative = true; + + UpdateConnectorNativeAction.Request updateNativeRequest = new UpdateConnectorNativeAction.Request(connectorId, isNative); + DocWriteResponse updateResponse = awaitUpdateConnectorNative(updateNativeRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(isNative, equalTo(indexedConnector.isNative())); + } + public void testUpdateConnectorStatus() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java index f4a92e51e8c6..c3d4bf8b72ff 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java @@ -50,7 +50,7 @@ public class ConnectorIngestPipelineTests extends ESTestCase { String content = XContentHelper.stripWhitespace(""" { "extract_binary_content": true, - "name": "ent-search-generic-ingestion", + "name": "search-default-ingestion", "reduce_whitespace": true, "run_ml_inference": false } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java index a4c7015afafc..068b99626af9 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -132,10 +132,7 @@ public class ConnectorTemplateRegistryTests extends ESTestCase { ClusterChangedEvent event = createClusterChangedEvent( Collections.emptyMap(), Collections.emptyMap(), - Collections.singletonMap( - ConnectorTemplateRegistry.ENT_SEARCH_GENERIC_PIPELINE_NAME, - ConnectorTemplateRegistry.REGISTRY_VERSION - ), + Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), Collections.emptyMap(), nodes ); @@ -169,10 +166,7 @@ public class ConnectorTemplateRegistryTests extends ESTestCase { ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION - 1 ), - Collections.singletonMap( - ConnectorTemplateRegistry.ENT_SEARCH_GENERIC_PIPELINE_NAME, - ConnectorTemplateRegistry.REGISTRY_VERSION - ), + Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), Collections.emptyMap(), nodes ); @@ -189,10 +183,7 @@ public class ConnectorTemplateRegistryTests extends ESTestCase { ClusterChangedEvent event = createClusterChangedEvent( Collections.emptyMap(), Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", null), - Collections.singletonMap( - ConnectorTemplateRegistry.ENT_SEARCH_GENERIC_PIPELINE_NAME, - ConnectorTemplateRegistry.REGISTRY_VERSION - ), + Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), Collections.emptyMap(), nodes ); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index f052ef79d82f..c563bc0a14ee 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -343,6 +343,18 @@ public final class ConnectorTestUtils { return getRandomConnectorBuilder().setIndexName(null).build(); } + public static Connector getRandomConnectorWithAttachedIndex(String indexName) { + return getRandomConnectorBuilder().setIndexName(indexName).build(); + } + + public static Connector getRandomSelfManagedConnector() { + return getRandomConnectorBuilder().setIsNative(false).build(); + } + + public static Connector getRandomElasticManagedConnector() { + return getRandomConnectorBuilder().setIsNative(true).build(); + } + public static Connector getRandomConnectorWithServiceTypeNotDefined() { return getRandomConnectorBuilder().setServiceType(null).build(); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index 734c6eaf8696..bcb647d978ab 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -225,7 +225,7 @@ public class ConnectorTests extends ESTestCase { "name":"test-name", "pipeline":{ "extract_binary_content":true, - "name":"ent-search-generic-ingestion", + "name":"search-default-ingestion", "reduce_whitespace":true, "run_ml_inference":false }, @@ -286,7 +286,7 @@ public class ConnectorTests extends ESTestCase { "name": null, "pipeline":{ "extract_binary_content":true, - "name":"ent-search-generic-ingestion", + "name":"search-default-ingestion", "reduce_whitespace":true, "run_ml_inference":false }, @@ -350,7 +350,7 @@ public class ConnectorTests extends ESTestCase { "name": null, "pipeline":{ "extract_binary_content":true, - "name":"ent-search-generic-ingestion", + "name":"search-default-ingestion", "reduce_whitespace":true, "run_ml_inference":false }, diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java index 0f0e83f2b9c5..e482bf3f6bb7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionTests.java @@ -20,7 +20,7 @@ public class PostConnectorActionTests extends ESTestCase { PostConnectorAction.Request request = new PostConnectorAction.Request( randomAlphaOfLength(10), randomAlphaOfLength(10), - randomBoolean(), + false, randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10) @@ -30,6 +30,24 @@ public class PostConnectorActionTests extends ESTestCase { assertThat(exception, nullValue()); } + public void testValidate_WrongIndexNamePresentForManagedConnector_ExpectValidationError() { + PostConnectorAction.Request requestWithIllegalIndexName = new PostConnectorAction.Request( + randomAlphaOfLength(10), + "wrong-prefix-" + randomAlphaOfLength(10), + true, + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithIllegalIndexName.validate(); + + assertThat(exception, notNullValue()); + assertThat( + exception.getMessage(), + containsString("Index attached to an Elastic-managed connector must start with the prefix: [content-]") + ); + } + public void testValidate_WhenMalformedIndexName_ExpectValidationError() { PostConnectorAction.Request requestWithMissingConnectorId = new PostConnectorAction.Request( randomAlphaOfLength(10), diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java index 873e102e4093..10ab04941356 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PutConnectorActionTests.java @@ -21,7 +21,7 @@ public class PutConnectorActionTests extends ESTestCase { randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), - randomBoolean(), + false, randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10) @@ -31,6 +31,25 @@ public class PutConnectorActionTests extends ESTestCase { assertThat(exception, nullValue()); } + public void testValidate_WrongIndexNamePresentForManagedConnector_ExpectValidationError() { + PutConnectorAction.Request requestWithIllegalIndexName = new PutConnectorAction.Request( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + "wrong-prefix-" + randomAlphaOfLength(10), + true, + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + ActionRequestValidationException exception = requestWithIllegalIndexName.validate(); + + assertThat(exception, notNullValue()); + assertThat( + exception.getMessage(), + containsString("Index attached to an Elastic-managed connector must start with the prefix: [content-]") + ); + } + public void testValidate_WhenMalformedIndexName_ExpectValidationError() { PutConnectorAction.Request requestWithMissingConnectorId = new PutConnectorAction.Request( randomAlphaOfLength(10), diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index 81b05ce25e17..ed3338c715bd 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -77,7 +77,7 @@ public class ConnectorSyncJobTests extends ESTestCase { "language": "english", "pipeline": { "extract_binary_content": true, - "name": "ent-search-generic-ingestion", + "name": "search-default-ingestion", "reduce_whitespace": true, "run_ml_inference": false }, @@ -160,7 +160,7 @@ public class ConnectorSyncJobTests extends ESTestCase { "language": "english", "pipeline": { "extract_binary_content": true, - "name": "ent-search-generic-ingestion", + "name": "search-default-ingestion", "reduce_whitespace": true, "run_ml_inference": false }, @@ -218,7 +218,7 @@ public class ConnectorSyncJobTests extends ESTestCase { "language": "english", "pipeline": { "extract_binary_content": true, - "name": "ent-search-generic-ingestion", + "name": "search-default-ingestion", "reduce_whitespace": true, "run_ml_inference": false }, @@ -275,7 +275,7 @@ public class ConnectorSyncJobTests extends ESTestCase { "language": "english", "pipeline": { "extract_binary_content": true, - "name": "ent-search-generic-ingestion", + "name": "search-default-ingestion", "reduce_whitespace": true, "run_ml_inference": false }, diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 004beaafb400..d4b087277df5 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V7; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V8; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @@ -96,7 +96,7 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @Override protected boolean supportsIndexModeLookup() throws IOException { - return hasCapabilities(List.of(JOIN_LOOKUP_V7.capabilityName())); + return hasCapabilities(List.of(JOIN_LOOKUP_V8.capabilityName())); } @Override diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index c75a920e1697..d7c57e23b714 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_SOURCE_INDI import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V7; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V8; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,7 +124,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V7.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V8.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { @@ -283,8 +283,8 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { @Override protected boolean supportsIndexModeLookup() throws IOException { - // CCS does not yet support JOIN_LOOKUP_V7 and clusters falsely report they have this capability - // return hasCapabilities(List.of(JOIN_LOOKUP_V7.capabilityName())); + // CCS does not yet support JOIN_LOOKUP_V8 and clusters falsely report they have this capability + // return hasCapabilities(List.of(JOIN_LOOKUP_V8.capabilityName())); return false; } } diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/SemanticMatchIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/SemanticMatchIT.java new file mode 100644 index 000000000000..0ce84330b0b0 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/SemanticMatchIT.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.multi_node; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xpack.esql.qa.rest.SemanticMatchTestCase; +import org.junit.ClassRule; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class SemanticMatchIT extends SemanticMatchTestCase { + @ClassRule + public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/SemanticMatchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/SemanticMatchIT.java new file mode 100644 index 000000000000..8edc2dbcf35a --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/SemanticMatchIT.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xpack.esql.qa.rest.SemanticMatchTestCase; +import org.junit.ClassRule; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class SemanticMatchIT extends SemanticMatchTestCase { + @ClassRule + public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index 40027249670f..355c403ce2a8 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -221,7 +221,7 @@ public abstract class RequestIndexFilteringTestCase extends ESRestTestCase { assertThat(e.getMessage(), containsString("index_not_found_exception")); assertThat(e.getMessage(), containsString("no such index [foo]")); - if (EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()) { + if (EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()) { e = expectThrows( ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM test1 | LOOKUP JOIN foo ON id1")) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/SemanticMatchTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/SemanticMatchTestCase.java new file mode 100644 index 000000000000..aafa57e764ae --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/SemanticMatchTestCase.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; + +import static org.hamcrest.core.StringContains.containsString; + +public abstract class SemanticMatchTestCase extends ESRestTestCase { + public void testWithMultipleInferenceIds() throws IOException { + String query = """ + from test-semantic1,test-semantic2 + | where match(semantic_text_field, "something") + """; + ResponseException re = expectThrows(ResponseException.class, () -> runEsqlQuery(query)); + + assertThat(re.getMessage(), containsString("Field [semantic_text_field] has multiple inference IDs associated with it")); + + assertEquals(400, re.getResponse().getStatusLine().getStatusCode()); + } + + public void testWithInferenceNotConfigured() { + String query = """ + from test-semantic3 + | where match(semantic_text_field, "something") + """; + ResponseException re = expectThrows(ResponseException.class, () -> runEsqlQuery(query)); + + assertThat(re.getMessage(), containsString("Inference endpoint not found")); + assertEquals(404, re.getResponse().getStatusLine().getStatusCode()); + } + + @Before + public void setUpIndices() throws IOException { + assumeTrue("semantic text capability not available", EsqlCapabilities.Cap.SEMANTIC_TEXT_TYPE.isEnabled()); + + var settings = Settings.builder().build(); + + String mapping1 = """ + "properties": { + "semantic_text_field": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + } + } + """; + createIndex(adminClient(), "test-semantic1", settings, mapping1); + + String mapping2 = """ + "properties": { + "semantic_text_field": { + "type": "semantic_text", + "inference_id": "test_dense_inference" + } + } + """; + createIndex(adminClient(), "test-semantic2", settings, mapping2); + + String mapping3 = """ + "properties": { + "semantic_text_field": { + "type": "semantic_text", + "inference_id": "inexistent" + } + } + """; + createIndex(adminClient(), "test-semantic3", settings, mapping3); + } + + @Before + public void setUpTextEmbeddingInferenceEndpoint() throws IOException { + assumeTrue("semantic text capability not available", EsqlCapabilities.Cap.SEMANTIC_TEXT_TYPE.isEnabled()); + Request request = new Request("PUT", "_inference/text_embedding/test_dense_inference"); + request.setJsonEntity(""" + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + """); + adminClient().performRequest(request); + } + + @After + public void wipeData() throws IOException { + assumeTrue("semantic text capability not available", EsqlCapabilities.Cap.SEMANTIC_TEXT_TYPE.isEnabled()); + adminClient().performRequest(new Request("DELETE", "*")); + + try { + adminClient().performRequest(new Request("DELETE", "_inference/test_dense_inference")); + } catch (ResponseException e) { + // 404 here means the endpoint was not created + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } + } + + private Map runEsqlQuery(String query) throws IOException { + RestEsqlTestCase.RequestObjectBuilder builder = RestEsqlTestCase.requestObjectBuilder().query(query); + return RestEsqlTestCase.runEsqlSync(builder); + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 7adafa908ce4..f0bdf089f69d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -63,7 +63,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CART import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; public final class CsvTestUtils { - private static final int MAX_WIDTH = 20; + private static final int MAX_WIDTH = 80; private static final CsvPreference CSV_SPEC_PREFERENCES = new CsvPreference.Builder('"', '|', "\r\n").build(); private static final String NULL_VALUE = "null"; private static final char ESCAPE_CHAR = '\\'; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 18ce9d7e3e05..66fd7d3ee5eb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -70,6 +70,7 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.Configuration; +import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.versionfield.Version; @@ -351,6 +352,8 @@ public final class EsqlTestUtils { public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); + public static final QueryBuilderResolver MOCK_QUERY_BUILDER_RESOLVER = new MockQueryBuilderResolver(); + private EsqlTestUtils() {} public static Configuration configuration(QueryPragmas pragmas, String query) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java new file mode 100644 index 000000000000..7af3a89108fc --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; +import org.elasticsearch.xpack.esql.session.Result; + +import java.util.function.BiConsumer; + +public class MockQueryBuilderResolver extends QueryBuilderResolver { + public MockQueryBuilderResolver() { + super(null, null, null, null); + } + + @Override + public void resolveQueryBuilders( + LogicalPlan plan, + ActionListener listener, + BiConsumer> callback + ) { + callback.accept(plan, listener); + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index b29c489910f6..8cfde2bb9bde 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -145,6 +145,24 @@ AVG(salary):double | bucket:date // end::bucket_in_agg-result[] ; +bucketWithOffset#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucketWithOffset[] +FROM employees +| STATS dates = MV_SORT(VALUES(birth_date)) BY b = BUCKET(birth_date + 1 HOUR, 1 YEAR) - 1 HOUR +| EVAL d_count = MV_COUNT(dates) +| SORT d_count, b +| LIMIT 3 +// end::bucketWithOffset[] +; + +// tag::bucketWithOffset-result[] +dates:date |b:date |d_count:integer +1965-01-03T00:00:00.000Z |1964-12-31T23:00:00.000Z|1 +[1955-01-21T00:00:00.000Z, 1955-08-20T00:00:00.000Z, 1955-08-28T00:00:00.000Z, 1955-10-04T00:00:00.000Z]|1954-12-31T23:00:00.000Z|4 +[1957-04-04T00:00:00.000Z, 1957-05-23T00:00:00.000Z, 1957-05-25T00:00:00.000Z, 1957-12-03T00:00:00.000Z]|1956-12-31T23:00:00.000Z|4 +// end::bucketWithOffset-result[] +; + docsBucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] //tag::docsBucketMonth[] FROM employees diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index e75c68f4a379..7d4f89ed920a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -8,7 +8,7 @@ ############################################### basicOnTheDataNode -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | EVAL language_code = languages @@ -25,7 +25,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -36,7 +36,7 @@ language_code:integer | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -53,7 +53,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; subsequentEvalOnTheDataNode -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | EVAL language_code = languages @@ -71,7 +71,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -89,7 +89,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; sortEvalBeforeLookup -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -106,7 +106,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; nonUniqueLeftKeyOnTheDataNode -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | WHERE emp_no <= 10030 @@ -130,7 +130,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; nonUniqueRightKeyOnTheDataNode -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | EVAL language_code = emp_no % 10 @@ -150,7 +150,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:k ; nonUniqueRightKeyOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -170,7 +170,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:k ; nonUniqueRightKeyFromRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW language_code = 2 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code @@ -183,7 +183,7 @@ language_code:integer | language_name:keyword | country:keyword ; repeatedIndexOnFrom -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 required_capability: join_lookup_repeated_index_from FROM languages_lookup @@ -203,7 +203,7 @@ language_code:integer | language_name:keyword ############################################### filterOnLeftSide -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | EVAL language_code = languages @@ -220,7 +220,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSide -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -236,7 +236,7 @@ FROM sample_data ; filterOnRightSideAfterStats -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -249,7 +249,7 @@ count:long | type:keyword ; filterOnJoinKey -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | EVAL language_code = languages @@ -264,7 +264,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSide -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | WHERE emp_no < 10006 @@ -281,7 +281,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSideOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -297,7 +297,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -313,7 +313,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSideOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | SORT emp_no @@ -330,7 +330,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnTheDataNodeThenFilterOnTheCoordinator -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | EVAL language_code = languages @@ -351,7 +351,7 @@ emp_no:integer | language_code:integer | language_name:keyword ########################################################################### nullJoinKeyOnTheDataNode -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | WHERE emp_no < 10004 @@ -368,7 +368,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyOnTheDataNode -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM employees | WHERE 10003 < emp_no AND emp_no < 10008 @@ -386,7 +386,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyFromRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW language_code = [4, 5, 6, 7] | LOOKUP JOIN languages_lookup_non_unique_key ON language_code @@ -399,7 +399,7 @@ language_code:integer | language_name:keyword | country:keyword ; mvJoinKeyFromRowExpanded -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW language_code = [4, 5, 6, 7, 8] | MV_EXPAND language_code @@ -421,7 +421,7 @@ language_code:integer | language_name:keyword | country:keyword ############################################### lookupIPFromRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -432,7 +432,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromKeepRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", right = "right" | KEEP left, client_ip, right @@ -444,7 +444,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowing -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -455,7 +455,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -468,7 +468,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeepReordered -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -481,7 +481,7 @@ right | Development | 172.21.0.5 ; lookupIPFromIndex -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -500,7 +500,7 @@ ignoreOrder:true ; lookupIPFromIndexKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -520,7 +520,7 @@ ignoreOrder:true ; lookupIPFromIndexKeepKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -542,7 +542,7 @@ timestamp:date | client_ip:keyword | event_duration:long | msg:keyword ; lookupIPFromIndexStats -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -558,7 +558,7 @@ count:long | env:keyword ; lookupIPFromIndexStatsKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -575,7 +575,7 @@ count:long | env:keyword ; statsAndLookupIPFromIndex -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -596,7 +596,7 @@ count:long | client_ip:keyword | env:keyword ############################################### lookupMessageFromRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -607,7 +607,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromKeepRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | KEEP left, message, right @@ -619,7 +619,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowing -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -630,7 +630,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowingKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -642,7 +642,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromIndex -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -660,7 +660,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -679,7 +679,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -699,7 +699,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepReordered -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -718,7 +718,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; lookupMessageFromIndexStats -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -733,7 +733,7 @@ count:long | type:keyword ; lookupMessageFromIndexStatsKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -749,7 +749,7 @@ count:long | type:keyword ; statsAndLookupMessageFromIndex -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | STATS count = count(message) BY message @@ -767,7 +767,7 @@ count:long | type:keyword | message:keyword ; lookupMessageFromIndexTwice -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -789,7 +789,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -816,7 +816,7 @@ ignoreOrder:true ############################################### lookupIPAndMessageFromRow -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -828,7 +828,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBefore -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | KEEP left, client_ip, message, right @@ -841,7 +841,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBetween -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -854,7 +854,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepAfter -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -867,7 +867,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowing -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", type = "type", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -879,7 +879,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -893,7 +893,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -908,7 +908,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeepKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -924,7 +924,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepReordered -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -938,7 +938,7 @@ right | Development | Success | 172.21.0.5 ; lookupIPAndMessageFromIndex -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -958,7 +958,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -979,7 +979,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexStats -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -997,7 +997,7 @@ count:long | env:keyword | type:keyword ; lookupIPAndMessageFromIndexStatsKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1016,7 +1016,7 @@ count:long | env:keyword | type:keyword ; statsAndLookupIPAndMessageFromIndex -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1035,7 +1035,7 @@ count:long | client_ip:keyword | message:keyword | env:keyword | type:keyw ; lookupIPAndMessageFromIndexChainedEvalKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1057,7 +1057,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexChainedRenameKeep -required_capability: join_lookup_v7 +required_capability: join_lookup_v8 FROM sample_data | EVAL client_ip = client_ip::keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec index 5ea169e1b110..6c9a6fed3853 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec @@ -597,3 +597,74 @@ from employees,employees_incompatible emp_no_bool:boolean ; + +testMatchWithSemanticText +required_capability: match_function +required_capability: semantic_text_type + +from semantic_text +| where match(semantic_text_field, "something") +| keep semantic_text_field +| sort semantic_text_field asc +; + +semantic_text_field:semantic_text +all we have to decide is what to do with the time that is given to us +be excellent to each other +live long and prosper +; + +testMatchWithSemanticTextAndKeyword +required_capability: match_function +required_capability: semantic_text_type + +from semantic_text +| where match(semantic_text_field, "something") AND match(host, "host1") +| keep semantic_text_field, host +; + +semantic_text_field:semantic_text | host:keyword +live long and prosper | host1 +; + +testMatchWithSemanticTextMultiValueField +required_capability: match_function +required_capability: semantic_text_type + +from semantic_text metadata _id +| where match(st_multi_value, "something") AND match(host, "host1") +| keep _id, st_multi_value +; + +_id: keyword | st_multi_value:semantic_text +1 | ["Hello there!", "This is a random value", "for testing purposes"] +; + +testMatchWithSemanticTextWithEvalsAndOtherFunctionsAndStats +required_capability: match_function +required_capability: semantic_text_type + +from semantic_text +| where qstr("description:some*") +| eval size = mv_count(st_multi_value) +| where match(semantic_text_field, "something") AND size > 1 AND match(host, "host1") +| STATS result = count(*) +; + +result:long +1 +; + +testMatchWithSemanticTextAndKql +required_capability: match_function +required_capability: semantic_text_type +required_capability: kql_function + +from semantic_text +| where kql("host:host1") AND match(semantic_text_field, "something") +| KEEP host, semantic_text_field +; + +host:keyword | semantic_text_field:semantic_text +"host1" | live long and prosper +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec index 7906f8b69162..721443a70fe2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec @@ -608,3 +608,73 @@ from employees,employees_incompatible emp_no_bool:boolean ; +testMatchWithSemanticText +required_capability: match_operator_colon +required_capability: semantic_text_type + +from semantic_text +| where semantic_text_field:"something" +| keep semantic_text_field +| sort semantic_text_field asc +; + +semantic_text_field:semantic_text +all we have to decide is what to do with the time that is given to us +be excellent to each other +live long and prosper +; + +testMatchWithSemanticTextAndKeyword +required_capability: match_operator_colon +required_capability: semantic_text_type + +from semantic_text +| where semantic_text_field:"something" AND host:"host1" +| keep semantic_text_field, host +; + +semantic_text_field:semantic_text | host:keyword +live long and prosper | host1 +; + +testMatchWithSemanticTextMultiValueField +required_capability: match_operator_colon +required_capability: semantic_text_type + +from semantic_text metadata _id +| where st_multi_value:"something" AND match(host, "host1") +| keep _id, st_multi_value +; + +_id: keyword | st_multi_value:semantic_text +1 | ["Hello there!", "This is a random value", "for testing purposes"] +; + +testMatchWithSemanticTextWithEvalsAndOtherFunctionsAndStats +required_capability: match_operator_colon +required_capability: semantic_text_type + +from semantic_text +| where qstr("description:some*") +| eval size = mv_count(st_multi_value) +| where semantic_text_field:"something" AND size > 1 AND match(host, "host1") +| STATS result = count(*) +; + +result:long +1 +; + +testMatchWithSemanticTextAndKql +required_capability: match_operator_colon +required_capability: semantic_text_type +required_capability: kql_function + +from semantic_text +| where kql("host:host1") AND semantic_text_field:"something" +| KEEP host, semantic_text_field +; + +host:keyword | semantic_text_field:semantic_text +"host1" | live long and prosper +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec index 72632c62603a..9d3526982f9e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec @@ -317,3 +317,43 @@ book_no:keyword | title:text 2924 | A Gentle Creature and Other Stories: White Nights, A Gentle Creature, and The Dream of a Ridiculous Man (The World's Classics) | foobar 5948 | That We Are Gentle Creatures | foobar ; + + +semanticTextMatch +required_capability: metadata_score +required_capability: semantic_text_type +required_capability: match_function + +from semantic_text metadata _id, _score +| where match(semantic_text_field, "something") +| sort _score desc +| keep _id +; + +_id:keyword +2 +3 +1 +; + +semanticTextMatchWithAllTheTextFunctions + +required_capability: metadata_score +required_capability: semantic_text_type +required_capability: match_function +required_capability: kql_function +required_capability: qstr_function + +from semantic_text metadata _id, _score +| where match(semantic_text_field, "something") + AND match(description, "some") + AND kql("description:some*") + AND NOT qstr("host:host1") +| sort _score desc +| keep _id +; + +_id:keyword +2 +3 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java index 8de65847c3f8..8054b260f006 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java @@ -10,26 +10,15 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.engine.SegmentsStats; -import org.elasticsearch.index.mapper.OnScriptError; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.Before; import java.io.IOException; import java.util.Collection; -import java.util.Map; -import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; @@ -40,8 +29,6 @@ import java.util.concurrent.TimeUnit; */ public abstract class AbstractPausableIntegTestCase extends AbstractEsqlIntegTestCase { - private static final Logger LOGGER = LogManager.getLogger(AbstractPausableIntegTestCase.class); - protected static final Semaphore scriptPermits = new Semaphore(0); protected int pageSize = -1; @@ -108,53 +95,10 @@ public abstract class AbstractPausableIntegTestCase extends AbstractEsqlIntegTes } } - public static class PausableFieldPlugin extends Plugin implements ScriptPlugin { - + public static class PausableFieldPlugin extends AbstractPauseFieldPlugin { @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "pause"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - try { - assertTrue(scriptPermits.tryAcquire(1, TimeUnit.MINUTES)); - } catch (Exception e) { - throw new AssertionError(e); - } - LOGGER.debug("--> emitting value"); - emit(1); - } - }; - } - }; - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; + protected boolean onWait() throws InterruptedException { + return scriptPermits.tryAcquire(1, TimeUnit.MINUTES); } } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java new file mode 100644 index 000000000000..5554f7e571df --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +import static org.junit.Assert.assertTrue; + +/** + * A plugin that provides a script language "pause" that can be used to simulate slow running queries. + * See also {@link AbstractPausableIntegTestCase}. + */ +public abstract class AbstractPauseFieldPlugin extends Plugin implements ScriptPlugin { + + // Called when the engine enters the execute() method. + protected void onStartExecute() {} + + // Called when the engine needs to wait for further execution to be allowed. + protected abstract boolean onWait() throws InterruptedException; + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "pause"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + if (context == LongFieldScript.CONTEXT) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + onStartExecute(); + try { + assertTrue(onWait()); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + emit(1); + } + }; + } + }; + } + throw new IllegalStateException("unsupported type " + context); + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java index a2bba19db50f..3926ea4c27a3 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -19,14 +19,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.transport.RemoteClusterAware; @@ -44,7 +38,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -80,7 +73,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action plugins.add(InternalExchangePlugin.class); - plugins.add(PauseFieldPlugin.class); + plugins.add(SimplePauseFieldPlugin.class); return plugins; } @@ -99,64 +92,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { @Before public void resetPlugin() { - PauseFieldPlugin.allowEmitting = new CountDownLatch(1); - PauseFieldPlugin.startEmitting = new CountDownLatch(1); - } - - public static class PauseFieldPlugin extends Plugin implements ScriptPlugin { - public static CountDownLatch startEmitting = new CountDownLatch(1); - public static CountDownLatch allowEmitting = new CountDownLatch(1); - - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - - public String getType() { - return "pause"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - if (context == LongFieldScript.CONTEXT) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - startEmitting.countDown(); - try { - assertTrue(allowEmitting.await(30, TimeUnit.SECONDS)); - } catch (InterruptedException e) { - throw new AssertionError(e); - } - emit(1); - } - }; - } - }; - } - throw new IllegalStateException("unsupported type " + context); - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } + SimplePauseFieldPlugin.resetPlugin(); } /** @@ -184,7 +120,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { } // wait until we know that the query against 'remote-b:blocking' has started - PauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); + SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it) assertBusy(() -> { @@ -234,7 +170,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { } // allow remoteB query to proceed - PauseFieldPlugin.allowEmitting.countDown(); + SimplePauseFieldPlugin.allowEmitting.countDown(); // wait until both remoteB and local queries have finished assertBusy(() -> { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 17f5f8148665..cfe6fdeccb19 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -15,18 +15,11 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.transport.TransportService; @@ -38,9 +31,6 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; @@ -63,7 +53,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(InternalExchangePlugin.class); - plugins.add(PauseFieldPlugin.class); + plugins.add(SimplePauseFieldPlugin.class); return plugins; } @@ -82,63 +72,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { @Before public void resetPlugin() { - PauseFieldPlugin.allowEmitting = new CountDownLatch(1); - PauseFieldPlugin.startEmitting = new CountDownLatch(1); - } - - public static class PauseFieldPlugin extends Plugin implements ScriptPlugin { - public static CountDownLatch startEmitting = new CountDownLatch(1); - public static CountDownLatch allowEmitting = new CountDownLatch(1); - - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "pause"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - if (context == LongFieldScript.CONTEXT) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - startEmitting.countDown(); - try { - assertTrue(allowEmitting.await(30, TimeUnit.SECONDS)); - } catch (InterruptedException e) { - throw new AssertionError(e); - } - emit(1); - } - }; - } - }; - } - throw new IllegalStateException("unsupported type " + context); - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } + SimplePauseFieldPlugin.resetPlugin(); } private void createRemoteIndex(int numDocs) throws Exception { @@ -169,7 +103,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { request.pragmas(randomPragmas()); PlainActionFuture requestFuture = new PlainActionFuture<>(); client().execute(EsqlQueryAction.INSTANCE, request, requestFuture); - assertTrue(PauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); + assertTrue(SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); List rootTasks = new ArrayList<>(); assertBusy(() -> { List tasks = client().admin().cluster().prepareListTasks().setActions(EsqlQueryAction.NAME).get().getTasks(); @@ -192,7 +126,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { } }); } finally { - PauseFieldPlugin.allowEmitting.countDown(); + SimplePauseFieldPlugin.allowEmitting.countDown(); } Exception error = expectThrows(Exception.class, requestFuture::actionGet); assertThat(error.getMessage(), containsString("proxy timeout")); @@ -223,7 +157,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { assertThat(tasks, hasSize(moreClusters + 1)); }); } finally { - PauseFieldPlugin.allowEmitting.countDown(); + SimplePauseFieldPlugin.allowEmitting.countDown(); } try (EsqlQueryResponse resp = future.actionGet(30, TimeUnit.SECONDS)) { // TODO: This produces incorrect results because data on the remote cluster is processed multiple times. @@ -244,7 +178,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); request.pragmas(randomPragmas()); ActionFuture requestFuture = client().execute(EsqlQueryAction.INSTANCE, request); - assertTrue(PauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); + assertTrue(SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); try { assertBusy(() -> { List clusterTasks = client(REMOTE_CLUSTER).admin() @@ -270,7 +204,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { \\_ExchangeSinkOperator""")); }); } finally { - PauseFieldPlugin.allowEmitting.countDown(); + SimplePauseFieldPlugin.allowEmitting.countDown(); } requestFuture.actionGet(30, TimeUnit.SECONDS).close(); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java new file mode 100644 index 000000000000..3ba73dd9a402 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +/** + * A plugin that provides a script language "pause" that can be used to simulate slow running queries. + * This implementation allows to know when it arrives at execute() via startEmitting and to allow the execution to proceed + * via allowEmitting. + */ +public class SimplePauseFieldPlugin extends AbstractPauseFieldPlugin { + public static CountDownLatch startEmitting = new CountDownLatch(1); + public static CountDownLatch allowEmitting = new CountDownLatch(1); + + public static void resetPlugin() { + allowEmitting = new CountDownLatch(1); + startEmitting = new CountDownLatch(1); + } + + @Override + public void onStartExecute() { + startEmitting.countDown(); + } + + @Override + public boolean onWait() throws InterruptedException { + return allowEmitting.await(30, TimeUnit.SECONDS); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java index d58637ab52c8..0e84ac7588ad 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java @@ -10,13 +10,17 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.kql.KqlPlugin; import org.junit.Before; import org.junit.BeforeClass; +import java.util.Collection; import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -141,4 +145,9 @@ public class KqlFunctionIT extends AbstractEsqlIntegTestCase { .get(); ensureYellow(indexName); } + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), KqlPlugin.class); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index bfc2675d5979..a6e0f1d89c36 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -555,12 +555,12 @@ public class EsqlCapabilities { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V7(Build.current().isSnapshot()), + JOIN_LOOKUP_V8(Build.current().isSnapshot()), /** * LOOKUP JOIN with the same index as the FROM */ - JOIN_LOOKUP_REPEATED_INDEX_FROM(JOIN_LOOKUP_V7.isEnabled()), + JOIN_LOOKUP_REPEATED_INDEX_FROM(JOIN_LOOKUP_V8.isEnabled()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index e15731ca7903..3d1bfdfd0ef4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -235,6 +235,37 @@ public class Analyzer extends ParameterizedRuleExecutor listener ) { final PlanningMetrics planningMetrics = new PlanningMetrics(); @@ -73,7 +75,8 @@ public class PlanExecutor { mapper, verifier, planningMetrics, - indicesExpressionGrouper + indicesExpressionGrouper, + queryBuilderResolver ); QueryMetric clientId = QueryMetric.fromString("rest"); metrics.total(clientId); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 0b2268fe1b02..e695a94198da 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -51,6 +51,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.IP; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; @@ -70,6 +71,7 @@ public class Match extends FullTextFunction implements Validatable { public static final Set FIELD_DATA_TYPES = Set.of( KEYWORD, TEXT, + SEMANTIC_TEXT, BOOLEAN, DATETIME, DATE_NANOS, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 347d542f5212..12932ba8d6e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -163,6 +163,17 @@ public class Bucket extends GroupingFunction implements Validatable, TwoOptional grouping part, or that it is invoked with the exact same expression:""", file = "bucket", tag = "reuseGroupingFunctionWithExpression" + ), + @Example( + description = """ + Sometimes you need to change the start value of each bucket by a given duration (similar to date histogram + aggregation's <> parameter). To do so, you will need to + take into account how the language handles expressions within the `STATS` command: if these contain functions or + arithmetic operators, a virtual `EVAL` is inserted before and/or after the `STATS` command. Consequently, a double + compensation is needed to adjust the bucketed date value before the aggregation and then again after. For instance, + inserting a negative offset of `1 hour` to buckets of `1 year` looks like this:""", + file = "bucket", + tag = "bucketWithOffset" ) } ) public Bucket( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 24398afa1801..49d77bc36fb2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -564,6 +564,11 @@ public class LogicalPlanBuilder extends ExpressionBuilder { } } + var matchFieldsCount = joinFields.size(); + if (matchFieldsCount > 1) { + throw new ParsingException(source, "JOIN ON clause only supports one field at the moment, found [{}]", matchFieldsCount); + } + return p -> new LookupJoin(source, p, right, joinFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 76bfb95d0792..50d5819688e4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -42,6 +43,7 @@ import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; +import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; import java.io.IOException; @@ -68,6 +70,7 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncTaskManagementService; private final RemoteClusterService remoteClusterService; + private final QueryBuilderResolver queryBuilderResolver; @Inject @SuppressWarnings("this-escape") @@ -82,7 +85,8 @@ public class TransportEsqlQueryAction extends HandledTransportAction toResponse(task, request, configuration, result)) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index c0290fa2b1d7..bd3b3bdb3483 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -113,6 +113,7 @@ public class EsqlSession { private final PhysicalPlanOptimizer physicalPlanOptimizer; private final PlanningMetrics planningMetrics; private final IndicesExpressionGrouper indicesExpressionGrouper; + private final QueryBuilderResolver queryBuilderResolver; public EsqlSession( String sessionId, @@ -125,7 +126,8 @@ public class EsqlSession { Mapper mapper, Verifier verifier, PlanningMetrics planningMetrics, - IndicesExpressionGrouper indicesExpressionGrouper + IndicesExpressionGrouper indicesExpressionGrouper, + QueryBuilderResolver queryBuilderResolver ) { this.sessionId = sessionId; this.configuration = configuration; @@ -139,6 +141,7 @@ public class EsqlSession { this.physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); this.planningMetrics = planningMetrics; this.indicesExpressionGrouper = indicesExpressionGrouper; + this.queryBuilderResolver = queryBuilderResolver; } public String sessionId() { @@ -158,7 +161,16 @@ public class EsqlSession { new EsqlSessionCCSUtils.CssPartialErrorsActionListener(executionInfo, listener) { @Override public void onResponse(LogicalPlan analyzedPlan) { - executeOptimizedPlan(request, executionInfo, planRunner, optimizedPlan(analyzedPlan), listener); + try { + var optimizedPlan = optimizedPlan(analyzedPlan); + queryBuilderResolver.resolveQueryBuilders( + optimizedPlan, + listener, + (newPlan, next) -> executeOptimizedPlan(request, executionInfo, planRunner, newPlan, next) + ); + } catch (Exception e) { + listener.onFailure(e); + } } } ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java new file mode 100644 index 000000000000..b6424c5f7fa5 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.session; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ResolvedIndices; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.Rewriteable; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.core.util.Holder; +import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; + +/** + * Some {@link FullTextFunction} implementations such as {@link org.elasticsearch.xpack.esql.expression.function.fulltext.Match} + * will be translated to a {@link QueryBuilder} that require a rewrite phase on the coordinator. + * {@link QueryBuilderResolver#resolveQueryBuilders(LogicalPlan, ActionListener, BiConsumer)} will rewrite the plan by replacing + * {@link FullTextFunction} expression with new ones that hold rewritten {@link QueryBuilder}s. + */ +public class QueryBuilderResolver { + private final SearchService searchService; + private final ClusterService clusterService; + private final TransportService transportService; + private final IndexNameExpressionResolver indexNameExpressionResolver; + + public QueryBuilderResolver( + SearchService searchService, + ClusterService clusterService, + TransportService transportService, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + this.searchService = searchService; + this.clusterService = clusterService; + this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + public void resolveQueryBuilders( + LogicalPlan plan, + ActionListener listener, + BiConsumer> callback + ) { + // TODO: remove once SEMANTIC_TEXT_TYPE is enabled outside of snapshots + if (false == EsqlCapabilities.Cap.SEMANTIC_TEXT_TYPE.isEnabled()) { + callback.accept(plan, listener); + return; + } + + if (plan.optimized() == false) { + listener.onFailure(new IllegalStateException("Expected optimized plan before query builder rewrite.")); + return; + } + + Set unresolved = fullTextFunctions(plan); + Set indexNames = indexNames(plan); + + if (indexNames == null || indexNames.isEmpty() || unresolved.isEmpty()) { + callback.accept(plan, listener); + return; + } + QueryRewriteContext ctx = queryRewriteContext(indexNames); + FullTextFunctionsRewritable rewritable = new FullTextFunctionsRewritable(unresolved); + Rewriteable.rewriteAndFetch(rewritable, ctx, new ActionListener() { + @Override + public void onResponse(FullTextFunctionsRewritable fullTextFunctionsRewritable) { + try { + LogicalPlan newPlan = planWithResolvedQueryBuilders(plan, fullTextFunctionsRewritable.results()); + callback.accept(newPlan, listener); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private Set fullTextFunctions(LogicalPlan plan) { + Set functions = new HashSet<>(); + plan.forEachExpressionDown(FullTextFunction.class, func -> functions.add(func)); + return functions; + } + + public Set indexNames(LogicalPlan plan) { + Holder> indexNames = new Holder<>(); + + plan.forEachDown(EsRelation.class, esRelation -> { indexNames.set(esRelation.index().concreteIndices()); }); + + return indexNames.get(); + } + + public LogicalPlan planWithResolvedQueryBuilders(LogicalPlan plan, Map newQueryBuilders) { + LogicalPlan newPlan = plan.transformExpressionsDown(FullTextFunction.class, m -> { + if (newQueryBuilders.keySet().contains(m)) { + return m.replaceQueryBuilder(newQueryBuilders.get(m)); + } + return m; + }); + // The given plan was already analyzed and optimized, so we set the resulted plan to optimized as well. + newPlan.setOptimized(); + return newPlan; + } + + private QueryRewriteContext queryRewriteContext(Set indexNames) { + ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndexNamesAndOptions( + indexNames.toArray(String[]::new), + IndexResolver.FIELD_CAPS_INDICES_OPTIONS, + clusterService.state(), + indexNameExpressionResolver, + transportService.getRemoteClusterService(), + System.currentTimeMillis() + ); + + return searchService.getRewriteContext(() -> System.currentTimeMillis(), resolvedIndices, null); + } + + private class FullTextFunctionsRewritable implements Rewriteable { + + private final Map queryBuilderMap; + + FullTextFunctionsRewritable(Map queryBuilderMap) { + this.queryBuilderMap = queryBuilderMap; + } + + FullTextFunctionsRewritable(Set functions) { + this.queryBuilderMap = new HashMap<>(); + + for (FullTextFunction func : functions) { + queryBuilderMap.put(func, func.asQuery(PlannerUtils.TRANSLATOR_HANDLER).asBuilder()); + } + } + + @Override + public FullTextFunctionsRewritable rewrite(QueryRewriteContext ctx) throws IOException { + Map results = new HashMap<>(); + + boolean hasChanged = false; + for (FullTextFunction func : queryBuilderMap.keySet()) { + var initial = queryBuilderMap.get(func); + var rewritten = Rewriteable.rewrite(initial, ctx, false); + + if (rewritten.equals(initial) == false) { + hasChanged = true; + } + + results.put(func, rewritten); + } + + return hasChanged ? new FullTextFunctionsRewritable(results) : this; + } + + public Map results() { + return queryBuilderMap; + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 717ac7b5a62a..e627f99322f0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public class CsvTests extends ESTestCase { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V7.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V8.capabilityName()) ); assumeFalse( "can't use TERM function in csv tests", @@ -322,13 +322,14 @@ public class CsvTests extends ESTestCase { } protected void assertResults(ExpectedResults expected, ActualResults actual, boolean ignoreOrder, Logger logger) { - CsvAssert.assertResults(expected, actual, ignoreOrder, logger); /* - * Comment the assertion above and enable the next two lines to see the results returned by ES without any assertions being done. + * Enable the next two lines to see the results returned by ES. * This is useful when creating a new test or trying to figure out what are the actual results. */ // CsvTestUtils.logMetaData(actual.columnNames(), actual.columnTypes(), LOGGER); // CsvTestUtils.logData(actual.values(), LOGGER); + + CsvAssert.assertResults(expected, actual, ignoreOrder, logger); } private static IndexResolution loadIndexResolution(String mappingName, String indexName, Map typeMapping) { @@ -445,7 +446,8 @@ public class CsvTests extends ESTestCase { mapper, TEST_VERIFIER, new PlanningMetrics(), - null + null, + EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER ); TestPhysicalOperationProviders physicalOperationProviders = testOperationProviders(testDataset); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index 85dd36ba0aaa..d4e786a9d9bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -46,6 +46,10 @@ public final class AnalyzerTestUtils { return analyzer(indexResolution, TEST_VERIFIER); } + public static Analyzer analyzer(IndexResolution indexResolution, Map lookupResolution) { + return analyzer(indexResolution, lookupResolution, TEST_VERIFIER); + } + public static Analyzer analyzer(IndexResolution indexResolution, Verifier verifier) { return new Analyzer( new AnalyzerContext( @@ -59,6 +63,19 @@ public final class AnalyzerTestUtils { ); } + public static Analyzer analyzer(IndexResolution indexResolution, Map lookupResolution, Verifier verifier) { + return new Analyzer( + new AnalyzerContext( + EsqlTestUtils.TEST_CFG, + new EsqlFunctionRegistry(), + indexResolution, + lookupResolution, + defaultEnrichResolution() + ), + verifier + ); + } + public static Analyzer analyzer(IndexResolution indexResolution, Verifier verifier, Configuration config) { return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), indexResolution, defaultLookupResolution(), defaultEnrichResolution()), @@ -111,7 +128,7 @@ public final class AnalyzerTestUtils { } public static IndexResolution loadMapping(String resource, String indexName) { - EsIndex test = new EsIndex(indexName, EsqlTestUtils.loadMapping(resource)); + EsIndex test = new EsIndex(indexName, EsqlTestUtils.loadMapping(resource), Map.of(indexName, IndexMode.STANDARD)); return IndexResolution.valid(test); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 9c71f20dcde0..5d1ff43dfe31 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2139,7 +2139,7 @@ public class AnalyzerTests extends ESTestCase { } public void testLookupJoinUnknownIndex() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String errorMessage = "Unknown index [foobar]"; IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); @@ -2168,7 +2168,7 @@ public class AnalyzerTests extends ESTestCase { } public void testLookupJoinUnknownField() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; String errorMessage = "1:45: Unknown column [last_name] in right side of join"; @@ -2190,6 +2190,35 @@ public class AnalyzerTests extends ESTestCase { assertThat(e.getMessage(), containsString(errorMessage3 + "right side of join")); } + public void testLookupJoinIndexMode() { + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + + var indexResolution = AnalyzerTestUtils.expandedDefaultIndexResolution(); + var lookupResolution = AnalyzerTestUtils.defaultLookupResolution(); + var indexResolutionAsLookup = Map.of("test", indexResolution); + var lookupResolutionAsIndex = lookupResolution.get("languages_lookup"); + + analyze("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); + analyze( + "FROM languages_lookup | LOOKUP JOIN languages_lookup ON language_code", + AnalyzerTestUtils.analyzer(lookupResolutionAsIndex, lookupResolution) + ); + + VerificationException e = expectThrows( + VerificationException.class, + () -> analyze( + "FROM languages_lookup | EVAL languages = language_code | LOOKUP JOIN test ON languages", + AnalyzerTestUtils.analyzer(lookupResolutionAsIndex, indexResolutionAsLookup) + ) + ); + assertThat(e.getMessage(), containsString("1:70: invalid [test] resolution in lookup mode to an index in [standard] mode")); + e = expectThrows( + VerificationException.class, + () -> analyze("FROM test | LOOKUP JOIN test ON languages", AnalyzerTestUtils.analyzer(indexResolution, indexResolutionAsLookup)) + ); + assertThat(e.getMessage(), containsString("1:25: invalid [test] resolution in lookup mode to an index in [standard] mode")); + } + public void testImplicitCasting() { var e = expectThrows(VerificationException.class, () -> analyze(""" from test | eval x = concat("2024", "-04", "-01") + 1 day diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 205c8943d4e3..549ddce03c20 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.LoadMapping; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; @@ -111,6 +112,46 @@ public class ParsingTests extends ESTestCase { assertEquals("-1:-1: ESQL statement is too large [1000011 characters > 1000000]", error(query.toString())); } + public void testJoinOnConstant() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assertEquals( + "1:55: JOIN ON clause only supports fields at the moment, found [123]", + error("row languages = 1, gender = \"f\" | lookup join test on 123") + ); + assertEquals( + "1:55: JOIN ON clause only supports fields at the moment, found [\"abc\"]", + error("row languages = 1, gender = \"f\" | lookup join test on \"abc\"") + ); + assertEquals( + "1:55: JOIN ON clause only supports fields at the moment, found [false]", + error("row languages = 1, gender = \"f\" | lookup join test on false") + ); + } + + public void testJoinOnMultipleFields() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assertEquals( + "1:35: JOIN ON clause only supports one field at the moment, found [2]", + error("row languages = 1, gender = \"f\" | lookup join test on gender, languages") + ); + } + + public void testJoinTwiceOnTheSameField() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assertEquals( + "1:35: JOIN ON clause only supports one field at the moment, found [2]", + error("row languages = 1, gender = \"f\" | lookup join test on languages, languages") + ); + } + + public void testJoinTwiceOnTheSameField_TwoLookups() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assertEquals( + "1:80: JOIN ON clause only supports one field at the moment, found [2]", + error("row languages = 1, gender = \"f\" | lookup join test on languages | eval x = 1 | lookup join test on gender, gender") + ); + } + private String functionName(EsqlFunctionRegistry registry, Expression functionCall) { for (FunctionDefinition def : registry.listFunctions()) { if (functionCall.getClass().equals(def.clazz())) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index a1e29117a25d..43d764ab2007 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1974,7 +1974,7 @@ public class VerifierTests extends ESTestCase { } public void testLookupJoinDataTypeMismatch() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); @@ -1985,7 +1985,11 @@ public class VerifierTests extends ESTestCase { } private void query(String query) { - defaultAnalyzer.analyze(parser.createStatement(query)); + query(query, defaultAnalyzer); + } + + private void query(String query, Analyzer analyzer) { + analyzer.analyze(parser.createStatement(query)); } private String error(String query) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 406e27c1517e..928c849b847d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -1450,6 +1450,11 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var analyzer = makeAnalyzer("mapping-all-types.json"); // Check for every possible query data type for (DataType fieldDataType : fieldDataTypes) { + // TODO: semantic_text is not present in mapping-all-types.json so we skip it for now + if (fieldDataType == DataType.SEMANTIC_TEXT) { + continue; + } + var queryValue = randomQueryValue(fieldDataType); String fieldName = fieldDataType == DataType.DATETIME ? "date" : fieldDataType.name().toLowerCase(Locale.ROOT); @@ -1483,6 +1488,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { case KEYWORD -> randomAlphaOfLength(5); case IP -> NetworkAddress.format(randomIp(randomBoolean())); case TEXT -> randomAlphaOfLength(50); + case SEMANTIC_TEXT -> randomAlphaOfLength(5); case VERSION -> VersionUtils.randomVersion(random()).toString(); default -> throw new IllegalArgumentException("Unexpected type: " + dataType); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index cfb993a7dd73..17e158f088fb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -4906,7 +4906,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { } public void testPlanSanityCheckWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); var plan = optimizedPlan(""" FROM test @@ -5911,7 +5911,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String query = """ FROM test @@ -5954,7 +5954,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnLeftSideField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String query = """ FROM test @@ -5998,7 +5998,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownDisabledForLookupField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String query = """ FROM test @@ -6043,7 +6043,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String query = """ FROM test @@ -6096,7 +6096,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); String query = """ FROM test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 964dd4642d7c..c7bb6e49703e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -2331,7 +2331,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { } public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); // Do not assert serialization: // This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 31ec4663738f..60bdf4e7f73d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -1365,7 +1365,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( "FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code", Set.of("languages", "languages.*", "language_code", "language_code.*"), @@ -1374,7 +1374,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testLookupJoinKeep() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM employees @@ -1388,7 +1388,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testLookupJoinKeepWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM employees @@ -1402,7 +1402,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1415,7 +1415,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1429,7 +1429,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1454,7 +1454,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1481,7 +1481,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepAfterWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1495,7 +1495,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndex() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1509,7 +1509,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndexKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1524,7 +1524,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndexKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1550,7 +1550,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndexKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V7.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); assertFieldNames( """ FROM sample_data diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index b323efad2b4c..539cd0314a4d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -123,6 +123,7 @@ public class PlanExecutorMetricsTests extends ESTestCase { new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, + EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER, new ActionListener<>() { @Override public void onResponse(Result result) { @@ -152,6 +153,7 @@ public class PlanExecutorMetricsTests extends ESTestCase { new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, + EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER, new ActionListener<>() { @Override public void onResponse(Result result) {} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index c7b3a9d42f57..3b0fc869c812 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; @@ -59,7 +58,7 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateInferencePlugin.class); + return Arrays.asList(Utils.TestInferencePlugin.class); } public void testBulkOperations() throws Exception { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index d5c156d1d4f4..be6b3725b0f3 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; @@ -76,7 +76,7 @@ public class ModelRegistryIT extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class, LocalStateInferencePlugin.class); + return pluginList(ReindexPlugin.class, InferencePlugin.class); } public void testStoreModel() throws Exception { diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 1c2240e8c521..53974657e4e2 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -34,7 +34,6 @@ module org.elasticsearch.inference { requires software.amazon.awssdk.retries.api; requires org.reactivestreams; requires org.elasticsearch.logging; - requires org.elasticsearch.sslconfig; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 169c8f87043e..72fa840ad19b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -46,7 +45,6 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceDiagnosticsAction; @@ -56,7 +54,6 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; -import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceDiagnosticsAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; @@ -122,6 +119,7 @@ import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; +import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.singletonList; @@ -155,7 +153,6 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); - private final SetOnce elasicInferenceServiceFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); @@ -238,31 +235,31 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (isElasticInferenceServiceEnabled()) { - // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). - var elasticInferenceServiceHttpClientManager = HttpClientManager.create( - settings, - services.threadPool(), - services.clusterService(), - throttlerManager, - getSslService() - ); + // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing + // internal names like "eis" or "gateway". + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( - serviceComponents.get(), - elasticInferenceServiceHttpClientManager, - services.clusterService() - ); - elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); + String elasticInferenceUrl = null; - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); + } + + if (elasticInferenceUrl != null) { elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( () -> List.of( context -> new ElasticInferenceService( - elasicInferenceServiceFactory.get(), + httpFactory.get(), serviceComponents.get(), elasticInferenceServiceComponents.get() ) @@ -385,21 +382,16 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP @Override public List> getSettings() { - ArrayList> settings = new ArrayList<>(); - settings.addAll(HttpSettings.getSettingsDefinitions()); - settings.addAll(HttpClientManager.getSettingsDefinitions()); - settings.addAll(ThrottlerManager.getSettingsDefinitions()); - settings.addAll(RetrySettings.getSettingsDefinitions()); - settings.addAll(Truncator.getSettingsDefinitions()); - settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); - settings.add(SKIP_VALIDATE_AND_START); - - // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. - if (isElasticInferenceServiceEnabled()) { - settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); - } - - return settings; + return Stream.of( + HttpSettings.getSettingsDefinitions(), + HttpClientManager.getSettingsDefinitions(), + ThrottlerManager.getSettingsDefinitions(), + RetrySettings.getSettingsDefinitions(), + ElasticInferenceServiceSettings.getSettingsDefinitions(), + Truncator.getSettingsDefinitions(), + RequestExecutorServiceSettings.getSettingsDefinitions(), + List.of(SKIP_VALIDATE_AND_START) + ).flatMap(Collection::stream).collect(Collectors.toList()); } @Override @@ -447,10 +439,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP @Override public List> getRetrievers() { return List.of( - new RetrieverSpec<>( - new ParseField(TextSimilarityRankBuilder.NAME), - (parser, context) -> TextSimilarityRankRetrieverBuilder.fromXContent(parser, context, getLicenseState()) - ), + new RetrieverSpec<>(new ParseField(TextSimilarityRankBuilder.NAME), TextSimilarityRankRetrieverBuilder::fromXContent), new RetrieverSpec<>(new ParseField(RandomRankBuilder.NAME), RandomRankRetrieverBuilder::fromXContent) ); } @@ -459,36 +448,4 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } - - // Get Elastic Inference service URL based on feature flags to support transitioning - // to the new Elastic Inference Service URL. - private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { - String elasticInferenceUrl = null; - - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG - ); - elasticInferenceUrl = settings.getEisGatewayUrl(); - } - - return elasticInferenceUrl; - } - - protected Boolean isElasticInferenceServiceEnabled() { - return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); - } - - protected SSLService getSslService() { - return XPackPlugin.getSharedSslService(); - } - - protected XPackLicenseState getLicenseState() { - return XPackPlugin.getSharedLicenseState(); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index 6d09c9e67b36..e5d76b9bb557 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -7,14 +7,9 @@ package org.elasticsearch.xpack.inference.external.http; -import org.apache.http.config.Registry; -import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; -import org.apache.http.nio.conn.NoopIOSessionStrategy; -import org.apache.http.nio.conn.SchemeIOSessionStrategy; -import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.pool.PoolStats; @@ -26,7 +21,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; @@ -34,13 +28,11 @@ import java.io.IOException; import java.util.List; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX; public class HttpClientManager implements Closeable { private static final Logger logger = LogManager.getLogger(HttpClientManager.class); /** * The maximum number of total connections the connection pool can lease to all routes. - * The configuration applies to each instance of HTTPClientManager (max_total_connections=10 and instances=5 leads to 50 connections). * From googling around the connection pools maxTotal value should be close to the number of available threads. * * https://stackoverflow.com/questions/30989637/how-to-decide-optimal-settings-for-setmaxtotal-and-setdefaultmaxperroute @@ -55,7 +47,6 @@ public class HttpClientManager implements Closeable { /** * The max number of connections a single route can lease. - * This configuration applies to each instance of HttpClientManager. */ public static final Setting MAX_ROUTE_CONNECTIONS = Setting.intSetting( "xpack.inference.http.max_route_connections", @@ -107,22 +98,6 @@ public class HttpClientManager implements Closeable { return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } - public static HttpClientManager create( - Settings settings, - ThreadPool threadPool, - ClusterService clusterService, - ThrottlerManager throttlerManager, - SSLService sslService - ) { - // Set the sslStrategy to ensure an encrypted connection, as Elastic Inference Service requires it. - SSLIOSessionStrategy sslioSessionStrategy = sslService.sslIOSessionStrategy( - sslService.getSSLConfiguration(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX) - ); - - PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(sslioSessionStrategy); - return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); - } - // Default for testing HttpClientManager( Settings settings, @@ -146,25 +121,6 @@ public class HttpClientManager implements Closeable { this.addSettingsUpdateConsumers(clusterService); } - private static PoolingNHttpClientConnectionManager createConnectionManager(SSLIOSessionStrategy sslStrategy) { - ConnectingIOReactor ioReactor; - try { - var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); - ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); - } catch (IOReactorException e) { - var message = "Failed to initialize HTTP client manager with SSL."; - logger.error(message, e); - throw new ElasticsearchException(message, e); - } - - Registry registry = RegistryBuilder.create() - .register("http", NoopIOSessionStrategy.INSTANCE) - .register("https", sslStrategy) - .build(); - - return new PoolingNHttpClientConnectionManager(ioReactor, registry); - } - private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index f54696895a81..46bebebff9c9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; @@ -22,6 +21,7 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; import java.util.List; @@ -47,7 +47,6 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder public static final ParseField INFERENCE_ID_FIELD = new ParseField("inference_id"); public static final ParseField INFERENCE_TEXT_FIELD = new ParseField("inference_text"); public static final ParseField FIELD_FIELD = new ParseField("field"); - public static final ParseField RANK_WINDOW_SIZE_FIELD = new ParseField("rank_window_size"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TextSimilarityRankBuilder.NAME, args -> { @@ -73,11 +72,8 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder RetrieverBuilder.declareBaseParserFields(TextSimilarityRankBuilder.NAME, PARSER); } - public static TextSimilarityRankRetrieverBuilder fromXContent( - XContentParser parser, - RetrieverParserContext context, - XPackLicenseState licenceState - ) throws IOException { + public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) + throws IOException { if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED) == false) { throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); } @@ -86,7 +82,7 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder "[text_similarity_reranker] retriever composition feature is not supported by all nodes in the cluster" ); } - if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(licenceState) == false) { + if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { throw LicenseUtils.newComplianceException(TextSimilarityRankBuilder.NAME); } return PARSER.apply(parser, context); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 3177474ea8ca..bf94f072b6e0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -43,6 +43,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE; import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE; +import static org.elasticsearch.rest.RestController.ERROR_TRACE_DEFAULT; /** * A version of {@link org.elasticsearch.rest.action.RestChunkedToXContentListener} that reads from a {@link Flow.Publisher} and encodes @@ -161,7 +162,7 @@ public class ServerSentEventsRestActionListener implements ActionListener EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); - public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; - static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope @@ -35,27 +31,11 @@ public class ElasticInferenceServiceSettings { public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); + } - public static final SSLConfigurationSettings ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS = SSLConfigurationSettings.withPrefix( - ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX, - false - ); - - public static final Setting ELASTIC_INFERENCE_SERVICE_SSL_ENABLED = Setting.boolSetting( - ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX + "enabled", - true, - Setting.Property.NodeScope - ); - public static List> getSettingsDefinitions() { - ArrayList> settings = new ArrayList<>(); - settings.add(EIS_GATEWAY_URL); - settings.add(ELASTIC_INFERENCE_SERVICE_URL); - settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); - settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); - - return settings; + return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); } @Deprecated diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java index 92ed0b610322..54aa72f3e926 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.inference.InferencePlugin; import org.hamcrest.Matchers; @@ -29,7 +28,7 @@ public class SemanticTextClusterMetadataTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(XPackPlugin.class, InferencePlugin.class); + return List.of(InferencePlugin.class); } public void testCreateIndexWithSemanticTextField() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java deleted file mode 100644 index d1db5b8b12cc..000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; -import org.junit.After; -import org.junit.Before; - -import static org.hamcrest.Matchers.is; - -public class InferencePluginTests extends ESTestCase { - private InferencePlugin inferencePlugin; - - private Boolean elasticInferenceServiceEnabled = true; - - private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { - this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; - } - - @Before - public void setUp() throws Exception { - super.setUp(); - - Settings settings = Settings.builder().build(); - inferencePlugin = new InferencePlugin(settings) { - @Override - protected Boolean isElasticInferenceServiceEnabled() { - return elasticInferenceServiceEnabled; - } - }; - } - - @After - public void tearDown() throws Exception { - super.tearDown(); - } - - public void testElasticInferenceServiceSettingsPresent() throws Exception { - setElasticInferenceServiceEnabled(true); // enable elastic inference service - boolean anyMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); - } - - public void testElasticInferenceServiceSettingsNotPresent() throws Exception { - setElasticInferenceServiceEnabled(false); // disable elastic inference service - boolean noneMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java deleted file mode 100644 index 68ea175bd987..000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.action.support.MappedActionFilter; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.inference.InferenceServiceExtension; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; -import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; -import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import static java.util.stream.Collectors.toList; - -public class LocalStateInferencePlugin extends LocalStateCompositeXPackPlugin { - private final InferencePlugin inferencePlugin; - - public LocalStateInferencePlugin(final Settings settings, final Path configPath) throws Exception { - super(settings, configPath); - LocalStateInferencePlugin thisVar = this; - this.inferencePlugin = new InferencePlugin(settings) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - - @Override - protected XPackLicenseState getLicenseState() { - return thisVar.getLicenseState(); - } - - @Override - public List getInferenceServiceFactories() { - return List.of( - TestSparseInferenceServiceExtension.TestInferenceService::new, - TestDenseInferenceServiceExtension.TestInferenceService::new - ); - } - }; - plugins.add(inferencePlugin); - } - - @Override - public List> getRetrievers() { - return this.filterPlugins(SearchPlugin.class).stream().flatMap(p -> p.getRetrievers().stream()).collect(toList()); - } - - @Override - public Map getMappers() { - return inferencePlugin.getMappers(); - } - - @Override - public Collection getMappedActionFilters() { - return inferencePlugin.getMappedActionFilters(); - } - -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 0f322e64755b..9395ae222e9b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -142,6 +143,20 @@ public final class Utils { latch.await(); } + public static class TestInferencePlugin extends InferencePlugin { + public TestInferencePlugin(Settings settings) { + super(settings); + } + + @Override + public List getInferenceServiceFactories() { + return List.of( + TestSparseInferenceServiceExtension.TestInferenceService::new, + TestDenseInferenceServiceExtension.TestInferenceService::new + ); + } + } + public static Model getInvalidModel(String inferenceEntityId, String serviceName) { var mockConfigs = mock(ModelConfigurations.class); when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java index 24183b21f73e..1f58c4165056 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.inference.mapper; import org.elasticsearch.index.mapper.NonDynamicFieldMapperTests; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; import org.junit.Before; @@ -27,7 +26,7 @@ public class SemanticTextNonDynamicFieldMapperTests extends NonDynamicFieldMappe @Override protected Collection> getPlugins() { - return List.of(LocalStateInferencePlugin.class); + return List.of(Utils.TestInferencePlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java index daed03c198e0..6d6403b69ea1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import java.util.Collection; import java.util.List; @@ -40,7 +40,7 @@ public class TextSimilarityRankMultiNodeTests extends AbstractRerankerIT { @Override protected Collection> pluginsNeeded() { - return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); } public void testQueryPhaseShardThrowingAllShardsFail() throws Exception { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java index ba6924ba0ff3..084a7f3de4a5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java @@ -24,7 +24,8 @@ import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.junit.Before; import java.io.IOException; @@ -46,7 +47,7 @@ public class TextSimilarityRankRetrieverTelemetryTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(InferencePlugin.class, XPackPlugin.class, TextSimilarityTestPlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index f81f2965c392..a042fca44fdb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.junit.Before; import java.util.Collection; @@ -108,7 +108,7 @@ public class TextSimilarityRankTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); } @Before diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java new file mode 100644 index 000000000000..b460c6abfeee --- /dev/null +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java @@ -0,0 +1,250 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.migrate.MigratePlugin; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; + +public class CreateIndexFromSourceActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(MigratePlugin.class, ReindexPlugin.class, MockTransportService.TestPlugin.class, DataStreamsPlugin.class); + } + + public void testDestIndexCreated() throws Exception { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + + // create from source + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertAcked( + client().execute(CreateIndexFromSourceAction.INSTANCE, new CreateIndexFromSourceAction.Request(sourceIndex, destIndex)) + ); + + try { + indicesAdmin().getIndex(new GetIndexRequest().indices(destIndex)).actionGet(); + } catch (IndexNotFoundException e) { + fail(); + } + } + + public void testSettingsCopiedFromSource() throws Exception { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + // start with a static setting + var numShards = randomIntBetween(1, 10); + var staticSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards).build(); + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings)).get(); + + // update with a dynamic setting + var numReplicas = randomIntBetween(0, 10); + var dynamicSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas).build(); + indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex)).actionGet(); + + // create from source + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertAcked( + client().execute(CreateIndexFromSourceAction.INSTANCE, new CreateIndexFromSourceAction.Request(sourceIndex, destIndex)) + ); + + // assert both static and dynamic settings set on dest index + var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); + assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); + } + + public void testMappingsCopiedFromSource() { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + String mapping = """ + { + "_doc":{ + "dynamic":"strict", + "properties":{ + "foo1":{ + "type":"text" + } + } + } + } + """; + indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mapping)).actionGet(); + + // create from source + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertAcked( + client().execute(CreateIndexFromSourceAction.INSTANCE, new CreateIndexFromSourceAction.Request(sourceIndex, destIndex)) + ); + + var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(sourceIndex, destIndex)).actionGet(); + Map mappings = mappingsResponse.mappings(); + var destMappings = mappings.get(destIndex).sourceAsMap(); + var sourceMappings = mappings.get(sourceIndex).sourceAsMap(); + + assertEquals(sourceMappings, destMappings); + // sanity check specific value from dest mapping + assertEquals("text", XContentMapValues.extractValue("properties.foo1.type", destMappings)); + } + + public void testSettingsOverridden() throws Exception { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var numShardsSource = randomIntBetween(1, 10); + var numReplicasSource = randomIntBetween(0, 10); + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + var sourceSettings = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShardsSource) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicasSource) + .build(); + indicesAdmin().create(new CreateIndexRequest(sourceIndex, sourceSettings)).get(); + + boolean overrideNumShards = randomBoolean(); + Settings settingsOverride = overrideNumShards + ? Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShardsSource + 1).build() + : Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicasSource + 1).build(); + + // create from source + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertAcked( + client().execute( + CreateIndexFromSourceAction.INSTANCE, + new CreateIndexFromSourceAction.Request(sourceIndex, destIndex, settingsOverride, Map.of()) + ) + ); + + // assert settings overridden + int expectedShards = overrideNumShards ? numShardsSource + 1 : numShardsSource; + int expectedReplicas = overrideNumShards ? numReplicasSource : numReplicasSource + 1; + var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + assertEquals(expectedShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); + assertEquals(expectedReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); + } + + public void testSettingsNullOverride() throws Exception { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + var sourceSettings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true).build(); + indicesAdmin().create(new CreateIndexRequest(sourceIndex, sourceSettings)).get(); + + Settings settingsOverride = Settings.builder().putNull(IndexMetadata.SETTING_BLOCKS_WRITE).build(); + + // create from source + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertAcked( + client().execute( + CreateIndexFromSourceAction.INSTANCE, + new CreateIndexFromSourceAction.Request(sourceIndex, destIndex, settingsOverride, Map.of()) + ) + ); + + // assert settings overridden + var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + assertNull(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE)); + } + + public void testMappingsOverridden() { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + String sourceMapping = """ + { + "_doc":{ + "dynamic":"strict", + "properties":{ + "foo1":{ + "type":"text" + }, + "foo2":{ + "type":"boolean" + } + } + } + } + """; + indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(sourceMapping)).actionGet(); + + String mappingOverrideStr = """ + { + "_doc":{ + "dynamic":"strict", + "properties":{ + "foo1":{ + "type":"integer" + }, + "foo3": { + "type":"keyword" + } + } + } + } + """; + var mappingOverride = XContentHelper.convertToMap(JsonXContent.jsonXContent, mappingOverrideStr, false); + + // create from source + var destIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertAcked( + client().execute( + CreateIndexFromSourceAction.INSTANCE, + new CreateIndexFromSourceAction.Request(sourceIndex, destIndex, Settings.EMPTY, mappingOverride) + ) + ); + + var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(destIndex)).actionGet(); + Map mappings = mappingsResponse.mappings(); + var destMappings = mappings.get(destIndex).sourceAsMap(); + + String expectedMappingStr = """ + { + "dynamic":"strict", + "properties":{ + "foo1":{ + "type":"integer" + }, + "foo2": { + "type":"boolean" + }, + "foo3": { + "type":"keyword" + } + } + } + """; + var expectedMapping = XContentHelper.convertToMap(JsonXContent.jsonXContent, expectedMappingStr, false); + assertEquals(expectedMapping, destMappings); + } +} diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java similarity index 98% rename from x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexIT.java rename to x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index e492f035da86..0ca58ecf0f0d 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -53,7 +53,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; import static org.hamcrest.Matchers.equalTo; -public class ReindexDatastreamIndexIT extends ESIntegTestCase { +public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { private static final String MAPPING = """ { @@ -126,12 +126,14 @@ public class ReindexDatastreamIndexIT extends ESIntegTestCase { assertHitCount(prepareSearch(response.getDestIndex()).setSize(0), numDocs); } - public void testSetSourceToReadOnly() throws Exception { + public void testSetSourceToBlockWrites() throws Exception { assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + var settings = randomBoolean() ? Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true).build() : Settings.EMPTY; + // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).get(); // call reindex client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index f42d05727b9f..d9dffdefafa2 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -34,6 +34,8 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction; import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamTransportAction; +import org.elasticsearch.xpack.migrate.action.CreateIndexFromSourceAction; +import org.elasticsearch.xpack.migrate.action.CreateIndexFromSourceTransportAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusTransportAction; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction; @@ -87,6 +89,7 @@ public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTas actions.add(new ActionHandler<>(GetMigrationReindexStatusAction.INSTANCE, GetMigrationReindexStatusTransportAction.class)); actions.add(new ActionHandler<>(CancelReindexDataStreamAction.INSTANCE, CancelReindexDataStreamTransportAction.class)); actions.add(new ActionHandler<>(ReindexDataStreamIndexAction.INSTANCE, ReindexDataStreamIndexTransportAction.class)); + actions.add(new ActionHandler<>(CreateIndexFromSourceAction.INSTANCE, CreateIndexFromSourceTransportAction.class)); } return actions; } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java new file mode 100644 index 000000000000..d67eaee3d251 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class CreateIndexFromSourceAction extends ActionType { + + public static final String NAME = "indices:admin/index/create_from_source"; + + public static final ActionType INSTANCE = new CreateIndexFromSourceAction(); + + private CreateIndexFromSourceAction() { + super(NAME); + } + + public static class Request extends ActionRequest implements IndicesRequest { + + private final String sourceIndex; + private final String destIndex; + private final Settings settingsOverride; + private final Map mappingsOverride; + + public Request(String sourceIndex, String destIndex) { + this(sourceIndex, destIndex, Settings.EMPTY, Map.of()); + } + + public Request(String sourceIndex, String destIndex, Settings settingsOverride, Map mappingsOverride) { + Objects.requireNonNull(mappingsOverride); + this.sourceIndex = sourceIndex; + this.destIndex = destIndex; + this.settingsOverride = settingsOverride; + this.mappingsOverride = mappingsOverride; + } + + @SuppressWarnings("unchecked") + public Request(StreamInput in) throws IOException { + super(in); + this.sourceIndex = in.readString(); + this.destIndex = in.readString(); + this.settingsOverride = Settings.readSettingsFromStream(in); + this.mappingsOverride = (Map) in.readGenericValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(sourceIndex); + out.writeString(destIndex); + settingsOverride.writeTo(out); + out.writeGenericValue(mappingsOverride); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String getSourceIndex() { + return sourceIndex; + } + + public String getDestIndex() { + return destIndex; + } + + public Settings getSettingsOverride() { + return settingsOverride; + } + + public Map getMappingsOverride() { + return mappingsOverride; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(sourceIndex, request.sourceIndex) + && Objects.equals(destIndex, request.destIndex) + && Objects.equals(settingsOverride, request.settingsOverride) + && Objects.equals(mappingsOverride, request.mappingsOverride); + } + + @Override + public int hashCode() { + return Objects.hash(sourceIndex, destIndex, settingsOverride, mappingsOverride); + } + + @Override + public String[] indices() { + return new String[] { sourceIndex }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java new file mode 100644 index 000000000000..968b2220628a --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.migrate.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public class CreateIndexFromSourceTransportAction extends HandledTransportAction< + CreateIndexFromSourceAction.Request, + AcknowledgedResponse> { + private static final Logger logger = LogManager.getLogger(CreateIndexFromSourceTransportAction.class); + + private final ClusterService clusterService; + private final Client client; + private final IndexScopedSettings indexScopedSettings; + + @Inject + public CreateIndexFromSourceTransportAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client, + IndexScopedSettings indexScopedSettings + ) { + super( + CreateIndexFromSourceAction.NAME, + false, + transportService, + actionFilters, + CreateIndexFromSourceAction.Request::new, + transportService.getThreadPool().executor(ThreadPool.Names.GENERIC) + ); + this.clusterService = clusterService; + this.client = client; + this.indexScopedSettings = indexScopedSettings; + } + + @Override + protected void doExecute(Task task, CreateIndexFromSourceAction.Request request, ActionListener listener) { + + IndexMetadata sourceIndex = clusterService.state().getMetadata().index(request.getSourceIndex()); + + if (sourceIndex == null) { + listener.onFailure(new IndexNotFoundException(request.getSourceIndex())); + return; + } + + logger.debug("Creating destination index [{}] for source index [{}]", request.getDestIndex(), request.getSourceIndex()); + + Settings settings = Settings.builder() + // add source settings + .put(filterSettings(sourceIndex)) + // add override settings from request + .put(request.getSettingsOverride()) + .build(); + + Map mergeMappings; + try { + mergeMappings = mergeMappings(sourceIndex.mapping(), request.getMappingsOverride()); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + var createIndexRequest = new CreateIndexRequest(request.getDestIndex()).settings(settings); + if (mergeMappings.isEmpty() == false) { + createIndexRequest.mapping(mergeMappings); + } + + client.admin().indices().create(createIndexRequest, listener.map(response -> response)); + } + + private static Map toMap(@Nullable MappingMetadata sourceMapping) { + return Optional.ofNullable(sourceMapping) + .map(MappingMetadata::source) + .map(CompressedXContent::uncompressed) + .map(s -> XContentHelper.convertToMap(s, true, XContentType.JSON).v2()) + .orElse(Map.of()); + } + + private static Map mergeMappings(@Nullable MappingMetadata sourceMapping, Map mappingAddition) + throws IOException { + Map combinedMappingMap = new HashMap<>(toMap(sourceMapping)); + XContentHelper.update(combinedMappingMap, mappingAddition, true); + return combinedMappingMap; + } + + // Filter source index settings to subset of settings that can be included during reindex. + // Similar to the settings filtering done when reindexing for upgrade in Kibana + // https://github.com/elastic/kibana/blob/8a8363f02cc990732eb9cbb60cd388643a336bed/x-pack + // /plugins/upgrade_assistant/server/lib/reindexing/index_settings.ts#L155 + private Settings filterSettings(IndexMetadata sourceIndex) { + return MetadataCreateIndexService.copySettingsFromSource(false, sourceIndex.getSettings(), indexScopedSettings, Settings.builder()) + .build(); + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java index 00c81fdc9fbc..2e3fd1b76ed3 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java @@ -41,10 +41,6 @@ public class ReindexDataStreamIndexAction extends ActionType { private static final Logger logger = LogManager.getLogger(ReindexDataStreamIndexTransportAction.class); - - private static final Set SETTINGS_TO_ADD_BACK = Set.of(IndexMetadata.SETTING_BLOCKS_WRITE, IndexMetadata.SETTING_READ_ONLY); - private static final IndicesOptions IGNORE_MISSING_OPTIONS = IndicesOptions.fromOptions(true, true, false, false); private final ClusterService clusterService; private final Client client; - private final IndexScopedSettings indexScopedSettings; @Inject public ReindexDataStreamIndexTransportAction( TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, - Client client, - IndexScopedSettings indexScopedSettings + Client client ) { super( ReindexDataStreamIndexAction.NAME, @@ -70,7 +65,6 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio ); this.clusterService = clusterService; this.client = client; - this.indexScopedSettings = indexScopedSettings; } @Override @@ -97,20 +91,19 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio SubscribableListener.newForked(l -> setBlockWrites(sourceIndexName, l)) .andThen(l -> deleteDestIfExists(destIndexName, l)) - .andThen(l -> createIndex(sourceIndex, destIndexName, l)) + .andThen(l -> createIndex(sourceIndex, destIndexName, l)) .andThen(l -> reindex(sourceIndexName, destIndexName, l)) - .andThen(l -> updateSettings(settingsBefore, destIndexName, l)) + .andThen(l -> addBlockIfFromSource(WRITE, settingsBefore, destIndexName, l)) + .andThen(l -> addBlockIfFromSource(READ_ONLY, settingsBefore, destIndexName, l)) .andThenApply(ignored -> new ReindexDataStreamIndexAction.Response(destIndexName)) .addListener(listener); } private void setBlockWrites(String sourceIndexName, ActionListener listener) { logger.debug("Setting write block on source index [{}]", sourceIndexName); - final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), true).build(); - var updateSettingsRequest = new UpdateSettingsRequest(readOnlySettings, sourceIndexName); - client.admin().indices().updateSettings(updateSettingsRequest, new ActionListener<>() { + addBlockToIndex(WRITE, sourceIndexName, new ActionListener<>() { @Override - public void onResponse(AcknowledgedResponse response) { + public void onResponse(AddIndexBlockResponse response) { if (response.isAcknowledged()) { listener.onResponse(null); } else { @@ -122,7 +115,7 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio @Override public void onFailure(Exception e) { if (e instanceof ClusterBlockException || e.getCause() instanceof ClusterBlockException) { - // It's fine if read-only is already set + // It's fine if block-writes is already set listener.onResponse(null); } else { listener.onFailure(e); @@ -139,18 +132,23 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio client.admin().indices().delete(deleteIndexRequest, failIfNotAcknowledged(listener, errorMessage)); } - private void createIndex(IndexMetadata sourceIndex, String destIndexName, ActionListener listener) { + private void createIndex(IndexMetadata sourceIndex, String destIndexName, ActionListener listener) { logger.debug("Creating destination index [{}] for source index [{}]", destIndexName, sourceIndex.getIndex().getName()); - // Create destination with subset of source index settings that can be added before reindex - var settings = getPreSettings(sourceIndex); + // override read-only settings if they exist + var removeReadOnlyOverride = Settings.builder() + .putNull(IndexMetadata.SETTING_READ_ONLY) + .putNull(IndexMetadata.SETTING_BLOCKS_WRITE) + .build(); - var sourceMapping = sourceIndex.mapping(); - Map mapping = sourceMapping != null ? sourceMapping.rawSourceAsMap() : Map.of(); - var createIndexRequest = new CreateIndexRequest(destIndexName).settings(settings).mapping(mapping); - - var errorMessage = String.format(Locale.ROOT, "Could not create index [%s]", destIndexName); - client.admin().indices().create(createIndexRequest, failIfNotAcknowledged(listener, errorMessage)); + var request = new CreateIndexFromSourceAction.Request( + sourceIndex.getIndex().getName(), + destIndexName, + removeReadOnlyOverride, + Map.of() + ); + var errorMessage = String.format(Locale.ROOT, "Could not create index [%s]", request.getDestIndex()); + client.execute(CreateIndexFromSourceAction.INSTANCE, request, failIfNotAcknowledged(listener, errorMessage)); } private void reindex(String sourceIndexName, String destIndexName, ActionListener listener) { @@ -163,35 +161,18 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio client.execute(ReindexAction.INSTANCE, reindexRequest, listener); } - private void updateSettings(Settings settingsBefore, String destIndexName, ActionListener listener) { - logger.debug("Adding settings from source index that could not be added before reindex"); - - Settings postSettings = getPostSettings(settingsBefore); - if (postSettings.isEmpty()) { + private void addBlockIfFromSource( + IndexMetadata.APIBlock block, + Settings settingsBefore, + String destIndexName, + ActionListener listener + ) { + if (settingsBefore.getAsBoolean(block.settingName(), false)) { + var errorMessage = String.format(Locale.ROOT, "Add [%s] block to index [%s] was not acknowledged", block.name(), destIndexName); + addBlockToIndex(block, destIndexName, failIfNotAcknowledged(listener, errorMessage)); + } else { listener.onResponse(null); - return; } - - var updateSettingsRequest = new UpdateSettingsRequest(postSettings, destIndexName); - var errorMessage = String.format(Locale.ROOT, "Could not update settings on index [%s]", destIndexName); - client.admin().indices().updateSettings(updateSettingsRequest, failIfNotAcknowledged(listener, errorMessage)); - } - - // Filter source index settings to subset of settings that can be included during reindex. - // Similar to the settings filtering done when reindexing for upgrade in Kibana - // https://github.com/elastic/kibana/blob/8a8363f02cc990732eb9cbb60cd388643a336bed/x-pack - // /plugins/upgrade_assistant/server/lib/reindexing/index_settings.ts#L155 - private Settings getPreSettings(IndexMetadata sourceIndex) { - // filter settings that will be added back later - var filtered = sourceIndex.getSettings().filter(settingName -> SETTINGS_TO_ADD_BACK.contains(settingName) == false); - - // filter private and non-copyable settings - var builder = MetadataCreateIndexService.copySettingsFromSource(false, filtered, indexScopedSettings, Settings.builder()); - return builder.build(); - } - - private Settings getPostSettings(Settings settingsBefore) { - return settingsBefore.filter(SETTINGS_TO_ADD_BACK::contains); } public static String generateDestIndexName(String sourceIndex) { @@ -202,11 +183,16 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio ActionListener listener, String errorMessage ) { - return listener.delegateFailureAndWrap((delegate, response) -> { + return listener.delegateFailure((delegate, response) -> { if (response.isAcknowledged()) { delegate.onResponse(null); + } else { + delegate.onFailure(new ElasticsearchException(errorMessage)); } - throw new ElasticsearchException(errorMessage); }); } + + private void addBlockToIndex(IndexMetadata.APIBlock block, String index, ActionListener listener) { + client.admin().indices().execute(TransportAddIndexBlockAction.TYPE, new AddIndexBlockRequest(block, index), listener); + } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 2928f4b91e94..9c4a41beac86 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -9,8 +9,15 @@ package org.elasticsearch.xpack.migrate.task; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.rollover.RolloverAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; +import org.elasticsearch.action.support.CountDownActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -20,9 +27,13 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.migrate.action.ReindexDataStreamIndexAction; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.NoSuchElementException; import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.getOldIndexVersionPredicate; @@ -72,22 +83,111 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec reindexClient.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { List dataStreamInfos = response.getDataStreams(); if (dataStreamInfos.size() == 1) { - List indices = dataStreamInfos.getFirst().getDataStream().getIndices(); - List indicesToBeReindexed = indices.stream() - .filter(getOldIndexVersionPredicate(clusterService.state().metadata().getProject())) - .toList(); - reindexDataStreamTask.setPendingIndicesCount(indicesToBeReindexed.size()); - for (Index index : indicesToBeReindexed) { - reindexDataStreamTask.incrementInProgressIndicesCount(index.getName()); - // TODO This is just a placeholder. This is where the real data stream reindex logic will go - reindexDataStreamTask.reindexSucceeded(index.getName()); + DataStream dataStream = dataStreamInfos.getFirst().getDataStream(); + if (getOldIndexVersionPredicate(clusterService.state().metadata().getProject()).test(dataStream.getWriteIndex())) { + reindexClient.execute( + RolloverAction.INSTANCE, + new RolloverRequest(sourceDataStream, null), + ActionListener.wrap( + rolloverResponse -> reindexIndices(dataStream, reindexDataStreamTask, reindexClient, sourceDataStream), + e -> completeFailedPersistentTask(reindexDataStreamTask, e) + ) + ); + } else { + reindexIndices(dataStream, reindexDataStreamTask, reindexClient, sourceDataStream); } - - completeSuccessfulPersistentTask(reindexDataStreamTask); } else { completeFailedPersistentTask(reindexDataStreamTask, new ElasticsearchException("data stream does not exist")); } - }, reindexDataStreamTask::markAsFailed)); + }, exception -> completeFailedPersistentTask(reindexDataStreamTask, exception))); + } + + private void reindexIndices( + DataStream dataStream, + ReindexDataStreamTask reindexDataStreamTask, + ExecuteWithHeadersClient reindexClient, + String sourceDataStream + ) { + List indices = dataStream.getIndices(); + List indicesToBeReindexed = indices.stream() + .filter(getOldIndexVersionPredicate(clusterService.state().metadata().getProject())) + .toList(); + reindexDataStreamTask.setPendingIndicesCount(indicesToBeReindexed.size()); + // The CountDownActionListener is 1 more than the number of indices so that the count is not 0 if we have no indices + CountDownActionListener listener = new CountDownActionListener(indicesToBeReindexed.size() + 1, ActionListener.wrap(response1 -> { + completeSuccessfulPersistentTask(reindexDataStreamTask); + }, exception -> { completeFailedPersistentTask(reindexDataStreamTask, exception); })); + List indicesRemaining = Collections.synchronizedList(new ArrayList<>(indicesToBeReindexed)); + final int maxConcurrentIndices = 1; + for (int i = 0; i < maxConcurrentIndices; i++) { + maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, reindexClient, sourceDataStream, listener); + } + // This takes care of the additional latch count referenced above: + listener.onResponse(null); + } + + private void maybeProcessNextIndex( + List indicesRemaining, + ReindexDataStreamTask reindexDataStreamTask, + ExecuteWithHeadersClient reindexClient, + String sourceDataStream, + CountDownActionListener listener + ) { + if (indicesRemaining.isEmpty()) { + return; + } + Index index; + try { + index = indicesRemaining.removeFirst(); + } catch (NoSuchElementException e) { + return; + } + reindexDataStreamTask.incrementInProgressIndicesCount(index.getName()); + reindexClient.execute( + ReindexDataStreamIndexAction.INSTANCE, + new ReindexDataStreamIndexAction.Request(index.getName()), + ActionListener.wrap(response1 -> { + updateDataStream(sourceDataStream, index.getName(), response1.getDestIndex(), ActionListener.wrap(unused -> { + reindexDataStreamTask.reindexSucceeded(index.getName()); + listener.onResponse(null); + maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, reindexClient, sourceDataStream, listener); + }, exception -> { + reindexDataStreamTask.reindexFailed(index.getName(), exception); + listener.onResponse(null); + }), reindexClient); + }, exception -> { + reindexDataStreamTask.reindexFailed(index.getName(), exception); + listener.onResponse(null); + }) + ); + } + + private void updateDataStream( + String dataStream, + String oldIndex, + String newIndex, + ActionListener listener, + ExecuteWithHeadersClient reindexClient + ) { + reindexClient.execute( + ModifyDataStreamsAction.INSTANCE, + new ModifyDataStreamsAction.Request( + TimeValue.MAX_VALUE, + TimeValue.MAX_VALUE, + List.of(DataStreamAction.removeBackingIndex(dataStream, oldIndex), DataStreamAction.addBackingIndex(dataStream, newIndex)) + ), + new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse response) { + listener.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + } + ); } private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { @@ -106,6 +206,9 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec PersistentTasksCustomMetadata.PersistentTask persistentTask = persistentTasksCustomMetadata.getTask( reindexDataStreamTask.getPersistentTaskId() ); + if (persistentTask == null) { + return TimeValue.timeValueMillis(0); + } PersistentTaskState state = persistentTask.getState(); final long completionTime; if (state == null) { diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamEnrichedStatusTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamEnrichedStatusTests.java index acd8cd1a6add..993db1096aac 100644 --- a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamEnrichedStatusTests.java +++ b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamEnrichedStatusTests.java @@ -74,7 +74,7 @@ public class ReindexDataStreamEnrichedStatusTests extends AbstractWireSerializin } private Set randomSet(int minSize) { - return randomSet(minSize, 100, () -> randomAlphaOfLength(50)); + return randomSet(minSize, Math.max(minSize, 100), () -> randomAlphaOfLength(50)); } private List> randomErrorList() { diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamStatusTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamStatusTests.java index 47e2d02bee3b..ad47eb6a23cd 100644 --- a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamStatusTests.java +++ b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamStatusTests.java @@ -70,7 +70,7 @@ public class ReindexDataStreamStatusTests extends AbstractWireSerializingTestCas } private Set randomSet(int minSize) { - return randomSet(minSize, 100, () -> randomAlphaOfLength(50)); + return randomSet(minSize, Math.max(minSize, 100), () -> randomAlphaOfLength(50)); } private List> randomErrorList() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index ff1a1d19779d..bab012afc310 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -27,7 +27,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.security.Security; @@ -87,12 +86,6 @@ public class LocalStateMachineLearning extends LocalStateCompositeXPackPlugin { } }); plugins.add(new MockedRollupPlugin()); - plugins.add(new InferencePlugin(settings) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - }); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index 372b07ed8c95..8dce8a62eac2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -82,6 +82,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; @@ -160,7 +161,8 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase { DataStreamsPlugin.class, // To remove errors from parsing build in templates that contain scaled_float MapperExtrasPlugin.class, - Wildcard.class + Wildcard.class, + InferencePlugin.class ); } diff --git a/x-pack/plugin/rank-rrf/build.gradle b/x-pack/plugin/rank-rrf/build.gradle index 2c3f217243aa..b2d470c6618e 100644 --- a/x-pack/plugin/rank-rrf/build.gradle +++ b/x-pack/plugin/rank-rrf/build.gradle @@ -22,6 +22,7 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) testImplementation(testArtifact(project(':server'))) + clusterModules project(':modules:mapper-extras') clusterModules project(xpackModule('rank-rrf')) clusterModules project(xpackModule('inference')) clusterModules project(':modules:lang-painless') diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index f1171b74f746..c1447623dd5b 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -48,7 +48,6 @@ public final class RRFRetrieverBuilder extends CompoundRetrieverBuilder(buildBindRequest(config.settings(), false)); + this.bindRequest = new AtomicReference<>(buildBindRequest(config.settings())); this.useConnectionPool = config.getSetting(poolingEnabled); if (useConnectionPool) { this.connectionPool = createConnectionPool(config, serverSet, timeout, logger, bindRequest.get(), healthCheckDNSupplier); @@ -93,11 +92,9 @@ abstract class PoolingSessionFactory extends SessionFactory implements Releasabl * will perform a setting consistency validation and throw {@link SettingsException} in case of violation. * Due to legacy reasons and BWC, when {@code reloadRequest} is se to {@code false}, this method will only log a warning message. * - * @param reloadRequest {@code true} if this method is called during reloading of secure settings, - * {@code false} if it is called during bootstrapping. * @return A new {@link SimpleBindRequest} that contains configured bind DN and password. */ - private SimpleBindRequest buildBindRequest(Settings settings, boolean reloadRequest) { + private SimpleBindRequest buildBindRequest(Settings settings) { final byte[] bindPassword; final Setting legacyPasswordSetting = config.getConcreteSetting(LEGACY_BIND_PASSWORD); final Setting securePasswordSetting = config.getConcreteSetting(SECURE_BIND_PASSWORD); @@ -119,27 +116,13 @@ abstract class PoolingSessionFactory extends SessionFactory implements Releasabl return new SimpleBindRequest(); } else { if (bindPassword == null) { - if (reloadRequest) { - throw new SettingsException( - "[{}] is set but no bind password is specified. Without a corresponding bind password, " - + "all {} realm authentication will fail. Specify a bind password via [{}].", - RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), - config.type(), - RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD) - ); - } else { - deprecationLogger.critical( - DeprecationCategory.SECURITY, - "bind_dn_set_without_password", - "[{}] is set but no bind password is specified. Without a corresponding bind password, " - + "all {} realm authentication will fail. Specify a bind password via [{}] or [{}]. " - + "In the next major release, nodes with incomplete bind credentials will fail to start.", - RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), - config.type(), - RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD), - RealmSettings.getFullSettingKey(config, LEGACY_BIND_PASSWORD) - ); - } + throw new SettingsException( + "[{}] is set but no bind password is specified. Without a corresponding bind password, " + + "all {} realm authentication will fail. Specify a bind password via [{}].", + RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), + config.type(), + RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD) + ); } return new SimpleBindRequest(this.bindDn, bindPassword); } @@ -148,7 +131,7 @@ abstract class PoolingSessionFactory extends SessionFactory implements Releasabl @Override public void reload(Settings settings) { final SimpleBindRequest oldRequest = bindRequest.get(); - final SimpleBindRequest newRequest = buildBindRequest(settings, true); + final SimpleBindRequest newRequest = buildBindRequest(settings); if (bindRequestEquals(newRequest, oldRequest) == false) { if (bindRequest.compareAndSet(oldRequest, newRequest)) { if (connectionPool != null) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index acb4359b3732..5482d7711031 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.TestEnvironment; @@ -45,7 +46,6 @@ import java.util.List; import java.util.Locale; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; -import static org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings.BIND_DN; import static org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD; import static org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings.SECURE_BIND_PASSWORD; import static org.hamcrest.Matchers.containsString; @@ -199,7 +199,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertDeprecationWarnings(config.identifier(), useAttribute, useLegacyBindPassword); } - public void testConstructorLogsErrorIfBindDnSetWithoutPassword() throws Exception { + public void testConstructorThrowsIfBindDnSetWithoutPassword() throws Exception { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "cn=William Bush,ou=people,o=sevenSeas"; @@ -216,19 +216,18 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { new ThreadContext(globalSettings) ); - try (LdapUserSearchSessionFactory ignored = getLdapUserSearchSessionFactory(config, sslService, threadPool)) { - assertCriticalWarnings( - String.format( - Locale.ROOT, - "[%s] is set but no bind password is specified. Without a corresponding bind password, " - + "all ldap realm authentication will fail. Specify a bind password via [%s] or [%s]. " - + "In the next major release, nodes with incomplete bind credentials will fail to start.", - RealmSettings.getFullSettingKey(config, BIND_DN), - RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD), - RealmSettings.getFullSettingKey(config, LEGACY_BIND_PASSWORD) - ) - ); - } + Exception ex = expectThrows(SettingsException.class, () -> getLdapUserSearchSessionFactory(config, sslService, threadPool)); + assertEquals( + String.format( + Locale.ROOT, + "[%s] is set but no bind password is specified. Without a corresponding bind password, " + + "all %s realm authentication will fail. Specify a bind password via [%s].", + RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), + config.type(), + RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD) + ), + ex.getMessage() + ); } public void testConstructorThrowsIfBothLegacyAndSecureBindPasswordSet() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumerTests.java index 0a078a8ead20..ddb0c46bdad9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumerTests.java @@ -353,7 +353,7 @@ public final class DeprecationRoleDescriptorConsumerTests extends ESTestCase { private void addIndex(ProjectMetadata.Builder projectBuilder, String index, String... aliases) { final IndexMetadata.Builder indexprojectBuilder = IndexMetadata.builder(index) - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random()))) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion())) .numberOfShards(1) .numberOfReplicas(1); for (final String alias : aliases) { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 81eb82a52238..d4615260952d 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -10,7 +10,8 @@ apply plugin: 'elasticsearch.rest-resources' dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) - javaRestTestImplementation project(path: ':test:fixtures:hdfs-fixture', configuration:"shadow") + javaRestTestCompileOnly project(path: ':test:fixtures:hdfs-fixture') + javaRestTestRuntimeOnly project(path: ':test:fixtures:hdfs-fixture', configuration:"shadow") javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" javaRestTestImplementation "org.slf4j:slf4j-simple:${versions.slf4j}" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml new file mode 100644 index 000000000000..fdb6746bbeed --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -0,0 +1,78 @@ +--- +setup: + - requires: + test_runner_features: [capabilities, contains] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [join_lookup_v8] + reason: "uses LOOKUP JOIN" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 5 + mappings: + properties: + key: + type: long + color: + type: keyword + - do: + indices.create: + index: test-lookup + body: + settings: + index: + mode: lookup + mappings: + properties: + key: + type: long + color: + type: keyword + - do: + bulk: + index: "test" + refresh: true + body: + - { "index": { } } + - { "key": 1, "color": "red" } + - { "index": { } } + - { "key": 2, "color": "blue" } + - do: + bulk: + index: "test-lookup" + refresh: true + body: + - { "index": { } } + - { "key": 1, "color": "cyan" } + - { "index": { } } + - { "key": 2, "color": "yellow" } + +--- +basic: + - do: + esql.query: + body: + query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup` ON key | LIMIT 3' + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [1, "cyan"]} + - match: {values.1: [2, "yellow"]} + +--- +non-lookup index: + - do: + esql.query: + body: + query: 'FROM test-lookup | SORT key | LOOKUP JOIN `test` ON key | LIMIT 3' + catch: "bad_request" + + - match: { error.type: "verification_exception" } + - contains: { error.reason: "Found 1 problem\nline 1:43: invalid [test] resolution in lookup mode to an index in [standard] mode" } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 40ad5bba29ba..58556dd420ca 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -11,15 +11,23 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Strings; +import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matchers; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.time.Instant; import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.upgrades.IndexingIT.assertCount; +import static org.hamcrest.Matchers.equalTo; public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { @@ -164,4 +172,152 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { } } + public void testUpgradeDataStream() throws Exception { + String dataStreamName = "reindex_test_data_stream"; + int numRollovers = 5; + if (CLUSTER_TYPE == ClusterType.OLD) { + createAndRolloverDataStream(dataStreamName, numRollovers); + } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { + upgradeDataStream(dataStreamName, numRollovers); + } + } + + private static void createAndRolloverDataStream(String dataStreamName, int numRollovers) throws IOException { + // We want to create a data stream and roll it over several times so that we have several indices to upgrade + final String template = """ + { + "settings":{ + "index": { + "mode": "time_series" + } + }, + "mappings":{ + "dynamic_templates": [ + { + "labels": { + "path_match": "pod.labels.*", + "mapping": { + "type": "keyword", + "time_series_dimension": true + } + } + } + ], + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "k8s": { + "properties": { + "pod": { + "properties": { + "name": { + "type": "keyword" + }, + "network": { + "properties": { + "tx": { + "type": "long" + }, + "rx": { + "type": "long" + } + } + } + } + } + } + } + } + } + } + """; + final String indexTemplate = """ + { + "index_patterns": ["$PATTERN"], + "template": $TEMPLATE, + "data_stream": { + } + }"""; + var putIndexTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_template"); + putIndexTemplateRequest.setJsonEntity(indexTemplate.replace("$TEMPLATE", template).replace("$PATTERN", dataStreamName)); + assertOK(client().performRequest(putIndexTemplateRequest)); + bulkLoadData(dataStreamName); + for (int i = 0; i < numRollovers; i++) { + rollover(dataStreamName); + bulkLoadData(dataStreamName); + } + } + + private void upgradeDataStream(String dataStreamName, int numRollovers) throws Exception { + Request reindexRequest = new Request("POST", "/_migration/reindex"); + reindexRequest.setJsonEntity(Strings.format(""" + { + "mode": "upgrade", + "source": { + "index": "%s" + } + }""", dataStreamName)); + Response reindexResponse = client().performRequest(reindexRequest); + assertOK(reindexResponse); + assertBusy(() -> { + Request statusRequest = new Request("GET", "_migration/reindex/" + dataStreamName + "/_status"); + Response statusResponse = client().performRequest(statusRequest); + Map statusResponseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + statusResponse.getEntity().getContent(), + false + ); + assertOK(statusResponse); + assertThat(statusResponseMap.get("complete"), equalTo(true)); + if (isOriginalClusterCurrent()) { + // If the original cluster was the same as this one, we don't want any indices reindexed: + assertThat(statusResponseMap.get("successes"), equalTo(0)); + } else { + assertThat(statusResponseMap.get("successes"), equalTo(numRollovers + 1)); + } + }, 60, TimeUnit.SECONDS); + Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); + Response cancelResponse = client().performRequest(cancelRequest); + assertOK(cancelResponse); + } + + private static void bulkLoadData(String dataStreamName) throws IOException { + final String bulk = """ + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "cat", "network": {"tx": 2001818691, "rx": 802133794}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "hamster", "network": {"tx": 2005177954, "rx": 801479970}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "cow", "network": {"tx": 2006223737, "rx": 802337279}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "rat", "network": {"tx": 2012916202, "rx": 803685721}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "dog", "network": {"tx": 1434521831, "rx": 530575198}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "tiger", "network": {"tx": 1434577921, "rx": 530600088}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "lion", "network": {"tx": 1434587694, "rx": 530604797}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "elephant", "network": {"tx": 1434595272, "rx": 530605511}}}} + """; + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + bulkRequest.setJsonEntity(bulk.replace("$now", formatInstant(Instant.now()))); + var response = client().performRequest(bulkRequest); + assertOK(response); + } + + static String formatInstant(Instant instant) { + return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); + } + + private static void rollover(String dataStreamName) throws IOException { + Request rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover"); + Response rolloverResponse = client().performRequest(rolloverRequest); + assertOK(rolloverResponse); + } }