diff --git a/.buildkite/scripts/get-latest-test-mutes.sh b/.buildkite/scripts/get-latest-test-mutes.sh index 5721e29f1b77..1dafcebec24b 100755 --- a/.buildkite/scripts/get-latest-test-mutes.sh +++ b/.buildkite/scripts/get-latest-test-mutes.sh @@ -1,6 +1,6 @@ #!/bin/bash -if [[ ! "${BUILDKITE_PULL_REQUEST:-}" || "${BUILDKITE_AGENT_META_DATA_PROVIDER:-}" == "k8s" ]]; then +if [[ "${BUILDKITE_PULL_REQUEST:-false}" == "false" || "${BUILDKITE_AGENT_META_DATA_PROVIDER:-}" == "k8s" ]]; then exit 0 fi diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 25cfae6c9803..632bae64389a 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.test.TestUtil +import org.elasticsearch.gradle.OS /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one @@ -77,7 +78,7 @@ tasks.register("copyPainless", Copy) { } tasks.named("run").configure { - executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + (OS.current() == OS.WINDOWS ? '.exe' : '') args << "-Dplugins.dir=${buildDir}/plugins" << "-Dtests.index=${buildDir}/index" dependsOn "copyExpression", "copyPainless", configurations.nativeLib systemProperty 'es.nativelibs.path', TestUtil.getTestLibraryPath(file("../libs/native/libraries/build/platform/").toString()) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 9aab4a3e3210..d3259b960471 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -27,6 +27,7 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -71,12 +72,11 @@ public class EvalBenchmark { BigArrays.NON_RECYCLING_INSTANCE ); + private static final FoldContext FOLD_CONTEXT = FoldContext.small(); + private static final int BLOCK_LENGTH = 8 * 1024; - static final DriverContext driverContext = new DriverContext( - BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) - ); + static final DriverContext driverContext = new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, blockFactory); static { // Smoke test all the expected values and force loading subclasses more like prod @@ -114,11 +114,12 @@ public class EvalBenchmark { return switch (operation) { case "abs" -> { FieldAttribute longField = longField(); - yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(driverContext); + yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Abs(Source.EMPTY, longField), layout(longField)).get(driverContext); } case "add" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( + FOLD_CONTEXT, new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataType.LONG)), layout(longField) ).get(driverContext); @@ -126,6 +127,7 @@ public class EvalBenchmark { case "add_double" -> { FieldAttribute doubleField = doubleField(); yield EvalMapper.toEvaluator( + FOLD_CONTEXT, new Add(Source.EMPTY, doubleField, new Literal(Source.EMPTY, 1D, DataType.DOUBLE)), layout(doubleField) ).get(driverContext); @@ -140,7 +142,8 @@ public class EvalBenchmark { lhs = new Add(Source.EMPTY, lhs, new Literal(Source.EMPTY, 1L, DataType.LONG)); rhs = new Add(Source.EMPTY, rhs, new Literal(Source.EMPTY, 1L, DataType.LONG)); } - yield EvalMapper.toEvaluator(new Case(Source.EMPTY, condition, List.of(lhs, rhs)), layout(f1, f2)).get(driverContext); + yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Case(Source.EMPTY, condition, List.of(lhs, rhs)), layout(f1, f2)) + .get(driverContext); } case "date_trunc" -> { FieldAttribute timestamp = new FieldAttribute( @@ -149,6 +152,7 @@ public class EvalBenchmark { new EsField("timestamp", DataType.DATETIME, Map.of(), true) ); yield EvalMapper.toEvaluator( + FOLD_CONTEXT, new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), DataType.TIME_DURATION), timestamp), layout(timestamp) ).get(driverContext); @@ -156,6 +160,7 @@ public class EvalBenchmark { case "equal_to_const" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( + FOLD_CONTEXT, new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataType.LONG)), layout(longField) ).get(driverContext); @@ -163,21 +168,21 @@ public class EvalBenchmark { case "long_equal_to_long" -> { FieldAttribute lhs = longField(); FieldAttribute rhs = longField(); - yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext); + yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext); } case "long_equal_to_int" -> { FieldAttribute lhs = longField(); FieldAttribute rhs = intField(); - yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext); + yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext); } case "mv_min", "mv_min_ascending" -> { FieldAttribute longField = longField(); - yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext); + yield EvalMapper.toEvaluator(FOLD_CONTEXT, new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext); } case "rlike" -> { FieldAttribute keywordField = keywordField(); RLike rlike = new RLike(Source.EMPTY, keywordField, new RLikePattern(".ar")); - yield EvalMapper.toEvaluator(rlike, layout(keywordField)).get(driverContext); + yield EvalMapper.toEvaluator(FOLD_CONTEXT, rlike, layout(keywordField)).get(driverContext); } default -> throw new UnsupportedOperationException(); }; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 6dfb337a22ac..24ba0740cfe2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:eef54b3a414aa53b98f0f8df2633aed83c3ba6230722769282925442968f0364", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:dd66beec64a7f9b19c6c35a1195153b2b630a55e16ec71949ed5187c5947eea1", "-wolfi", "apk" ), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java index ab28a66d9306..fac7d86701d5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java @@ -122,7 +122,7 @@ public class TestFixturesPlugin implements Plugin { composeExtension.getRemoveContainers().set(true); composeExtension.getCaptureContainersOutput() .set(EnumSet.of(LogLevel.INFO, LogLevel.DEBUG).contains(project.getGradle().getStartParameter().getLogLevel())); - composeExtension.getUseDockerComposeV2().set(false); + composeExtension.getUseDockerComposeV2().set(true); composeExtension.getExecutable().set(this.providerFactory.provider(() -> { String composePath = dockerSupport.get().getDockerAvailability().dockerComposePath(); LOGGER.debug("Docker Compose path: {}", composePath); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index c3b976894676..1e57d9fab7cf 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -187,20 +187,12 @@ class APMJvmOptions { static void extractSecureSettings(SecureSettings secrets, Map propertiesMap) { final Set settingNames = secrets.getSettingNames(); for (String key : List.of("api_key", "secret_token")) { - for (String prefix : List.of("telemetry.", "tracing.apm.")) { - if (settingNames.contains(prefix + key)) { - if (propertiesMap.containsKey(key)) { - throw new IllegalStateException( - Strings.format("Duplicate telemetry setting: [telemetry.%s] and [tracing.apm.%s]", key, key) - ); - } - - try (SecureString token = secrets.getString(prefix + key)) { - propertiesMap.put(key, token.toString()); - } + String prefix = "telemetry."; + if (settingNames.contains(prefix + key)) { + try (SecureString token = secrets.getString(prefix + key)) { + propertiesMap.put(key, token.toString()); } } - } } @@ -227,44 +219,12 @@ class APMJvmOptions { static Map extractApmSettings(Settings settings) throws UserException { final Map propertiesMap = new HashMap<>(); - // tracing.apm.agent. is deprecated by telemetry.agent. final String telemetryAgentPrefix = "telemetry.agent."; - final String deprecatedTelemetryAgentPrefix = "tracing.apm.agent."; final Settings telemetryAgentSettings = settings.getByPrefix(telemetryAgentPrefix); telemetryAgentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(telemetryAgentSettings.get(key)))); - final Settings apmAgentSettings = settings.getByPrefix(deprecatedTelemetryAgentPrefix); - for (String key : apmAgentSettings.keySet()) { - if (propertiesMap.containsKey(key)) { - throw new IllegalStateException( - Strings.format( - "Duplicate telemetry setting: [%s%s] and [%s%s]", - telemetryAgentPrefix, - key, - deprecatedTelemetryAgentPrefix, - key - ) - ); - } - propertiesMap.put(key, String.valueOf(apmAgentSettings.get(key))); - } - StringJoiner globalLabels = extractGlobalLabels(telemetryAgentPrefix, propertiesMap, settings); - if (globalLabels.length() == 0) { - globalLabels = extractGlobalLabels(deprecatedTelemetryAgentPrefix, propertiesMap, settings); - } else { - StringJoiner tracingGlobalLabels = extractGlobalLabels(deprecatedTelemetryAgentPrefix, propertiesMap, settings); - if (tracingGlobalLabels.length() != 0) { - throw new IllegalArgumentException( - "Cannot have global labels with tracing.agent prefix [" - + globalLabels - + "] and telemetry.apm.agent prefix [" - + tracingGlobalLabels - + "]" - ); - } - } if (globalLabels.length() > 0) { propertiesMap.put("global_labels", globalLabels.toString()); } @@ -274,7 +234,7 @@ class APMJvmOptions { if (propertiesMap.containsKey(key)) { throw new UserException( ExitCodes.CONFIG, - "Do not set a value for [tracing.apm.agent." + key + "], as this is configured automatically by Elasticsearch" + "Do not set a value for [telemetry.agent." + key + "], as this is configured automatically by Elasticsearch" ); } } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java index a7ba8eb11fbc..0e067afc1aa7 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java @@ -25,18 +25,15 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Function; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -82,109 +79,63 @@ public class APMJvmOptionsTests extends ESTestCase { } public void testExtractSecureSettings() { - MockSecureSettings duplicateSecureSettings = new MockSecureSettings(); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("telemetry.secret_token", "token"); + secureSettings.setString("telemetry.api_key", "key"); - for (String prefix : List.of("telemetry.", "tracing.apm.")) { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(prefix + "secret_token", "token"); - secureSettings.setString(prefix + "api_key", "key"); - - duplicateSecureSettings.setString(prefix + "api_key", "secret"); - - Map propertiesMap = new HashMap<>(); - APMJvmOptions.extractSecureSettings(secureSettings, propertiesMap); - - assertThat(propertiesMap, matchesMap(Map.of("secret_token", "token", "api_key", "key"))); - } - - Exception exception = expectThrows( - IllegalStateException.class, - () -> APMJvmOptions.extractSecureSettings(duplicateSecureSettings, new HashMap<>()) - ); - assertThat(exception.getMessage(), containsString("Duplicate telemetry setting")); - assertThat(exception.getMessage(), containsString("telemetry.api_key")); - assertThat(exception.getMessage(), containsString("tracing.apm.api_key")); + Map propertiesMap = new HashMap<>(); + APMJvmOptions.extractSecureSettings(secureSettings, propertiesMap); + assertThat(propertiesMap, matchesMap(Map.of("secret_token", "token", "api_key", "key"))); } public void testExtractSettings() throws UserException { - Function buildSettings = (prefix) -> Settings.builder() - .put(prefix + "server_url", "https://myurl:443") - .put(prefix + "service_node_name", "instance-0000000001"); - - for (String prefix : List.of("tracing.apm.agent.", "telemetry.agent.")) { - var name = "APM Tracing"; - var deploy = "123"; - var org = "456"; - var extracted = APMJvmOptions.extractApmSettings( - buildSettings.apply(prefix) - .put(prefix + "global_labels.deployment_name", name) - .put(prefix + "global_labels.deployment_id", deploy) - .put(prefix + "global_labels.organization_id", org) - .build() - ); - - assertThat( - extracted, - allOf( - hasEntry("server_url", "https://myurl:443"), - hasEntry("service_node_name", "instance-0000000001"), - hasEntry(equalTo("global_labels"), not(endsWith(","))), // test that we have collapsed all global labels into one - not(hasKey("global_labels.organization_id")) // tests that we strip out the top level label keys - ) - ); - - List labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); - assertThat(labels, hasSize(3)); - assertThat(labels, containsInAnyOrder("deployment_name=APM Tracing", "organization_id=" + org, "deployment_id=" + deploy)); - - // test replacing with underscores and skipping empty - name = "APM=Tracing"; - deploy = ""; - org = ",456"; - extracted = APMJvmOptions.extractApmSettings( - buildSettings.apply(prefix) - .put(prefix + "global_labels.deployment_name", name) - .put(prefix + "global_labels.deployment_id", deploy) - .put(prefix + "global_labels.organization_id", org) - .build() - ); - labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); - assertThat(labels, hasSize(2)); - assertThat(labels, containsInAnyOrder("deployment_name=APM_Tracing", "organization_id=_456")); - } - - IllegalStateException err = expectThrows( - IllegalStateException.class, - () -> APMJvmOptions.extractApmSettings( - Settings.builder() - .put("tracing.apm.agent.server_url", "https://myurl:443") - .put("telemetry.agent.server_url", "https://myurl-2:443") - .build() - ) - ); - assertThat(err.getMessage(), is("Duplicate telemetry setting: [telemetry.agent.server_url] and [tracing.apm.agent.server_url]")); - } - - public void testNoMixedLabels() { - String telemetryAgent = "telemetry.agent."; - String tracingAgent = "tracing.apm.agent."; - Settings settings = Settings.builder() - .put("tracing.apm.enabled", true) - .put(telemetryAgent + "server_url", "https://myurl:443") - .put(telemetryAgent + "service_node_name", "instance-0000000001") - .put(tracingAgent + "global_labels.deployment_id", "123") - .put(telemetryAgent + "global_labels.organization_id", "456") + Settings defaults = Settings.builder() + .put("telemetry.agent.server_url", "https://myurl:443") + .put("telemetry.agent.service_node_name", "instance-0000000001") .build(); - IllegalArgumentException err = assertThrows(IllegalArgumentException.class, () -> APMJvmOptions.extractApmSettings(settings)); + var name = "APM Tracing"; + var deploy = "123"; + var org = "456"; + var extracted = APMJvmOptions.extractApmSettings( + Settings.builder() + .put(defaults) + .put("telemetry.agent.global_labels.deployment_name", name) + .put("telemetry.agent.global_labels.deployment_id", deploy) + .put("telemetry.agent.global_labels.organization_id", org) + .build() + ); + assertThat( - err.getMessage(), - is( - "Cannot have global labels with tracing.agent prefix [organization_id=456] and" - + " telemetry.apm.agent prefix [deployment_id=123]" + extracted, + allOf( + hasEntry("server_url", "https://myurl:443"), + hasEntry("service_node_name", "instance-0000000001"), + hasEntry(equalTo("global_labels"), not(endsWith(","))), // test that we have collapsed all global labels into one + not(hasKey("global_labels.organization_id")) // tests that we strip out the top level label keys ) ); + + List labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + assertThat(labels, hasSize(3)); + assertThat(labels, containsInAnyOrder("deployment_name=APM Tracing", "organization_id=" + org, "deployment_id=" + deploy)); + + // test replacing with underscores and skipping empty + name = "APM=Tracing"; + deploy = ""; + org = ",456"; + extracted = APMJvmOptions.extractApmSettings( + Settings.builder() + .put(defaults) + .put("telemetry.agent.global_labels.deployment_name", name) + .put("telemetry.agent.global_labels.deployment_id", deploy) + .put("telemetry.agent.global_labels.organization_id", org) + .build() + ); + labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + assertThat(labels, hasSize(2)); + assertThat(labels, containsInAnyOrder("deployment_name=APM_Tracing", "organization_id=_456")); } private Path makeFakeAgentJar() throws IOException { diff --git a/docs/changelog/118188.yaml b/docs/changelog/118188.yaml new file mode 100644 index 000000000000..f24651231b7a --- /dev/null +++ b/docs/changelog/118188.yaml @@ -0,0 +1,5 @@ +pr: 118188 +summary: Check for early termination in Driver +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/118602.yaml b/docs/changelog/118602.yaml new file mode 100644 index 000000000000..a75c5dcf11da --- /dev/null +++ b/docs/changelog/118602.yaml @@ -0,0 +1,5 @@ +pr: 118602 +summary: Limit memory usage of `fold` +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/119227.yaml b/docs/changelog/119227.yaml new file mode 100644 index 000000000000..1e3d4f97a3d2 --- /dev/null +++ b/docs/changelog/119227.yaml @@ -0,0 +1,13 @@ +pr: 119227 +summary: Remove unfreeze REST endpoint +area: Indices APIs +type: breaking +issues: [] +breaking: + title: Remove unfreeze REST endpoint + area: REST API + details: >- + The `/{index}/_unfreeze` REST endpoint is no longer supported. This API was deprecated, and the corresponding + `/{index}/_freeze` endpoint was removed in 8.0. + impact: None, since it is not possible to have a frozen index in a version which is readable by Elasticsearch 9.0 + notable: false diff --git a/docs/changelog/119575.yaml b/docs/changelog/119575.yaml new file mode 100644 index 000000000000..daa7e69118ac --- /dev/null +++ b/docs/changelog/119575.yaml @@ -0,0 +1,6 @@ +pr: 119575 +summary: Fix realtime get of nested fields with synthetic source +area: Mapping +type: bug +issues: + - 119553 diff --git a/docs/changelog/119679.yaml b/docs/changelog/119679.yaml new file mode 100644 index 000000000000..a3fb36bcd01c --- /dev/null +++ b/docs/changelog/119679.yaml @@ -0,0 +1,5 @@ +pr: 119679 +summary: Support mTLS for the Elastic Inference Service integration inside the inference API +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/119772.yaml b/docs/changelog/119772.yaml new file mode 100644 index 000000000000..58d483566b10 --- /dev/null +++ b/docs/changelog/119772.yaml @@ -0,0 +1,6 @@ +pr: 119772 +summary: ESQL Support IN operator for Date nanos +area: ES|QL +type: enhancement +issues: + - 118578 diff --git a/docs/changelog/119831.yaml b/docs/changelog/119831.yaml new file mode 100644 index 000000000000..61c09d7d54de --- /dev/null +++ b/docs/changelog/119831.yaml @@ -0,0 +1,5 @@ +pr: 119831 +summary: Run `TransportClusterGetSettingsAction` on local node +area: Infra/Settings +type: enhancement +issues: [] diff --git a/docs/changelog/119846.yaml b/docs/changelog/119846.yaml new file mode 100644 index 000000000000..9e7d99fe1be1 --- /dev/null +++ b/docs/changelog/119846.yaml @@ -0,0 +1,12 @@ +pr: 119846 +summary: Drop support for brackets from METADATA syntax +area: ES|QL +type: deprecation +issues: + - 115401 +deprecation: + title: Drop support for brackets from METADATA syntax + area: ES|QL + details: Please describe the details of this change for the release notes. You can + use asciidoc. + impact: Please describe the impact of this change to users diff --git a/docs/changelog/119893.yaml b/docs/changelog/119893.yaml new file mode 100644 index 000000000000..35a46ce0940d --- /dev/null +++ b/docs/changelog/119893.yaml @@ -0,0 +1,5 @@ +pr: 119893 +summary: Add enterprise license check for Inference API actions +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/119922.yaml b/docs/changelog/119922.yaml new file mode 100644 index 000000000000..2fc9d9529c96 --- /dev/null +++ b/docs/changelog/119922.yaml @@ -0,0 +1,5 @@ +pr: 119922 +summary: "[Inference API] fix spell words: covertToString to convertToString" +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/119926.yaml b/docs/changelog/119926.yaml new file mode 100644 index 000000000000..3afafd5b2117 --- /dev/null +++ b/docs/changelog/119926.yaml @@ -0,0 +1,11 @@ +pr: 119926 +summary: "Deprecated tracing.apm.* settings got removed." +area: Infra/Metrics +type: breaking +issues: [] +breaking: + title: "Deprecated tracing.apm.* settings got removed." + area: Cluster and node setting + details: Deprecated `tracing.apm.*` settings got removed, use respective `telemetry.*` / `telemetry.tracing.*` settings instead. + impact: 9.x nodes will refuse to start if any such setting (including secret settings) is still present. + notable: false diff --git a/docs/changelog/120014.yaml b/docs/changelog/120014.yaml new file mode 100644 index 000000000000..bef1f3ba4993 --- /dev/null +++ b/docs/changelog/120014.yaml @@ -0,0 +1,6 @@ +pr: 120014 +summary: Fix potential file leak in ES816BinaryQuantizedVectorsWriter +area: Search +type: bug +issues: + - 119981 diff --git a/docs/changelog/120020.yaml b/docs/changelog/120020.yaml new file mode 100644 index 000000000000..55a80187dbff --- /dev/null +++ b/docs/changelog/120020.yaml @@ -0,0 +1,5 @@ +pr: 120020 +summary: Resume Driver on cancelled or early finished +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/120038.yaml b/docs/changelog/120038.yaml new file mode 100644 index 000000000000..fe3a2ccccc09 --- /dev/null +++ b/docs/changelog/120038.yaml @@ -0,0 +1,5 @@ +pr: 120038 +summary: Run template simulation actions on local node +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/120042.yaml b/docs/changelog/120042.yaml new file mode 100644 index 000000000000..0093068ae989 --- /dev/null +++ b/docs/changelog/120042.yaml @@ -0,0 +1,5 @@ +pr: 120042 +summary: Match dot prefix of migrated DS backing index with the source index +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/120055.yaml b/docs/changelog/120055.yaml new file mode 100644 index 000000000000..05f66523d0ef --- /dev/null +++ b/docs/changelog/120055.yaml @@ -0,0 +1,5 @@ +pr: 120055 +summary: Optimize loading mappings when determining synthetic source usage and whether host.name can be sorted on. +area: Logs +type: enhancement +issues: [] diff --git a/docs/changelog/120062.yaml b/docs/changelog/120062.yaml new file mode 100644 index 000000000000..42e8d97f1744 --- /dev/null +++ b/docs/changelog/120062.yaml @@ -0,0 +1,6 @@ +pr: 120062 +summary: Update Text Similarity Reranker to Properly Handle Aliases +area: Ranking +type: bug +issues: + - 119617 diff --git a/docs/changelog/120084.yaml b/docs/changelog/120084.yaml new file mode 100644 index 000000000000..aafe490d79f1 --- /dev/null +++ b/docs/changelog/120084.yaml @@ -0,0 +1,5 @@ +pr: 120084 +summary: Improve how reindex data stream index action handles api blocks +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/120087.yaml b/docs/changelog/120087.yaml new file mode 100644 index 000000000000..8539640809b0 --- /dev/null +++ b/docs/changelog/120087.yaml @@ -0,0 +1,5 @@ +pr: 120087 +summary: Include `clusterApplyListener` in long cluster apply warnings +area: Cluster Coordination +type: enhancement +issues: [] diff --git a/docs/changelog/120133.yaml b/docs/changelog/120133.yaml new file mode 100644 index 000000000000..4ec88267a1bf --- /dev/null +++ b/docs/changelog/120133.yaml @@ -0,0 +1,6 @@ +pr: 120133 +summary: Use approximation to advance matched queries +area: Search +type: bug +issues: + - 120130 diff --git a/docs/changelog/120143.yaml b/docs/changelog/120143.yaml new file mode 100644 index 000000000000..7e8cd5a8ceae --- /dev/null +++ b/docs/changelog/120143.yaml @@ -0,0 +1,6 @@ +pr: 120143 +summary: Esql - support date nanos in date format function +area: ES|QL +type: enhancement +issues: + - 109994 diff --git a/docs/changelog/120193.yaml b/docs/changelog/120193.yaml new file mode 100644 index 000000000000..18858e81d9b6 --- /dev/null +++ b/docs/changelog/120193.yaml @@ -0,0 +1,5 @@ +pr: 120193 +summary: "Do not capture `ClusterChangedEvent` in `IndicesStore` call to #onClusterStateShardsClosed" +area: Store +type: bug +issues: [] diff --git a/docs/changelog/120198.yaml b/docs/changelog/120198.yaml new file mode 100644 index 000000000000..076a2be942a3 --- /dev/null +++ b/docs/changelog/120198.yaml @@ -0,0 +1,5 @@ +pr: 120198 +summary: Bump `TrialLicenseVersion` to allow starting new trial on 9.0 +area: License +type: enhancement +issues: [] diff --git a/docs/changelog/120200.yaml b/docs/changelog/120200.yaml new file mode 100644 index 000000000000..abde91aec0df --- /dev/null +++ b/docs/changelog/120200.yaml @@ -0,0 +1,5 @@ +pr: 120200 +summary: "[Connector API] Support hard deletes with new URL param in delete endpoint" +area: Extract&Transform +type: feature +issues: [] diff --git a/docs/reference/connector/apis/delete-connector-api.asciidoc b/docs/reference/connector/apis/delete-connector-api.asciidoc index f161a3c3b593..a324630cc8a5 100644 --- a/docs/reference/connector/apis/delete-connector-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-api.asciidoc @@ -13,7 +13,7 @@ beta::[] For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. -- -Soft-deletes a connector and removes associated sync jobs. +Deletes a connector and optionally removes associated sync jobs. Note: this action doesn't delete any API key, ingest pipeline or data index associated with the connector. These need to be removed manually. @@ -37,6 +37,9 @@ To get started with Connector APIs, check out <`:: (Required, string) +``:: +(Optional, boolean) If `true`, the connector doc is deleted. If `false`, connector doc is marked as deleted (soft deletion). Defaults to `false`. + `delete_sync_jobs`:: (Optional, boolean) A flag indicating if associated sync jobs should be also removed. Defaults to `false`. diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index 8ce8064a8161..adfd38478ab2 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -30,11 +30,11 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** You can use `to_datetime` to cast to millisecond dates to use unsupported functions * `double` (`float`, `half_float`, `scaled_float` are represented as `double`) * `ip` -* `keyword` family including `keyword`, `constant_keyword`, and `wildcard` +* `keyword` <> including `keyword`, `constant_keyword`, and `wildcard` * `int` (`short` and `byte` are represented as `int`) * `long` * `null` -* `text` +* `text` <> including `text`, `semantic_text` and `match_only_text` * experimental:[] `unsigned_long` * `version` * Spatial types diff --git a/docs/reference/esql/functions/description/match.asciidoc b/docs/reference/esql/functions/description/match.asciidoc index 931fd5eb2f94..0724f0f108e3 100644 --- a/docs/reference/esql/functions/description/match.asciidoc +++ b/docs/reference/esql/functions/description/match.asciidoc @@ -2,4 +2,4 @@ *Description* -Use `MATCH` to perform a <> on the specified field. Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. Match can be used on text fields, as well as other field types like boolean, dates, and numeric types. For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. `MATCH` returns true if the provided query matches the row. +Use `MATCH` to perform a <> on the specified field. Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. Match can be used on fields from the text family like <> and <>, as well as other field types like keyword, boolean, dates, and numeric types. For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. `MATCH` returns true if the provided query matches the row. diff --git a/docs/reference/esql/functions/description/to_date_nanos.asciidoc b/docs/reference/esql/functions/description/to_date_nanos.asciidoc index 3fac7295f1be..955c19b43a12 100644 --- a/docs/reference/esql/functions/description/to_date_nanos.asciidoc +++ b/docs/reference/esql/functions/description/to_date_nanos.asciidoc @@ -4,4 +4,4 @@ Converts an input to a nanosecond-resolution date value (aka date_nanos). -NOTE: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. +NOTE: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attepting to convertvalues outside of that range will result in null with a warning.. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index 3d96de05c840..f9c7f2f27d6f 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -1599,7 +1599,7 @@ "FROM sample_data \n| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW()\n| STATS COUNT(*) BY bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW())", "FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS AVG(salary) BY bucket = BUCKET(hire_date, 20, \"1985-01-01T00:00:00Z\", \"1986-01-01T00:00:00Z\")\n| SORT bucket", "FROM employees\n| STATS s1 = b1 + 1, s2 = BUCKET(salary / 1000 + 999, 50.) + 2 BY b1 = BUCKET(salary / 100 + 99, 50.), b2 = BUCKET(salary / 1000 + 999, 50.)\n| SORT b1, b2\n| KEEP s1, b1, s2, b2", - "FROM employees \n| STATS dates = VALUES(birth_date) BY b = BUCKET(birth_date + 1 HOUR, 1 YEAR) - 1 HOUR\n| EVAL d_count = MV_COUNT(dates)\n| SORT d_count\n| LIMIT 3" + "FROM employees\n| STATS dates = MV_SORT(VALUES(birth_date)) BY b = BUCKET(birth_date + 1 HOUR, 1 YEAR) - 1 HOUR\n| EVAL d_count = MV_COUNT(dates)\n| SORT d_count, b\n| LIMIT 3" ], "preview" : false, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json index 6e2738fafb96..f6f48e9df82b 100644 --- a/docs/reference/esql/functions/kibana/definition/date_format.json +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -4,6 +4,30 @@ "name" : "date_format", "description" : "Returns a string representation of a date, in the provided format.", "signatures" : [ + { + "params" : [ + { + "name" : "dateFormat", + "type" : "date", + "optional" : true, + "description" : "Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "date_nanos", + "optional" : true, + "description" : "Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { @@ -22,6 +46,24 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "keyword", + "optional" : true, + "description" : "Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`, the function returns `null`." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { @@ -39,6 +81,24 @@ ], "variadic" : false, "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "text", + "optional" : true, + "description" : "Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`, the function returns `null`." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "keyword" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index d61534da81a6..eb206cb9ddf4 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "match", - "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on text fields, as well as other field types like boolean, dates, and numeric types.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", + "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json index 44233bbddb65..b58f9d5835a2 100644 --- a/docs/reference/esql/functions/kibana/definition/match_operator.json +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "operator", "name" : "match_operator", - "description" : "Performs a <> on the specified field. Returns true if the provided query matches the row.", + "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index d9409bceb8e6..210b9608f9ef 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -3,7 +3,7 @@ "type" : "eval", "name" : "to_date_nanos", "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", - "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", + "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attepting to convertvalues outside of that range will result in null with a warning.. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", "signatures" : [ { "params" : [ @@ -90,6 +90,6 @@ "returnType" : "date_nanos" } ], - "preview" : true, + "preview" : false, "snapshot_only" : false } diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 72258a168293..80bf84351c18 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -6,7 +6,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Use `MATCH` to perform a <> on the specified field. Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. -Match can be used on text fields, as well as other field types like boolean, dates, and numeric types. +Match can be used on fields from the text family like <> and <>, +as well as other field types like keyword, boolean, dates, and numeric types. For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md index b0b619679808..98f55aacde0b 100644 --- a/docs/reference/esql/functions/kibana/docs/match_operator.md +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -3,7 +3,15 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### MATCH_OPERATOR -Performs a <> on the specified field. Returns true if the provided query matches the row. +Use `MATCH` to perform a <> on the specified field. +Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. + +Match can be used on fields from the text family like <> and <>, +as well as other field types like keyword, boolean, dates, and numeric types. + +For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. + +`MATCH` returns true if the provided query matches the row. ``` FROM books diff --git a/docs/reference/esql/functions/kibana/docs/to_date_nanos.md b/docs/reference/esql/functions/kibana/docs/to_date_nanos.md index 0294802485cc..1bce8d4fca83 100644 --- a/docs/reference/esql/functions/kibana/docs/to_date_nanos.md +++ b/docs/reference/esql/functions/kibana/docs/to_date_nanos.md @@ -5,4 +5,4 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### TO_DATE_NANOS Converts an input to a nanosecond-resolution date value (aka date_nanos). -Note: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. +Note: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attepting to convertvalues outside of that range will result in null with a warning.. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. diff --git a/docs/reference/esql/functions/layout/to_date_nanos.asciidoc b/docs/reference/esql/functions/layout/to_date_nanos.asciidoc index 977a0ac969e5..2dfd13dac7e2 100644 --- a/docs/reference/esql/functions/layout/to_date_nanos.asciidoc +++ b/docs/reference/esql/functions/layout/to_date_nanos.asciidoc @@ -4,8 +4,6 @@ [[esql-to_date_nanos]] === `TO_DATE_NANOS` -preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - *Syntax* [.text-center] diff --git a/docs/reference/esql/functions/type-conversion-functions.asciidoc b/docs/reference/esql/functions/type-conversion-functions.asciidoc index 9ac9ec290c07..bd70c2789dfa 100644 --- a/docs/reference/esql/functions/type-conversion-functions.asciidoc +++ b/docs/reference/esql/functions/type-conversion-functions.asciidoc @@ -18,6 +18,7 @@ * <> * experimental:[] <> * <> +* <> * <> * <> * <> @@ -37,6 +38,7 @@ include::layout/to_cartesianpoint.asciidoc[] include::layout/to_cartesianshape.asciidoc[] include::layout/to_dateperiod.asciidoc[] include::layout/to_datetime.asciidoc[] +include::layout/to_date_nanos.asciidoc[] include::layout/to_degrees.asciidoc[] include::layout/to_double.asciidoc[] include::layout/to_geopoint.asciidoc[] diff --git a/docs/reference/esql/functions/types/date_format.asciidoc b/docs/reference/esql/functions/types/date_format.asciidoc index b2e97dfa8835..c8f4942d98a6 100644 --- a/docs/reference/esql/functions/types/date_format.asciidoc +++ b/docs/reference/esql/functions/types/date_format.asciidoc @@ -5,6 +5,10 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== dateFormat | date | result +date | | keyword +date_nanos | | keyword keyword | date | keyword +keyword | date_nanos | keyword text | date | keyword +text | date_nanos | keyword |=== diff --git a/docs/reference/indices.asciidoc b/docs/reference/indices.asciidoc index ca7de396147a..b6b82422cbb4 100644 --- a/docs/reference/indices.asciidoc +++ b/docs/reference/indices.asciidoc @@ -24,7 +24,6 @@ index settings, aliases, mappings, and index templates. * <> * <> * <> -* <> * <> * <> * <> @@ -143,6 +142,5 @@ include::indices/shrink-index.asciidoc[] include::indices/simulate-index.asciidoc[] include::indices/simulate-template.asciidoc[] include::indices/split-index.asciidoc[] -include::indices/apis/unfreeze.asciidoc[] include::indices/update-settings.asciidoc[] include::indices/put-mapping.asciidoc[] diff --git a/docs/reference/indices/apis/unfreeze.asciidoc b/docs/reference/indices/apis/unfreeze.asciidoc deleted file mode 100644 index 5d04d44db744..000000000000 --- a/docs/reference/indices/apis/unfreeze.asciidoc +++ /dev/null @@ -1,61 +0,0 @@ -[role="xpack"] -[[unfreeze-index-api]] -=== Unfreeze index API -++++ -Unfreeze index -++++ - -[WARNING] -.Deprecated in 7.14 -==== -In 8.0, we removed the ability to freeze an index. In previous versions, -freezing an index reduced its memory overhead. However, frozen indices are no -longer useful due to -https://www.elastic.co/blog/significantly-decrease-your-elasticsearch-heap-memory-usage[recent -improvements in heap memory usage]. -You can use this API to unfreeze indices that were frozen in 7.x. Frozen indices -are not related to the frozen data tier. -==== - -.New API reference -[sidebar] --- -For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. --- - -Unfreezes an index. - -[[unfreeze-index-api-request]] -==== {api-request-title} - -`POST //_unfreeze` - -[[unfreeze-index-api-prereqs]] -==== {api-prereq-title} - -* If the {es} {security-features} are enabled, you must have the `manage` -<> for the target index or index alias. - -[[unfreeze-index-api-desc]] -==== {api-description-title} - -When a frozen index is unfrozen, the index goes through the normal recovery -process and becomes writeable again. - -[[unfreeze-index-api-path-parms]] -==== {api-path-parms-title} - -``:: - (Required, string) Identifier for the index. - -[[unfreeze-index-api-examples]] -==== {api-examples-title} - -The following example unfreezes an index: - -[source,console] --------------------------------------------------- -POST /my-index-000001/_unfreeze --------------------------------------------------- -// TEST[s/^/PUT my-index-000001\n/] -// TEST[skip:unable to ignore deprecation warning] diff --git a/docs/reference/indices/index-mgmt.asciidoc b/docs/reference/indices/index-mgmt.asciidoc index 73643dbfd4b3..131bc79faa40 100644 --- a/docs/reference/indices/index-mgmt.asciidoc +++ b/docs/reference/indices/index-mgmt.asciidoc @@ -43,7 +43,7 @@ For more information on managing indices, refer to <>. * To filter the list of indices, use the search bar or click a badge. Badges indicate if an index is a <>, a -<>, or <>. +<>, or <>. * To drill down into the index <>, <>, and statistics, diff --git a/docs/reference/inference/chat-completion-inference.asciidoc b/docs/reference/inference/chat-completion-inference.asciidoc new file mode 100644 index 000000000000..83a8f94634f2 --- /dev/null +++ b/docs/reference/inference/chat-completion-inference.asciidoc @@ -0,0 +1,417 @@ +[role="xpack"] +[[chat-completion-inference-api]] +=== Chat completion inference API + +Streams a chat completion response. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. + + +[discrete] +[[chat-completion-inference-api-request]] +==== {api-request-title} + +`POST /_inference//_unified` + +`POST /_inference/chat_completion//_unified` + + +[discrete] +[[chat-completion-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `monitor_inference` <> +(the built-in `inference_admin` and `inference_user` roles grant this privilege) +* You must use a client that supports streaming. + + +[discrete] +[[chat-completion-inference-api-desc]] +==== {api-description-title} + +The chat completion {infer} API enables real-time responses for chat completion tasks by delivering answers incrementally, reducing response times during computation. +It only works with the `chat_completion` task type for `openai` and `elastic` {infer} services. + +[NOTE] +==== +The `chat_completion` task type is only available within the _unified API and only supports streaming. +==== + +[discrete] +[[chat-completion-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +The unique identifier of the {infer} endpoint. + + +``:: +(Optional, string) +The type of {infer} task that the model performs. If included, this must be set to the value `chat_completion`. + + +[discrete] +[[chat-completion-inference-api-request-body]] +==== {api-request-body-title} + +`messages`:: +(Required, array of objects) A list of objects representing the conversation. +Requests should generally only add new messages from the user (role `user`). The other message roles (`assistant`, `system`, or `tool`) should generally only be copied from the response to a previous completion request, such that the messages array is built up throughout a conversation. ++ +.Assistant message +[%collapsible%closed] +===== +`content`:: +(Required unless `tool_calls` is specified, string or array of objects) +The contents of the message. ++ +include::inference-shared.asciidoc[tag=chat-completion-schema-content-with-examples] ++ +`role`:: +(Required, string) +The role of the message author. This should be set to `assistant` for this type of message. ++ +`tool_calls`:: +(Optional, array of objects) +The tool calls generated by the model. ++ +.Examples +[%collapsible%closed] +====== +[source,js] +------------------------------------------------------------ +{ + "tool_calls": [ + { + "id": "call_KcAjWtAww20AihPHphUh46Gd", + "type": "function", + "function": { + "name": "get_current_weather", + "arguments": "{\"location\":\"Boston, MA\"}" + } + } + ] +} +------------------------------------------------------------ +// NOTCONSOLE +====== ++ +`id`::: +(Required, string) +The identifier of the tool call. ++ +`type`::: +(Required, string) +The type of tool call. This must be set to the value `function`. ++ +`function`::: +(Required, object) +The function that the model called. ++ +`name`:::: +(Required, string) +The name of the function to call. ++ +`arguments`:::: +(Required, string) +The arguments to call the function with in JSON format. +===== ++ +.System message +[%collapsible%closed] +===== +`content`::: +(Required, string or array of objects) +The contents of the message. ++ +include::inference-shared.asciidoc[tag=chat-completion-schema-content-with-examples] ++ +`role`::: +(Required, string) +The role of the message author. This should be set to `system` for this type of message. +===== ++ +.Tool message +[%collapsible%closed] +===== +`content`:: +(Required, string or array of objects) +The contents of the message. ++ +include::inference-shared.asciidoc[tag=chat-completion-schema-content-with-examples] ++ +`role`:: +(Required, string) +The role of the message author. This should be set to `tool` for this type of message. ++ +`tool_call_id`:: +(Required, string) +The tool call that this message is responding to. +===== ++ +.User message +[%collapsible%closed] +===== +`content`:: +(Required, string or array of objects) +The contents of the message. ++ +include::inference-shared.asciidoc[tag=chat-completion-schema-content-with-examples] ++ +`role`:: +(Required, string) +The role of the message author. This should be set to `user` for this type of message. +===== + +`model`:: +(Optional, string) +The ID of the model to use. By default, the model ID is set to the value included when creating the inference endpoint. + +`max_completion_tokens`:: +(Optional, integer) +The upper bound limit for the number of tokens that can be generated for a completion request. + +`stop`:: +(Optional, array of strings) +A sequence of strings to control when the model should stop generating additional tokens. + +`temperature`:: +(Optional, float) +The sampling temperature to use. + +`tools`:: +(Optional, array of objects) +A list of tools that the model can call. ++ +.Structure +[%collapsible%closed] +===== +`type`:: +(Required, string) +The type of tool, must be set to the value `function`. ++ +`function`:: +(Required, object) +The function definition. ++ +`description`::: +(Optional, string) +A description of what the function does. This is used by the model to choose when and how to call the function. ++ +`name`::: +(Required, string) +The name of the function. ++ +`parameters`::: +(Optional, object) +The parameters the functional accepts. This should be formatted as a JSON object. ++ +`strict`::: +(Optional, boolean) +Whether to enable schema adherence when generating the function call. +===== ++ +.Examples +[%collapsible%closed] +====== +[source,js] +------------------------------------------------------------ +{ + "tools": [ + { + "type": "function", + "function": { + "name": "get_price_of_item", + "description": "Get the current price of an item", + "parameters": { + "type": "object", + "properties": { + "item": { + "id": "12345" + }, + "unit": { + "type": "currency" + } + } + } + } + } + ] +} +------------------------------------------------------------ +// NOTCONSOLE +====== + +`tool_choice`:: +(Optional, string or object) +Controls which tool is called by the model. ++ +String representation::: +One of `auto`, `none`, or `requrired`. `auto` allows the model to choose between calling tools and generating a message. `none` causes the model to not call any tools. `required` forces the model to call one or more tools. ++ +Object representation::: ++ +.Structure +[%collapsible%closed] +===== +`type`:: +(Required, string) +The type of the tool. This must be set to the value `function`. ++ +`function`:: +(Required, object) ++ +`name`::: +(Required, string) +The name of the function to call. +===== ++ +.Examples +[%collapsible%closed] +===== +[source,js] +------------------------------------------------------------ +{ + "tool_choice": { + "type": "function", + "function": { + "name": "get_current_weather" + } + } +} +------------------------------------------------------------ +// NOTCONSOLE +===== + +`top_p`:: +(Optional, float) +Nucleus sampling, an alternative to sampling with temperature. + +[discrete] +[[chat-completion-inference-api-example]] +==== {api-examples-title} + +The following example performs a chat completion on the example question with streaming. + + +[source,console] +------------------------------------------------------------ +POST _inference/chat_completion/openai-completion/_stream +{ + "model": "gpt-4o", + "messages": [ + { + "role": "user", + "content": "What is Elastic?" + } + ] +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The following example performs a chat completion using an Assistant message with `tool_calls`. + +[source,console] +------------------------------------------------------------ +POST _inference/chat_completion/openai-completion/_stream +{ + "messages": [ + { + "role": "assistant", + "content": "Let's find out what the weather is", + "tool_calls": [ <1> + { + "id": "call_KcAjWtAww20AihPHphUh46Gd", + "type": "function", + "function": { + "name": "get_current_weather", + "arguments": "{\"location\":\"Boston, MA\"}" + } + } + ] + }, + { <2> + "role": "tool", + "content": "The weather is cold", + "tool_call_id": "call_KcAjWtAww20AihPHphUh46Gd" + } + ] +} +------------------------------------------------------------ +// TEST[skip:TBD] + +<1> Each tool call needs a corresponding Tool message. +<2> The corresponding Tool message. + +The following example performs a chat completion using a User message with `tools` and `tool_choice`. + +[source,console] +------------------------------------------------------------ +POST _inference/chat_completion/openai-completion/_stream +{ + "messages": [ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "What's the price of a scarf?" + } + ] + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_current_price", + "description": "Get the current price of a item", + "parameters": { + "type": "object", + "properties": { + "item": { + "id": "123" + } + } + } + } + } + ], + "tool_choice": { + "type": "function", + "function": { + "name": "get_current_price" + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The API returns the following response when a request is made to the OpenAI service: + + +[source,txt] +------------------------------------------------------------ +event: message +data: {"chat_completion":{"id":"chatcmpl-Ae0TWsy2VPnSfBbv5UztnSdYUMFP3","choices":[{"delta":{"content":"","role":"assistant"},"index":0}],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk"}} + +event: message +data: {"chat_completion":{"id":"chatcmpl-Ae0TWsy2VPnSfBbv5UztnSdYUMFP3","choices":[{"delta":{"content":Elastic"},"index":0}],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk"}} + +event: message +data: {"chat_completion":{"id":"chatcmpl-Ae0TWsy2VPnSfBbv5UztnSdYUMFP3","choices":[{"delta":{"content":" is"},"index":0}],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk"}} + +(...) + +event: message +data: {"chat_completion":{"id":"chatcmpl-Ae0TWsy2VPnSfBbv5UztnSdYUMFP3","choices":[],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk","usage":{"completion_tokens":28,"prompt_tokens":16,"total_tokens":44}}} <1> + +event: message +data: [DONE] +------------------------------------------------------------ +// NOTCONSOLE + +<1> The last object message of the stream contains the token usage information. diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index ca273afc478e..4f27409973ca 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -26,6 +26,7 @@ the following APIs to manage {infer} models and perform {infer}: * <> * <> * <> +* <> * <> [[inference-landscape]] @@ -34,9 +35,9 @@ image::images/inference-landscape.jpg[A representation of the Elastic inference An {infer} endpoint enables you to use the corresponding {ml} model without manual deployment and apply it to your data at ingestion time through -<>. +<>. -Choose a model from your provider or use ELSER – a retrieval model trained by +Choose a model from your provider or use ELSER – a retrieval model trained by Elastic –, then create an {infer} endpoint by the <>. Now use <> to perform <> on your data. @@ -67,7 +68,7 @@ The following list contains the default {infer} endpoints listed by `inference_i Use the `inference_id` of the endpoint in a <> field definition or when creating an <>. The API call will automatically download and deploy the model which might take a couple of minutes. Default {infer} enpoints have {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations] enabled. -For these models, the minimum number of allocations is `0`. +For these models, the minimum number of allocations is `0`. If there is no {infer} activity that uses the endpoint, the number of allocations will scale down to `0` automatically after 15 minutes. @@ -84,7 +85,7 @@ Returning a long document in search results is less useful than providing the mo Each chunk will include the text subpassage and the corresponding embedding generated from it. By default, documents are split into sentences and grouped in sections up to 250 words with 1 sentence overlap so that each chunk shares a sentence with the previous chunk. -Overlapping ensures continuity and prevents vital contextual information in the input text from being lost by a hard break. +Overlapping ensures continuity and prevents vital contextual information in the input text from being lost by a hard break. {es} uses the https://unicode-org.github.io/icu-docs/[ICU4J] library to detect word and sentence boundaries for chunking. https://unicode-org.github.io/icu/userguide/boundaryanalysis/#word-boundary[Word boundaries] are identified by following a series of rules, not just the presence of a whitespace character. @@ -135,6 +136,7 @@ PUT _inference/sparse_embedding/small_chunk_size include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] +include::chat-completion-inference.asciidoc[] include::put-inference.asciidoc[] include::stream-inference.asciidoc[] include::update-inference.asciidoc[] diff --git a/docs/reference/inference/inference-shared.asciidoc b/docs/reference/inference/inference-shared.asciidoc index da497c6581e5..b133c5408281 100644 --- a/docs/reference/inference/inference-shared.asciidoc +++ b/docs/reference/inference/inference-shared.asciidoc @@ -41,7 +41,7 @@ end::chunking-settings[] tag::chunking-settings-max-chunking-size[] Specifies the maximum size of a chunk in words. Defaults to `250`. -This value cannot be higher than `300` or lower than `20` (for `sentence` strategy) or `10` (for `word` strategy). +This value cannot be higher than `300` or lower than `20` (for `sentence` strategy) or `10` (for `word` strategy). end::chunking-settings-max-chunking-size[] tag::chunking-settings-overlap[] @@ -63,4 +63,48 @@ Specifies the chunking strategy. It could be either `sentence` or `word`. end::chunking-settings-strategy[] +tag::chat-completion-schema-content-with-examples[] +.Examples +[%collapsible%closed] +====== +String example +[source,js] +------------------------------------------------------------ +{ + "content": "Some string" +} +------------------------------------------------------------ +// NOTCONSOLE +Object example +[source,js] +------------------------------------------------------------ +{ + "content": [ + { + "text": "Some text", + "type": "text" + } + ] +} +------------------------------------------------------------ +// NOTCONSOLE +====== + +String representation::: +(Required, string) +The text content. ++ +Object representation::: +`text`:::: +(Required, string) +The text content. ++ +`type`:::: +(Required, string) +This must be set to the value `text`. +end::chat-completion-schema-content-with-examples[] + +tag::chat-completion-docs[] +For more information on how to use the `chat_completion` task type, please refer to the <>. +end::chat-completion-docs[] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index f0c15323863d..da07d1d3e7d8 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -42,7 +42,7 @@ include::inference-shared.asciidoc[tag=inference-id] include::inference-shared.asciidoc[tag=task-type] + -- -Refer to the service list in the <> for the available task types. +Refer to the service list in the <> for the available task types. -- @@ -61,7 +61,7 @@ The create {infer} API enables you to create an {infer} endpoint and configure a The following services are available through the {infer} API. -You can find the available task types next to the service name. +You can find the available task types next to the service name. Click the links to review the configuration details of the services: * <> (`completion`, `rerank`, `sparse_embedding`, `text_embedding`) @@ -73,10 +73,10 @@ Click the links to review the configuration details of the services: * <> (`rerank`, `sparse_embedding`, `text_embedding` - this service is for built-in models and models uploaded through Eland) * <> (`sparse_embedding`) * <> (`completion`, `text_embedding`) -* <> (`rerank`, `text_embedding`) +* <> (`rerank`, `text_embedding`) * <> (`text_embedding`) * <> (`text_embedding`) -* <> (`completion`, `text_embedding`) +* <> (`chat_completion`, `completion`, `text_embedding`) * <> (`text_embedding`) * <> (`text_embedding`, `rerank`) diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc index e4be7f18e09d..590f280b1c49 100644 --- a/docs/reference/inference/service-openai.asciidoc +++ b/docs/reference/inference/service-openai.asciidoc @@ -31,10 +31,18 @@ include::inference-shared.asciidoc[tag=task-type] -- Available task types: +* `chat_completion`, * `completion`, * `text_embedding`. -- +[NOTE] +==== +The `chat_completion` task type only supports streaming and only through the `_unified` API. + +include::inference-shared.asciidoc[tag=chat-completion-docs] +==== + [discrete] [[infer-service-openai-api-request-body]] ==== {api-request-body-title} @@ -61,7 +69,7 @@ include::inference-shared.asciidoc[tag=chunking-settings-strategy] `service`:: (Required, string) -The type of service supported for the specified task type. In this case, +The type of service supported for the specified task type. In this case, `openai`. `service_settings`:: @@ -176,4 +184,4 @@ PUT _inference/completion/openai-completion } } ------------------------------------------------------------ -// TEST[skip:TBD] \ No newline at end of file +// TEST[skip:TBD] diff --git a/docs/reference/inference/stream-inference.asciidoc b/docs/reference/inference/stream-inference.asciidoc index 42abb589f9af..4a3ce3190971 100644 --- a/docs/reference/inference/stream-inference.asciidoc +++ b/docs/reference/inference/stream-inference.asciidoc @@ -38,8 +38,12 @@ However, if you do not plan to use the {infer} APIs to use these models or if yo ==== {api-description-title} The stream {infer} API enables real-time responses for completion tasks by delivering answers incrementally, reducing response times during computation. -It only works with the `completion` task type. +It only works with the `completion` and `chat_completion` task types. +[NOTE] +==== +include::inference-shared.asciidoc[tag=chat-completion-docs] +==== [discrete] [[stream-inference-api-path-params]] diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc index d3a90f5d84e6..441c21629da5 100644 --- a/docs/reference/inference/update-inference.asciidoc +++ b/docs/reference/inference/update-inference.asciidoc @@ -19,9 +19,9 @@ However, if you do not plan to use the {infer} APIs to use these models or if yo [[update-inference-api-request]] ==== {api-request-title} -`POST _inference//_update` +`PUT _inference//_update` -`POST _inference///_update` +`PUT _inference///_update` [discrete] @@ -52,7 +52,7 @@ Click the links to review the service configuration details: * <> (`rerank`, `sparse_embedding`, `text_embedding` - this service is for built-in models and models uploaded through Eland) * <> (`sparse_embedding`) * <> (`completion`, `text_embedding`) -* <> (`rerank`, `text_embedding`) +* <> (`rerank`, `text_embedding`) * <> (`text_embedding`) * <> (`text_embedding`) * <> (`completion`, `text_embedding`) @@ -81,7 +81,7 @@ The following example shows how to update an API key of an {infer} endpoint call [source,console] ------------------------------------------------------------ -POST _inference/my-inference-endpoint/_update +PUT _inference/my-inference-endpoint/_update { "service_settings": { "api_key": "" diff --git a/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc b/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc index 82263c98e911..766886a0b48a 100644 --- a/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc +++ b/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc @@ -384,6 +384,7 @@ A collection of model size stats fields. `model_size_bytes`::: (integer) The size of the model in bytes. +This parameter applies only to PyTorch models. `required_native_memory_bytes`::: (integer) diff --git a/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc index 4f583319ca38..03777afbd6ee 100644 --- a/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc @@ -131,6 +131,7 @@ The free-text description of the trained model. `model_size_bytes`::: (integer) The estimated model size in bytes to keep the trained model in memory. +This parameter applies only to {dfanalytics} trained models. `estimated_operations`::: (integer) diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc index c24135a37091..27220f0d8514 100644 --- a/docs/reference/query-dsl/bool-query.asciidoc +++ b/docs/reference/query-dsl/bool-query.asciidoc @@ -13,21 +13,24 @@ occurrence types are: |======================================================================= |Occur |Description |`must` |The clause (query) must appear in matching documents and will -contribute to the score. +contribute to the score. Each query defined under a `must` acts as a logical "AND", returning only documents that match _all_ the specified queries. + +|`should` |The clause (query) should appear in the matching document. Each query defined under a `should` acts as a logical "OR", returning documents that match _any_ of the specified queries. |`filter` |The clause (query) must appear in matching documents. However unlike `must` the score of the query will be ignored. Filter clauses are executed in <>, meaning that scoring is ignored -and clauses are considered for caching. - -|`should` |The clause (query) should appear in the matching document. +and clauses are considered for caching. Each query defined under a `filter` acts as a logical "AND", returning only documents that match _all_ the specified queries. |`must_not` |The clause (query) must not appear in the matching documents. Clauses are executed in <> meaning that scoring is ignored and clauses are considered for caching. Because scoring is -ignored, a score of `0` for all documents is returned. +ignored, a score of `0` for all documents is returned. Each query defined under a `must_not` acts as a logical "NOT", returning only documents that do not match any of the specified queries. + |======================================================================= +The `must` and `should` clauses function as logical AND, OR operators, contributing to the scoring of results. However, these results will not be cached for faster retrieval. In contrast, the `filter` and `must_not` clauses are used to include or exclude results without impacting the score, unless used within a `constant_score` query. + The `bool` query takes a _more-matches-is-better_ approach, so the score from each matching `must` or `should` clause will be added together to provide the final `_score` for each document. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index c3bf84fa600d..68ecc469c6cb 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -156,10 +156,16 @@ See <>. The freeze index API was removed in 8.0. // tag::frozen-removal-explanation[] Frozen indices are no longer useful due to -https://www.elastic.co/blog/significantly-decrease-your-elasticsearch-heap-memory-usage[recent -improvements in heap memory usage]. +https://www.elastic.co/blog/significantly-decrease-your-elasticsearch-heap-memory-usage[improvements +in heap memory usage]. // end::frozen-removal-explanation[] +[role="exclude",id="unfreeze-index-api"] +=== Unfreeze index API + +The unfreeze index API was removed in 9.0. +include::redirects.asciidoc[tag=frozen-removal-explanation] + [role="exclude",id="ilm-freeze"] === Freeze {ilm-init} action @@ -326,7 +332,7 @@ See <>. See <>. // [END] Security redirects -[roles="exclude",id="modules-scripting-stored-scripts"] +[role="exclude",id="modules-scripting-stored-scripts"] === Stored scripts See <> @@ -1749,8 +1755,10 @@ See <>. === Frozen indices // tag::frozen-index-redirect[] - -For API documentation, see <>. +Older versions of {es} provided the option to reduce the amount of data kept in memory for an index, at the expense of +increasing search latency. This was known as 'freezing' the index. +include::redirects.asciidoc[tag=frozen-removal-explanation] +The freeze index API was removed in 8.0, and the unfreeze index API was removed in 9.0. // end::frozen-index-redirect[] [role="exclude",id="best_practices"] diff --git a/docs/reference/search/search-your-data/search-across-clusters.asciidoc b/docs/reference/search/search-your-data/search-across-clusters.asciidoc index 5f9e92c57579..8d3768817e85 100644 --- a/docs/reference/search/search-your-data/search-across-clusters.asciidoc +++ b/docs/reference/search/search-your-data/search-across-clusters.asciidoc @@ -22,7 +22,7 @@ The following APIs support {ccs}: * experimental:[] <> * experimental:[] <> * experimental:[] <> -* experimental:[] <> +* experimental:[] <> [discrete] === Prerequisites diff --git a/docs/reference/sql/language/indices.asciidoc b/docs/reference/sql/language/indices.asciidoc index 1dee7f0840ad..1912a020ab0b 100644 --- a/docs/reference/sql/language/indices.asciidoc +++ b/docs/reference/sql/language/indices.asciidoc @@ -100,7 +100,7 @@ requires the keyword `LIKE` for SQL `LIKE` pattern. [[sql-index-frozen]] === Frozen Indices -By default, {es-sql} doesn't search <>. To +By default, {es-sql} doesn't search <>. To search frozen indices, use one of the following features: dedicated configuration parameter:: diff --git a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc index c07e92c05899..5d74ca66ee6b 100644 --- a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc +++ b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc @@ -78,35 +78,31 @@ A shard can become unassigned for several reasons. The following tips outline th most common causes and their solutions. [discrete] -[[fix-cluster-status-reenable-allocation]] -===== Re-enable shard allocation +[[fix-cluster-status-only-one-node]] +===== Single node cluster -You typically disable allocation during a <> or other -cluster maintenance. If you forgot to re-enable allocation afterward, {es} will -be unable to assign shards. To re-enable allocation, reset the -`cluster.routing.allocation.enable` cluster setting. +{es} will never assign a replica to the same node as the primary shard. A single-node cluster will always have yellow status. To change to green, set <> to 0 for all indices. -[source,console] ----- -PUT _cluster/settings -{ - "persistent" : { - "cluster.routing.allocation.enable" : null - } -} ----- - -See https://www.youtube.com/watch?v=MiKKUdZvwnI[this video] for walkthrough of troubleshooting "no allocations are allowed". +Therefore, if the number of replicas equals or exceeds the number of nodes, some shards won't be allocated. [discrete] [[fix-cluster-status-recover-nodes]] ===== Recover lost nodes Shards often become unassigned when a data node leaves the cluster. This can -occur for several reasons, ranging from connectivity issues to hardware failure. +occur for several reasons: + +* A manual node restart will cause a temporary unhealthy cluster state until the node recovers. + +* When a node becomes overloaded or fails, it can temporarily disrupt the cluster’s health, leading to an unhealthy state. Prolonged garbage collection (GC) pauses, caused by out-of-memory errors or high memory usage during intensive searches, can trigger this state. See <> for more JVM-related issues. + +* Network issues can prevent reliable node communication, causing shards to become out of sync. Check the logs for repeated messages about nodes leaving and rejoining the cluster. + After you resolve the issue and recover the node, it will rejoin the cluster. {es} will then automatically allocate any unassigned shards. +You can monitor this process by <>. The number of unallocated shards should progressively decrease until green status is reached. + To avoid wasting resources on temporary issues, {es} <> by one minute by default. If you've recovered a node and don’t want to wait for the delay period, you can call the <> or add a delete phase. If you no longer need to search the data, you @@ -219,11 +216,39 @@ watermark or set it to an explicit byte value. PUT _cluster/settings { "persistent": { - "cluster.routing.allocation.disk.watermark.low": "30gb" + "cluster.routing.allocation.disk.watermark.low": "90%", + "cluster.routing.allocation.disk.watermark.high": "95%" } } ---- -// TEST[s/"30gb"/null/] +// TEST[s/"90%"/null/] +// TEST[s/"95%"/null/] + +[IMPORTANT] +==== +This is usually a temporary solution and may cause instability if disk space is not freed up. +==== + +[discrete] +[[fix-cluster-status-reenable-allocation]] +===== Re-enable shard allocation + +You typically disable allocation during a <> or other +cluster maintenance. If you forgot to re-enable allocation afterward, {es} will +be unable to assign shards. To re-enable allocation, reset the +`cluster.routing.allocation.enable` cluster setting. + +[source,console] +---- +PUT _cluster/settings +{ + "persistent" : { + "cluster.routing.allocation.enable" : null + } +} +---- + +See https://www.youtube.com/watch?v=MiKKUdZvwnI[this video] for walkthrough of troubleshooting "no allocations are allowed". [discrete] [[fix-cluster-status-jvm]] @@ -271,4 +296,4 @@ POST _cluster/reroute // TEST[s/^/PUT my-index\n/] // TEST[catch:bad_request] -See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. \ No newline at end of file +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 8b03aeb17858..1e03c61df98e 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -13,10 +13,22 @@ import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.net.ContentHandlerFactory; +import java.net.DatagramPacket; +import java.net.DatagramSocket; import java.net.DatagramSocketImplFactory; import java.net.FileNameMap; +import java.net.InetAddress; +import java.net.MulticastSocket; +import java.net.NetworkInterface; +import java.net.Proxy; +import java.net.ProxySelector; +import java.net.ResponseCache; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.SocketAddress; import java.net.SocketImplFactory; import java.net.URL; +import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.List; @@ -167,4 +179,79 @@ public interface EntitlementChecker { void check$java_net_URLConnection$$setContentHandlerFactory(Class callerClass, ContentHandlerFactory fac); + //////////////////// + // + // Network access + // + void check$java_net_ProxySelector$$setDefault(Class callerClass, ProxySelector ps); + + void check$java_net_ResponseCache$$setDefault(Class callerClass, ResponseCache rc); + + void check$java_net_spi_InetAddressResolverProvider$(Class callerClass); + + void check$java_net_spi_URLStreamHandlerProvider$(Class callerClass); + + void check$java_net_URL$(Class callerClass, String protocol, String host, int port, String file, URLStreamHandler handler); + + void check$java_net_URL$(Class callerClass, URL context, String spec, URLStreamHandler handler); + + void check$java_net_DatagramSocket$bind(Class callerClass, DatagramSocket that, SocketAddress addr); + + void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, InetAddress addr); + + void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, SocketAddress addr); + + void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p); + + void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p); + + void check$java_net_DatagramSocket$joinGroup(Class callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni); + + void check$java_net_DatagramSocket$leaveGroup(Class callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni); + + void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, InetAddress addr); + + void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, SocketAddress addr, NetworkInterface ni); + + void check$java_net_MulticastSocket$leaveGroup(Class callerClass, MulticastSocket that, InetAddress addr); + + void check$java_net_MulticastSocket$leaveGroup(Class callerClass, MulticastSocket that, SocketAddress addr, NetworkInterface ni); + + void check$java_net_MulticastSocket$send(Class callerClass, MulticastSocket that, DatagramPacket p, byte ttl); + + // Binding/connecting ctor + void check$java_net_ServerSocket$(Class callerClass, int port); + + void check$java_net_ServerSocket$(Class callerClass, int port, int backlog); + + void check$java_net_ServerSocket$(Class callerClass, int port, int backlog, InetAddress bindAddr); + + void check$java_net_ServerSocket$accept(Class callerClass, ServerSocket that); + + void check$java_net_ServerSocket$implAccept(Class callerClass, ServerSocket that, Socket s); + + void check$java_net_ServerSocket$bind(Class callerClass, ServerSocket that, SocketAddress endpoint); + + void check$java_net_ServerSocket$bind(Class callerClass, ServerSocket that, SocketAddress endpoint, int backlog); + + // Binding/connecting ctors + void check$java_net_Socket$(Class callerClass, Proxy proxy); + + void check$java_net_Socket$(Class callerClass, String host, int port); + + void check$java_net_Socket$(Class callerClass, InetAddress address, int port); + + void check$java_net_Socket$(Class callerClass, String host, int port, InetAddress localAddr, int localPort); + + void check$java_net_Socket$(Class callerClass, InetAddress address, int port, InetAddress localAddr, int localPort); + + void check$java_net_Socket$(Class callerClass, String host, int port, boolean stream); + + void check$java_net_Socket$(Class callerClass, InetAddress host, int port, boolean stream); + + void check$java_net_Socket$bind(Class callerClass, Socket that, SocketAddress endpoint); + + void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint); + + void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint, int backlog); } diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/DummyImplementations.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/DummyImplementations.java index 6dbb684c7151..304aead1e2bf 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/DummyImplementations.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/DummyImplementations.java @@ -9,8 +9,19 @@ package org.elasticsearch.entitlement.qa.common; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.DatagramPacket; +import java.net.DatagramSocket; +import java.net.DatagramSocketImpl; import java.net.InetAddress; +import java.net.NetworkInterface; +import java.net.ServerSocket; import java.net.Socket; +import java.net.SocketAddress; +import java.net.SocketException; +import java.net.SocketImpl; import java.security.cert.Certificate; import java.text.BreakIterator; import java.text.Collator; @@ -290,6 +301,81 @@ class DummyImplementations { } } + private static class DummySocketImpl extends SocketImpl { + @Override + protected void create(boolean stream) {} + + @Override + protected void connect(String host, int port) {} + + @Override + protected void connect(InetAddress address, int port) {} + + @Override + protected void connect(SocketAddress address, int timeout) {} + + @Override + protected void bind(InetAddress host, int port) {} + + @Override + protected void listen(int backlog) {} + + @Override + protected void accept(SocketImpl s) {} + + @Override + protected InputStream getInputStream() { + return null; + } + + @Override + protected OutputStream getOutputStream() { + return null; + } + + @Override + protected int available() { + return 0; + } + + @Override + protected void close() {} + + @Override + protected void sendUrgentData(int data) {} + + @Override + public void setOption(int optID, Object value) {} + + @Override + public Object getOption(int optID) { + return null; + } + } + + static class DummySocket extends Socket { + DummySocket() throws SocketException { + super(new DummySocketImpl()); + } + } + + static class DummyServerSocket extends ServerSocket { + DummyServerSocket() { + super(new DummySocketImpl()); + } + } + + static class DummyBoundServerSocket extends ServerSocket { + DummyBoundServerSocket() { + super(new DummySocketImpl()); + } + + @Override + public boolean isBound() { + return true; + } + } + static class DummySSLSocketFactory extends SSLSocketFactory { @Override public Socket createSocket(String host, int port) { @@ -327,8 +413,77 @@ class DummyImplementations { } } + static class DummyDatagramSocket extends DatagramSocket { + DummyDatagramSocket() throws SocketException { + super(new DatagramSocketImpl() { + @Override + protected void create() throws SocketException {} + + @Override + protected void bind(int lport, InetAddress laddr) throws SocketException {} + + @Override + protected void send(DatagramPacket p) throws IOException {} + + @Override + protected int peek(InetAddress i) throws IOException { + return 0; + } + + @Override + protected int peekData(DatagramPacket p) throws IOException { + return 0; + } + + @Override + protected void receive(DatagramPacket p) throws IOException {} + + @Override + protected void setTTL(byte ttl) throws IOException {} + + @Override + protected byte getTTL() throws IOException { + return 0; + } + + @Override + protected void setTimeToLive(int ttl) throws IOException {} + + @Override + protected int getTimeToLive() throws IOException { + return 0; + } + + @Override + protected void join(InetAddress inetaddr) throws IOException {} + + @Override + protected void leave(InetAddress inetaddr) throws IOException {} + + @Override + protected void joinGroup(SocketAddress mcastaddr, NetworkInterface netIf) throws IOException {} + + @Override + protected void leaveGroup(SocketAddress mcastaddr, NetworkInterface netIf) throws IOException {} + + @Override + protected void close() {} + + @Override + public void setOption(int optID, Object value) throws SocketException {} + + @Override + public Object getOption(int optID) throws SocketException { + return null; + } + + @Override + protected void connect(InetAddress address, int port) throws SocketException {} + }); + } + } + private static RuntimeException unexpected() { return new IllegalStateException("This method isn't supposed to be called"); } - } diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java new file mode 100644 index 000000000000..c88d4ce2b11a --- /dev/null +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.common; + +import org.elasticsearch.core.SuppressForbidden; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.ServerSocket; +import java.net.Socket; + +class NetworkAccessCheckActions { + + static void serverSocketAccept() throws IOException { + try (ServerSocket socket = new DummyImplementations.DummyBoundServerSocket()) { + try { + socket.accept(); + } catch (IOException e) { + // Our dummy socket cannot accept connections unless we tell the JDK how to create a socket for it. + // But Socket.setSocketImplFactory(); is one of the methods we always forbid, so we cannot use it. + // Still, we can check accept is called (allowed/denied), we don't care if it fails later for this + // known reason. + assert e.getMessage().contains("client socket implementation factory not set"); + } + } + } + + static void serverSocketBind() throws IOException { + try (ServerSocket socket = new DummyImplementations.DummyServerSocket()) { + socket.bind(null); + } + } + + @SuppressForbidden(reason = "Testing entitlement check on forbidden action") + static void createSocketWithProxy() throws IOException { + try (Socket socket = new Socket(new Proxy(Proxy.Type.HTTP, new InetSocketAddress(0)))) { + assert socket.isBound() == false; + } + } + + static void socketBind() throws IOException { + try (Socket socket = new DummyImplementations.DummySocket()) { + socket.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } + + @SuppressForbidden(reason = "Testing entitlement check on forbidden action") + static void socketConnect() throws IOException { + try (Socket socket = new DummyImplementations.DummySocket()) { + socket.connect(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } +} diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java index 9869af4d8525..a156d20e3686 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java @@ -11,6 +11,7 @@ package org.elasticsearch.entitlement.qa.common; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.qa.common.DummyImplementations.DummyBreakIteratorProvider; import org.elasticsearch.entitlement.qa.common.DummyImplementations.DummyCalendarDataProvider; @@ -32,16 +33,25 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.io.UncheckedIOException; +import java.net.DatagramPacket; import java.net.DatagramSocket; -import java.net.DatagramSocketImpl; -import java.net.DatagramSocketImplFactory; import java.net.HttpURLConnection; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.MalformedURLException; +import java.net.NetworkInterface; +import java.net.ProxySelector; +import java.net.ResponseCache; import java.net.ServerSocket; import java.net.Socket; +import java.net.SocketException; import java.net.URL; import java.net.URLClassLoader; import java.net.URLConnection; +import java.net.URLStreamHandler; +import java.net.spi.InetAddressResolver; +import java.net.spi.InetAddressResolverProvider; +import java.net.spi.URLStreamHandlerProvider; import java.security.NoSuchAlgorithmException; import java.util.List; import java.util.Map; @@ -57,25 +67,26 @@ import static org.elasticsearch.entitlement.qa.common.RestEntitlementsCheckActio import static org.elasticsearch.entitlement.qa.common.RestEntitlementsCheckAction.CheckAction.forPlugins; import static org.elasticsearch.rest.RestRequest.Method.GET; +@SuppressWarnings("unused") public class RestEntitlementsCheckAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class); public static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); private final String prefix; - record CheckAction(Runnable action, boolean isAlwaysDeniedToPlugins) { + record CheckAction(CheckedRunnable action, boolean isAlwaysDeniedToPlugins) { /** * These cannot be granted to plugins, so our test plugins cannot test the "allowed" case. * Used both for always-denied entitlements as well as those granted only to the server itself. */ - static CheckAction deniedToPlugins(Runnable action) { + static CheckAction deniedToPlugins(CheckedRunnable action) { return new CheckAction(action, true); } - static CheckAction forPlugins(Runnable action) { + static CheckAction forPlugins(CheckedRunnable action) { return new CheckAction(action, false); } - static CheckAction alwaysDenied(Runnable action) { + static CheckAction alwaysDenied(CheckedRunnable action) { return new CheckAction(action, true); } } @@ -125,15 +136,81 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), - entry("urlConnection_setContentHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory)) + entry("urlConnection_setContentHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory)), + + entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), + entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), + entry("createInetAddressResolverProvider", alwaysDenied(RestEntitlementsCheckAction::createInetAddressResolverProvider)), + entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), + entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), + entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), + entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), + entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), + entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), + entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), + entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), + entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), + + entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), + entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), + entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), + entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), + entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)) ); - private static void setDefaultSSLContext() { - try { - SSLContext.setDefault(SSLContext.getDefault()); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } + private static void createURLStreamHandlerProvider() { + var x = new URLStreamHandlerProvider() { + @Override + public URLStreamHandler createURLStreamHandler(String protocol) { + return null; + } + }; + } + + @SuppressWarnings("deprecation") + private static void createURLWithURLStreamHandler() throws MalformedURLException { + var x = new URL("http", "host", 1234, "file", new URLStreamHandler() { + @Override + protected URLConnection openConnection(URL u) { + return null; + } + }); + } + + @SuppressWarnings("deprecation") + private static void createURLWithURLStreamHandler2() throws MalformedURLException { + var x = new URL(null, "spec", new URLStreamHandler() { + @Override + protected URLConnection openConnection(URL u) { + return null; + } + }); + } + + private static void createInetAddressResolverProvider() { + var x = new InetAddressResolverProvider() { + @Override + public InetAddressResolver get(Configuration configuration) { + return null; + } + + @Override + public String name() { + return "TEST"; + } + }; + } + + private static void setDefaultResponseCache() { + ResponseCache.setDefault(null); + } + + private static void setDefaultProxySelector() { + ProxySelector.setDefault(null); + } + + private static void setDefaultSSLContext() throws NoSuchAlgorithmException { + SSLContext.setDefault(SSLContext.getDefault()); } private static void setDefaultHostnameVerifier() { @@ -159,28 +236,18 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { System.exit(123); } - private static void createClassLoader() { + private static void createClassLoader() throws IOException { try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { logger.info("Created URLClassLoader [{}]", classLoader.getName()); - } catch (IOException e) { - throw new UncheckedIOException(e); } } - private static void processBuilder_start() { - try { - new ProcessBuilder("").start(); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static void processBuilder_start() throws IOException { + new ProcessBuilder("").start(); } - private static void processBuilder_startPipeline() { - try { - ProcessBuilder.startPipeline(List.of()); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static void processBuilder_startPipeline() throws IOException { + ProcessBuilder.startPipeline(List.of()); } private static void setHttpsConnectionProperties() { @@ -268,17 +335,8 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { @SuppressWarnings("deprecation") @SuppressForbidden(reason = "We're required to prevent calls to this forbidden API") - private static void datagramSocket$$setDatagramSocketImplFactory() { - try { - DatagramSocket.setDatagramSocketImplFactory(new DatagramSocketImplFactory() { - @Override - public DatagramSocketImpl createDatagramSocketImpl() { - throw new IllegalStateException(); - } - }); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static void datagramSocket$$setDatagramSocketImplFactory() throws IOException { + DatagramSocket.setDatagramSocketImplFactory(() -> { throw new IllegalStateException(); }); } private static void httpURLConnection$$setFollowRedirects() { @@ -287,22 +345,14 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { @SuppressWarnings("deprecation") @SuppressForbidden(reason = "We're required to prevent calls to this forbidden API") - private static void serverSocket$$setSocketFactory() { - try { - ServerSocket.setSocketFactory(() -> { throw new IllegalStateException(); }); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static void serverSocket$$setSocketFactory() throws IOException { + ServerSocket.setSocketFactory(() -> { throw new IllegalStateException(); }); } @SuppressWarnings("deprecation") @SuppressForbidden(reason = "We're required to prevent calls to this forbidden API") - private static void socket$$setSocketImplFactory() { - try { - Socket.setSocketImplFactory(() -> { throw new IllegalStateException(); }); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static void socket$$setSocketImplFactory() throws IOException { + Socket.setSocketImplFactory(() -> { throw new IllegalStateException(); }); } private static void url$$setURLStreamHandlerFactory() { @@ -317,6 +367,51 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { URLConnection.setContentHandlerFactory(__ -> { throw new IllegalStateException(); }); } + private static void bindDatagramSocket() throws SocketException { + try (var socket = new DatagramSocket(null)) { + socket.bind(null); + } + } + + @SuppressForbidden(reason = "testing entitlements") + private static void connectDatagramSocket() throws SocketException { + try (var socket = new DummyImplementations.DummyDatagramSocket()) { + socket.connect(new InetSocketAddress(1234)); + } + } + + private static void joinGroupDatagramSocket() throws IOException { + try (var socket = new DummyImplementations.DummyDatagramSocket()) { + socket.joinGroup( + new InetSocketAddress(InetAddress.getByAddress(new byte[] { (byte) 230, 0, 0, 1 }), 1234), + NetworkInterface.getByIndex(0) + ); + } + } + + private static void leaveGroupDatagramSocket() throws IOException { + try (var socket = new DummyImplementations.DummyDatagramSocket()) { + socket.leaveGroup( + new InetSocketAddress(InetAddress.getByAddress(new byte[] { (byte) 230, 0, 0, 1 }), 1234), + NetworkInterface.getByIndex(0) + ); + } + } + + @SuppressForbidden(reason = "testing entitlements") + private static void sendDatagramSocket() throws IOException { + try (var socket = new DummyImplementations.DummyDatagramSocket()) { + socket.send(new DatagramPacket(new byte[] { 0 }, 1, InetAddress.getLocalHost(), 1234)); + } + } + + @SuppressForbidden(reason = "testing entitlements") + private static void receiveDatagramSocket() throws IOException { + try (var socket = new DummyImplementations.DummyDatagramSocket()) { + socket.receive(new DatagramPacket(new byte[1], 1, InetAddress.getLocalHost(), 1234)); + } + } + public RestEntitlementsCheckAction(String prefix) { this.prefix = prefix; } diff --git a/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml index 30fc9f0abeec..05a94f09264a 100644 --- a/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml +++ b/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,3 +1,8 @@ ALL-UNNAMED: - create_class_loader - set_https_connection_properties + - network: + actions: + - listen + - accept + - connect diff --git a/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml index 0a25570a9f62..0d2c66c2daa2 100644 --- a/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml +++ b/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,3 +1,8 @@ org.elasticsearch.entitlement.qa.common: - create_class_loader - set_https_connection_properties + - network: + actions: + - listen + - accept + - connect diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index ba5ccbafa70a..9b621461403d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -22,6 +22,7 @@ import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker import org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement; import org.elasticsearch.entitlement.runtime.policy.Entitlement; import org.elasticsearch.entitlement.runtime.policy.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import org.elasticsearch.entitlement.runtime.policy.PolicyParser; @@ -44,6 +45,9 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement.ACCEPT_ACTION; +import static org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement.CONNECT_ACTION; +import static org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement.LISTEN_ACTION; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; /** @@ -97,7 +101,15 @@ public class EntitlementInitialization { List.of( new Scope("org.elasticsearch.base", List.of(new CreateClassLoaderEntitlement())), new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())), - new Scope("org.elasticsearch.server", List.of(new ExitVMEntitlement(), new CreateClassLoaderEntitlement())) + new Scope( + "org.elasticsearch.server", + List.of( + new ExitVMEntitlement(), + new CreateClassLoaderEntitlement(), + new NetworkEntitlement(LISTEN_ACTION | CONNECT_ACTION | ACCEPT_ACTION) + ) + ), + new Scope("org.apache.httpcomponents.httpclient", List.of(new NetworkEntitlement(CONNECT_ACTION))) ) ); // agents run without a module, so this is a special hack for the apm agent diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 686fb73e10bc..695d1c574c7c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,16 +10,29 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.net.ContentHandlerFactory; +import java.net.DatagramPacket; +import java.net.DatagramSocket; import java.net.DatagramSocketImplFactory; import java.net.FileNameMap; +import java.net.InetAddress; +import java.net.MulticastSocket; +import java.net.NetworkInterface; +import java.net.Proxy; +import java.net.ProxySelector; +import java.net.ResponseCache; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.SocketAddress; import java.net.SocketImplFactory; import java.net.URL; +import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.List; @@ -310,4 +323,185 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { public void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context) { policyManager.checkChangeJVMGlobalState(callerClass); } + + @Override + public void check$java_net_ProxySelector$$setDefault(Class callerClass, ProxySelector ps) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_net_ResponseCache$$setDefault(Class callerClass, ResponseCache rc) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_net_spi_InetAddressResolverProvider$(Class callerClass) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_net_spi_URLStreamHandlerProvider$(Class callerClass) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_net_URL$(Class callerClass, String protocol, String host, int port, String file, URLStreamHandler handler) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_net_URL$(Class callerClass, URL context, String spec, URLStreamHandler handler) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_net_DatagramSocket$bind(Class callerClass, DatagramSocket that, SocketAddress addr) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, InetAddress addr) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, SocketAddress addr) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p) { + var actions = NetworkEntitlement.CONNECT_ACTION; + if (p.getAddress().isMulticastAddress()) { + actions |= NetworkEntitlement.ACCEPT_ACTION; + } + policyManager.checkNetworkAccess(callerClass, actions); + } + + @Override + public void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_DatagramSocket$joinGroup(Class caller, DatagramSocket that, SocketAddress addr, NetworkInterface ni) { + policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_DatagramSocket$leaveGroup(Class caller, DatagramSocket that, SocketAddress addr, NetworkInterface ni) { + policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, InetAddress addr) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_MulticastSocket$joinGroup(Class caller, MulticastSocket that, SocketAddress addr, NetworkInterface ni) { + policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_MulticastSocket$leaveGroup(Class caller, MulticastSocket that, InetAddress addr) { + policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_MulticastSocket$leaveGroup(Class caller, MulticastSocket that, SocketAddress addr, NetworkInterface ni) { + policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_MulticastSocket$send(Class callerClass, MulticastSocket that, DatagramPacket p, byte ttl) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_ServerSocket$(Class callerClass, int port) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_ServerSocket$(Class callerClass, int port, int backlog) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_ServerSocket$(Class callerClass, int port, int backlog, InetAddress bindAddr) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_ServerSocket$accept(Class callerClass, ServerSocket that) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_ServerSocket$implAccept(Class callerClass, ServerSocket that, Socket s) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.ACCEPT_ACTION); + } + + @Override + public void check$java_net_ServerSocket$bind(Class callerClass, ServerSocket that, SocketAddress endpoint) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_ServerSocket$bind(Class callerClass, ServerSocket that, SocketAddress endpoint, int backlog) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_Socket$(Class callerClass, Proxy proxy) { + if (proxy.type() == Proxy.Type.SOCKS || proxy.type() == Proxy.Type.HTTP) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION); + } + } + + @Override + public void check$java_net_Socket$(Class callerClass, String host, int port) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$(Class callerClass, InetAddress address, int port) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$(Class callerClass, String host, int port, InetAddress localAddr, int localPort) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$(Class callerClass, InetAddress address, int port, InetAddress localAddr, int localPort) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$(Class callerClass, String host, int port, boolean stream) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$(Class callerClass, InetAddress host, int port, boolean stream) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$bind(Class callerClass, Socket that, SocketAddress endpoint) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + } + + @Override + public void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION); + } + + @Override + public void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint, int backlog) { + policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlement.java new file mode 100644 index 000000000000..9b4035cee98d --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlement.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.core.Strings; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.StringJoiner; + +import static java.util.Map.entry; + +/** + * Describes a network entitlement (sockets) with actions. + */ +public class NetworkEntitlement implements Entitlement { + + public static final int LISTEN_ACTION = 0x1; + public static final int CONNECT_ACTION = 0x2; + public static final int ACCEPT_ACTION = 0x4; + + static final String LISTEN = "listen"; + static final String CONNECT = "connect"; + static final String ACCEPT = "accept"; + + private static final Map ACTION_MAP = Map.ofEntries( + entry(LISTEN, LISTEN_ACTION), + entry(CONNECT, CONNECT_ACTION), + entry(ACCEPT, ACCEPT_ACTION) + ); + + private final int actions; + + @ExternalEntitlement(parameterNames = { "actions" }, esModulesOnly = false) + public NetworkEntitlement(List actionsList) { + + int actionsInt = 0; + + for (String actionString : actionsList) { + var action = ACTION_MAP.get(actionString); + if (action == null) { + throw new IllegalArgumentException("unknown network action [" + actionString + "]"); + } + if ((actionsInt & action) == action) { + throw new IllegalArgumentException(Strings.format("network action [%s] specified multiple times", actionString)); + } + actionsInt |= action; + } + + this.actions = actionsInt; + } + + public NetworkEntitlement(int actions) { + this.actions = actions; + } + + public static String printActions(int actions) { + var joiner = new StringJoiner(","); + for (var entry : ACTION_MAP.entrySet()) { + var action = entry.getValue(); + if ((actions & action) == action) { + joiner.add(entry.getKey()); + } + } + return joiner.toString(); + } + + /** + * For the actions to match, the actions present in this entitlement must be a superset + * of the actions required by a check. + * There is only one "negative" case (action required by the check but not present in the entitlement), + * and it can be expressed efficiently via this truth table: + * this.actions | requiredActions | + * 0 | 0 | 0 + * 0 | 1 | 1 --> NOT this.action AND requiredActions + * 1 | 0 | 0 + * 1 | 1 | 0 + * + * @param requiredActions the actions required to be present for a check to pass + * @return true if requiredActions are present, false otherwise + */ + public boolean matchActions(int requiredActions) { + return (~this.actions & requiredActions) == 0; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkEntitlement that = (NetworkEntitlement) o; + return actions == that.actions; + } + + @Override + public int hashCode() { + return Objects.hash(actions); + } + + @Override + public String toString() { + return "NetworkEntitlement{actions=" + actions + '}'; + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 9c45f2d42f03..aeb54d5c1156 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -52,7 +52,11 @@ public class PolicyManager { } public Stream getEntitlements(Class entitlementClass) { - return entitlementsByType.get(entitlementClass).stream().map(entitlementClass::cast); + var entitlements = entitlementsByType.get(entitlementClass); + if (entitlements == null) { + return Stream.empty(); + } + return entitlements.stream().map(entitlementClass::cast); } } @@ -171,25 +175,67 @@ public class PolicyManager { }); } + /** + * Check for operations that can modify the way network operations are handled + */ + public void checkChangeNetworkHandling(Class callerClass) { + checkChangeJVMGlobalState(callerClass); + } + + /** + * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions + */ + public void checkReadSensitiveNetworkInformation(Class callerClass) { + neverEntitled(callerClass, "access sensitive network information"); + } + private String operationDescription(String methodName) { // TODO: Use a more human-readable description. Perhaps share code with InstrumentationServiceImpl.parseCheckerMethodName return methodName.substring(methodName.indexOf('$')); } + public void checkNetworkAccess(Class callerClass, int actions) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass, NetworkEntitlement.class); + if (entitlements.getEntitlements(NetworkEntitlement.class).anyMatch(n -> n.matchActions(actions))) { + logger.debug( + () -> Strings.format( + "Entitled: class [%s], module [%s], entitlement [network], actions [%s]", + requestingClass, + requestingClass.getModule().getName(), + NetworkEntitlement.printActions(actions) + ) + ); + return; + } + throw new NotEntitledException( + Strings.format( + "Missing entitlement: class [%s], module [%s], entitlement [network], actions [%s]", + requestingClass, + requestingClass.getModule().getName(), + NetworkEntitlement.printActions(actions) + ) + ); + } + private void checkEntitlementPresent(Class callerClass, Class entitlementClass) { var requestingClass = requestingClass(callerClass); if (isTriviallyAllowed(requestingClass)) { return; } - ModuleEntitlements entitlements = getEntitlements(requestingClass); + ModuleEntitlements entitlements = getEntitlements(requestingClass, entitlementClass); if (entitlements.hasEntitlement(entitlementClass)) { logger.debug( () -> Strings.format( "Entitled: class [%s], module [%s], entitlement [%s]", requestingClass, requestingClass.getModule().getName(), - entitlementClass.getSimpleName() + PolicyParser.getEntitlementTypeName(entitlementClass) ) ); return; @@ -199,19 +245,22 @@ public class PolicyManager { "Missing entitlement: class [%s], module [%s], entitlement [%s]", requestingClass, requestingClass.getModule().getName(), - entitlementClass.getSimpleName() + PolicyParser.getEntitlementTypeName(entitlementClass) ) ); } - ModuleEntitlements getEntitlements(Class requestingClass) { - return moduleEntitlementsMap.computeIfAbsent(requestingClass.getModule(), m -> computeEntitlements(requestingClass)); + ModuleEntitlements getEntitlements(Class requestingClass, Class entitlementClass) { + return moduleEntitlementsMap.computeIfAbsent( + requestingClass.getModule(), + m -> computeEntitlements(requestingClass, entitlementClass) + ); } - private ModuleEntitlements computeEntitlements(Class requestingClass) { + private ModuleEntitlements computeEntitlements(Class requestingClass, Class entitlementClass) { Module requestingModule = requestingClass.getModule(); if (isServerModule(requestingModule)) { - return getModuleScopeEntitlements(requestingClass, serverEntitlements, requestingModule.getName()); + return getModuleScopeEntitlements(requestingClass, serverEntitlements, requestingModule.getName(), "server", entitlementClass); } // plugins @@ -225,7 +274,7 @@ public class PolicyManager { } else { scopeName = requestingModule.getName(); } - return getModuleScopeEntitlements(requestingClass, pluginEntitlements, scopeName); + return getModuleScopeEntitlements(requestingClass, pluginEntitlements, scopeName, pluginName, entitlementClass); } } @@ -241,11 +290,19 @@ public class PolicyManager { private ModuleEntitlements getModuleScopeEntitlements( Class callerClass, Map> scopeEntitlements, - String moduleName + String moduleName, + String component, + Class entitlementClass ) { var entitlements = scopeEntitlements.get(moduleName); if (entitlements == null) { - logger.warn("No applicable entitlement policy for module [{}], class [{}]", moduleName, callerClass); + logger.warn( + "No applicable entitlement policy for entitlement [{}] in [{}], module [{}], class [{}]", + PolicyParser.getEntitlementTypeName(entitlementClass), + component, + moduleName, + callerClass + ); return ModuleEntitlements.NONE; } return ModuleEntitlements.from(entitlements); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 013acf8f22fa..ac4d4afdd97f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -37,7 +37,8 @@ public class PolicyParser { private static final Map> EXTERNAL_ENTITLEMENTS = Stream.of( FileEntitlement.class, CreateClassLoaderEntitlement.class, - SetHttpsConnectionPropertiesEntitlement.class + SetHttpsConnectionPropertiesEntitlement.class, + NetworkEntitlement.class ).collect(Collectors.toUnmodifiableMap(PolicyParser::getEntitlementTypeName, Function.identity())); protected final XContentParser policyParser; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlementTests.java new file mode 100644 index 000000000000..91051d48c365 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlementTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.hamcrest.Matchers.is; + +public class NetworkEntitlementTests extends ESTestCase { + + public void testMatchesActions() { + var listenEntitlement = new NetworkEntitlement(List.of(NetworkEntitlement.LISTEN)); + var emptyEntitlement = new NetworkEntitlement(List.of()); + var connectAcceptEntitlement = new NetworkEntitlement(List.of(NetworkEntitlement.CONNECT, NetworkEntitlement.ACCEPT)); + + assertThat(listenEntitlement.matchActions(0), is(true)); + assertThat(listenEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION), is(true)); + assertThat(listenEntitlement.matchActions(NetworkEntitlement.ACCEPT_ACTION), is(false)); + assertThat(listenEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION), is(false)); + assertThat(listenEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); + assertThat(listenEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION), is(false)); + assertThat(listenEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); + + assertThat(connectAcceptEntitlement.matchActions(0), is(true)); + assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION), is(false)); + assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.ACCEPT_ACTION), is(true)); + assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION), is(true)); + assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); + assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION), is(false)); + assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(true)); + + assertThat(emptyEntitlement.matchActions(0), is(true)); + assertThat(emptyEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION), is(false)); + assertThat(emptyEntitlement.matchActions(NetworkEntitlement.ACCEPT_ACTION), is(false)); + assertThat(emptyEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION), is(false)); + assertThat(emptyEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); + assertThat(emptyEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION), is(false)); + assertThat(emptyEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); + } +} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index d22c2f598e34..092813be75cc 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.sameInstance; public class PolicyManagerTests extends ESTestCase { /** * A module you can use for test cases that don't actually care about the - * entitlements module. + * entitlement module. */ private static Module NO_ENTITLEMENTS_MODULE; @@ -66,7 +66,11 @@ public class PolicyManagerTests extends ESTestCase { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for the unnamed module", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals( + "No policy for the unnamed module", + ModuleEntitlements.NONE, + policyManager.getEntitlements(callerClass, Entitlement.class) + ); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); } @@ -78,7 +82,7 @@ public class PolicyManagerTests extends ESTestCase { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for this plugin", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals("No policy for this plugin", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass, Entitlement.class)); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); } @@ -90,11 +94,11 @@ public class PolicyManagerTests extends ESTestCase { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass, Entitlement.class)); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); // A second time - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass, Entitlement.class)); // Nothing new in the map assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); @@ -112,7 +116,7 @@ public class PolicyManagerTests extends ESTestCase { // Any class from the current module (unnamed) will do var callerClass = this.getClass(); - var entitlements = policyManager.getEntitlements(callerClass); + var entitlements = policyManager.getEntitlements(callerClass, Entitlement.class); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); } @@ -126,7 +130,11 @@ public class PolicyManagerTests extends ESTestCase { var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); var requestingModule = mockServerClass.getModule(); - assertEquals("No policy for this module in server", ModuleEntitlements.NONE, policyManager.getEntitlements(mockServerClass)); + assertEquals( + "No policy for this module in server", + ModuleEntitlements.NONE, + policyManager.getEntitlements(mockServerClass, Entitlement.class) + ); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); } @@ -145,9 +153,8 @@ public class PolicyManagerTests extends ESTestCase { // So we use a random module in the boot layer, and a random class from that module (not java.base -- it is // loaded too early) to mimic a class that would be in the server module. var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); - var requestingModule = mockServerClass.getModule(); - var entitlements = policyManager.getEntitlements(mockServerClass); + var entitlements = policyManager.getEntitlements(mockServerClass, Entitlement.class); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat(entitlements.hasEntitlement(ExitVMEntitlement.class), is(true)); } @@ -167,9 +174,8 @@ public class PolicyManagerTests extends ESTestCase { var layer = createLayerForJar(jar, "org.example.plugin"); var mockPluginClass = layer.findLoader("org.example.plugin").loadClass("q.B"); - var requestingModule = mockPluginClass.getModule(); - var entitlements = policyManager.getEntitlements(mockPluginClass); + var entitlements = policyManager.getEntitlements(mockPluginClass, Entitlement.class); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat( entitlements.getEntitlements(FileEntitlement.class).toList(), @@ -189,11 +195,11 @@ public class PolicyManagerTests extends ESTestCase { // Any class from the current module (unnamed) will do var callerClass = this.getClass(); - var entitlements = policyManager.getEntitlements(callerClass); + var entitlements = policyManager.getEntitlements(callerClass, Entitlement.class); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); - var cachedResult = policyManager.moduleEntitlementsMap.values().stream().findFirst().get(); - var entitlementsAgain = policyManager.getEntitlements(callerClass); + var cachedResult = policyManager.moduleEntitlementsMap.values().stream().findFirst().orElseThrow(); + var entitlementsAgain = policyManager.getEntitlements(callerClass, Entitlement.class); // Nothing new in the map assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 4d17fc92e157..1e0c31d2280b 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -52,6 +52,22 @@ public class PolicyParserTests extends ESTestCase { assertEquals(expected, parsedPolicy); } + public void testParseNetwork() throws IOException { + Policy parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - network: + actions: + - listen + - accept + - connect + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + Policy expected = new Policy( + "test-policy.yaml", + List.of(new Scope("entitlement-module-name", List.of(new NetworkEntitlement(List.of("listen", "accept", "connect"))))) + ); + assertEquals(expected, parsedPolicy); + } + public void testParseCreateClassloader() throws IOException { Policy parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index 339a4ec24ca1..43447cfa21a6 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -92,14 +92,7 @@ public class APM extends Plugin implements NetworkPlugin, TelemetryPlugin { APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING, APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING, - APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES, - // The settings below are deprecated and are currently kept as fallback. - APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING, - APMAgentSettings.TRACING_APM_API_KEY_SETTING, - APMAgentSettings.TRACING_APM_ENABLED_SETTING, - APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING, - APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING, - APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES + APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES ); } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index f66683a787bc..8647761e2def 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -25,9 +25,7 @@ import java.security.PrivilegedAction; import java.util.List; import java.util.Objects; import java.util.Set; -import java.util.function.Function; -import static org.elasticsearch.common.settings.Setting.Property.Deprecated; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; @@ -101,9 +99,6 @@ public class APMAgentSettings { private static final String TELEMETRY_SETTING_PREFIX = "telemetry."; - // The old legacy prefix - private static final String LEGACY_TRACING_APM_SETTING_PREFIX = "tracing.apm."; - /** * Allow-list of APM agent config keys users are permitted to configure. * @see APM Java Agent Configuration @@ -248,56 +243,24 @@ public class APMAgentSettings { public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( TELEMETRY_SETTING_PREFIX + "agent.", - LEGACY_TRACING_APM_SETTING_PREFIX + "agent.", - (namespace, qualifiedKey) -> qualifiedKey.startsWith(LEGACY_TRACING_APM_SETTING_PREFIX) - ? concreteAgentSetting(namespace, qualifiedKey, NodeScope, OperatorDynamic, Deprecated) - : concreteAgentSetting(namespace, qualifiedKey, NodeScope, OperatorDynamic) + null, // no fallback + (namespace, qualifiedKey) -> concreteAgentSetting(namespace, qualifiedKey, NodeScope, OperatorDynamic) ); - /** - * @deprecated in favor of TELEMETRY_TRACING_NAMES_INCLUDE_SETTING. - */ - @Deprecated - public static final Setting> TRACING_APM_NAMES_INCLUDE_SETTING = Setting.stringListSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "names.include", - OperatorDynamic, - NodeScope, - Deprecated - ); - - public static final Setting> TELEMETRY_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( + public static final Setting> TELEMETRY_TRACING_NAMES_INCLUDE_SETTING = Setting.stringListSetting( TELEMETRY_SETTING_PREFIX + "tracing.names.include", - TRACING_APM_NAMES_INCLUDE_SETTING, - Function.identity(), OperatorDynamic, NodeScope ); - /** - * @deprecated in favor of TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING. - */ - @Deprecated - public static final Setting> TRACING_APM_NAMES_EXCLUDE_SETTING = Setting.stringListSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "names.exclude", - OperatorDynamic, - NodeScope, - Deprecated - ); - - public static final Setting> TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING = Setting.listSetting( + public static final Setting> TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING = Setting.stringListSetting( TELEMETRY_SETTING_PREFIX + "tracing.names.exclude", - TRACING_APM_NAMES_EXCLUDE_SETTING, - Function.identity(), OperatorDynamic, NodeScope ); - /** - * @deprecated in favor of TELEMETRY_TRACING_SANITIZE_FIELD_NAMES. - */ - @Deprecated - public static final Setting> TRACING_APM_SANITIZE_FIELD_NAMES = Setting.stringListSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "sanitize_field_names", + public static final Setting> TELEMETRY_TRACING_SANITIZE_FIELD_NAMES = Setting.stringListSetting( + TELEMETRY_SETTING_PREFIX + "tracing.sanitize_field_names", List.of( "password", "passwd", @@ -313,33 +276,12 @@ public class APMAgentSettings { "set-cookie" ), OperatorDynamic, - NodeScope, - Deprecated - ); - - public static final Setting> TELEMETRY_TRACING_SANITIZE_FIELD_NAMES = Setting.listSetting( - TELEMETRY_SETTING_PREFIX + "tracing.sanitize_field_names", - TRACING_APM_SANITIZE_FIELD_NAMES, - Function.identity(), - OperatorDynamic, NodeScope ); - /** - * @deprecated in favor of TELEMETRY_TRACING_ENABLED_SETTING. - */ - @Deprecated - public static final Setting TRACING_APM_ENABLED_SETTING = Setting.boolSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "enabled", - false, - OperatorDynamic, - NodeScope, - Deprecated - ); - public static final Setting TELEMETRY_TRACING_ENABLED_SETTING = Setting.boolSetting( TELEMETRY_SETTING_PREFIX + "tracing.enabled", - TRACING_APM_ENABLED_SETTING, + false, OperatorDynamic, NodeScope ); @@ -351,33 +293,13 @@ public class APMAgentSettings { NodeScope ); - /** - * @deprecated in favor of TELEMETRY_SECRET_TOKEN_SETTING. - */ - @Deprecated - public static final Setting TRACING_APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( - LEGACY_TRACING_APM_SETTING_PREFIX + "secret_token", - null, - Deprecated - ); - public static final Setting TELEMETRY_SECRET_TOKEN_SETTING = SecureSetting.secureString( TELEMETRY_SETTING_PREFIX + "secret_token", - TRACING_APM_SECRET_TOKEN_SETTING - ); - - /** - * @deprecated in favor of TELEMETRY_API_KEY_SETTING. - */ - @Deprecated - public static final Setting TRACING_APM_API_KEY_SETTING = SecureSetting.secureString( - LEGACY_TRACING_APM_SETTING_PREFIX + "api_key", - null, - Deprecated + null ); public static final Setting TELEMETRY_API_KEY_SETTING = SecureSetting.secureString( TELEMETRY_SETTING_PREFIX + "api_key", - TRACING_APM_API_KEY_SETTING + null ); } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java index a60048c82a3c..551667242092 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java @@ -11,8 +11,6 @@ package org.elasticsearch.telemetry.apm.internal; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.MockSecureSettings; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; @@ -21,21 +19,13 @@ import java.util.List; import java.util.Set; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_AGENT_SETTINGS; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_API_KEY_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_SECRET_TOKEN_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_API_KEY_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_ENABLED_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; import static org.mockito.Mockito.clearInvocations; import static org.mockito.Mockito.mock; @@ -70,14 +60,6 @@ public class APMAgentSettingsTests extends ESTestCase { } } - public void testEnableTracingUsingLegacySetting() { - Settings settings = Settings.builder().put(TRACING_APM_ENABLED_SETTING.getKey(), true).build(); - apmAgentSettings.initAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "true"); - assertWarnings("[tracing.apm.enabled] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - public void testEnableMetrics() { for (boolean tracingEnabled : List.of(true, false)) { clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService()); @@ -121,14 +103,6 @@ public class APMAgentSettingsTests extends ESTestCase { } } - public void testDisableTracingUsingLegacySetting() { - Settings settings = Settings.builder().put(TRACING_APM_ENABLED_SETTING.getKey(), false).build(); - apmAgentSettings.initAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "false"); - assertWarnings("[tracing.apm.enabled] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - public void testDisableMetrics() { for (boolean tracingEnabled : List.of(true, false)) { clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService()); @@ -181,70 +155,18 @@ public class APMAgentSettingsTests extends ESTestCase { verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); } - public void testSetAgentsSettingsWithLegacyPrefix() { - Settings settings = Settings.builder() - .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) - .put("tracing.apm.agent.span_compression_enabled", "true") - .build(); - apmAgentSettings.initAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "true"); - verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); - assertWarnings( - "[tracing.apm.agent.span_compression_enabled] setting was deprecated in Elasticsearch and will be removed in a future release." - ); - } - /** * Check that invalid or forbidden APM agent settings are rejected. */ public void testRejectForbiddenOrUnknownAgentSettings() { - List prefixes = List.of(APM_AGENT_SETTINGS.getKey(), "tracing.apm.agent."); - for (String prefix : prefixes) { - Settings settings = Settings.builder().put(prefix + "unknown", "true").build(); - Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings)); - assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]")); - } + String prefix = APM_AGENT_SETTINGS.getKey(); + Settings settings = Settings.builder().put(prefix + "unknown", "true").build(); + Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings)); + assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]")); + // though, accept / ignore nested global_labels - for (String prefix : prefixes) { - Settings settings = Settings.builder().put(prefix + "global_labels.abc", "123").build(); - APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings); - - if (prefix.startsWith("tracing.apm.agent.")) { - assertWarnings( - "[tracing.apm.agent.global_labels.abc] setting was deprecated in Elasticsearch and will be removed in a future release." - ); - } - } - } - - public void testTelemetryTracingNamesIncludeFallback() { - Settings settings = Settings.builder().put(TRACING_APM_NAMES_INCLUDE_SETTING.getKey(), "abc,xyz").build(); - - List included = TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.get(settings); - - assertThat(included, containsInAnyOrder("abc", "xyz")); - assertWarnings("[tracing.apm.names.include] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - - public void testTelemetryTracingNamesExcludeFallback() { - Settings settings = Settings.builder().put(TRACING_APM_NAMES_EXCLUDE_SETTING.getKey(), "abc,xyz").build(); - - List included = TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.get(settings); - - assertThat(included, containsInAnyOrder("abc", "xyz")); - assertWarnings("[tracing.apm.names.exclude] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - - public void testTelemetryTracingSanitizeFieldNamesFallback() { - Settings settings = Settings.builder().put(TRACING_APM_SANITIZE_FIELD_NAMES.getKey(), "abc,xyz").build(); - - List included = TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(settings); - - assertThat(included, containsInAnyOrder("abc", "xyz")); - assertWarnings( - "[tracing.apm.sanitize_field_names] setting was deprecated in Elasticsearch and will be removed in a future release." - ); + var map = APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(Settings.builder().put(prefix + "global_labels.abc", "123").build()); + assertThat(map, hasEntry("global_labels.abc", "123")); } public void testTelemetryTracingSanitizeFieldNamesFallbackDefault() { @@ -252,28 +174,6 @@ public class APMAgentSettingsTests extends ESTestCase { assertThat(included, hasItem("password")); // and more defaults } - public void testTelemetrySecretTokenFallback() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(TRACING_APM_SECRET_TOKEN_SETTING.getKey(), "verysecret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - - try (SecureString secureString = TELEMETRY_SECRET_TOKEN_SETTING.get(settings)) { - assertEquals("verysecret", secureString.toString()); - } - assertWarnings("[tracing.apm.secret_token] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - - public void testTelemetryApiKeyFallback() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(TRACING_APM_API_KEY_SETTING.getKey(), "abc"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - - try (SecureString secureString = TELEMETRY_API_KEY_SETTING.get(settings)) { - assertEquals("abc", secureString.toString()); - } - assertWarnings("[tracing.apm.api_key] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - /** * Check that invalid or forbidden APM agent settings are rejected if their last part resembles an allowed setting. */ diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 58d3e6613290..fb6f99570bff 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -182,7 +182,8 @@ public class DataStreamIT extends ESIntegTestCase { String backingIndex = barDataStream.getIndices().get(0).getName(); backingIndices.add(backingIndex); - GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)) + .actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); Map mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); @@ -190,7 +191,7 @@ public class DataStreamIT extends ESIntegTestCase { backingIndex = fooDataStream.getIndices().get(0).getName(); backingIndices.add(backingIndex); - getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)).actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); @@ -214,7 +215,7 @@ public class DataStreamIT extends ESIntegTestCase { backingIndex = fooRolloverResponse.getNewIndex(); backingIndices.add(backingIndex); - getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)).actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); @@ -222,7 +223,7 @@ public class DataStreamIT extends ESIntegTestCase { backingIndex = barRolloverResponse.getNewIndex(); backingIndices.add(backingIndex); - getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)).actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); @@ -245,7 +246,7 @@ public class DataStreamIT extends ESIntegTestCase { expectThrows( IndexNotFoundException.class, "Backing index '" + index + "' should have been deleted.", - () -> indicesAdmin().getIndex(new GetIndexRequest().indices(index)).actionGet() + () -> indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)).actionGet() ); } } @@ -479,7 +480,8 @@ public class DataStreamIT extends ESIntegTestCase { String backingIndex = getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices().get(0).getName(); assertThat(backingIndex, backingIndexEqualTo(dataStreamName, 1)); - GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(dataStreamName)).actionGet(); + GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(dataStreamName)) + .actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); assertThat( @@ -492,7 +494,7 @@ public class DataStreamIT extends ESIntegTestCase { assertThat(backingIndex, backingIndexEqualTo(dataStreamName, 2)); assertTrue(rolloverResponse.isRolledOver()); - getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)).actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); assertThat( @@ -518,7 +520,7 @@ public class DataStreamIT extends ESIntegTestCase { expectThrows( IndexNotFoundException.class, "Backing index '" + index.getName() + "' should have been deleted.", - () -> indicesAdmin().getIndex(new GetIndexRequest().indices(index.getName())).actionGet() + () -> indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index.getName())).actionGet() ); } } @@ -596,7 +598,7 @@ public class DataStreamIT extends ESIntegTestCase { verifyResolvability(dataStreamName, indicesAdmin().prepareGetFieldMappings(dataStreamName), false); verifyResolvability(dataStreamName, indicesAdmin().preparePutMapping(dataStreamName).setSource(""" {"_doc":{"properties": {"my_field":{"type":"keyword"}}}}""", XContentType.JSON), false); - verifyResolvability(dataStreamName, indicesAdmin().prepareGetMappings(dataStreamName), false); + verifyResolvability(dataStreamName, indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, dataStreamName), false); verifyResolvability( dataStreamName, indicesAdmin().prepareUpdateSettings(dataStreamName).setSettings(Settings.builder().put("index.number_of_replicas", 0)), @@ -606,7 +608,7 @@ public class DataStreamIT extends ESIntegTestCase { verifyResolvability(dataStreamName, clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, dataStreamName), false); verifyResolvability(dataStreamName, clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setIndices(dataStreamName), false); verifyResolvability(dataStreamName, client().prepareFieldCaps(dataStreamName).setFields("*"), false); - verifyResolvability(dataStreamName, indicesAdmin().prepareGetIndex().addIndices(dataStreamName), false); + verifyResolvability(dataStreamName, indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(dataStreamName), false); verifyResolvability(dataStreamName, indicesAdmin().prepareOpen(dataStreamName), false); verifyResolvability(dataStreamName, indicesAdmin().prepareClose(dataStreamName), true); verifyResolvability(aliasToDataStream, indicesAdmin().prepareClose(aliasToDataStream), true); @@ -643,7 +645,7 @@ public class DataStreamIT extends ESIntegTestCase { verifyResolvability(wildcardExpression, indicesAdmin().prepareGetFieldMappings(wildcardExpression), false); verifyResolvability(wildcardExpression, indicesAdmin().preparePutMapping(wildcardExpression).setSource(""" {"_doc":{"properties": {"my_field":{"type":"keyword"}}}}""", XContentType.JSON), false); - verifyResolvability(wildcardExpression, indicesAdmin().prepareGetMappings(wildcardExpression), false); + verifyResolvability(wildcardExpression, indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, wildcardExpression), false); verifyResolvability(wildcardExpression, indicesAdmin().prepareGetSettings(wildcardExpression), false); verifyResolvability( wildcardExpression, @@ -653,7 +655,7 @@ public class DataStreamIT extends ESIntegTestCase { verifyResolvability(wildcardExpression, clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, wildcardExpression), false); verifyResolvability(wildcardExpression, clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setIndices(wildcardExpression), false); verifyResolvability(wildcardExpression, client().prepareFieldCaps(wildcardExpression).setFields("*"), false); - verifyResolvability(wildcardExpression, indicesAdmin().prepareGetIndex().addIndices(wildcardExpression), false); + verifyResolvability(wildcardExpression, indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(wildcardExpression), false); verifyResolvability(wildcardExpression, indicesAdmin().prepareOpen(wildcardExpression), false); verifyResolvability(wildcardExpression, indicesAdmin().prepareClose(wildcardExpression), false); verifyResolvability( @@ -1180,7 +1182,7 @@ public class DataStreamIT extends ESIntegTestCase { DataStreamTimestampFieldMapper.NAME, Map.of("enabled", true) ); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("logs-foobar").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "logs-foobar").get(); assertThat(getMappingsResponse.getMappings().size(), equalTo(2)); assertThat(getMappingsResponse.getMappings().get(backingIndex1).getSourceAsMap(), equalTo(expectedMapping)); assertThat(getMappingsResponse.getMappings().get(backingIndex2).getSourceAsMap(), equalTo(expectedMapping)); @@ -1195,7 +1197,7 @@ public class DataStreamIT extends ESIntegTestCase { .setSource("{\"properties\":{\"my_field\":{\"type\":\"keyword\"}}}", XContentType.JSON) .get(); // The mappings of all backing indices should be updated: - getMappingsResponse = indicesAdmin().prepareGetMappings("logs-foobar").get(); + getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "logs-foobar").get(); assertThat(getMappingsResponse.getMappings().size(), equalTo(2)); assertThat(getMappingsResponse.getMappings().get(backingIndex1).getSourceAsMap(), equalTo(expectedMapping)); assertThat(getMappingsResponse.getMappings().get(backingIndex2).getSourceAsMap(), equalTo(expectedMapping)); @@ -1401,7 +1403,8 @@ public class DataStreamIT extends ESIntegTestCase { } private static void assertBackingIndex(String backingIndex, String timestampFieldPathInMapping, Map expectedMapping) { - GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)) + .actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); Map mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); @@ -1488,7 +1491,8 @@ public class DataStreamIT extends ESIntegTestCase { assertThat(getDataStreamsResponse.getDataStreams().get(2).getDataStream().getName(), equalTo("logs-foobaz2")); assertThat(getDataStreamsResponse.getDataStreams().get(3).getDataStream().getName(), equalTo("logs-foobaz3")); - GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices("logs-bar*")).actionGet(); + GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("logs-bar*")) + .actionGet(); assertThat(getIndexResponse.getIndices(), arrayWithSize(4)); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-barbaz")); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-barfoo")); @@ -1521,7 +1525,8 @@ public class DataStreamIT extends ESIntegTestCase { .actionGet(); assertThat(getDataStreamsResponse.getDataStreams(), hasSize(0)); - GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices("logs-foobar")).actionGet(); + GetIndexResponse getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("logs-foobar")) + .actionGet(); assertThat(getIndexResponse.getIndices(), arrayWithSize(1)); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-foobar")); assertThat(getIndexResponse.getSettings().get("logs-foobar").get(IndexMetadata.SETTING_NUMBER_OF_REPLICAS), equalTo("0")); @@ -1657,7 +1662,7 @@ public class DataStreamIT extends ESIntegTestCase { .actionGet(); String newBackingIndexName = getDataStreamResponse.getDataStreams().get(0).getDataStream().getWriteIndex().getName(); assertThat(newBackingIndexName, backingIndexEqualTo("potato-biscuit", 2)); - indicesAdmin().prepareGetIndex().addIndices(newBackingIndexName).get(); + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(newBackingIndexName).get(); } catch (Exception e) { logger.info("--> expecting second index to be created but it has not yet been created"); fail("expecting second index to exist"); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index f6c703b96888..40bde501f0bf 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -1304,7 +1304,7 @@ public class DataStreamsSnapshotsIT extends AbstractSnapshotIntegTestCase { assertEquals(RestStatus.OK, restoreSnapshotResponse.status()); assertThat(getDataStreamInfo("*"), hasSize(3)); - assertNotNull(client.admin().indices().prepareGetIndex().setIndices(indexName).get()); + assertNotNull(client.admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(indexName).get()); } public void testRestoreDataStreamAliasWithConflictingDataStream() throws Exception { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java index c08a3548127e..c02c7ea25b12 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java @@ -50,7 +50,7 @@ public class DataTierDataStreamIT extends ESIntegTestCase { .setWaitForActiveShards(0) .get() .getIndex(); - var idxSettings = indicesAdmin().prepareGetIndex().addIndices(index).get().getSettings().get(dsIndexName); + var idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(index).get().getSettings().get(dsIndexName); assertThat(DataTier.TIER_PREFERENCE_SETTING.get(idxSettings), equalTo(DataTier.DATA_HOT)); logger.info("--> waiting for {} to be yellow", index); @@ -62,7 +62,7 @@ public class DataTierDataStreamIT extends ESIntegTestCase { // new index name should have the rolled over name assertNotEquals(dsIndexName, rolledOverIndexName); - idxSettings = indicesAdmin().prepareGetIndex().addIndices(index).get().getSettings().get(rolledOverIndexName); + idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(index).get().getSettings().get(rolledOverIndexName); assertThat(DataTier.TIER_PREFERENCE_SETTING.get(idxSettings), equalTo(DataTier.DATA_HOT)); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java index aa6ecf35e06f..0bba93ee6ec3 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java @@ -341,7 +341,10 @@ public class ResolveClusterDataStreamIT extends AbstractMultiClustersTestCase { DataStream fooDataStream = getDataStreamResponse.getDataStreams().get(0).getDataStream(); String backingIndex = fooDataStream.getIndices().get(0).getName(); backingIndices.add(backingIndex); - GetIndexResponse getIndexResponse = client.admin().indices().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + GetIndexResponse getIndexResponse = client.admin() + .indices() + .getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)) + .actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); Map mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); @@ -377,7 +380,10 @@ public class ResolveClusterDataStreamIT extends AbstractMultiClustersTestCase { DataStream barDataStream = getDataStreamResponse.getDataStreams().get(0).getDataStream(); String backingIndex = barDataStream.getIndices().get(0).getName(); backingIndices.add(backingIndex); - GetIndexResponse getIndexResponse = client.admin().indices().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet(); + GetIndexResponse getIndexResponse = client.admin() + .indices() + .getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndex)) + .actionGet(); assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue()); assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true)); Map mappings = getIndexResponse.getMappings().get(backingIndex).getSourceAsMap(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java index 2083807b1227..855644a09e0e 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java @@ -104,7 +104,7 @@ public class SystemDataStreamSnapshotIT extends AbstractSnapshotIntegTestCase { } { - GetIndexResponse indicesRemaining = indicesAdmin().prepareGetIndex().addIndices("_all").get(); + GetIndexResponse indicesRemaining = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("_all").get(); assertThat(indicesRemaining.indices(), arrayWithSize(0)); assertSystemDataStreamDoesNotExist(); } @@ -236,7 +236,7 @@ public class SystemDataStreamSnapshotIT extends AbstractSnapshotIntegTestCase { assertAcked(indicesAdmin().prepareDelete("my-index")); { - GetIndexResponse indicesRemaining = indicesAdmin().prepareGetIndex().addIndices("_all").get(); + GetIndexResponse indicesRemaining = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("_all").get(); assertThat(indicesRemaining.indices(), arrayWithSize(0)); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index aad68660d2e4..434a8bced889 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -155,7 +155,7 @@ public class TSDBIndexingIT extends ESSingleNodeTestCase { } // fetch end time - var getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndexName)).actionGet(); + var getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName)).actionGet(); Instant endTime = IndexSettings.TIME_SERIES_END_TIME.get(getIndexResponse.getSettings().get(backingIndexName)); // index another doc and verify index @@ -194,7 +194,7 @@ public class TSDBIndexingIT extends ESSingleNodeTestCase { var newBackingIndexName = rolloverResponse.getNewIndex(); // index and check target index is new - getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest().indices(newBackingIndexName)).actionGet(); + getIndexResponse = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(newBackingIndexName)).actionGet(); Instant newStartTime = IndexSettings.TIME_SERIES_START_TIME.get(getIndexResponse.getSettings().get(newBackingIndexName)); Instant newEndTime = IndexSettings.TIME_SERIES_END_TIME.get(getIndexResponse.getSettings().get(newBackingIndexName)); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java index 17e9cca07a05..a76dac5db454 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java @@ -183,7 +183,7 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { } // validate index: - var getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest().indices(index)).actionGet(); + var getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)).actionGet(); assertThat(getIndexResponse.getSettings().get(index).get("index.routing_path"), equalTo("[attributes.*]")); // validate mapping var mapping = getIndexResponse.mappings().get(index).getSourceAsMap(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index f090186480b7..8026ec641d04 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -9,10 +9,6 @@ package org.elasticsearch.datastreams; -import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; -import org.elasticsearch.action.datastreams.autosharding.DataStreamAutoShardingService; -import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; -import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -27,12 +23,7 @@ public class DataStreamFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( - DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE, // Added in 8.12 - LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER, // Added in 8.13 - DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE, - DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 - ); + return Set.of(); } @Override diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index cb7445705537..7d5f4bbee32b 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -197,8 +197,7 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlu settings, services.client(), services.clusterService(), - errorStoreInitialisationService.get(), - services.featureService() + errorStoreInitialisationService.get() ) ); dataLifecycleInitialisationService.set( diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java index 642fa4923e07..71575ee88aa7 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java @@ -19,8 +19,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; import org.elasticsearch.health.node.DslErrorInfo; import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; @@ -45,12 +43,10 @@ public class DataStreamLifecycleHealthInfoPublisher { Setting.Property.Dynamic, Setting.Property.NodeScope ); - public static final NodeFeature DSL_HEALTH_INFO_FEATURE = new NodeFeature("health.dsl.info", true); private final Client client; private final ClusterService clusterService; private final DataStreamLifecycleErrorStore errorStore; - private final FeatureService featureService; private volatile int signallingErrorRetryInterval; private volatile int maxNumberOfErrorsToPublish; @@ -58,13 +54,11 @@ public class DataStreamLifecycleHealthInfoPublisher { Settings settings, Client client, ClusterService clusterService, - DataStreamLifecycleErrorStore errorStore, - FeatureService featureService + DataStreamLifecycleErrorStore errorStore ) { this.client = client; this.clusterService = clusterService; this.errorStore = errorStore; - this.featureService = featureService; this.signallingErrorRetryInterval = DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING.get(settings); this.maxNumberOfErrorsToPublish = DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING.get(settings); } @@ -89,9 +83,6 @@ public class DataStreamLifecycleHealthInfoPublisher { * {@link org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService#DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING} */ public void publishDslErrorEntries(ActionListener actionListener) { - if (featureService.clusterHasFeature(clusterService.state(), DSL_HEALTH_INFO_FEATURE) == false) { - return; - } // fetching the entries that persist in the error store for more than the signalling retry interval // note that we're reporting this view into the error store on every publishing iteration List errorEntriesToSignal = errorStore.getErrorsInfo( diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index fc6a6ffa6ad3..9779a151c829 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -67,9 +67,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.datastreams.DataStreamFeatures; import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; @@ -183,13 +181,7 @@ public class DataStreamLifecycleServiceTests extends ESTestCase { () -> now, errorStore, allocationService, - new DataStreamLifecycleHealthInfoPublisher( - Settings.EMPTY, - client, - clusterService, - errorStore, - new FeatureService(List.of(new DataStreamFeatures())) - ), + new DataStreamLifecycleHealthInfoPublisher(Settings.EMPTY, client, clusterService, errorStore), globalRetentionSettings ); clientDelegate = null; @@ -1487,13 +1479,7 @@ public class DataStreamLifecycleServiceTests extends ESTestCase { () -> now.getAndAdd(delta), errorStore, mock(AllocationService.class), - new DataStreamLifecycleHealthInfoPublisher( - Settings.EMPTY, - getTransportRequestsRecordingClient(), - clusterService, - errorStore, - new FeatureService(List.of(new DataStreamFeatures())) - ), + new DataStreamLifecycleHealthInfoPublisher(Settings.EMPTY, getTransportRequestsRecordingClient(), clusterService, errorStore), globalRetentionSettings ); assertThat(service.getLastRunDuration(), is(nullValue())); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java index cff6127e0729..f8a2ac3c6102 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java @@ -24,10 +24,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; import org.elasticsearch.health.node.DslErrorInfo; import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; @@ -40,7 +38,6 @@ import org.junit.Before; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; @@ -83,13 +80,7 @@ public class DataStreamLifecycleHealthInfoPublisherTests extends ESTestCase { final Client client = getTransportRequestsRecordingClient(); errorStore = new DataStreamLifecycleErrorStore(() -> now); - dslHealthInfoPublisher = new DataStreamLifecycleHealthInfoPublisher( - Settings.EMPTY, - client, - clusterService, - errorStore, - new FeatureService(List.of(new DataStreamFeatures())) - ); + dslHealthInfoPublisher = new DataStreamLifecycleHealthInfoPublisher(Settings.EMPTY, client, clusterService, errorStore); } @After @@ -105,16 +96,6 @@ public class DataStreamLifecycleHealthInfoPublisherTests extends ESTestCase { } errorStore.recordError("testIndex", new IllegalStateException("bad state")); ClusterState stateWithHealthNode = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); - stateWithHealthNode = ClusterState.builder(stateWithHealthNode) - .nodeFeatures( - Map.of( - node1.getId(), - Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), - node2.getId(), - Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) - ) - ) - .build(); ClusterServiceUtils.setState(clusterService, stateWithHealthNode); dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { @Override @@ -143,16 +124,6 @@ public class DataStreamLifecycleHealthInfoPublisherTests extends ESTestCase { errorStore.recordError("testIndex", new IllegalStateException("bad state")); ClusterState stateNoHealthNode = ClusterStateCreationUtils.state(node1, node1, null, allNodes); - stateNoHealthNode = ClusterState.builder(stateNoHealthNode) - .nodeFeatures( - Map.of( - node1.getId(), - Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), - node2.getId(), - Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) - ) - ) - .build(); ClusterServiceUtils.setState(clusterService, stateNoHealthNode); dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { @Override @@ -170,16 +141,6 @@ public class DataStreamLifecycleHealthInfoPublisherTests extends ESTestCase { public void testPublishDslErrorEntriesEmptyErrorStore() { // publishes the empty error store (this is the "back to healthy" state where all errors have been fixed) ClusterState state = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); - state = ClusterState.builder(state) - .nodeFeatures( - Map.of( - node1.getId(), - Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), - node2.getId(), - Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) - ) - ) - .build(); ClusterServiceUtils.setState(clusterService, state); dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { @Override diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 9ea3bfefabdf..884adb545810 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -300,9 +300,6 @@ index without timestamp with pipeline: --- dynamic templates: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -450,9 +447,6 @@ dynamic templates: --- dynamic templates - conflicting aliases: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -549,9 +543,6 @@ dynamic templates - conflicting aliases: --- dynamic templates - conflicting aliases with top-level field: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [otel] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -632,9 +623,6 @@ dynamic templates - conflicting aliases with top-level field: --- dynamic templates with nesting: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -810,10 +798,6 @@ dynamic templates with nesting: --- dynamic templates with incremental indexing: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -1038,9 +1022,6 @@ dynamic templates with incremental indexing: --- subobject in passthrough object auto flatten: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - do: allowed_warnings: - "index template [my-passthrough-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-passthrough-template] will take precedence during new index creation" @@ -1108,9 +1089,6 @@ enable subobjects in passthrough object: --- passthrough objects with duplicate priority: - - requires: - cluster_features: ["mapper.pass_through_priority"] - reason: support for priority in passthrough objects - do: catch: /has a conflicting param/ indices.put_index_template: @@ -1135,9 +1113,6 @@ passthrough objects with duplicate priority: --- dimensions with ignore_malformed and ignore_above: - - requires: - cluster_features: ["mapper.keyword_dimension_ignore_above"] - reason: support for ignore_above on keyword dimensions - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -1229,9 +1204,6 @@ dimensions with ignore_malformed and ignore_above: --- non string dimension fields: - - requires: - cluster_features: ["mapper.pass_through_priority", "routing.boolean_routing_path", "mapper.boolean_dimension"] - reason: support for priority in passthrough objects - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" @@ -1339,10 +1311,6 @@ non string dimension fields: --- multi value dimensions: - - requires: - cluster_features: ["routing.multi_value_routing_path"] - reason: support for multi-value dimensions - - do: allowed_warnings: - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java index aa48c73cf1d7..08efe87e6fde 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java @@ -160,11 +160,6 @@ public record DatabaseConfiguration(String id, String name, Provider provider) i if (provider instanceof Maxmind maxmind) { out.writeString(maxmind.accountId); } else { - /* - * The existence of a non-Maxmind providers is gated on the feature get_database_configuration_action.multi_node, and - * get_database_configuration_action.multi_node is only available on or after - * TransportVersions.INGEST_GEO_DATABASE_PROVIDERS. - */ assert false : "non-maxmind DatabaseConfiguration.Provider [" + provider.getWriteableName() + "]"; } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java index a2705655bc20..7233765bfeda 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.geoip.DatabaseNodeService; import org.elasticsearch.ingest.geoip.GeoIpTaskState; import org.elasticsearch.ingest.geoip.IngestGeoIpMetadata; @@ -41,8 +40,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.ingest.IngestGeoIpFeatures.GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE; - public class TransportGetDatabaseConfigurationAction extends TransportNodesAction< GetDatabaseConfigurationAction.Request, GetDatabaseConfigurationAction.Response, @@ -50,7 +47,6 @@ public class TransportGetDatabaseConfigurationAction extends TransportNodesActio GetDatabaseConfigurationAction.NodeResponse, List> { - private final FeatureService featureService; private final DatabaseNodeService databaseNodeService; @Inject @@ -59,7 +55,6 @@ public class TransportGetDatabaseConfigurationAction extends TransportNodesActio ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - FeatureService featureService, DatabaseNodeService databaseNodeService ) { super( @@ -70,39 +65,9 @@ public class TransportGetDatabaseConfigurationAction extends TransportNodesActio GetDatabaseConfigurationAction.NodeRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.featureService = featureService; this.databaseNodeService = databaseNodeService; } - @Override - protected void doExecute( - Task task, - GetDatabaseConfigurationAction.Request request, - ActionListener listener - ) { - if (featureService.clusterHasFeature(clusterService.state(), GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE) == false) { - /* - * TransportGetDatabaseConfigurationAction used to be a TransportMasterNodeAction, and not all nodes in the cluster have been - * updated. So we don't want to send node requests to the other nodes because they will blow up. Instead, we just return - * the information that we used to return from the master node (it doesn't make any difference that this might not be the master - * node, because we're only reading the cluster state). Because older nodes only know about the Maxmind provider type, we filter - * out all others here to avoid causing problems on those nodes. - */ - newResponseAsync( - task, - request, - createActionContext(task, request).stream() - .filter(database -> database.database().provider() instanceof DatabaseConfiguration.Maxmind) - .toList(), - List.of(), - List.of(), - listener - ); - } else { - super.doExecute(task, request, listener); - } - } - protected List createActionContext(Task task, GetDatabaseConfigurationAction.Request request) { final Set ids; if (request.getDatabaseIds().length == 0) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java index 5dda55799209..6e3910f9d5e9 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.geoip.IngestGeoIpMetadata; import org.elasticsearch.ingest.geoip.direct.PutDatabaseConfigurationAction.Request; import org.elasticsearch.injection.guice.Inject; @@ -42,8 +41,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Optional; -import static org.elasticsearch.ingest.IngestGeoIpFeatures.PUT_DATABASE_CONFIGURATION_ACTION_IPINFO; - public class TransportPutDatabaseConfigurationAction extends TransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportPutDatabaseConfigurationAction.class); @@ -61,7 +58,6 @@ public class TransportPutDatabaseConfigurationAction extends TransportMasterNode } }; - private final FeatureService featureService; private final MasterServiceTaskQueue updateDatabaseConfigurationTaskQueue; @Inject @@ -70,8 +66,7 @@ public class TransportPutDatabaseConfigurationAction extends TransportMasterNode ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - FeatureService featureService + IndexNameExpressionResolver indexNameExpressionResolver ) { super( PutDatabaseConfigurationAction.NAME, @@ -84,7 +79,6 @@ public class TransportPutDatabaseConfigurationAction extends TransportMasterNode AcknowledgedResponse::readFrom, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.featureService = featureService; this.updateDatabaseConfigurationTaskQueue = clusterService.createTaskQueue( "update-geoip-database-configuration-state-update", Priority.NORMAL, @@ -96,18 +90,6 @@ public class TransportPutDatabaseConfigurationAction extends TransportMasterNode protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { final String id = request.getDatabase().id(); - // if this is an ipinfo configuration, then make sure the whole cluster supports that feature - if (request.getDatabase().provider() instanceof DatabaseConfiguration.Ipinfo - && featureService.clusterHasFeature(clusterService.state(), PUT_DATABASE_CONFIGURATION_ACTION_IPINFO) == false) { - listener.onFailure( - new IllegalArgumentException( - "Unable to use ipinfo database configurations in mixed-clusters with nodes that do not support feature " - + PUT_DATABASE_CONFIGURATION_ACTION_IPINFO.id() - ) - ); - return; - } - updateDatabaseConfigurationTaskQueue.submitTask( Strings.format("update-geoip-database-configuration-[%s]", id), new UpdateDatabaseConfigurationTask(listener, request.getDatabase()), diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml index a1104505bc24..007c82db4c92 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml @@ -1,9 +1,3 @@ ---- -setup: - - requires: - cluster_features: ["geoip.downloader.database.configuration", "get_database_configuration_action.multi_node"] - reason: "geoip downloader database configuration APIs added in 8.15, and updated in 8.16 to return more results" - --- teardown: - do: diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml index fd73c715a5ac..094798476952 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/50_ip_lookup_processor.yml @@ -1,9 +1,3 @@ -setup: - - requires: - cluster_features: - - "put_database_configuration_action.ipinfo" - reason: "ipinfo support added in 8.16" - --- "Test ip_location processor with defaults": - do: diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml index e2e9a1fdb5e2..47f09392df60 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/60_ip_location_databases.yml @@ -1,10 +1,3 @@ ---- -setup: - - requires: - cluster_features: - - "put_database_configuration_action.ipinfo" - reason: "ip location downloader database configuration APIs added in 8.16 to support more types" - --- teardown: - do: diff --git a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java index 553e4696af31..a9ab0c02612f 100644 --- a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java +++ b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java @@ -16,6 +16,8 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; @@ -23,7 +25,6 @@ import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolStats; @@ -49,10 +50,6 @@ import static org.hamcrest.Matchers.startsWith; * threads that wait on a phaser. This lets us verify that operations on system indices * are being directed to other thread pools.

*/ -@TestLogging( - reason = "investigate", - value = "org.elasticsearch.kibana.KibanaThreadPoolIT:DEBUG,org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor:TRACE" -) public class KibanaThreadPoolIT extends ESIntegTestCase { private static final Logger logger = LogManager.getLogger(KibanaThreadPoolIT.class); @@ -68,6 +65,8 @@ public class KibanaThreadPoolIT extends ESIntegTestCase { .put("thread_pool.write.queue_size", 1) .put("thread_pool.get.size", 1) .put("thread_pool.get.queue_size", 1) + // a rejected GET may retry on an INITIALIZING shard (the target of a relocation) and unexpectedly succeed, so block rebalancing + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) .build(); } @@ -112,7 +111,12 @@ public class KibanaThreadPoolIT extends ESIntegTestCase { } public void testBlockedThreadPoolsRejectUserRequests() throws Exception { - assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); + assertAcked( + client().admin() + .indices() + .prepareCreate(USER_INDEX) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) // avoid retrying rejected actions + ); runWithBlockedThreadPools(this::assertThreadPoolsBlocked); diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml index 25088f51e2b5..1434450b65a6 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml @@ -221,9 +221,6 @@ setup: - close_to: {hits.hits.2._score: {value: 186.34454, error: 0.01}} --- "Test hamming distance fails on float": - - requires: - cluster_features: ["script.hamming"] - reason: "support for hamming distance added in 8.15" - do: headers: Content-Type: application/json diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml index cdd65ca0eb29..05a10ffdbccd 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml @@ -1,7 +1,5 @@ setup: - requires: - cluster_features: ["mapper.vectors.bit_vectors"] - reason: "support for bit vectors added in 8.15" test_runner_features: headers - do: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/151_dense_vector_byte_hamming.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/151_dense_vector_byte_hamming.yml index 373f048e7be7..a6c111be681f 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/151_dense_vector_byte_hamming.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/151_dense_vector_byte_hamming.yml @@ -1,7 +1,5 @@ setup: - requires: - cluster_features: ["script.hamming"] - reason: "support for hamming distance added in 8.15" test_runner_features: headers - do: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/190_term_statistics_script_score.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/190_term_statistics_script_score.yml index f82b844f0158..3a869640993f 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/190_term_statistics_script_score.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/190_term_statistics_script_score.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: ["script.term_stats"] - reason: "support for term stats has been added in 8.16" - - do: indices.create: index: test-index diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/191_term_statistics_function_score.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/191_term_statistics_function_score.yml index de4d6530f4a9..3a9c71e3c2ba 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/191_term_statistics_function_score.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/191_term_statistics_function_score.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: ["script.term_stats"] - reason: "support for term stats has been added in 8.16" - - do: indices.create: index: test-index diff --git a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml index 968e93cf9fc5..175abe183106 100644 --- a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml +++ b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml @@ -251,11 +251,6 @@ setup: --- "Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - do: cluster.stats: {} diff --git a/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml b/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml index e8c34a4b6a20..d2370919297a 100644 --- a/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml +++ b/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml @@ -234,11 +234,6 @@ setup: --- "Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - do: cluster.stats: {} diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java index 93915e8491d5..3d7c8dd15061 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java @@ -19,13 +19,15 @@ import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import java.util.Locale; + @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3MinioBasicCredentialsRestIT extends AbstractRepositoryS3RestTestCase { - private static final String PREFIX = getIdentifierPrefix("RepositoryS3MinioBasicCredentialsRestIT"); + private static final String PREFIX = getIdentifierPrefix("RepositoryS3MinioBasicCredentialsRestIT").toLowerCase(Locale.ROOT); private static final String BUCKET = PREFIX + "bucket"; - private static final String BASE_PATH = PREFIX + "base_path"; + private static final String BASE_PATH = PREFIX + "base-path"; private static final String ACCESS_KEY = PREFIX + "access-key"; private static final String SECRET_KEY = PREFIX + "secret-key"; private static final String CLIENT = "minio_client"; diff --git a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..4c42ec110a25 --- /dev/null +++ b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,4 @@ +ALL-UNNAMED: + - network: + actions: + - connect diff --git a/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..f1dc1fc7755e --- /dev/null +++ b/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,4 @@ +org.apache.httpcomponents.httpclient: + - network: + actions: + - connect # for URLHttpClient diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index cad839bed955..5876945cf93b 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -79,7 +79,7 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase if (doHandshake) { super.executeHandshake(node, channel, profile, listener); } else { - assert getVersion().equals(TransportVersion.current()); + assert version.equals(TransportVersion.current()); listener.onResponse(TransportVersions.MINIMUM_COMPATIBLE); } } diff --git a/muted-tests.yml b/muted-tests.yml index 2f652f87ab28..69e6ba22b84b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -52,9 +52,6 @@ tests: - class: org.elasticsearch.xpack.transform.integration.TransformIT method: testStopWaitForCheckpoint issue: https://github.com/elastic/elasticsearch/issues/106113 -- class: org.elasticsearch.kibana.KibanaThreadPoolIT - method: testBlockedThreadPoolsRejectUserRequests - issue: https://github.com/elastic/elasticsearch/issues/113939 - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformAgnosticVariant issue: https://github.com/elastic/elasticsearch/issues/113983 @@ -216,9 +213,6 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/sparse_vector_search/Test sparse_vector search with query vector and pruning config} issue: https://github.com/elastic/elasticsearch/issues/119548 -- class: org.elasticsearch.index.engine.LuceneSyntheticSourceChangesSnapshotTests - method: testSkipNonRootOfNestedDocuments - issue: https://github.com/elastic/elasticsearch/issues/119553 - class: org.elasticsearch.xpack.ml.integration.ForecastIT method: testOverflowToDisk issue: https://github.com/elastic/elasticsearch/issues/117740 @@ -227,50 +221,39 @@ tests: - class: org.elasticsearch.search.profile.dfs.DfsProfilerIT method: testProfileDfs issue: https://github.com/elastic/elasticsearch/issues/119711 -- class: org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizerTests - method: testSingleMatchFunctionFilterPushdownWithStringValues {default} - issue: https://github.com/elastic/elasticsearch/issues/119720 -- class: org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizerTests - method: testSingleMatchFunctionPushdownWithCasting {default} - issue: https://github.com/elastic/elasticsearch/issues/119722 -- class: org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizerTests - method: testSingleMatchOperatorFilterPushdownWithStringValues {default} - issue: https://github.com/elastic/elasticsearch/issues/119721 -- class: org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterIT - method: testBulkOperations {p0=false} - issue: https://github.com/elastic/elasticsearch/issues/119901 -- class: org.elasticsearch.xpack.inference.InferenceCrudIT - method: testGetServicesWithCompletionTaskType - issue: https://github.com/elastic/elasticsearch/issues/119959 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testSearchableSnapshotUpgrade {p0=[9.0.0, 8.18.0, 8.18.0]} - issue: https://github.com/elastic/elasticsearch/issues/119978 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testSearchableSnapshotUpgrade {p0=[9.0.0, 9.0.0, 8.18.0]} - issue: https://github.com/elastic/elasticsearch/issues/119979 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testMountSearchableSnapshot {p0=[9.0.0, 8.18.0, 8.18.0]} - issue: https://github.com/elastic/elasticsearch/issues/119550 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testMountSearchableSnapshot {p0=[9.0.0, 9.0.0, 8.18.0]} - issue: https://github.com/elastic/elasticsearch/issues/119980 -- class: org.elasticsearch.index.codec.vectors.es816.ES816HnswBinaryQuantizedVectorsFormatTests - method: testRandomExceptions - issue: https://github.com/elastic/elasticsearch/issues/119981 - class: org.elasticsearch.multi_cluster.MultiClusterYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/119983 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testMountSearchableSnapshot {p0=[9.0.0, 9.0.0, 9.0.0]} - issue: https://github.com/elastic/elasticsearch/issues/119989 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testSearchableSnapshotUpgrade {p0=[9.0.0, 9.0.0, 9.0.0]} - issue: https://github.com/elastic/elasticsearch/issues/119990 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_unattended/Test unattended put and start} issue: https://github.com/elastic/elasticsearch/issues/120019 - class: org.elasticsearch.index.mapper.IntervalThrottlerTests method: testThrottling issue: https://github.com/elastic/elasticsearch/issues/120023 +- class: org.elasticsearch.xpack.ilm.actions.SearchableSnapshotActionIT + method: testUpdatePolicyToAddPhasesYieldsInvalidActionsToBeSkipped + issue: https://github.com/elastic/elasticsearch/issues/118406 +- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsIT + issue: https://github.com/elastic/elasticsearch/issues/120088 +- class: org.elasticsearch.xpack.searchablesnapshots.minio.MinioSearchableSnapshotsIT + issue: https://github.com/elastic/elasticsearch/issues/120101 +- class: org.elasticsearch.repositories.s3.S3RepositoryThirdPartyTests + issue: https://github.com/elastic/elasticsearch/issues/120115 +- class: org.elasticsearch.repositories.s3.RepositoryS3MinioBasicCredentialsRestIT + issue: https://github.com/elastic/elasticsearch/issues/120117 +- class: org.elasticsearch.repositories.blobstore.testkit.analyze.MinioRepositoryAnalysisRestIT + issue: https://github.com/elastic/elasticsearch/issues/118548 +- class: org.elasticsearch.xpack.security.QueryableReservedRolesIT + method: testConfiguredReservedRolesAfterClosingAndOpeningIndex + issue: https://github.com/elastic/elasticsearch/issues/120127 +- class: org.elasticsearch.oldrepos.OldRepositoryAccessIT + method: testOldRepoAccess + issue: https://github.com/elastic/elasticsearch/issues/120148 +- class: org.elasticsearch.oldrepos.OldRepositoryAccessIT + method: testOldSourceOnlyRepoAccess + issue: https://github.com/elastic/elasticsearch/issues/120080 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + method: test {lookup-join.MvJoinKeyFromRow ASYNC} + issue: https://github.com/elastic/elasticsearch/issues/120242 # Examples: # diff --git a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..4c42ec110a25 --- /dev/null +++ b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,4 @@ +ALL-UNNAMED: + - network: + actions: + - connect diff --git a/plugins/mapper-annotated-text/src/main/java/module-info.java b/plugins/mapper-annotated-text/src/main/java/module-info.java index 13f2bd66418b..58aca0d2857f 100644 --- a/plugins/mapper-annotated-text/src/main/java/module-info.java +++ b/plugins/mapper-annotated-text/src/main/java/module-info.java @@ -15,6 +15,4 @@ module org.elasticsearch.index.mapper.annotatedtext { requires org.apache.lucene.highlighter; // exports nothing - - provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.index.mapper.annotatedtext.Features; } diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 33b5db1c4662..4b2006430b89 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -22,7 +22,6 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -64,8 +63,6 @@ import java.util.regex.Pattern; **/ public class AnnotatedTextFieldMapper extends FieldMapper { - public static final NodeFeature SYNTHETIC_SOURCE_SUPPORT = new NodeFeature("mapper.annotated_text.synthetic_source", true); - public static final String CONTENT_TYPE = "annotated_text"; private static Builder builder(FieldMapper in) { diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/Features.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/Features.java deleted file mode 100644 index 51a2d2bbe1d4..000000000000 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/Features.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.index.mapper.annotatedtext; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -/** - * Provides features for annotated text mapper. - */ -public class Features implements FeatureSpecification { - @Override - public Set getFeatures() { - return Set.of( - AnnotatedTextFieldMapper.SYNTHETIC_SOURCE_SUPPORT // Added in 8.15 - ); - } -} diff --git a/plugins/mapper-annotated-text/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/plugins/mapper-annotated-text/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 1fc11da18fc3..000000000000 --- a/plugins/mapper-annotated-text/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,10 +0,0 @@ -# - # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - # or more contributor license agreements. Licensed under the "Elastic License - # 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - # Public License v 1"; you may not use this file except in compliance with, at - # your election, the "Elastic License 2.0", the "GNU Affero General Public - # License v3.0 only", or the "Server Side Public License, v 1". -# - -org.elasticsearch.index.mapper.annotatedtext.Features diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index c2251910c312..435849821691 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -91,7 +91,7 @@ public class SizeMappingIT extends ESIntegTestCase { "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s", index ); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(index).get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, index).get(); Map mappingSource = getMappingsResponse.getMappings().get(index).getSourceAsMap(); assertThat(errMsg, mappingSource, hasKey("_size")); String sizeAsString = mappingSource.get("_size").toString(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index d9adec47ff48..30367bf55d8c 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -11,7 +11,6 @@ package org.elasticsearch.upgrades; import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.Request; import org.elasticsearch.cluster.metadata.DesiredNode; @@ -84,7 +83,7 @@ public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() + null ) ) .toList(); @@ -96,7 +95,7 @@ public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() + null ); }).toList(); } diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java index ce514c5f1b1e..c48ae9ba1843 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java @@ -12,12 +12,15 @@ package org.elasticsearch.http; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.template.get.GetComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetComposableIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; +import org.elasticsearch.action.admin.indices.template.post.SimulateIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.post.SimulateTemplateAction; import org.elasticsearch.action.support.CancellableActionTestPlugin; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.RefCountingListener; @@ -81,6 +84,25 @@ public class RestActionCancellationIT extends HttpSmokeTestCase { runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_index_template"), GetComposableIndexTemplateAction.NAME); } + public void testSimulateTemplateCancellation() { + runRestActionCancellationTest( + new Request(HttpPost.METHOD_NAME, "/_index_template/_simulate/random_index_template"), + SimulateTemplateAction.NAME + ); + } + + public void testSimulateIndexTemplateCancellation() { + createIndex("test"); + runRestActionCancellationTest( + new Request(HttpPost.METHOD_NAME, "/_index_template/_simulate_index/test"), + SimulateIndexTemplateAction.NAME + ); + } + + public void testClusterGetSettingsCancellation() { + runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_cluster/settings"), ClusterGetSettingsAction.NAME); + } + private void runRestActionCancellationTest(Request request, String actionName) { final var node = usually() ? internalCluster().getRandomNodeName() : internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index d4843fb15288..4a5ceeb66f66 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -222,10 +222,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.mapping.validation.templates"] - reason: "ingest simulate index mapping validation added in 8.16" - - do: headers: Content-Type: application/json @@ -313,10 +309,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.component.template.substitutions"] - reason: "ingest simulate component template substitutions added in 8.16" - - do: headers: Content-Type: application/json @@ -494,10 +486,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.component.template.substitutions"] - reason: "ingest simulate component template substitutions added in 8.16" - - do: headers: Content-Type: application/json @@ -617,10 +605,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.component.template.substitutions"] - reason: "ingest simulate component template substitutions added in 8.16" - - do: headers: Content-Type: application/json @@ -816,10 +800,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.index.template.substitutions"] - reason: "ingest simulate index template substitutions added in 8.16" - - do: headers: Content-Type: application/json @@ -1010,10 +990,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.index.template.substitutions"] - reason: "ingest simulate component template substitutions added in 8.16" - - do: headers: Content-Type: application/json @@ -1227,10 +1203,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.mapping.addition"] - reason: "ingest simulate mapping addition added in 8.17" - - do: headers: Content-Type: application/json @@ -1463,10 +1435,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.mapping.addition"] - reason: "ingest simulate mapping addition added in 8.17" - - do: indices.put_template: name: my-legacy-template @@ -1584,10 +1552,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.support.non.template.mapping"] - reason: "ingest simulate support for indices with mappings that didn't come from templates added in 8.17" - # A global match-everything legacy template is added to the cluster sometimes (rarely). We have to get rid of this template if it exists # because this test is making sure we get correct behavior when an index matches *no* template: - do: diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 1da8e906582b..e4b46b98cedd 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -85,4 +85,6 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: pre-filter across multiple aliases", "waiting for #118774 backport") task.skipTest("search.vectors/160_knn_query_missing_params/kNN search in a dis_max query - missing num_candidates", "waiting for #118774 backport") task.skipTest("search.highlight/30_max_analyzed_offset/Plain highlighter with max_analyzed_offset < 0 should FAIL", "semantics of test has changed") + task.skipTest("indices.create/10_basic/Create lookup index", "default auto_expand_replicas was removed") + task.skipTest("indices.create/10_basic/Create lookup index with one shard", "default auto_expand_replicas was removed") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json index 5862804257c6..5004ab8de697 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json @@ -26,7 +26,7 @@ }, "master_timeout":{ "type":"time", - "description":"Explicit operation timeout for connection to master node" + "description":"Timeout for waiting for new cluster state in case it is blocked" }, "timeout":{ "type":"time", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json index 96d477160b27..aa5a3dc0a791 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json @@ -28,6 +28,11 @@ ] }, "params": { + "hard": { + "type": "boolean", + "default": false, + "description": "If true, the connector doc is deleted. If false, connector doc is marked as deleted (soft-deleted)." + }, "delete_sync_jobs": { "type": "boolean", "default": false, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.unfreeze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.unfreeze.json deleted file mode 100644 index 2327519ff281..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.unfreeze.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "indices.unfreeze":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/unfreeze-index-api.html", - "description":"Unfreezes an index. When a frozen index is unfrozen, the index goes through the normal recovery process and becomes writeable again." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/{index}/_unfreeze", - "methods":[ - "POST" - ], - "parts":{ - "index":{ - "type":"string", - "description":"The name of the index to unfreeze" - } - }, - "deprecated":{ - "version":"7.14.0", - "description":"Frozen indices are deprecated because they provide no benefit given improvements in heap memory utilization. They will be removed in a future release." - } - } - ] - }, - "params":{ - "timeout":{ - "type":"time", - "description":"Explicit operation timeout" - }, - "master_timeout":{ - "type":"time", - "description":"Specify timeout for connection to master" - }, - "ignore_unavailable":{ - "type":"boolean", - "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" - }, - "allow_no_indices":{ - "type":"boolean", - "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" - }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "default":"closed", - "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "wait_for_active_shards":{ - "type":"string", - "description":"Sets the number of active shards to wait for before the operation returns." - } - } - } -} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/70_index_mode.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/70_index_mode.yml index 9da6d2c5f086..ce3f7f019839 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/70_index_mode.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/70_index_mode.yml @@ -1,9 +1,5 @@ --- setup: - - requires: - cluster_features: "mapper.query_index_mode" - reason: "require index_mode" - - do: indices.create: index: test_metrics diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml index 13f6ca58ea29..a0061272a2c2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml @@ -1014,10 +1014,6 @@ flattened field: --- flattened field with ignore_above: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -1070,10 +1066,6 @@ flattened field with ignore_above: --- flattened field with ignore_above and arrays: - - requires: - cluster_features: ["mapper.flattened.ignore_above_with_arrays_support"] - reason: requires support of ignore_above synthetic source with arrays - - do: indices.create: index: test @@ -1127,10 +1119,6 @@ flattened field with ignore_above and arrays: --- completion: - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml index 414c24cfffd7..7b8f785a2cb9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml @@ -2,7 +2,6 @@ "Metrics object indexing": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] reason: requires supporting subobjects auto setting - do: @@ -69,7 +68,6 @@ "Root with metrics": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] reason: requires supporting subobjects auto setting - do: @@ -131,7 +129,6 @@ "Metrics object indexing with synthetic source": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] reason: added in 8.4.0 - do: @@ -201,7 +198,6 @@ "Root without subobjects with synthetic source": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] reason: added in 8.4.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml index d0e1759073e1..8645c91a51ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -171,7 +171,6 @@ index: test_lookup - match: { test_lookup.settings.index.number_of_shards: "1"} - - match: { test_lookup.settings.index.auto_expand_replicas: "0-all"} --- "Create lookup index with one shard": @@ -196,7 +195,6 @@ index: test_lookup - match: { test_lookup.settings.index.number_of_shards: "1"} - - match: { test_lookup.settings.index.auto_expand_replicas: "0-all"} --- "Create lookup index with two shards": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index 5003f6df79a1..096ccbce9a58 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -5,10 +5,6 @@ setup: --- object with unmapped fields: - - requires: - cluster_features: ["mapper.track_ignored_source", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -56,10 +52,6 @@ object with unmapped fields: --- unmapped arrays: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -102,10 +94,6 @@ unmapped arrays: --- nested object with unmapped fields: - - requires: - cluster_features: ["mapper.track_ignored_source", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -153,10 +141,6 @@ nested object with unmapped fields: --- empty object with unmapped fields: - - requires: - cluster_features: ["mapper.track_ignored_source", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -314,10 +298,6 @@ disabled object contains array: --- disabled subobject: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -353,10 +333,6 @@ disabled subobject: --- disabled subobject with array: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -393,10 +369,6 @@ disabled subobject with array: --- mixed disabled and enabled objects: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -441,7 +413,7 @@ mixed disabled and enabled objects: --- object with dynamic override: - requires: - cluster_features: ["mapper.ignored_source.dont_expand_dots", "mapper.bwc_workaround_9_0"] + cluster_features: ["mapper.ignored_source.dont_expand_dots"] reason: requires tracking ignored source - do: @@ -488,10 +460,6 @@ object with dynamic override: --- subobject with dynamic override: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -537,10 +505,6 @@ subobject with dynamic override: --- object array in object with dynamic override: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -592,10 +556,6 @@ object array in object with dynamic override: --- value array in object with dynamic override: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -636,10 +596,6 @@ value array in object with dynamic override: --- nested object: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -682,10 +638,6 @@ nested object: --- nested object next to regular: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -729,10 +681,6 @@ nested object next to regular: --- nested object with disabled: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -818,10 +766,6 @@ nested object with disabled: --- doubly nested object: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -914,10 +858,6 @@ doubly nested object: --- subobjects auto: - - requires: - cluster_features: ["mapper.subobjects_auto", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source and supporting subobjects auto setting - - do: indices.create: index: test @@ -1003,10 +943,6 @@ subobjects auto: --- synthetic_source with copy_to: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1140,10 +1076,6 @@ synthetic_source with copy_to: --- synthetic_source with disabled doc_values: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires disabled doc_values support in synthetic source - - do: indices.create: index: test @@ -1224,10 +1156,6 @@ synthetic_source with disabled doc_values: --- fallback synthetic_source for text field: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires disabled doc_values support in synthetic source - - do: indices.create: index: test @@ -1259,10 +1187,6 @@ fallback synthetic_source for text field: --- synthetic_source with copy_to and ignored values: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1328,10 +1252,6 @@ synthetic_source with copy_to and ignored values: --- synthetic_source with copy_to field having values in source: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1392,10 +1312,6 @@ synthetic_source with copy_to field having values in source: --- synthetic_source with ignored source field using copy_to: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1457,10 +1373,6 @@ synthetic_source with ignored source field using copy_to: --- synthetic_source with copy_to field from dynamic template having values in source: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1555,7 +1467,6 @@ synthetic_source with copy_to field from dynamic template having values in sourc --- synthetic_source with copy_to and invalid values for copy: - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_fix"] reason: requires copy_to support in synthetic source test_runner_features: "contains" @@ -1592,10 +1503,6 @@ synthetic_source with copy_to and invalid values for copy: --- synthetic_source with copy_to pointing inside object: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1697,10 +1604,6 @@ synthetic_source with copy_to pointing inside object: --- synthetic_source with copy_to pointing to ambiguous field: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1745,10 +1648,6 @@ synthetic_source with copy_to pointing to ambiguous field: --- synthetic_source with copy_to pointing to ambiguous field and subobjects false: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1794,10 +1693,6 @@ synthetic_source with copy_to pointing to ambiguous field and subobjects false: --- synthetic_source with copy_to pointing to ambiguous field and subobjects auto: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test @@ -1845,7 +1740,6 @@ synthetic_source with copy_to pointing to ambiguous field and subobjects auto: synthetic_source with copy_to pointing at dynamic field: - requires: test_runner_features: contains - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] reason: requires copy_to support in synthetic source - do: @@ -1931,10 +1825,6 @@ synthetic_source with copy_to pointing at dynamic field: --- synthetic_source with copy_to pointing inside dynamic object: - - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml index 095665e9337b..e51d527593d4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml @@ -5,10 +5,6 @@ setup: --- object param - store complex object: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -70,10 +66,6 @@ object param - store complex object: --- object param - object array: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -135,10 +127,6 @@ object param - object array: --- object param - object array within array: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -179,10 +167,6 @@ object param - object array within array: --- object param - no object array: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -222,10 +206,6 @@ object param - no object array: --- object param - field ordering in object array: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -272,10 +252,6 @@ object param - field ordering in object array: --- object param - nested object array next to other fields: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -330,7 +306,7 @@ object param - nested object array next to other fields: --- object param - nested object with stored array: - requires: - cluster_features: ["mapper.ignored_source.always_store_object_arrays_in_nested", "mapper.bwc_workaround_9_0"] + cluster_features: ["mapper.ignored_source.always_store_object_arrays_in_nested"] reason: requires fix to object array handling - do: @@ -379,10 +355,6 @@ object param - nested object with stored array: --- index param - nested array within array: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -427,10 +399,6 @@ index param - nested array within array: --- # 112156 stored field under object with store_array_source: - - requires: - cluster_features: ["mapper.source.synthetic_source_stored_fields_advance_fix", "mapper.bwc_workaround_9_0"] - reason: requires bug fix to be implemented - - do: indices.create: index: test @@ -477,10 +445,6 @@ stored field under object with store_array_source: --- field param - keep root array: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -535,10 +499,6 @@ field param - keep root array: --- field param - keep nested array: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -605,7 +565,6 @@ field param - keep nested array: field param - keep root singleton fields: - requires: test_runner_features: close_to - cluster_features: ["mapper.synthetic_source_keep"] reason: requires keeping singleton source - do: @@ -695,7 +654,6 @@ field param - keep root singleton fields: field param - keep nested singleton fields: - requires: test_runner_features: close_to - cluster_features: ["mapper.synthetic_source_keep"] reason: requires keeping singleton source - do: @@ -776,10 +734,6 @@ field param - keep nested singleton fields: --- field param - nested array within array: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires tracking ignored source - - do: indices.create: index: test @@ -820,10 +774,6 @@ field param - nested array within array: --- index param - root arrays: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires keeping array source - - do: indices.create: index: test @@ -900,10 +850,6 @@ index param - root arrays: --- index param - dynamic root arrays: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -952,10 +898,6 @@ index param - dynamic root arrays: --- index param - object array within array: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -1001,10 +943,6 @@ index param - object array within array: --- index param - no object array: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -1045,10 +983,6 @@ index param - no object array: --- index param - field ordering: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -1095,10 +1029,6 @@ index param - field ordering: --- index param - nested arrays: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -1162,10 +1092,6 @@ index param - nested arrays: --- index param - nested object with stored array: - - requires: - cluster_features: ["mapper.synthetic_source_keep"] - reason: requires keeping array source - - do: indices.create: index: test @@ -1213,10 +1139,6 @@ index param - nested object with stored array: --- index param - flattened fields: - - requires: - cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] - reason: requires keeping array source - - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml index 3d82539944a9..89816be5ca8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml @@ -453,8 +453,6 @@ --- "Composable index templates that include subobjects: auto at root": - requires: - cluster_features: ["mapper.subobjects_auto"] - reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" test_runner_features: "allowed_warnings" - do: @@ -504,8 +502,6 @@ --- "Composable index templates that include subobjects: auto on arbitrary field": - requires: - cluster_features: ["mapper.subobjects_auto"] - reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" test_runner_features: "allowed_warnings" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml index c88d638199db..d07d03cb7146 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml @@ -1,8 +1,5 @@ --- sort doc with nested object: - - requires: - cluster_features: ["mapper.index_sorting_on_nested"] - reason: uses index sorting on nested fields - do: indices.create: index: test @@ -66,9 +63,6 @@ sort doc with nested object: --- sort doc on nested field: - - requires: - cluster_features: [ "mapper.index_sorting_on_nested" ] - reason: uses index sorting on nested fields - do: catch: /cannot apply index sort to field \[nested_field\.foo\] under nested object \[nested_field\]/ indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index 07af3fb52b92..2a31b3bd387c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -312,7 +312,6 @@ override sort mode settings: --- override sort field using nested field type in sorting: - requires: - cluster_features: ["mapper.index_sorting_on_nested"] test_runner_features: [ capabilities ] capabilities: - method: PUT @@ -358,9 +357,6 @@ override sort field using nested field type in sorting: --- override sort field using nested field type: - - requires: - cluster_features: ["mapper.index_sorting_on_nested"] - reason: "Support for index sorting on indexes with nested objects required" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/90_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/90_synthetic_source.yml index 084f104932d9..8485aba0ecc6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/90_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/90_synthetic_source.yml @@ -55,9 +55,6 @@ keyword: --- keyword with normalizer: - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: indices.create: index: test-keyword-with-normalizer diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml index 9d6e8da8c1e1..2a14c291d5d3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml @@ -417,7 +417,6 @@ - requires: test_runner_features: [arbitrary_key] - cluster_features: ["mapper.query_index_mode"] reason: "_ignored_source added to mappings" - do: @@ -511,10 +510,6 @@ --- "Lucene segment level fields stats": - - requires: - cluster_features: ["mapper.segment_level_fields_stats"] - reason: "segment level fields stats" - - do: indices.create: index: index1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml index 3ec854e93d82..20e9d92a3608 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml @@ -1,8 +1,6 @@ --- "Allocation stats": - requires: - cluster_features: ["stats.include_disk_thresholds"] - reason: "fs watermark stats was added in 8.15.0" test_runner_features: [arbitrary_key] - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml index 3432a1e34c01..6ca17cc9cdce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml @@ -520,10 +520,6 @@ setup: --- "Null bounds": - - requires: - cluster_features: ["mapper.range.null_values_off_by_one_fix"] - reason: fixed in 8.15.0 - - do: index: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/60_unified_matched_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/60_unified_matched_fields.yml index bd14fb182ac5..94db54d15294 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/60_unified_matched_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/60_unified_matched_fields.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: 'unified_highlighter_matched_fields' - reason: 'test requires unified highlighter to support matched_fields' - - do: indices.create: index: index1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/101_knn_nested_search_bits.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/101_knn_nested_search_bits.yml index a3d920d903ae..bc4e262ea53c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/101_knn_nested_search_bits.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/101_knn_nested_search_bits.yml @@ -1,8 +1,6 @@ setup: - requires: - cluster_features: "mapper.vectors.bit_vectors" test_runner_features: close_to - reason: 'bit vectors added in 8.15' - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml index 3d4841a16d82..cffc12a8d24a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml @@ -59,9 +59,6 @@ setup: --- "Simple knn query": - - requires: - cluster_features: "search.vectors.k_param_supported" - reason: 'k param for knn as query is required' - do: search: index: my_index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml index f6538b573809..c92c88df9164 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml @@ -1,8 +1,6 @@ # test how knn query interact with other queries setup: - requires: - cluster_features: "search.vectors.k_param_supported" - reason: 'k param for knn as query is required' test_runner_features: close_to - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml index 3f81c0044d17..abde3e86dd05 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_bbq_hnsw.yml @@ -1,7 +1,4 @@ setup: - - requires: - cluster_features: "mapper.vectors.bbq" - reason: 'kNN float to better-binary quantization is required' - do: indices.create: index: bbq_hnsw diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_half_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_half_byte_quantized.yml index baf568762dd1..9b27aea4b1db 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_half_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_half_byte_quantized.yml @@ -1,7 +1,4 @@ setup: - - requires: - cluster_features: "mapper.vectors.int4_quantization" - reason: 'kNN float to half-byte quantization is required' - do: indices.create: index: hnsw_byte_quantized diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml index 0bc111576c2a..2541de7023bf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_bbq_flat.yml @@ -1,7 +1,4 @@ setup: - - requires: - cluster_features: "mapper.vectors.bbq" - reason: 'kNN float to better-binary quantization is required' - do: indices.create: index: bbq_flat diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml index 0e0180e58fd9..f9f8d56e1d9c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml @@ -1,7 +1,4 @@ setup: - - requires: - cluster_features: "mapper.vectors.int4_quantization" - reason: 'kNN float to half-byte quantization is required' - do: indices.create: index: int4_flat diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml index 680433a5945f..ef2ae3ba7ee0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: "mapper.vectors.bit_vectors" - reason: 'mapper.vectors.bit_vectors' - - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit_flat.yml index 783f08a5d4ff..07261e6a30c7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit_flat.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: "mapper.vectors.bit_vectors" - reason: 'mapper.vectors.bit_vectors' - - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index 44d966b76f34..8915325c3a67 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -1128,10 +1128,6 @@ fetch geo_point: --- "Test with subobjects: auto": - - requires: - cluster_features: "mapper.subobjects_auto" - reason: requires support for subobjects auto setting - - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/530_ignore_above_stored_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/530_ignore_above_stored_source.yml index 1730a49f743d..7e00cbb01c58 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/530_ignore_above_stored_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/530_ignore_above_stored_source.yml @@ -1,8 +1,5 @@ --- ignore_above mapping level setting: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test @@ -42,9 +39,6 @@ ignore_above mapping level setting: --- ignore_above mapping level setting on arrays: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test @@ -84,9 +78,6 @@ ignore_above mapping level setting on arrays: --- ignore_above mapping overrides setting: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test @@ -128,9 +119,6 @@ ignore_above mapping overrides setting: --- ignore_above mapping overrides setting on arrays: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test @@ -172,9 +160,6 @@ ignore_above mapping overrides setting on arrays: --- date ignore_above index level setting: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml index 772c3c24170c..045f757b0830 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml @@ -5,9 +5,6 @@ setup: --- ignore_above mapping level setting: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test @@ -47,9 +44,6 @@ ignore_above mapping level setting: --- ignore_above mapping level setting on arrays: - - requires: - cluster_features: [ "mapper.flattened.ignore_above_with_arrays_support" ] - reason: requires support of ignore_above with arrays for flattened fields - do: indices.create: index: test @@ -90,9 +84,6 @@ ignore_above mapping level setting on arrays: --- ignore_above mapping overrides setting: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test @@ -135,9 +126,6 @@ ignore_above mapping overrides setting: --- ignore_above mapping overrides setting on arrays: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/550_ignore_above_invalid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/550_ignore_above_invalid.yml index 3c29845871fe..6e711ee143b0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/550_ignore_above_invalid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/550_ignore_above_invalid.yml @@ -16,9 +16,6 @@ ignore_above index setting negative value: --- keyword ignore_above mapping setting negative value: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: catch: bad_request indices.create: @@ -32,9 +29,6 @@ keyword ignore_above mapping setting negative value: --- flattened ignore_above mapping setting negative value: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: catch: bad_request indices.create: @@ -48,9 +42,6 @@ flattened ignore_above mapping setting negative value: --- wildcard ignore_above mapping setting negative value: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: catch: bad_request indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml index a4a9b1aaecb2..71e0c2d147c1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml @@ -1,8 +1,5 @@ --- flattened ignore_above single-value field: - - requires: - cluster_features: [ "flattened.ignore_above_support" ] - reason: introduce ignore_above support in flattened fields - do: indices.create: index: test @@ -65,9 +62,6 @@ flattened ignore_above single-value field: --- flattened ignore_above multi-value field: - - requires: - cluster_features: [ "flattened.ignore_above_support" ] - reason: introduce ignore_above support in flattened fields - do: indices.create: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml index da0f00d96053..70a3b0253c78 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml @@ -119,10 +119,6 @@ setup: - skip: features: headers - - requires: - cluster_features: ["simulate.mapping.validation"] - reason: "ingest simulate index mapping validation added in 8.16" - - do: headers: Content-Type: application/json @@ -265,10 +261,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.mapping.validation.templates"] - reason: "ingest simulate index mapping validation added in 8.16" - - do: indices.put_template: name: v1_template @@ -401,10 +393,6 @@ setup: - headers - allowed_warnings - - requires: - cluster_features: ["simulate.mapping.validation.templates"] - reason: "ingest simulate index mapping validation added in 8.16" - - do: allowed_warnings: - "index template [my-template1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml index 616afd3cf67a..1e841c8893fc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml @@ -122,8 +122,6 @@ missing dimension on routing path field: multi-value routing path field succeeds: - requires: test_runner_features: close_to - cluster_features: ["routing.multi_value_routing_path"] - reason: support for multi-value dimensions - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index beba6f2752a1..5a5ae03ab938 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -65,9 +65,6 @@ setup: --- generates a consistent id: - - requires: - cluster_features: "tsdb.ts_routing_hash_doc_value_parse_byte_ref" - reason: _tsid routing hash doc value parsing has been fixed - do: bulk: refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml index dae50704dd0d..a8d256bbc097 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml @@ -340,9 +340,6 @@ sort by tsid: --- aggs by index_mode: - - requires: - cluster_features: ["mapper.query_index_mode"] - reason: require _index_mode metadata field - do: search: index: test diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 420f6427a55e..68e65b16aa3a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -519,7 +519,7 @@ public class IndicesRequestIT extends ESIntegTestCase { public void testGetMappings() { interceptTransportActions(GetMappingsAction.NAME); - GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(randomIndicesOrAliases()); + GetMappingsRequest getMappingsRequest = new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(randomIndicesOrAliases()); internalCluster().coordOnlyNodeClient().admin().indices().getMappings(getMappingsRequest).actionGet(); clearInterceptedActions(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java index ea7cec710e31..e46a0e2ab65e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java @@ -71,7 +71,7 @@ public class AutoCreateSystemIndexIT extends ESIntegTestCase { CreateIndexRequest request = new CreateIndexRequest(PRIMARY_INDEX_NAME); client().execute(AutoCreateAction.INSTANCE, request).get(); - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices(PRIMARY_INDEX_NAME).get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(PRIMARY_INDEX_NAME).get(); assertThat(response.indices().length, is(1)); assertThat(response.aliases().size(), is(1)); assertThat(response.aliases().get(PRIMARY_INDEX_NAME).size(), is(1)); @@ -85,7 +85,7 @@ public class AutoCreateSystemIndexIT extends ESIntegTestCase { CreateIndexRequest request = new CreateIndexRequest(INDEX_NAME); client().execute(AutoCreateAction.INSTANCE, request).get(); - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices(PRIMARY_INDEX_NAME).get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(PRIMARY_INDEX_NAME).get(); assertThat(response.indices().length, is(1)); assertThat(response.aliases().size(), is(1)); assertThat(response.aliases().get(PRIMARY_INDEX_NAME).size(), is(1)); @@ -99,7 +99,7 @@ public class AutoCreateSystemIndexIT extends ESIntegTestCase { CreateIndexRequest request = new CreateIndexRequest(INDEX_NAME + "-2"); client().execute(AutoCreateAction.INSTANCE, request).get(); - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices(INDEX_NAME + "-2").get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(INDEX_NAME + "-2").get(); assertThat(response.indices().length, is(1)); assertThat(response.aliases().size(), is(1)); assertThat(response.aliases().get(INDEX_NAME + "-2").size(), is(1)); @@ -144,7 +144,9 @@ public class AutoCreateSystemIndexIT extends ESIntegTestCase { CreateIndexRequest request = new CreateIndexRequest(UnmanagedSystemIndexTestPlugin.SYSTEM_INDEX_NAME); client().execute(AutoCreateAction.INSTANCE, request).get(); - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices(UnmanagedSystemIndexTestPlugin.SYSTEM_INDEX_NAME).get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(UnmanagedSystemIndexTestPlugin.SYSTEM_INDEX_NAME) + .get(); assertThat(response.indices().length, is(1)); Settings settings = response.settings().get(UnmanagedSystemIndexTestPlugin.SYSTEM_INDEX_NAME); assertThat(settings, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 152d8405c312..20ef20867337 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -120,7 +120,7 @@ public class CreateIndexIT extends ESIntegTestCase { ) ); - GetMappingsResponse response = indicesAdmin().prepareGetMappings("test").get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); @@ -130,7 +130,7 @@ public class CreateIndexIT extends ESIntegTestCase { public void testEmptyNestedMappings() throws Exception { assertAcked(prepareCreate("test").setMapping(XContentFactory.jsonBuilder().startObject().endObject())); - GetMappingsResponse response = indicesAdmin().prepareGetMappings("test").get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); @@ -150,7 +150,7 @@ public class CreateIndexIT extends ESIntegTestCase { prepareCreate("test").setMapping(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()) ); - GetMappingsResponse response = indicesAdmin().prepareGetMappings("test").get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java index 867ca89f9e7f..b9dadf86c334 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java @@ -357,8 +357,9 @@ public class CreateSystemIndicesIT extends ESIntegTestCase { * Fetch the mappings and settings for {@link TestSystemIndexDescriptor#INDEX_NAME} and verify that they match the expected values. */ private void assertMappingsAndSettings(String expectedMappings, String concreteIndex) { - final GetMappingsResponse getMappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(INDEX_NAME)) - .actionGet(); + final GetMappingsResponse getMappingsResponse = indicesAdmin().getMappings( + new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(INDEX_NAME) + ).actionGet(); final Map mappings = getMappingsResponse.getMappings(); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java index 3945c3a48f0a..ddd3c8e53773 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java @@ -46,7 +46,7 @@ public class GetIndexIT extends ESIntegTestCase { } public void testSimple() { - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices("idx").get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("idx").get(); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); @@ -58,7 +58,7 @@ public class GetIndexIT extends ESIntegTestCase { public void testSimpleUnknownIndex() { try { - indicesAdmin().prepareGetIndex().addIndices("missing_idx").get(); + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("missing_idx").get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { assertThat(e.getMessage(), is("no such index [missing_idx]")); @@ -66,7 +66,7 @@ public class GetIndexIT extends ESIntegTestCase { } public void testUnknownIndexWithAllowNoIndices() { - GetIndexResponse response = indicesAdmin().prepareGetIndex() + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .addIndices("missing_idx") .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .get(); @@ -77,7 +77,7 @@ public class GetIndexIT extends ESIntegTestCase { } public void testEmpty() { - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices("empty_idx").get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("empty_idx").get(); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); @@ -88,7 +88,10 @@ public class GetIndexIT extends ESIntegTestCase { } public void testSimpleMapping() { - GetIndexResponse response = runWithRandomFeatureMethod(indicesAdmin().prepareGetIndex().addIndices("idx"), Feature.MAPPINGS); + GetIndexResponse response = runWithRandomFeatureMethod( + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("idx"), + Feature.MAPPINGS + ); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); @@ -99,7 +102,10 @@ public class GetIndexIT extends ESIntegTestCase { } public void testSimpleAlias() { - GetIndexResponse response = runWithRandomFeatureMethod(indicesAdmin().prepareGetIndex().addIndices("idx"), Feature.ALIASES); + GetIndexResponse response = runWithRandomFeatureMethod( + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("idx"), + Feature.ALIASES + ); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); @@ -110,7 +116,10 @@ public class GetIndexIT extends ESIntegTestCase { } public void testSimpleSettings() { - GetIndexResponse response = runWithRandomFeatureMethod(indicesAdmin().prepareGetIndex().addIndices("idx"), Feature.SETTINGS); + GetIndexResponse response = runWithRandomFeatureMethod( + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("idx"), + Feature.SETTINGS + ); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); @@ -127,7 +136,7 @@ public class GetIndexIT extends ESIntegTestCase { features.add(randomFrom(Feature.values())); } GetIndexResponse response = runWithRandomFeatureMethod( - indicesAdmin().prepareGetIndex().addIndices("idx"), + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("idx"), features.toArray(new Feature[features.size()]) ); String[] indices = response.indices(); @@ -158,7 +167,7 @@ public class GetIndexIT extends ESIntegTestCase { features.add(randomFrom(Feature.values())); } GetIndexResponse response = runWithRandomFeatureMethod( - indicesAdmin().prepareGetIndex().addIndices("empty_idx"), + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("empty_idx"), features.toArray(new Feature[features.size()]) ); String[] indices = response.indices(); @@ -182,7 +191,7 @@ public class GetIndexIT extends ESIntegTestCase { for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_READ_ONLY_ALLOW_DELETE)) { try { enableIndexBlock("idx", block); - GetIndexResponse response = indicesAdmin().prepareGetIndex() + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .addIndices("idx") .addFeatures(Feature.MAPPINGS, Feature.ALIASES) .get(); @@ -200,7 +209,7 @@ public class GetIndexIT extends ESIntegTestCase { try { enableIndexBlock("idx", SETTING_BLOCKS_METADATA); assertBlocked( - indicesAdmin().prepareGetIndex().addIndices("idx").addFeatures(Feature.MAPPINGS, Feature.ALIASES), + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("idx").addFeatures(Feature.MAPPINGS, Feature.ALIASES), INDEX_METADATA_BLOCK ); } finally { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 1f34646269f1..9c6189f35874 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -838,7 +838,7 @@ public class RolloverIT extends ESIntegTestCase { assertBusy(() -> { try { - indicesAdmin().prepareGetIndex().addIndices(writeIndexPrefix + "000002").get(); + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(writeIndexPrefix + "000002").get(); } catch (Exception e) { logger.info("--> expecting second index to be created but it has not yet been created"); fail("expecting second index to exist"); @@ -857,7 +857,7 @@ public class RolloverIT extends ESIntegTestCase { }); // We should *NOT* have a third index, it should have rolled over *exactly* once - expectThrows(Exception.class, indicesAdmin().prepareGetIndex().addIndices(writeIndexPrefix + "000003")); + expectThrows(Exception.class, indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(writeIndexPrefix + "000003")); } public void testRolloverConcurrently() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index e45555b1dec1..2cd319d14832 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -55,7 +55,7 @@ public class BulkIntegrationIT extends ESIntegTestCase { bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); bulkBuilder.get(); assertBusy(() -> { - GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings().get(); + GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT).get(); assertTrue(mappingsResponse.getMappings().containsKey("logstash-2014.03.30")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java index e0ecc0815945..11dbe38a0824 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java @@ -258,11 +258,14 @@ public class SimpleClusterStateIT extends ESIntegTestCase { .setTimeout(TimeValue.timeValueMinutes(1)) ); ensureGreen(); // wait for green state, so its both green, and there are no more pending events - MappingMetadata masterMappingMetadata = indicesAdmin().prepareGetMappings("test").get().getMappings().get("test"); + MappingMetadata masterMappingMetadata = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test") + .get() + .getMappings() + .get("test"); for (Client client : clients()) { MappingMetadata mappingMetadata = client.admin() .indices() - .prepareGetMappings("test") + .prepareGetMappings(TEST_REQUEST_TIMEOUT, "test") .setLocal(true) .get() .getMappings() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/MetadataNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/MetadataNodesIT.java index 584653e6220d..f8f88fadd074 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/MetadataNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/MetadataNodesIT.java @@ -118,7 +118,7 @@ public class MetadataNodesIT extends ESIntegTestCase { ) .get(); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(index).get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, index).get(); assertNotNull( ((Map) (getMappingsResponse.getMappings().get(index).getSourceAsMap().get("properties"))).get("integer_field") ); @@ -149,7 +149,7 @@ public class MetadataNodesIT extends ESIntegTestCase { ) .get(); - getMappingsResponse = indicesAdmin().prepareGetMappings(index).get(); + getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, index).get(); assertNotNull( ((Map) (getMappingsResponse.getMappings().get(index).getSourceAsMap().get("properties"))).get("float_field") ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java index 6c7754932af6..f06810377771 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; @@ -33,8 +34,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; @@ -932,6 +935,102 @@ public class GetActionIT extends ESIntegTestCase { ); } + public void testRealTimeGetNestedFields() { + String index = "test"; + SourceFieldMapper.Mode sourceMode = randomFrom(SourceFieldMapper.Mode.values()); + assertAcked( + prepareCreate(index).setMapping("title", "type=keyword", "author", "type=nested") + .setSettings( + indexSettings(1, 0).put("index.refresh_interval", -1) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), sourceMode) + ) + ); + ensureGreen(); + String source0 = """ + { + "title": "t0", + "author": [ + { + "name": "a0" + } + ] + } + """; + prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("0").setSource(source0, XContentType.JSON).get(); + // start tracking translog locations + assertTrue(client().prepareGet(index, "0").setRealtime(true).get().isExists()); + String source1 = """ + { + "title": ["t1"], + "author": [ + { + "name": "a1" + } + ] + } + """; + prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("1").setSource(source1, XContentType.JSON).get(); + String source2 = """ + { + "title": ["t1", "t2"], + "author": [ + { + "name": "a1" + }, + { + "name": "a2" + } + ] + } + """; + prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("2").setSource(source2, XContentType.JSON).get(); + String source3 = """ + { + "title": ["t1", "t3", "t2"] + } + """; + prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("3").setSource(source3, XContentType.JSON).get(); + GetResponse translog1 = client().prepareGet(index, "1").setRealtime(true).get(); + GetResponse translog2 = client().prepareGet(index, "2").setRealtime(true).get(); + GetResponse translog3 = client().prepareGet(index, "3").setRealtime(true).get(); + assertTrue(translog1.isExists()); + assertTrue(translog2.isExists()); + assertTrue(translog3.isExists()); + switch (sourceMode) { + case STORED -> { + assertThat(translog1.getSourceAsBytesRef().utf8ToString(), equalTo(source1)); + assertThat(translog2.getSourceAsBytesRef().utf8ToString(), equalTo(source2)); + assertThat(translog3.getSourceAsBytesRef().utf8ToString(), equalTo(source3)); + } + case SYNTHETIC -> { + assertThat(translog1.getSourceAsBytesRef().utf8ToString(), equalTo(""" + {"author":{"name":"a1"},"title":"t1"}""")); + assertThat(translog2.getSourceAsBytesRef().utf8ToString(), equalTo(""" + {"author":[{"name":"a1"},{"name":"a2"}],"title":["t1","t2"]}""")); + assertThat(translog3.getSourceAsBytesRef().utf8ToString(), equalTo(""" + {"title":["t1","t2","t3"]}""")); + } + case DISABLED -> { + assertNull(translog1.getSourceAsBytesRef()); + assertNull(translog2.getSourceAsBytesRef()); + assertNull(translog3.getSourceAsBytesRef()); + } + } + assertFalse(client().prepareGet(index, "1").setRealtime(false).get().isExists()); + assertFalse(client().prepareGet(index, "2").setRealtime(false).get().isExists()); + assertFalse(client().prepareGet(index, "3").setRealtime(false).get().isExists()); + refresh(index); + GetResponse lucene1 = client().prepareGet(index, "1").setRealtime(randomBoolean()).get(); + GetResponse lucene2 = client().prepareGet(index, "2").setRealtime(randomBoolean()).get(); + GetResponse lucene3 = client().prepareGet(index, "3").setRealtime(randomBoolean()).get(); + assertTrue(lucene1.isExists()); + assertTrue(lucene2.isExists()); + assertTrue(lucene3.isExists()); + assertThat(translog1.getSourceAsBytesRef(), equalTo(lucene1.getSourceAsBytesRef())); + assertThat(translog2.getSourceAsBytesRef(), equalTo(lucene2.getSourceAsBytesRef())); + assertThat(translog3.getSourceAsBytesRef(), equalTo(lucene3.getSourceAsBytesRef())); + } + private void assertGetFieldsAlwaysWorks(String index, String docId, String[] fields) { assertGetFieldsAlwaysWorks(index, docId, fields, null); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java index 802ba04375c4..24af560f608d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java @@ -97,7 +97,7 @@ public class HiddenIndexIT extends ESIntegTestCase { assertAcked(indicesAdmin().prepareCreate("a_hidden_index").setSettings(Settings.builder().put("index.hidden", true).build())); - GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings("a_hidden_index").get(); + GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "a_hidden_index").get(); assertThat(mappingsResponse.mappings().size(), is(1)); MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index"); assertNotNull(mappingMetadata); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java index 960ee2fd7ca6..6bca87ebd6e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/LookupIndexModeIT.java @@ -49,7 +49,7 @@ public class LookupIndexModeIT extends ESIntegTestCase { assertAcked(client().admin().indices().execute(TransportCreateIndexAction.TYPE, createRequest)); Settings settings = client().admin().indices().prepareGetSettings("hosts").get().getIndexToSettings().get("hosts"); assertThat(settings.get("index.mode"), equalTo("lookup")); - assertThat(settings.get("index.auto_expand_replicas"), equalTo("0-all")); + assertNull(settings.get("index.auto_expand_replicas")); Map allHosts = Map.of( "192.168.1.2", "Windows", @@ -141,7 +141,6 @@ public class LookupIndexModeIT extends ESIntegTestCase { Settings settings = client().admin().indices().prepareGetSettings("lookup-2").get().getIndexToSettings().get("lookup-2"); assertThat(settings.get("index.mode"), equalTo("lookup")); assertThat(settings.get("index.number_of_shards"), equalTo("1")); - assertThat(settings.get("index.auto_expand_replicas"), equalTo("0-all")); ResizeRequest split = new ResizeRequest("lookup-3", "lookup-1"); split.setResizeType(ResizeType.SPLIT); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index 2dad8500a309..71b5ba58b578 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -96,7 +96,11 @@ public class DynamicMappingIT extends ESIntegTestCase { client().prepareIndex("index").setId("1").setSource("a.x", 1).get(); client().prepareIndex("index").setId("2").setSource("a.y", 2).get(); - Map mappings = indicesAdmin().prepareGetMappings("index").get().mappings().get("index").sourceAsMap(); + Map mappings = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "index") + .get() + .mappings() + .get("index") + .sourceAsMap(); assertTrue(new WriteField("properties.a", () -> mappings).exists()); assertTrue(new WriteField("properties.a.properties.x", () -> mappings).exists()); } @@ -183,7 +187,7 @@ public class DynamicMappingIT extends ESIntegTestCase { for (int i = 0; i < numberOfFieldsToCreate; ++i) { assertTrue(client().prepareGet("index", Integer.toString(i)).get().isExists()); } - GetMappingsResponse mappings = indicesAdmin().prepareGetMappings("index").get(); + GetMappingsResponse mappings = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "index").get(); MappingMetadata indexMappings = mappings.getMappings().get("index"); assertNotNull(indexMappings); Map typeMappingsMap = indexMappings.getSourceAsMap(); @@ -214,7 +218,11 @@ public class DynamicMappingIT extends ESIntegTestCase { for (int i = 0; i < numberOfDocsToCreate; ++i) { assertTrue(client().prepareGet("index", Integer.toString(i)).get().isExists()); } - Map index = indicesAdmin().prepareGetMappings("index").get().getMappings().get("index").getSourceAsMap(); + Map index = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "index") + .get() + .getMappings() + .get("index") + .getSourceAsMap(); for (int i = 0, j = 1; i < numberOfDocsToCreate; i++, j++) { assertThat(new WriteField("properties.field" + i + ".type", () -> index).get(null), is(oneOf("long", "float"))); assertThat(new WriteField("properties.field" + j + ".type", () -> index).get(null), is(oneOf("long", "float"))); @@ -808,7 +816,11 @@ public class DynamicMappingIT extends ESIntegTestCase { assertEquals(RestStatus.CREATED, indexResponse.status()); assertBusy(() -> { - Map mappings = indicesAdmin().prepareGetMappings("test").get().mappings().get("test").sourceAsMap(); + Map mappings = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test") + .get() + .mappings() + .get("test") + .sourceAsMap(); @SuppressWarnings("unchecked") Map properties = (Map) mappings.get("properties"); assertEquals(4, properties.size()); @@ -853,7 +865,11 @@ public class DynamicMappingIT extends ESIntegTestCase { assertEquals(RestStatus.CREATED, indexResponse.status()); assertBusy(() -> { - Map mappings = indicesAdmin().prepareGetMappings("test").get().mappings().get("test").sourceAsMap(); + Map mappings = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test") + .get() + .mappings() + .get("test") + .sourceAsMap(); Map properties = (Map) mappings.get("properties"); Map foo = (Map) properties.get("foo"); properties = (Map) foo.get("properties"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java index 4a7de4b0ebc2..1a51fc12fed8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java @@ -37,7 +37,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { public void testMultiFields() throws Exception { assertAcked(indicesAdmin().prepareCreate("my-index").setMapping(createTypeSource())); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("my-index").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); @@ -53,7 +53,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { assertAcked(indicesAdmin().preparePutMapping("my-index").setSource(createPutMappingSource())); - getMappingsResponse = indicesAdmin().prepareGetMappings("my-index").get(); + getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "my-index").get(); mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); mappingSource = mappingMetadata.sourceAsMap(); @@ -74,7 +74,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { public void testGeoPointMultiField() throws Exception { assertAcked(indicesAdmin().prepareCreate("my-index").setMapping(createMappingSource("geo_point"))); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("my-index").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); @@ -102,7 +102,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { public void testCompletionMultiField() throws Exception { assertAcked(indicesAdmin().prepareCreate("my-index").setMapping(createMappingSource("completion"))); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("my-index").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); @@ -123,7 +123,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { public void testIpMultiField() throws Exception { assertAcked(indicesAdmin().prepareCreate("my-index").setMapping(createMappingSource("ip"))); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("my-index").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseCreationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseCreationIT.java index 92e5eb8e046b..88cca3308ac4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseCreationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseCreationIT.java @@ -54,7 +54,7 @@ public class PeerRecoveryRetentionLeaseCreationIT extends ESIntegTestCase { ensureGreen(INDEX_NAME); IndicesService service = internalCluster().getInstance(IndicesService.class, dataNode); - String uuid = indicesAdmin().getIndex(new GetIndexRequest().indices(INDEX_NAME)) + String uuid = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(INDEX_NAME)) .actionGet() .getSetting(INDEX_NAME, IndexMetadata.SETTING_INDEX_UUID); Path path = service.indexService(new Index(INDEX_NAME, uuid)).getShard(0).shardPath().getShardStatePath(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 652f4e02ffbc..c53cf3b56f65 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -537,26 +537,26 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } verify(indicesAdmin().preparePutMapping("foo").setSource("field", "type=text"), false); - assertThat(indicesAdmin().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "foo").get().mappings().get("foo"), notNullValue()); verify(indicesAdmin().preparePutMapping("b*").setSource("field", "type=text"), false); - assertThat(indicesAdmin().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "bar").get().mappings().get("bar"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "barbaz").get().mappings().get("barbaz"), notNullValue()); verify(indicesAdmin().preparePutMapping("_all").setSource("field", "type=text"), false); - assertThat(indicesAdmin().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("foobar").get().mappings().get("foobar"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "foo").get().mappings().get("foo"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "foobar").get().mappings().get("foobar"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "bar").get().mappings().get("bar"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "barbaz").get().mappings().get("barbaz"), notNullValue()); verify(indicesAdmin().preparePutMapping().setSource("field", "type=text"), false); - assertThat(indicesAdmin().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("foobar").get().mappings().get("foobar"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); - assertThat(indicesAdmin().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "foo").get().mappings().get("foo"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "foobar").get().mappings().get("foobar"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "bar").get().mappings().get("bar"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "barbaz").get().mappings().get("barbaz"), notNullValue()); verify(indicesAdmin().preparePutMapping("c*").setSource("field", "type=text"), true); assertAcked(indicesAdmin().prepareClose("barbaz").get()); verify(indicesAdmin().preparePutMapping("barbaz").setSource("field", "type=text"), false); - assertThat(indicesAdmin().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + assertThat(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "barbaz").get().mappings().get("barbaz"), notNullValue()); } public static final class TestPlugin extends Plugin { @@ -664,7 +664,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } static GetMappingsRequestBuilder getMapping(String... indices) { - return indicesAdmin().prepareGetMappings(indices); + return indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, indices); } static GetSettingsRequestBuilder getSettings(String... indices) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexMappingUpdateServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexMappingUpdateServiceIT.java index de565605ff58..7264585337fc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexMappingUpdateServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexMappingUpdateServiceIT.java @@ -123,8 +123,9 @@ public class SystemIndexMappingUpdateServiceIT extends ESIntegTestCase { * Fetch the mappings and settings for {@link TestSystemIndexDescriptor#INDEX_NAME} and verify that they match the expected values. */ private void assertMappingsAndSettings(String expectedMappings) { - final GetMappingsResponse getMappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(INDEX_NAME)) - .actionGet(); + final GetMappingsResponse getMappingsResponse = indicesAdmin().getMappings( + new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(INDEX_NAME) + ).actionGet(); final Map mappings = getMappingsResponse.getMappings(); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 71fcb25c2e0b..e3092bda185f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -198,7 +198,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { try { enableIndexBlock("test", SETTING_BLOCKS_METADATA); - assertBlocked(indicesAdmin().prepareGetMappings(), INDEX_METADATA_BLOCK); + assertBlocked(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT), INDEX_METADATA_BLOCK); } finally { disableIndexBlock("test", SETTING_BLOCKS_METADATA); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetMappingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetMappingsIT.java index 20e59fab3bd0..023aa402b733 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetMappingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetMappingsIT.java @@ -42,7 +42,7 @@ public class SimpleGetMappingsIT extends ESIntegTestCase { public void testGetMappingsWhereThereAreNone() { createIndex("index"); - GetMappingsResponse response = indicesAdmin().prepareGetMappings().get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT).get(); assertThat(response.mappings().containsKey("index"), equalTo(true)); assertEquals(MappingMetadata.EMPTY_MAPPINGS, response.mappings().get("index")); } @@ -70,19 +70,19 @@ public class SimpleGetMappingsIT extends ESIntegTestCase { assertThat(clusterHealth.isTimedOut(), equalTo(false)); // Get all mappings - GetMappingsResponse response = indicesAdmin().prepareGetMappings().get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT).get(); assertThat(response.mappings().size(), equalTo(2)); assertThat(response.mappings().get("indexa"), notNullValue()); assertThat(response.mappings().get("indexb"), notNullValue()); // Get all mappings, via wildcard support - response = indicesAdmin().prepareGetMappings("*").get(); + response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "*").get(); assertThat(response.mappings().size(), equalTo(2)); assertThat(response.mappings().get("indexa"), notNullValue()); assertThat(response.mappings().get("indexb"), notNullValue()); // Get mappings in indexa - response = indicesAdmin().prepareGetMappings("indexa").get(); + response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "indexa").get(); assertThat(response.mappings().size(), equalTo(1)); assertThat(response.mappings().get("indexa"), notNullValue()); } @@ -94,7 +94,7 @@ public class SimpleGetMappingsIT extends ESIntegTestCase { for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { enableIndexBlock("test", block); - GetMappingsResponse response = indicesAdmin().prepareGetMappings().get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT).get(); assertThat(response.mappings().size(), equalTo(1)); assertNotNull(response.mappings().get("test")); } finally { @@ -104,7 +104,7 @@ public class SimpleGetMappingsIT extends ESIntegTestCase { try { enableIndexBlock("test", SETTING_BLOCKS_METADATA); - assertBlocked(indicesAdmin().prepareGetMappings(), INDEX_METADATA_BLOCK); + assertBlocked(indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT), INDEX_METADATA_BLOCK); } finally { disableIndexBlock("test", SETTING_BLOCKS_METADATA); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 6f6e488d46b2..fa2598348a1c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -109,7 +109,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { assertThat(putMappingResponse.isAcknowledged(), equalTo(true)); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("test").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertThat(getMappingsResponse.mappings().get("test").source().toString(), equalTo(""" {"_doc":{"properties":{"body":{"type":"text"},"date":{"type":"integer"}}}}""")); } @@ -123,7 +123,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { assertThat(putMappingResponse.isAcknowledged(), equalTo(true)); - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("test").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertThat(getMappingsResponse.mappings().get("test").source().toString(), equalTo(""" {"_doc":{"properties":{"date":{"type":"integer"}}}}""")); } @@ -215,7 +215,10 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { .get(); assertThat(response.isAcknowledged(), equalTo(true)); - GetMappingsResponse getMappingResponse = client2.admin().indices().prepareGetMappings(indexName).get(); + GetMappingsResponse getMappingResponse = client2.admin() + .indices() + .prepareGetMappings(TEST_REQUEST_TIMEOUT, indexName) + .get(); MappingMetadata mappings = getMappingResponse.getMappings().get(indexName); @SuppressWarnings("unchecked") Map properties = (Map) mappings.getSourceAsMap().get("properties"); @@ -284,7 +287,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { * Waits for the given mapping type to exists on the master node. */ private void assertMappingOnMaster(final String index, final String... fieldNames) { - GetMappingsResponse response = indicesAdmin().prepareGetMappings(index).get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, index).get(); MappingMetadata mappings = response.getMappings().get(index); assertThat(mappings, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java index c3eda84ee9e5..b1fd483cb9be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java @@ -87,16 +87,25 @@ public class FeatureStateResetApiIT extends ESIntegTestCase { ); // verify that both indices are gone - Exception e1 = expectThrows(IndexNotFoundException.class, indicesAdmin().prepareGetIndex().addIndices(systemIndex1)); + Exception e1 = expectThrows( + IndexNotFoundException.class, + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(systemIndex1) + ); assertThat(e1.getMessage(), containsString("no such index")); - Exception e2 = expectThrows(IndexNotFoundException.class, indicesAdmin().prepareGetIndex().addIndices(associatedIndex)); + Exception e2 = expectThrows( + IndexNotFoundException.class, + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(associatedIndex) + ); assertThat(e2.getMessage(), containsString("no such index")); - Exception e3 = expectThrows(IndexNotFoundException.class, indicesAdmin().prepareGetIndex().addIndices(systemIndex2)); + Exception e3 = expectThrows( + IndexNotFoundException.class, + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(systemIndex2) + ); assertThat(e3.getMessage(), containsString("no such index")); - GetIndexResponse response = indicesAdmin().prepareGetIndex().addIndices("my_index").get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("my_index").get(); assertThat(response.getIndices(), arrayContaining("my_index")); } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index ffd3d3d883f3..a26632d5b796 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -427,22 +427,14 @@ module org.elasticsearch.server { provides org.elasticsearch.features.FeatureSpecification with - org.elasticsearch.action.admin.indices.stats.IndicesStatsFeatures, org.elasticsearch.action.bulk.BulkFeatures, org.elasticsearch.features.FeatureInfrastructureFeatures, - org.elasticsearch.health.HealthFeatures, - org.elasticsearch.cluster.metadata.MetadataFeatures, - org.elasticsearch.rest.RestFeatures, - org.elasticsearch.repositories.RepositoriesFeatures, - org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures, org.elasticsearch.index.mapper.MapperFeatures, org.elasticsearch.index.IndexFeatures, - org.elasticsearch.ingest.IngestGeoIpFeatures, org.elasticsearch.search.SearchFeatures, org.elasticsearch.script.ScriptFeatures, org.elasticsearch.search.retriever.RetrieversFeatures, - org.elasticsearch.reservedstate.service.FileSettingsFeatures, org.elasticsearch.action.admin.cluster.stats.ClusterStatsFeatures; uses org.elasticsearch.plugins.internal.SettingsExtension; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 237ad37982da..e735b3701ff0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -157,6 +157,7 @@ public class TransportVersions { public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_00_0); public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0); public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0); + public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_00_0); /* * WARNING: DO NOT MERGE INTO MAIN! diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 28ef18e98727..7c6abbe7c788 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -1016,7 +1016,7 @@ public class ActionModule extends AbstractModule { // Desired nodes registerHandler.accept(new RestGetDesiredNodesAction()); - registerHandler.accept(new RestUpdateDesiredNodesAction(clusterSupportsFeature)); + registerHandler.accept(new RestUpdateDesiredNodesAction()); registerHandler.accept(new RestDeleteDesiredNodesAction()); for (ActionPlugin plugin : actionPlugins) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java deleted file mode 100644 index 164fc816ad36..000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action.admin.cluster.allocation; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -public class AllocationStatsFeatures implements FeatureSpecification { - public static final NodeFeature INCLUDE_DISK_THRESHOLD_SETTINGS = new NodeFeature("stats.include_disk_thresholds", true); - - @Override - public Set getFeatures() { - return Set.of(INCLUDE_DISK_THRESHOLD_SETTINGS); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index d929fb457d5d..23bf22e08985 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -49,7 +48,6 @@ public class TransportGetAllocationStatsAction extends TransportMasterNodeReadAc private final AllocationStatsService allocationStatsService; private final DiskThresholdSettings diskThresholdSettings; - private final FeatureService featureService; @Inject public TransportGetAllocationStatsAction( @@ -58,8 +56,7 @@ public class TransportGetAllocationStatsAction extends TransportMasterNodeReadAc ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AllocationStatsService allocationStatsService, - FeatureService featureService + AllocationStatsService allocationStatsService ) { super( TYPE.name(), @@ -74,7 +71,6 @@ public class TransportGetAllocationStatsAction extends TransportMasterNodeReadAc ); this.allocationStatsService = allocationStatsService; this.diskThresholdSettings = new DiskThresholdSettings(clusterService.getSettings(), clusterService.getClusterSettings()); - this.featureService = featureService; } @Override @@ -92,10 +88,7 @@ public class TransportGetAllocationStatsAction extends TransportMasterNodeReadAc listener.onResponse( new Response( request.metrics().contains(Metric.ALLOCATIONS) ? allocationStatsService.stats() : Map.of(), - request.metrics().contains(Metric.FS) - && featureService.clusterHasFeature(clusterService.state(), AllocationStatsFeatures.INCLUDE_DISK_THRESHOLD_SETTINGS) - ? diskThresholdSettings - : null + request.metrics().contains(Metric.FS) ? diskThresholdSettings : null ) ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java index 7d70e83f6558..6ff5d347ab05 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java @@ -41,4 +41,9 @@ public class NodeCapability extends BaseNodeResponse { out.writeBoolean(supported); } + + @Override + public String toString() { + return "NodeCapability{supported=" + supported + '}'; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java index 9fede2ebb5be..beb0e1f927de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.node.capabilities; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -20,11 +19,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; @@ -32,7 +29,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; public class TransportNodesCapabilitiesAction extends TransportNodesAction< @@ -45,7 +41,6 @@ public class TransportNodesCapabilitiesAction extends TransportNodesAction< public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); private final RestController restController; - private final FeatureService featureService; @Inject public TransportNodesCapabilitiesAction( @@ -53,8 +48,7 @@ public class TransportNodesCapabilitiesAction extends TransportNodesAction< ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - RestController restController, - FeatureService featureService + RestController restController ) { super( TYPE.name(), @@ -65,23 +59,6 @@ public class TransportNodesCapabilitiesAction extends TransportNodesAction< threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.restController = restController; - this.featureService = featureService; - } - - @Override - protected void doExecute(Task task, NodesCapabilitiesRequest request, ActionListener listener) { - if (featureService.clusterHasFeature(clusterService.state(), RestNodesCapabilitiesAction.CAPABILITIES_ACTION) == false) { - // not everything in the cluster supports capabilities. - // Therefore we don't support whatever it is we're being asked for - listener.onResponse(new NodesCapabilitiesResponse(clusterService.getClusterName(), List.of(), List.of()) { - @Override - public Optional isSupported() { - return Optional.of(false); - } - }); - } else { - super.doExecute(task, request, listener); - } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java index 7e3c38c73509..ca02d19749ae 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsAction.java @@ -13,13 +13,18 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; +import java.util.Map; import java.util.Objects; public class ClusterGetSettingsAction extends ActionType { @@ -34,25 +39,29 @@ public class ClusterGetSettingsAction extends ActionType { + public static class Request extends LocalClusterStateRequest { public Request(TimeValue masterNodeTimeout) { super(masterNodeTimeout); } + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) public Request(StreamInput in) throws IOException { super(in); assert in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0); } @Override - public void writeTo(StreamOutput out) throws IOException { - assert out.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0); - super.writeTo(out); + public ActionRequestValidationException validate() { + return null; } @Override - public ActionRequestValidationException validate() { - return null; + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); } } @@ -79,20 +88,17 @@ public class ClusterGetSettingsAction extends ActionType { private final SettingsFilter settingsFilter; + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) + @SuppressWarnings("this-escape") @Inject public TransportClusterGetSettingsAction( TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, SettingsFilter settingsFilter, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver + ActionFilters actionFilters ) { super( ClusterGetSettingsAction.NAME, - false, - transportService, - clusterService, - threadPool, actionFilters, - ClusterGetSettingsAction.Request::new, - indexNameExpressionResolver, - ClusterGetSettingsAction.Response::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.settingsFilter = settingsFilter; + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + ClusterGetSettingsAction.Request::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, ClusterGetSettingsAction.Request request, ClusterState state, ActionListener listener ) throws Exception { + ((CancellableTask) task).ensureNotCancelled(); Metadata metadata = state.metadata(); listener.onResponse( new ClusterGetSettingsAction.Response( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java index 0d910cbfdeba..02b581ecbdda 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -79,7 +79,7 @@ public final class MappingStats implements ToXContentFragment, Writeable { } AnalysisStats.countMapping(mappingCounts, indexMetadata); - var sourceMode = SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexMetadata.getSettings()); + var sourceMode = IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexMetadata.getSettings()); sourceModeUsageCount.merge(sourceMode.toString().toLowerCase(Locale.ENGLISH), 1, Integer::sum); } for (MappingMetadata mappingMetadata : project.getMappingsByHash().values()) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java index be7aaeec8f69..05c44b55cf8b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.master.info.ClusterInfoRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ArrayUtils; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; @@ -93,8 +94,8 @@ public class GetIndexRequest extends ClusterInfoRequest { private boolean humanReadable = false; private transient boolean includeDefaults = false; - public GetIndexRequest() { - super(IndicesOptions.strictExpandOpen()); + public GetIndexRequest(TimeValue masterTimeout) { + super(masterTimeout, IndicesOptions.strictExpandOpen()); } public GetIndexRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java index 51088124b084..18abb9e5c58e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java @@ -12,11 +12,12 @@ package org.elasticsearch.action.admin.indices.get; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.support.master.info.ClusterInfoRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.core.TimeValue; public class GetIndexRequestBuilder extends ClusterInfoRequestBuilder { - public GetIndexRequestBuilder(ElasticsearchClient client, String... indices) { - super(client, GetIndexAction.INSTANCE, new GetIndexRequest().indices(indices)); + public GetIndexRequestBuilder(ElasticsearchClient client, TimeValue masterTimeout, String... indices) { + super(client, GetIndexAction.INSTANCE, new GetIndexRequest(masterTimeout).indices(indices)); } public GetIndexRequestBuilder setFeatures(Feature... features) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java index dd4114c94717..84789d8a2acf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java @@ -12,6 +12,7 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.info.ClusterInfoRequest; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -21,7 +22,9 @@ import java.util.Map; public class GetMappingsRequest extends ClusterInfoRequest { - public GetMappingsRequest() {} + public GetMappingsRequest(TimeValue masterTimeout) { + super(masterTimeout); + } public GetMappingsRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java index 3f5413858139..a12ba4f60c26 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java @@ -11,13 +11,14 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.support.master.info.ClusterInfoRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.core.TimeValue; public class GetMappingsRequestBuilder extends ClusterInfoRequestBuilder< GetMappingsRequest, GetMappingsResponse, GetMappingsRequestBuilder> { - public GetMappingsRequestBuilder(ElasticsearchClient client, String... indices) { - super(client, GetMappingsAction.INSTANCE, new GetMappingsRequest().indices(indices)); + public GetMappingsRequestBuilder(ElasticsearchClient client, TimeValue masterTimeout, String... indices) { + super(client, GetMappingsAction.INSTANCE, new GetMappingsRequest(masterTimeout).indices(indices)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java index d2634bdc23ae..7917c311f7c3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java @@ -37,7 +37,6 @@ import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; @@ -59,8 +58,6 @@ public final class LazyRolloverAction extends ActionType { private static final Logger logger = LogManager.getLogger(LazyRolloverAction.class); - public static final NodeFeature DATA_STREAM_LAZY_ROLLOVER = new NodeFeature("data_stream.rollover.lazy", true); - public static final LazyRolloverAction INSTANCE = new LazyRolloverAction(); public static final String NAME = "indices:admin/data_stream/lazy_rollover"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 67b6df150c45..6106e620521f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -12,7 +12,6 @@ package org.elasticsearch.action.admin.indices.stats; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import java.util.ArrayList; import java.util.HashMap; @@ -22,9 +21,6 @@ import java.util.Map; public class IndexStats implements Iterable { - // feature was effectively reverted but we still need to keep this constant around - public static final NodeFeature REVERTED_TIER_CREATION_DATE = new NodeFeature("stats.tier_creation_date", true); - private final String index; private final String uuid; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java deleted file mode 100644 index 558343db1023..000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action.admin.indices.stats; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -public class IndicesStatsFeatures implements FeatureSpecification { - - @Override - public Set getFeatures() { - return Set.of(IndexStats.REVERTED_TIER_CREATION_DATE); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java index ce29d65ececf..003be58d1955 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java @@ -12,16 +12,20 @@ package org.elasticsearch.action.admin.indices.template.post; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; +import java.util.Map; import java.util.Objects; -public class SimulateIndexTemplateRequest extends MasterNodeReadRequest { +public class SimulateIndexTemplateRequest extends LocalClusterStateRequest { private String indexName; @@ -30,14 +34,18 @@ public class SimulateIndexTemplateRequest extends MasterNodeReadRequest headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + public String getIndexName() { return indexName; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index a521dac60e96..1a04b6e4d763 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -12,13 +12,11 @@ package org.elasticsearch.action.admin.indices.template.post; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; -import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.ResettableValue; import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -67,27 +65,11 @@ public class SimulateIndexTemplateResponse extends ActionResponse implements ToX return rolloverConfiguration; } - public SimulateIndexTemplateResponse(StreamInput in) throws IOException { - super(in); - resolvedTemplate = in.readOptionalWriteable(Template::new); - if (in.readBoolean()) { - int overlappingTemplatesCount = in.readInt(); - overlappingTemplates = Maps.newMapWithExpectedSize(overlappingTemplatesCount); - for (int i = 0; i < overlappingTemplatesCount; i++) { - String templateName = in.readString(); - overlappingTemplates.put(templateName, in.readStringCollectionAsList()); - } - } else { - this.overlappingTemplates = null; - } - rolloverConfiguration = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) - ? in.readOptionalWriteable(RolloverConfiguration::new) - : null; - if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { - in.readOptionalWriteable(DataStreamGlobalRetention::read); - } - } - + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC we must remain able to write these responses until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(resolvedTemplate); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java index 75cc72416a85..15015b910767 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java @@ -14,12 +14,16 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; +import java.util.Map; import java.util.Objects; /** @@ -35,7 +39,7 @@ public class SimulateTemplateAction extends ActionType { + public static class Request extends LocalClusterStateRequest { @Nullable private String templateName; @@ -44,26 +48,15 @@ public class SimulateTemplateAction extends ActionType headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + @Nullable public String getTemplateName() { return templateName; @@ -112,11 +100,6 @@ public class SimulateTemplateAction extends ActionType { @@ -78,14 +78,18 @@ public class TransportSimulateIndexTemplateAction extends TransportMasterNodeRea private final ClusterSettings clusterSettings; private final boolean isDslOnlyMode; + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) + @SuppressWarnings("this-escape") @Inject public TransportSimulateIndexTemplateAction( TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, MetadataIndexTemplateService indexTemplateService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry, IndicesService indicesService, SystemIndices systemIndices, @@ -93,13 +97,9 @@ public class TransportSimulateIndexTemplateAction extends TransportMasterNodeRea ) { super( SimulateIndexTemplateAction.NAME, - transportService, - clusterService, - threadPool, actionFilters, - SimulateIndexTemplateRequest::new, - indexNameExpressionResolver, - SimulateIndexTemplateResponse::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.indexTemplateService = indexTemplateService; @@ -109,10 +109,19 @@ public class TransportSimulateIndexTemplateAction extends TransportMasterNodeRea this.indexSettingProviders = indexSettingProviders.getIndexSettingProviders(); this.clusterSettings = clusterService.getClusterSettings(); this.isDslOnlyMode = isDataStreamsLifecycleOnlyMode(clusterService.getSettings()); + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + SimulateIndexTemplateRequest::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, SimulateIndexTemplateRequest request, ClusterState state, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java index 521316ac874e..555fa72af4a5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java @@ -11,26 +11,26 @@ package org.elasticsearch.action.admin.indices.template.post; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.local.TransportLocalClusterStateAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -48,7 +48,7 @@ import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.fi * Handles simulating an index template either by name (looking it up in the * cluster state), or by a provided template configuration */ -public class TransportSimulateTemplateAction extends TransportMasterNodeReadAction< +public class TransportSimulateTemplateAction extends TransportLocalClusterStateAction< SimulateTemplateAction.Request, SimulateIndexTemplateResponse> { @@ -60,14 +60,18 @@ public class TransportSimulateTemplateAction extends TransportMasterNodeReadActi private final ClusterSettings clusterSettings; private final boolean isDslOnlyMode; + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) + @SuppressWarnings("this-escape") @Inject public TransportSimulateTemplateAction( TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, MetadataIndexTemplateService indexTemplateService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry, IndicesService indicesService, SystemIndices systemIndices, @@ -75,13 +79,9 @@ public class TransportSimulateTemplateAction extends TransportMasterNodeReadActi ) { super( SimulateTemplateAction.NAME, - transportService, - clusterService, - threadPool, actionFilters, - SimulateTemplateAction.Request::new, - indexNameExpressionResolver, - SimulateIndexTemplateResponse::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.indexTemplateService = indexTemplateService; @@ -91,10 +91,19 @@ public class TransportSimulateTemplateAction extends TransportMasterNodeReadActi this.indexSettingProviders = indexSettingProviders.getIndexSettingProviders(); this.clusterSettings = clusterService.getClusterSettings(); this.isDslOnlyMode = isDataStreamsLifecycleOnlyMode(clusterService.getSettings()); + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + SimulateTemplateAction.Request::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, SimulateTemplateAction.Request request, ClusterState state, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java index 998a3ada5d15..5851549977ea 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java @@ -14,24 +14,10 @@ import org.elasticsearch.features.NodeFeature; import java.util.Set; -import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_IGNORED_FIELDS; -import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS; -import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION; -import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION; -import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION_TEMPLATES; -import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING; public class BulkFeatures implements FeatureSpecification { public Set getFeatures() { - return Set.of( - SIMULATE_MAPPING_VALIDATION, - SIMULATE_MAPPING_VALIDATION_TEMPLATES, - SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS, - SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS, - SIMULATE_MAPPING_ADDITION, - SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING, - SIMULATE_IGNORED_FIELDS - ); + return Set.of(SIMULATE_IGNORED_FIELDS); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 7fbcd123013a..ab85b0b69791 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; import org.elasticsearch.indices.SystemIndices; @@ -81,7 +80,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { private static final Logger logger = LogManager.getLogger(TransportBulkAction.class); public static final String LAZY_ROLLOVER_ORIGIN = "lazy_rollover"; - private final FeatureService featureService; private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; private final OriginSettingClient rolloverClient; @@ -94,7 +92,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { TransportService transportService, ClusterService clusterService, IngestService ingestService, - FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -109,7 +106,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { transportService, clusterService, ingestService, - featureService, client, actionFilters, indexNameExpressionResolver, @@ -127,7 +123,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { TransportService transportService, ClusterService clusterService, IngestService ingestService, - FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -145,7 +140,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { transportService, clusterService, ingestService, - featureService, client, actionFilters, indexNameExpressionResolver, @@ -165,7 +159,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { TransportService transportService, ClusterService clusterService, IngestService ingestService, - FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -191,7 +184,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { ); this.dataStreamFailureStoreSettings = dataStreamFailureStoreSettings; Objects.requireNonNull(relativeTimeProvider); - this.featureService = featureService; this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; this.rolloverClient = new OriginSettingClient(client, LAZY_ROLLOVER_ORIGIN); @@ -308,10 +300,6 @@ public class TransportBulkAction extends TransportAbstractBulkAction { index, projectState.metadata() ); - boolean lazyRolloverFeature = featureService.clusterHasFeature( - projectState.cluster(), - LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER - ); boolean lazyRolloverFailureStoreFeature = DataStream.isFailureStoreFeatureFlagEnabled(); Set indicesThatRequireAlias = new HashSet<>(); @@ -356,16 +344,12 @@ public class TransportBulkAction extends TransportAbstractBulkAction { } } // Determine which data streams and failure stores need to be rolled over. - if (lazyRolloverFeature) { - DataStream dataStream = projectState.metadata().dataStreams().get(request.index()); - if (dataStream != null) { - if (writeToFailureStore == false && dataStream.getBackingIndices().isRolloverOnWrite()) { - dataStreamsToBeRolledOver.add(request.index()); - } else if (lazyRolloverFailureStoreFeature - && writeToFailureStore - && dataStream.getFailureIndices().isRolloverOnWrite()) { - failureStoresToBeRolledOver.add(request.index()); - } + DataStream dataStream = projectState.metadata().dataStreams().get(request.index()); + if (dataStream != null) { + if (writeToFailureStore == false && dataStream.getBackingIndices().isRolloverOnWrite()) { + dataStreamsToBeRolledOver.add(request.index()); + } else if (lazyRolloverFailureStoreFeature && writeToFailureStore && dataStream.getFailureIndices().isRolloverOnWrite()) { + failureStoresToBeRolledOver.add(request.index()); } } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index 0f0352eb2bfa..1f336a5961ec 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -85,15 +85,6 @@ import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.fi * shards are not actually modified). */ public class TransportSimulateBulkAction extends TransportAbstractBulkAction { - public static final NodeFeature SIMULATE_MAPPING_VALIDATION = new NodeFeature("simulate.mapping.validation", true); - public static final NodeFeature SIMULATE_MAPPING_VALIDATION_TEMPLATES = new NodeFeature("simulate.mapping.validation.templates", true); - public static final NodeFeature SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS = new NodeFeature( - "simulate.component.template.substitutions", - true - ); - public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions", true); - public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition", true); - public static final NodeFeature SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING = new NodeFeature("simulate.support.non.template.mapping", true); public static final NodeFeature SIMULATE_IGNORED_FIELDS = new NodeFeature("simulate.ignored.fields"); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index 650a72a86e7c..beea9ef07c86 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -23,8 +23,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.Index; import java.util.List; @@ -43,8 +41,6 @@ public class DataStreamAutoShardingService { private static final Logger logger = LogManager.getLogger(DataStreamAutoShardingService.class); public static final String DATA_STREAMS_AUTO_SHARDING_ENABLED = "data_streams.auto_sharding.enabled"; - public static final NodeFeature DATA_STREAM_AUTO_SHARDING_FEATURE = new NodeFeature("data_stream.auto_sharding", true); - public static final Setting> DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING = Setting.listSetting( "data_streams.auto_sharding.excludes", List.of(), @@ -101,7 +97,6 @@ public class DataStreamAutoShardingService { ); private final ClusterService clusterService; private final boolean isAutoShardingEnabled; - private final FeatureService featureService; private final LongSupplier nowSupplier; private volatile TimeValue increaseShardsCooldown; private volatile TimeValue reduceShardsCooldown; @@ -109,12 +104,7 @@ public class DataStreamAutoShardingService { private volatile int maxWriteThreads; private volatile List dataStreamExcludePatterns; - public DataStreamAutoShardingService( - Settings settings, - ClusterService clusterService, - FeatureService featureService, - LongSupplier nowSupplier - ) { + public DataStreamAutoShardingService(Settings settings, ClusterService clusterService, LongSupplier nowSupplier) { this.clusterService = clusterService; this.isAutoShardingEnabled = settings.getAsBoolean(DATA_STREAMS_AUTO_SHARDING_ENABLED, false); this.increaseShardsCooldown = DATA_STREAMS_AUTO_SHARDING_INCREASE_SHARDS_COOLDOWN.get(settings); @@ -122,7 +112,6 @@ public class DataStreamAutoShardingService { this.minWriteThreads = CLUSTER_AUTO_SHARDING_MIN_WRITE_THREADS.get(settings); this.maxWriteThreads = CLUSTER_AUTO_SHARDING_MAX_WRITE_THREADS.get(settings); this.dataStreamExcludePatterns = DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.get(settings); - this.featureService = featureService; this.nowSupplier = nowSupplier; } @@ -167,15 +156,6 @@ public class DataStreamAutoShardingService { return NOT_APPLICABLE_RESULT; } - if (featureService.clusterHasFeature(state.cluster(), DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE) == false) { - logger.debug( - "Data stream auto sharding service cannot compute the optimal number of shards for data stream [{}] because the cluster " - + "doesn't have the auto sharding feature", - dataStream.getName() - ); - return NOT_APPLICABLE_RESULT; - } - if (dataStreamExcludePatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, dataStream.getName()))) { logger.debug( "Data stream [{}] is excluded from auto sharding via the [{}] setting", diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 80ffd305bad5..73e6a0306247 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -231,7 +231,7 @@ abstract class AbstractSearchAsyncAction exten } @Override - public final void run() { + protected final void run() { for (final SearchShardIterator iterator : toSkipShardsIts) { assert iterator.skip(); skipShard(iterator); @@ -286,7 +286,7 @@ abstract class AbstractSearchAsyncAction exten return true; } - protected void performPhaseOnShard(final int shardIndex, final SearchShardIterator shardIt, final SearchShardTarget shard) { + private void performPhaseOnShard(final int shardIndex, final SearchShardIterator shardIt, final SearchShardTarget shard) { if (throttleConcurrentRequests) { var pendingExecutions = pendingExecutionsPerNode.computeIfAbsent( shard.getNodeId(), @@ -349,7 +349,7 @@ abstract class AbstractSearchAsyncAction exten * of the next phase. If there are no successful operations in the context when this method is executed the search is aborted and * a response is returned to the user indicating that all shards have failed. */ - protected void executeNextPhase(SearchPhase currentPhase, Supplier nextPhaseSupplier) { + protected void executeNextPhase(String currentPhase, Supplier nextPhaseSupplier) { /* This is the main search phase transition where we move to the next phase. If all shards * failed or if there was a failure and partial results are not allowed, then we immediately * fail. Otherwise we continue to the next phase. @@ -360,7 +360,7 @@ abstract class AbstractSearchAsyncAction exten Throwable cause = shardSearchFailures.length == 0 ? null : ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0]; - logger.debug(() -> "All shards failed for phase: [" + currentPhase.getName() + "]", cause); + logger.debug(() -> "All shards failed for phase: [" + currentPhase + "]", cause); onPhaseFailure(currentPhase, "all shards failed", cause); } else { Boolean allowPartialResults = request.allowPartialSearchResults(); @@ -373,7 +373,7 @@ abstract class AbstractSearchAsyncAction exten int numShardFailures = shardSearchFailures.length; shardSearchFailures = ExceptionsHelper.groupBy(shardSearchFailures); Throwable cause = ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0]; - logger.debug(() -> format("%s shards failed for phase: [%s]", numShardFailures, currentPhase.getName()), cause); + logger.debug(() -> format("%s shards failed for phase: [%s]", numShardFailures, currentPhase), cause); } onPhaseFailure(currentPhase, "Partial shards failure", null); } else { @@ -386,7 +386,7 @@ abstract class AbstractSearchAsyncAction exten successfulOps.get(), skippedOps.get(), getNumShards(), - currentPhase.getName() + currentPhase ); } onPhaseFailure(currentPhase, "Partial shards failure (" + discrepancy + " shards unavailable)", null); @@ -400,7 +400,7 @@ abstract class AbstractSearchAsyncAction exten .collect(Collectors.joining(",")); logger.trace( "[{}] Moving to next phase: [{}], based on results from: {} (cluster state version: {})", - currentPhase.getName(), + currentPhase, nextPhase.getName(), resultsFrom, clusterStateVersion @@ -413,11 +413,11 @@ abstract class AbstractSearchAsyncAction exten private void executePhase(SearchPhase phase) { try { phase.run(); - } catch (Exception e) { + } catch (RuntimeException e) { if (logger.isDebugEnabled()) { logger.debug(() -> format("Failed to execute [%s] while moving to [%s] phase", request, phase.getName()), e); } - onPhaseFailure(phase, "", e); + onPhaseFailure(phase.getName(), "", e); } } @@ -693,8 +693,8 @@ abstract class AbstractSearchAsyncAction exten * @param msg an optional message * @param cause the cause of the phase failure */ - public void onPhaseFailure(SearchPhase phase, String msg, Throwable cause) { - raisePhaseFailure(new SearchPhaseExecutionException(phase.getName(), msg, cause, buildShardFailures())); + public void onPhaseFailure(String phase, String msg, Throwable cause) { + raisePhaseFailure(new SearchPhaseExecutionException(phase, msg, cause, buildShardFailures())); } /** @@ -739,7 +739,7 @@ abstract class AbstractSearchAsyncAction exten * @see #onShardResult(SearchPhaseResult, SearchShardIterator) */ private void onPhaseDone() { // as a tribute to @kimchy aka. finishHim() - executeNextPhase(this, this::getNextPhase); + executeNextPhase(getName(), this::getNextPhase); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index cc8c4becea9a..faeb552530e4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -39,6 +39,9 @@ import java.util.function.Function; * @see CountedCollector#onFailure(int, SearchShardTarget, Exception) */ final class DfsQueryPhase extends SearchPhase { + + public static final String NAME = "dfs_query"; + private final SearchPhaseResults queryResult; private final List searchResults; private final AggregatedDfs dfs; @@ -56,7 +59,7 @@ final class DfsQueryPhase extends SearchPhase { Function, SearchPhase> nextPhaseFactory, AbstractSearchAsyncAction context ) { - super("dfs_query"); + super(NAME); this.progressListener = context.getTask().getProgressListener(); this.queryResult = queryResult; this.searchResults = searchResults; @@ -68,13 +71,13 @@ final class DfsQueryPhase extends SearchPhase { } @Override - public void run() { + protected void run() { // TODO we can potentially also consume the actual per shard results from the initial phase here in the aggregateDfs // to free up memory early final CountedCollector counter = new CountedCollector<>( queryResult, searchResults.size(), - () -> context.executeNextPhase(this, () -> nextPhaseFactory.apply(queryResult)), + () -> context.executeNextPhase(NAME, () -> nextPhaseFactory.apply(queryResult)), context ); @@ -106,7 +109,7 @@ final class DfsQueryPhase extends SearchPhase { response.setSearchProfileDfsPhaseResult(dfsResult.searchProfileDfsPhaseResult()); counter.onResult(response); } catch (Exception e) { - context.onPhaseFailure(DfsQueryPhase.this, "", e); + context.onPhaseFailure(NAME, "", e); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index e8d94c32bdcc..b0b3f1526592 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -31,12 +31,15 @@ import java.util.function.Supplier; * forwards to the next phase immediately. */ final class ExpandSearchPhase extends SearchPhase { + + static final String NAME = "expand"; + private final AbstractSearchAsyncAction context; private final SearchHits searchHits; private final Supplier nextPhase; ExpandSearchPhase(AbstractSearchAsyncAction context, SearchHits searchHits, Supplier nextPhase) { - super("expand"); + super(NAME); this.context = context; this.searchHits = searchHits; this.nextPhase = nextPhase; @@ -51,7 +54,7 @@ final class ExpandSearchPhase extends SearchPhase { } @Override - public void run() { + protected void run() { if (isCollapseRequest() == false || searchHits.getHits().length == 0) { onPhaseDone(); } else { @@ -123,7 +126,7 @@ final class ExpandSearchPhase extends SearchPhase { } private void phaseFailure(Exception ex) { - context.onPhaseFailure(this, "failed to expand hits", ex); + context.onPhaseFailure(NAME, "failed to expand hits", ex); } private static SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder options, CollapseBuilder innerCollapseBuilder) { @@ -168,6 +171,6 @@ final class ExpandSearchPhase extends SearchPhase { } private void onPhaseDone() { - context.executeNextPhase(this, nextPhase); + context.executeNextPhase(NAME, nextPhase); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java index d8671bcadf86..2e98d5019649 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java @@ -33,6 +33,9 @@ import java.util.stream.Collectors; * @see org.elasticsearch.index.mapper.LookupRuntimeFieldType */ final class FetchLookupFieldsPhase extends SearchPhase { + + static final String NAME = "fetch_lookup_fields"; + private final AbstractSearchAsyncAction context; private final SearchResponseSections searchResponse; private final AtomicArray queryResults; @@ -42,7 +45,7 @@ final class FetchLookupFieldsPhase extends SearchPhase { SearchResponseSections searchResponse, AtomicArray queryResults ) { - super("fetch_lookup_fields"); + super(NAME); this.context = context; this.searchResponse = searchResponse; this.queryResults = queryResults; @@ -74,7 +77,7 @@ final class FetchLookupFieldsPhase extends SearchPhase { } @Override - public void run() { + protected void run() { final List clusters = groupLookupFieldsByClusterAlias(searchResponse.hits); if (clusters.isEmpty()) { context.sendSearchResponse(searchResponse, queryResults); @@ -129,7 +132,7 @@ final class FetchLookupFieldsPhase extends SearchPhase { } } if (failure != null) { - context.onPhaseFailure(FetchLookupFieldsPhase.this, "failed to fetch lookup fields", failure); + context.onPhaseFailure(NAME, "failed to fetch lookup fields", failure); } else { context.sendSearchResponse(searchResponse, queryResults); } @@ -137,7 +140,7 @@ final class FetchLookupFieldsPhase extends SearchPhase { @Override public void onFailure(Exception e) { - context.onPhaseFailure(FetchLookupFieldsPhase.this, "failed to fetch lookup fields", e); + context.onPhaseFailure(NAME, "failed to fetch lookup fields", e); } }); } diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 8568b6091676..119cfcab7610 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -34,6 +34,9 @@ import java.util.function.BiFunction; * Then it reaches out to all relevant shards to fetch the topN hits. */ final class FetchSearchPhase extends SearchPhase { + + static final String NAME = "fetch"; + private final AtomicArray searchPhaseShardResults; private final BiFunction, SearchPhase> nextPhaseFactory; private final AbstractSearchAsyncAction context; @@ -70,7 +73,7 @@ final class FetchSearchPhase extends SearchPhase { @Nullable SearchPhaseController.ReducedQueryPhase reducedQueryPhase, BiFunction, SearchPhase> nextPhaseFactory ) { - super("fetch"); + super(NAME); if (context.getNumShards() != resultConsumer.getNumShards()) { throw new IllegalStateException( "number of shards must match the length of the query results but doesn't:" @@ -90,7 +93,7 @@ final class FetchSearchPhase extends SearchPhase { } @Override - public void run() { + protected void run() { context.execute(new AbstractRunnable() { @Override @@ -100,7 +103,7 @@ final class FetchSearchPhase extends SearchPhase { @Override public void onFailure(Exception e) { - context.onPhaseFailure(FetchSearchPhase.this, "", e); + context.onPhaseFailure(NAME, "", e); } }); } @@ -222,7 +225,7 @@ final class FetchSearchPhase extends SearchPhase { progressListener.notifyFetchResult(shardIndex); counter.onResult(result); } catch (Exception e) { - context.onPhaseFailure(FetchSearchPhase.this, "", e); + context.onPhaseFailure(NAME, "", e); } } @@ -269,7 +272,7 @@ final class FetchSearchPhase extends SearchPhase { AtomicArray fetchResultsArr, SearchPhaseController.ReducedQueryPhase reducedQueryPhase ) { - context.executeNextPhase(this, () -> { + context.executeNextPhase(NAME, () -> { var resp = SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr); context.addReleasable(resp); return nextPhaseFactory.apply(resp, searchPhaseShardResults); diff --git a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java index 199228c9f992..e9302883457e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java @@ -37,6 +37,8 @@ import java.util.List; */ public class RankFeaturePhase extends SearchPhase { + static final String NAME = "rank-feature"; + private static final Logger logger = LogManager.getLogger(RankFeaturePhase.class); private final AbstractSearchAsyncAction context; final SearchPhaseResults queryPhaseResults; @@ -51,7 +53,7 @@ public class RankFeaturePhase extends SearchPhase { AbstractSearchAsyncAction context, RankFeaturePhaseRankCoordinatorContext rankFeaturePhaseRankCoordinatorContext ) { - super("rank-feature"); + super(NAME); assert rankFeaturePhaseRankCoordinatorContext != null; this.rankFeaturePhaseRankCoordinatorContext = rankFeaturePhaseRankCoordinatorContext; if (context.getNumShards() != queryPhaseResults.getNumShards()) { @@ -71,7 +73,7 @@ public class RankFeaturePhase extends SearchPhase { } @Override - public void run() { + protected void run() { context.execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { @@ -84,7 +86,7 @@ public class RankFeaturePhase extends SearchPhase { @Override public void onFailure(Exception e) { - context.onPhaseFailure(RankFeaturePhase.this, "", e); + context.onPhaseFailure(NAME, "", e); } }); } @@ -139,7 +141,7 @@ public class RankFeaturePhase extends SearchPhase { progressListener.notifyRankFeatureResult(shardIndex); rankRequestCounter.onResult(response); } catch (Exception e) { - context.onPhaseFailure(RankFeaturePhase.this, "", e); + context.onPhaseFailure(NAME, "", e); } } @@ -194,7 +196,7 @@ public class RankFeaturePhase extends SearchPhase { @Override public void onFailure(Exception e) { - context.onPhaseFailure(RankFeaturePhase.this, "Computing updated ranks for results failed", e); + context.onPhaseFailure(NAME, "Computing updated ranks for results failed", e); } } ); @@ -239,6 +241,6 @@ public class RankFeaturePhase extends SearchPhase { } void moveToNextPhase(SearchPhaseResults phaseResults, SearchPhaseController.ReducedQueryPhase reducedQueryPhase) { - context.executeNextPhase(this, () -> new FetchSearchPhase(phaseResults, aggregatedDfs, context, reducedQueryPhase)); + context.executeNextPhase(NAME, () -> new FetchSearchPhase(phaseResults, aggregatedDfs, context, reducedQueryPhase)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 25d59a06664d..5c5c47b5fcc4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -10,6 +10,13 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -17,12 +24,16 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsKnnResults; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; @@ -93,12 +104,11 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction @Override protected SearchPhase getNextPhase() { final List dfsSearchResults = results.getAtomicArray().asList(); - final AggregatedDfs aggregatedDfs = SearchPhaseController.aggregateDfs(dfsSearchResults); - final List mergedKnnResults = SearchPhaseController.mergeKnnResults(getRequest(), dfsSearchResults); + final AggregatedDfs aggregatedDfs = aggregateDfs(dfsSearchResults); return new DfsQueryPhase( dfsSearchResults, aggregatedDfs, - mergedKnnResults, + mergeKnnResults(getRequest(), dfsSearchResults), queryPhaseResultConsumer, (queryResults) -> SearchQueryThenFetchAsyncAction.nextPhase(client, this, queryResults, aggregatedDfs), this @@ -109,4 +119,95 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction protected void onShardGroupFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) { progressListener.notifyQueryFailure(shardIndex, shardTarget, exc); } + + private static List mergeKnnResults(SearchRequest request, List dfsSearchResults) { + if (request.hasKnnSearch() == false) { + return null; + } + SearchSourceBuilder source = request.source(); + List> topDocsLists = new ArrayList<>(source.knnSearch().size()); + List> nestedPath = new ArrayList<>(source.knnSearch().size()); + for (int i = 0; i < source.knnSearch().size(); i++) { + topDocsLists.add(new ArrayList<>()); + nestedPath.add(new SetOnce<>()); + } + + for (DfsSearchResult dfsSearchResult : dfsSearchResults) { + if (dfsSearchResult.knnResults() != null) { + for (int i = 0; i < dfsSearchResult.knnResults().size(); i++) { + DfsKnnResults knnResults = dfsSearchResult.knnResults().get(i); + ScoreDoc[] scoreDocs = knnResults.scoreDocs(); + TotalHits totalHits = new TotalHits(scoreDocs.length, TotalHits.Relation.EQUAL_TO); + TopDocs shardTopDocs = new TopDocs(totalHits, scoreDocs); + SearchPhaseController.setShardIndex(shardTopDocs, dfsSearchResult.getShardIndex()); + topDocsLists.get(i).add(shardTopDocs); + nestedPath.get(i).trySet(knnResults.getNestedPath()); + } + } + } + + List mergedResults = new ArrayList<>(source.knnSearch().size()); + for (int i = 0; i < source.knnSearch().size(); i++) { + TopDocs mergedTopDocs = TopDocs.merge(source.knnSearch().get(i).k(), topDocsLists.get(i).toArray(new TopDocs[0])); + mergedResults.add(new DfsKnnResults(nestedPath.get(i).get(), mergedTopDocs.scoreDocs)); + } + return mergedResults; + } + + private static AggregatedDfs aggregateDfs(Collection results) { + Map termStatistics = new HashMap<>(); + Map fieldStatistics = new HashMap<>(); + long aggMaxDoc = 0; + for (DfsSearchResult lEntry : results) { + final Term[] terms = lEntry.terms(); + final TermStatistics[] stats = lEntry.termStatistics(); + assert terms.length == stats.length; + for (int i = 0; i < terms.length; i++) { + assert terms[i] != null; + if (stats[i] == null) { + continue; + } + TermStatistics existing = termStatistics.get(terms[i]); + if (existing != null) { + assert terms[i].bytes().equals(existing.term()); + termStatistics.put( + terms[i], + new TermStatistics( + existing.term(), + existing.docFreq() + stats[i].docFreq(), + existing.totalTermFreq() + stats[i].totalTermFreq() + ) + ); + } else { + termStatistics.put(terms[i], stats[i]); + } + + } + + assert lEntry.fieldStatistics().containsKey(null) == false; + for (var entry : lEntry.fieldStatistics().entrySet()) { + String key = entry.getKey(); + CollectionStatistics value = entry.getValue(); + if (value == null) { + continue; + } + assert key != null; + CollectionStatistics existing = fieldStatistics.get(key); + if (existing != null) { + CollectionStatistics merged = new CollectionStatistics( + key, + existing.maxDoc() + value.maxDoc(), + existing.docCount() + value.docCount(), + existing.sumTotalTermFreq() + value.sumTotalTermFreq(), + existing.sumDocFreq() + value.sumDocFreq() + ); + fieldStatistics.put(key, merged); + } else { + fieldStatistics.put(key, value); + } + } + aggMaxDoc += lEntry.maxDoc(); + } + return new AggregatedDfs(termStatistics, fieldStatistics, aggMaxDoc); + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 7d849a72abf9..702369dc3839 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -9,25 +9,25 @@ package org.elasticsearch.action.search; import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.transport.Transport; -import java.io.IOException; import java.util.Objects; import java.util.function.Function; /** * Base class for all individual search phases like collecting distributed frequencies, fetching documents, querying shards. */ -abstract class SearchPhase implements CheckedRunnable { +abstract class SearchPhase { private final String name; protected SearchPhase(String name) { this.name = Objects.requireNonNull(name, "name must not be null"); } + protected abstract void run(); + /** * Returns the phases name. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 69e7fba4dd0d..f8736ab79690 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -9,20 +9,16 @@ package org.elasticsearch.action.search; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.TotalHits.Relation; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.lucene.Lucene; @@ -42,9 +38,6 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.dfs.AggregatedDfs; -import org.elasticsearch.search.dfs.DfsKnnResults; -import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; @@ -84,97 +77,6 @@ public final class SearchPhaseController { this.requestToAggReduceContextBuilder = requestToAggReduceContextBuilder; } - public static AggregatedDfs aggregateDfs(Collection results) { - Map termStatistics = new HashMap<>(); - Map fieldStatistics = new HashMap<>(); - long aggMaxDoc = 0; - for (DfsSearchResult lEntry : results) { - final Term[] terms = lEntry.terms(); - final TermStatistics[] stats = lEntry.termStatistics(); - assert terms.length == stats.length; - for (int i = 0; i < terms.length; i++) { - assert terms[i] != null; - if (stats[i] == null) { - continue; - } - TermStatistics existing = termStatistics.get(terms[i]); - if (existing != null) { - assert terms[i].bytes().equals(existing.term()); - termStatistics.put( - terms[i], - new TermStatistics( - existing.term(), - existing.docFreq() + stats[i].docFreq(), - existing.totalTermFreq() + stats[i].totalTermFreq() - ) - ); - } else { - termStatistics.put(terms[i], stats[i]); - } - - } - - assert lEntry.fieldStatistics().containsKey(null) == false; - for (var entry : lEntry.fieldStatistics().entrySet()) { - String key = entry.getKey(); - CollectionStatistics value = entry.getValue(); - if (value == null) { - continue; - } - assert key != null; - CollectionStatistics existing = fieldStatistics.get(key); - if (existing != null) { - CollectionStatistics merged = new CollectionStatistics( - key, - existing.maxDoc() + value.maxDoc(), - existing.docCount() + value.docCount(), - existing.sumTotalTermFreq() + value.sumTotalTermFreq(), - existing.sumDocFreq() + value.sumDocFreq() - ); - fieldStatistics.put(key, merged); - } else { - fieldStatistics.put(key, value); - } - } - aggMaxDoc += lEntry.maxDoc(); - } - return new AggregatedDfs(termStatistics, fieldStatistics, aggMaxDoc); - } - - public static List mergeKnnResults(SearchRequest request, List dfsSearchResults) { - if (request.hasKnnSearch() == false) { - return null; - } - SearchSourceBuilder source = request.source(); - List> topDocsLists = new ArrayList<>(source.knnSearch().size()); - List> nestedPath = new ArrayList<>(source.knnSearch().size()); - for (int i = 0; i < source.knnSearch().size(); i++) { - topDocsLists.add(new ArrayList<>()); - nestedPath.add(new SetOnce<>()); - } - - for (DfsSearchResult dfsSearchResult : dfsSearchResults) { - if (dfsSearchResult.knnResults() != null) { - for (int i = 0; i < dfsSearchResult.knnResults().size(); i++) { - DfsKnnResults knnResults = dfsSearchResult.knnResults().get(i); - ScoreDoc[] scoreDocs = knnResults.scoreDocs(); - TotalHits totalHits = new TotalHits(scoreDocs.length, Relation.EQUAL_TO); - TopDocs shardTopDocs = new TopDocs(totalHits, scoreDocs); - setShardIndex(shardTopDocs, dfsSearchResult.getShardIndex()); - topDocsLists.get(i).add(shardTopDocs); - nestedPath.get(i).trySet(knnResults.getNestedPath()); - } - } - } - - List mergedResults = new ArrayList<>(source.knnSearch().size()); - for (int i = 0; i < source.knnSearch().size(); i++) { - TopDocs mergedTopDocs = TopDocs.merge(source.knnSearch().get(i).k(), topDocsLists.get(i).toArray(new TopDocs[0])); - mergedResults.add(new DfsKnnResults(nestedPath.get(i).get(), mergedTopDocs.scoreDocs)); - } - return mergedResults; - } - /** * Returns a score doc array of top N search docs across all shards, followed by top suggest docs for each * named completion suggestion across all shards. If more than one named completion suggestion is specified in the @@ -496,38 +398,6 @@ public final class SearchPhaseController { ); } - /** - * Reduces the given query results and consumes all aggregations and profile results. - * @param queryResults a list of non-null query shard results - */ - static ReducedQueryPhase reducedScrollQueryPhase(Collection queryResults) { - AggregationReduceContext.Builder aggReduceContextBuilder = new AggregationReduceContext.Builder() { - @Override - public AggregationReduceContext forPartialReduction() { - throw new UnsupportedOperationException("Scroll requests don't have aggs"); - } - - @Override - public AggregationReduceContext forFinalReduction() { - throw new UnsupportedOperationException("Scroll requests don't have aggs"); - } - }; - final TopDocsStats topDocsStats = new TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE); - final List topDocs = new ArrayList<>(); - for (SearchPhaseResult sortedResult : queryResults) { - QuerySearchResult queryResult = sortedResult.queryResult(); - final TopDocsAndMaxScore td = queryResult.consumeTopDocs(); - assert td != null; - topDocsStats.add(td, queryResult.searchTimedOut(), queryResult.terminatedEarly()); - // make sure we set the shard index before we add it - the consumer didn't do that yet - if (td.topDocs.scoreDocs.length > 0) { - setShardIndex(td.topDocs, queryResult.getShardIndex()); - topDocs.add(td.topDocs); - } - } - return reducedQueryPhase(queryResults, null, topDocs, topDocsStats, 0, true, aggReduceContextBuilder, null, true); - } - /** * Reduces the given query results and consumes all aggregations and profile results. * @param queryResults a list of non-null query shard results diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index 2231f791384f..53da76d96e40 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -10,21 +10,27 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.TopDocs; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -40,7 +46,7 @@ import static org.elasticsearch.core.Strings.format; * fan out to nodes and execute the query part of the scroll request. Subclasses can for instance * run separate fetch phases etc. */ -abstract class SearchScrollAsyncAction implements Runnable { +abstract class SearchScrollAsyncAction { protected final Logger logger; protected final ActionListener listener; protected final ParsedScrollId scrollId; @@ -229,7 +235,7 @@ abstract class SearchScrollAsyncAction implements R ) { return new SearchPhase("fetch") { @Override - public void run() { + protected void run() { sendResponse(queryPhase, fetchResults); } }; @@ -301,4 +307,48 @@ abstract class SearchScrollAsyncAction implements R protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { return searchTransportService.getConnection(clusterAlias, node); } + + /** + * Reduces the given query results and consumes all aggregations and profile results. + * @param queryResults a list of non-null query shard results + */ + protected static SearchPhaseController.ReducedQueryPhase reducedScrollQueryPhase(Collection queryResults) { + AggregationReduceContext.Builder aggReduceContextBuilder = new AggregationReduceContext.Builder() { + @Override + public AggregationReduceContext forPartialReduction() { + throw new UnsupportedOperationException("Scroll requests don't have aggs"); + } + + @Override + public AggregationReduceContext forFinalReduction() { + throw new UnsupportedOperationException("Scroll requests don't have aggs"); + } + }; + final SearchPhaseController.TopDocsStats topDocsStats = new SearchPhaseController.TopDocsStats( + SearchContext.TRACK_TOTAL_HITS_ACCURATE + ); + final List topDocs = new ArrayList<>(); + for (SearchPhaseResult sortedResult : queryResults) { + QuerySearchResult queryResult = sortedResult.queryResult(); + final TopDocsAndMaxScore td = queryResult.consumeTopDocs(); + assert td != null; + topDocsStats.add(td, queryResult.searchTimedOut(), queryResult.terminatedEarly()); + // make sure we set the shard index before we add it - the consumer didn't do that yet + if (td.topDocs.scoreDocs.length > 0) { + SearchPhaseController.setShardIndex(td.topDocs, queryResult.getShardIndex()); + topDocs.add(td.topDocs); + } + } + return SearchPhaseController.reducedQueryPhase( + queryResults, + null, + topDocs, + topDocsStats, + 0, + true, + aggReduceContextBuilder, + null, + true + ); + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java index 7b5ba21c8022..ba14b5bcd2cb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java @@ -51,7 +51,7 @@ final class SearchScrollQueryAndFetchAsyncAction extends SearchScrollAsyncAction @Override protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { - return sendResponsePhase(SearchPhaseController.reducedScrollQueryPhase(queryFetchResults.asList()), queryFetchResults); + return sendResponsePhase(reducedScrollQueryPhase(queryFetchResults.asList()), queryFetchResults); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index 8c33e3ca7da4..29822e596356 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -65,10 +65,8 @@ final class SearchScrollQueryThenFetchAsyncAction extends SearchScrollAsyncActio protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { return new SearchPhase("fetch") { @Override - public void run() { - final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = SearchPhaseController.reducedScrollQueryPhase( - queryResults.asList() - ); + protected void run() { + final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = reducedScrollQueryPhase(queryResults.asList()); ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs().scoreDocs(); if (scoreDocs.length == 0) { sendResponse(reducedQueryPhase, fetchResults); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 36ca0fba9437..6c95a3c8fd43 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -270,7 +270,7 @@ public class TransportOpenPointInTimeAction extends HandledTransportAction new SearchScrollQueryThenFetchAsyncAction( logger, clusterService, diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 943b03588d4b..634a103e9754 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import java.io.IOException; @@ -27,11 +28,22 @@ public abstract class ClusterInfoRequest listener) { @@ -349,8 +349,8 @@ public class IndicesAdminClient implements ElasticsearchClient { execute(GetIndexAction.INSTANCE, request, listener); } - public GetIndexRequestBuilder prepareGetIndex() { - return new GetIndexRequestBuilder(this); + public GetIndexRequestBuilder prepareGetIndex(TimeValue masterTimeout) { + return new GetIndexRequestBuilder(this, masterTimeout); } public ActionFuture clearCache(final ClearIndicesCacheRequest request) { diff --git a/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java b/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java index 9e3497601fb5..f632ba33f772 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java @@ -47,7 +47,8 @@ public interface RemoteClusterClient { /** * Obtain a connection to the remote cluster for use with the {@link #execute} override that allows to specify the connection. Useful - * for cases where you need to inspect {@link Transport.Connection#getVersion} before deciding the exact remote action to invoke. + * for cases where you need to inspect {@link Transport.Connection#getTransportVersion} before deciding the exact remote action to + * invoke. */ void getConnection(@Nullable Request request, ActionListener listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetention.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetention.java index 673960c71339..17267525d4bd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetention.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetention.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.NodeFeature; import java.io.IOException; @@ -23,7 +22,6 @@ import java.io.IOException; */ public record DataStreamGlobalRetention(@Nullable TimeValue defaultRetention, @Nullable TimeValue maxRetention) implements Writeable { - public static final NodeFeature GLOBAL_RETENTION = new NodeFeature("data_stream.lifecycle.global_retention", true); public static final TimeValue MIN_RETENTION_VALUE = TimeValue.timeValueSeconds(10); /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 353f17fe0e00..8366083b1907 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -46,9 +45,6 @@ import static org.elasticsearch.node.NodeRoleSettings.NODE_ROLES_SETTING; public final class DesiredNode implements Writeable, ToXContentObject, Comparable { - public static final NodeFeature RANGE_FLOAT_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.range_float_processors"); - public static final NodeFeature DESIRED_NODE_VERSION_DEPRECATED = new NodeFeature("desired_node.version_deprecated", true); - public static final TransportVersion RANGE_FLOAT_PROCESSORS_SUPPORT_TRANSPORT_VERSION = TransportVersions.V_8_3_0; private static final ParseField SETTINGS_FIELD = new ParseField("settings"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 1e76d7965e36..4fa3ed1bb0d5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -72,7 +72,6 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.indices.IndexCreationException; import org.elasticsearch.indices.IndicesService; @@ -245,6 +244,9 @@ public class MetadataCreateIndexService { * Validate the name for an index or alias against some static rules. */ public static void validateIndexOrAliasName(String index, BiFunction exceptionCtor) { + if (index == null || index.isEmpty()) { + throw exceptionCtor.apply(index, "must not be empty"); + } if (Strings.validFileName(index) == false) { throw exceptionCtor.apply(index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); } @@ -1657,7 +1659,7 @@ public class MetadataCreateIndexService { private static final Set UNMODIFIABLE_SETTINGS_DURING_RESIZE = Set.of( IndexSettings.MODE.getKey(), - SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), + IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java deleted file mode 100644 index 49bd38330e3a..000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.metadata; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -public class MetadataFeatures implements FeatureSpecification { - @Override - public Set getFeatures() { - return Set.of(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java index e38cd677991f..24b14a46c878 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java @@ -54,8 +54,6 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.expectValueT */ public abstract class IndexRouting { - static final NodeFeature BOOLEAN_ROUTING_PATH = new NodeFeature("routing.boolean_routing_path", true); - static final NodeFeature MULTI_VALUE_ROUTING_PATH = new NodeFeature("routing.multi_value_routing_path", true); static final NodeFeature LOGSB_ROUTE_ON_SORT_FIELDS = new NodeFeature("routing.logsb_route_on_sort_fields"); /** diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingFeatures.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingFeatures.java index 1545fdf90d11..461ac50e1efc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingFeatures.java @@ -18,7 +18,7 @@ public class RoutingFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(IndexRouting.BOOLEAN_ROUTING_PATH, IndexRouting.MULTI_VALUE_ROUTING_PATH); + return Set.of(); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 7b2514868530..5d01775b43a5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -277,8 +277,7 @@ public class AllocationService { } /** - * unassigned an shards that are associated with nodes that are no longer part of the cluster, potentially promoting replicas - * if needed. + * Unassign any shards that are associated with nodes that are no longer part of the cluster, potentially promoting replicas if needed. */ public ClusterState disassociateDeadNodes(ClusterState clusterState, boolean reroute, String reason) { RoutingAllocation allocation = createRoutingAllocation(clusterState, currentNanoTime()); @@ -290,7 +289,7 @@ public class AllocationService { clusterState = buildResultAndLogHealthChange(clusterState, allocation, reason); } if (reroute) { - return reroute(clusterState, reason, rerouteCompletionIsNotRequired());// this is not triggered by a user request + return reroute(clusterState, reason, rerouteCompletionIsNotRequired() /* this is not triggered by a user request */); } else { return clusterState; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java index 0745c45b2183..16cbf41ee1bf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java @@ -19,6 +19,10 @@ import java.util.Objects; /** * The desired balance of the cluster, indicating which nodes should hold a copy of each shard. * + * @param lastConvergedIndex Identifies what input data the balancer computation round used to produce this {@link DesiredBalance}. See + * {@link DesiredBalanceInput#index()} for details. Each reroute request in the same master term is assigned a + * strictly increasing sequence number. A new master term restarts the index values from zero. The balancer, + * which runs async to reroute, uses the latest request's data to compute the desired balance. * @param assignments a set of the (persistent) node IDs to which each {@link ShardId} should be allocated * @param weightsPerNode The node weights calculated based on * {@link org.elasticsearch.cluster.routing.allocation.allocator.WeightFunction#calculateNodeWeight} @@ -31,8 +35,11 @@ public record DesiredBalance( ) { enum ComputationFinishReason { + /** Computation ran to completion */ CONVERGED, + /** Computation exited and published early because a new cluster event occurred that affects computation */ YIELD_TO_NEW_INPUT, + /** Computation stopped and published early to avoid delaying new shard assignment */ STOP_EARLY } @@ -44,6 +51,7 @@ public record DesiredBalance( * The placeholder value for {@link DesiredBalance} when the node stands down as master. */ public static final DesiredBalance NOT_MASTER = new DesiredBalance(-2, Map.of()); + /** * The starting value for {@link DesiredBalance} when the node becomes the master. */ @@ -57,6 +65,10 @@ public record DesiredBalance( return Objects.equals(a.assignments, b.assignments) == false; } + /** + * Returns the sum of shard movements needed to reach the new desired balance. Doesn't count new shard copies as a move, nor removal or + * unassignment of a shard copy. + */ public static int shardMovements(DesiredBalance old, DesiredBalance updated) { var intersection = Sets.intersection(old.assignments().keySet(), updated.assignments().keySet()); int movements = 0; @@ -70,8 +82,15 @@ public record DesiredBalance( return movements; } + /** + * Returns the number of shard movements needed to reach the new shard assignment. Doesn't count new shard copies as a move, nor removal + * or unassignment of a shard copy. + */ private static int shardMovements(ShardAssignment old, ShardAssignment updated) { - var movements = Math.min(0, old.assigned() - updated.assigned());// compensate newly started shards + // A shard move should retain the same number of assigned nodes, just swap out one node for another. We will compensate for newly + // started shards -- adding a shard copy is not a move -- by initializing the count with a negative value so that incrementing later + // for a new node zeros out. + var movements = Math.min(0, old.assigned() - updated.assigned()); for (String nodeId : updated.nodeIds()) { if (old.nodeIds().contains(nodeId) == false) { movements++; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java index 3b22221ea7db..03630c284fa3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java @@ -415,11 +415,14 @@ public class DesiredBalanceComputer { } private static Map collectShardAssignments(RoutingNodes routingNodes) { - final var entries = routingNodes.getAssignedShards().entrySet(); - assert entries.stream().flatMap(t -> t.getValue().stream()).allMatch(ShardRouting::started) : routingNodes; - final Map res = Maps.newHashMapWithExpectedSize(entries.size()); - for (var shardAndAssignments : entries) { - res.put(shardAndAssignments.getKey(), ShardAssignment.ofAssignedShards(shardAndAssignments.getValue())); + final var allAssignedShards = routingNodes.getAssignedShards().entrySet(); + assert allAssignedShards.stream().flatMap(t -> t.getValue().stream()).allMatch(ShardRouting::started) : routingNodes; + final Map res = Maps.newHashMapWithExpectedSize(allAssignedShards.size()); + for (var shardIdAndShardRoutings : allAssignedShards) { + res.put( + shardIdAndShardRoutings.getKey(), + ShardAssignment.createFromAssignedShardRoutingsList(shardIdAndShardRoutings.getValue()) + ); } return res; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index bf4900b8d04b..85beb1498d11 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -55,6 +55,10 @@ public class DesiredBalanceReconciler { private static final Logger logger = LogManager.getLogger(DesiredBalanceReconciler.class); + /** + * The minimum interval that log messages will be written if the number of undesired shard allocations reaches the percentage of total + * shards set by {@link #UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING}. + */ public static final Setting UNDESIRED_ALLOCATIONS_LOG_INTERVAL_SETTING = Setting.timeSetting( "cluster.routing.allocation.desired_balance.undesired_allocations.log_interval", TimeValue.timeValueHours(1), @@ -63,6 +67,10 @@ public class DesiredBalanceReconciler { Setting.Property.NodeScope ); + /** + * Warning log messages may be periodically written if the number of shards that are on undesired nodes reaches this percentage setting. + * Works together with {@link #UNDESIRED_ALLOCATIONS_LOG_INTERVAL_SETTING} to log on a periodic basis. + */ public static final Setting UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING = Setting.doubleSetting( "cluster.routing.allocation.desired_balance.undesired_allocations.threshold", 0.1, @@ -97,6 +105,13 @@ public class DesiredBalanceReconciler { this.nodeAllocationStatsAndWeightsCalculator = nodeAllocationStatsAndWeightsCalculator; } + /** + * Applies a desired shard allocation to the routing table by initializing and relocating shards in the cluster state. + * + * @param desiredBalance The new desired cluster shard allocation + * @param allocation Cluster state information with which to make decisions, contains routing table metadata that will be modified to + * reach the given desired balance. + */ public void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { var nodeIds = allocation.routingNodes().getAllNodeIds(); allocationOrdering.retainNodes(nodeIds); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index dec2590915fb..c37a2e6aa7be 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService.RerouteStrategy; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; @@ -56,11 +57,30 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { private final ShardsAllocator delegateAllocator; private final ThreadPool threadPool; + /** + * This is a callback to run {@link AllocationService#executeWithRoutingAllocation(ClusterState, String, RerouteStrategy)}, which + * produces a new ClusterState with the changes made by {@link DesiredBalanceReconciler#reconcile}. The {@link RerouteStrategy} provided + * to the callback calls into {@link #desiredBalanceReconciler} for the changes. The {@link #masterServiceTaskQueue} will publish the + * new cluster state after the cluster state is constructed by the {@link ReconcileDesiredBalanceExecutor}. + */ private final DesiredBalanceReconcilerAction reconciler; private final DesiredBalanceComputer desiredBalanceComputer; + /** + * Reconciliation ({@link DesiredBalanceReconciler#reconcile(DesiredBalance, RoutingAllocation)}) takes the {@link DesiredBalance} + * output of {@link DesiredBalanceComputer#compute} and identifies how shards need to be added, moved or removed to go from the current + * cluster shard allocation to the new desired allocation. + */ private final DesiredBalanceReconciler desiredBalanceReconciler; private final ContinuousComputation desiredBalanceComputation; - private final PendingListenersQueue queue; + /** + * Saves and runs listeners after DesiredBalance computations complete. + */ + private final PendingListenersQueue pendingListenersQueue; + /** + * Each reroute request gets assigned a monotonically increasing sequence number. Many reroute requests may arrive before the balancer + * asynchronously runs a computation. The balancer will use the latest request and save this sequence number to track back to the + * request. + */ private final AtomicLong indexGenerator = new AtomicLong(-1); private final ConcurrentLinkedQueue> pendingDesiredBalanceMoves = new ConcurrentLinkedQueue<>(); private final MasterServiceTaskQueue masterServiceTaskQueue; @@ -199,7 +219,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { return "DesiredBalanceShardsAllocator#allocate"; } }; - this.queue = new PendingListenersQueue(); + this.pendingListenersQueue = new PendingListenersQueue(); this.masterServiceTaskQueue = clusterService.createTaskQueue( "reconcile-desired-balance", Priority.URGENT, @@ -235,7 +255,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { var index = indexGenerator.incrementAndGet(); logger.debug("Executing allocate for [{}]", index); - queue.add(index, listener); + pendingListenersQueue.add(index, listener); // This can only run on master, so unset not-master if exists if (currentDesiredBalanceRef.compareAndSet(DesiredBalance.NOT_MASTER, DesiredBalance.BECOME_MASTER_INITIAL)) { logger.debug("initialized desired balance for becoming master"); @@ -378,7 +398,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { private void onNoLongerMaster() { if (indexGenerator.getAndSet(-1) != -1) { currentDesiredBalanceRef.set(DesiredBalance.NOT_MASTER); - queue.completeAllAsNotMaster(); + pendingListenersQueue.completeAllAsNotMaster(); pendingDesiredBalanceMoves.clear(); desiredBalanceReconciler.clear(); desiredBalanceMetrics.zeroAllMetrics(); @@ -428,7 +448,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { batchExecutionContext.initialState(), createReconcileAllocationAction(latest.getTask().desiredBalance) ); - latest.success(() -> queue.complete(latest.getTask().desiredBalance.lastConvergedIndex())); + latest.success(() -> pendingListenersQueue.complete(latest.getTask().desiredBalance.lastConvergedIndex())); return newState; } } @@ -447,7 +467,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { // only for tests - in production, this happens after reconciliation protected final void completeToLastConvergedIndex() { - queue.complete(currentDesiredBalanceRef.get().lastConvergedIndex()); + pendingListenersQueue.complete(currentDesiredBalanceRef.get().lastConvergedIndex()); } private void recordTime(CounterMetric metric, Runnable action) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/PendingListenersQueue.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/PendingListenersQueue.java index e1b58cf79ac0..5b14277f2c65 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/PendingListenersQueue.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/PendingListenersQueue.java @@ -24,6 +24,10 @@ import java.util.Queue; import static org.elasticsearch.cluster.service.ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME; import static org.elasticsearch.cluster.service.MasterService.MASTER_UPDATE_THREAD_NAME; +/** + * Registers listeners with an `index` number ({@link #add(long, ActionListener)}) and then completes them whenever the latest index number + * is greater or equal to a listener's index value ({@link #complete(long)}). + */ public class PendingListenersQueue { private static final Logger logger = LogManager.getLogger(PendingListenersQueue.class); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardAssignment.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardAssignment.java index 4fb9137cb454..2bd1b9bb2bb6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardAssignment.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardAssignment.java @@ -17,6 +17,14 @@ import java.util.Set; import static java.util.Collections.unmodifiableSet; +/** + * Simple shard assignment summary of shard copies for a particular index shard. + * + * @param nodeIds The node IDs of nodes holding a shard copy. + * @param total The total number of shard copies. + * @param unassigned The number of unassigned shard copies. + * @param ignored The number of ignored shard copies. + */ public record ShardAssignment(Set nodeIds, int total, int unassigned, int ignored) { public ShardAssignment { @@ -28,9 +36,13 @@ public record ShardAssignment(Set nodeIds, int total, int unassigned, in return nodeIds.size(); } - public static ShardAssignment ofAssignedShards(List routings) { + /** + * Helper method to instantiate a new ShardAssignment from a given list of ShardRouting instances. Assumes all shards are assigned. + */ + public static ShardAssignment createFromAssignedShardRoutingsList(List routings) { var nodeIds = new LinkedHashSet(); for (ShardRouting routing : routings) { + assert routing.unassignedInfo() == null : "Expected assigned shard copies only, unassigned info: " + routing.unassignedInfo(); nodeIds.add(routing.currentNodeId()); } return new ShardAssignment(unmodifiableSet(nodeIds), routings.size(), 0, 0); diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 5327380d5cba..d2369139ebe1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -159,6 +159,31 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements } } + private record TimedListener(ActionListener listener, Recorder recorder) implements ActionListener { + + @Override + public void onResponse(Void response) { + try (Releasable ignored = recorder.record("listener.onResponse")) { + listener.onResponse(null); + } catch (Exception e) { + assert false : e; + logger.error("exception thrown by listener.onResponse", e); + } + } + + @Override + public void onFailure(Exception e) { + assert e != null; + try (Releasable ignored = recorder.record("listener.onFailure")) { + listener.onFailure(e); + } catch (Exception inner) { + e.addSuppressed(inner); + assert false : e; + logger.error(() -> "exception thrown by listener.onFailure", e); + } + } + } + @Override protected synchronized void doStop() { for (Map.Entry onGoingTimeout : timeoutClusterStateListeners.entrySet()) { @@ -395,12 +420,14 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements final long startTimeMillis = threadPool.relativeTimeInMillis(); final Recorder stopWatch = new Recorder(threadPool, slowTaskThreadDumpTimeout); + final TimedListener timedListener = new TimedListener(clusterApplyListener, stopWatch); final ClusterState newClusterState; try { try (Releasable ignored = stopWatch.record("running task [" + source + ']')) { newClusterState = updateFunction.apply(previousClusterState); } } catch (Exception e) { + timedListener.onFailure(e); TimeValue executionTime = getTimeSince(startTimeMillis); logger.trace( () -> format( @@ -413,15 +440,14 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements e ); warnAboutSlowTaskIfNeeded(executionTime, source, stopWatch); - clusterApplyListener.onFailure(e); return; } if (previousClusterState == newClusterState) { + timedListener.onResponse(null); TimeValue executionTime = getTimeSince(startTimeMillis); logger.debug("processing [{}]: took [{}] no change in cluster state", source, executionTime); warnAboutSlowTaskIfNeeded(executionTime, source, stopWatch); - clusterApplyListener.onResponse(null); } else { if (logger.isTraceEnabled()) { logger.debug("cluster state updated, version [{}], source [{}]\n{}", newClusterState.version(), source, newClusterState); @@ -431,6 +457,7 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements try { setIsApplyingClusterState(); applyChanges(previousClusterState, newClusterState, source, stopWatch); + timedListener.onResponse(null); TimeValue executionTime = getTimeSince(startTimeMillis); logger.debug( "processing [{}]: took [{}] done applying updated cluster state (version: {}, uuid: {})", @@ -440,8 +467,11 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements newClusterState.stateUUID() ); warnAboutSlowTaskIfNeeded(executionTime, source, stopWatch); - clusterApplyListener.onResponse(null); } catch (Exception e) { + // failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we + // continue we will retry with the same cluster state but that might not help. + assert applicationMayFail(); + timedListener.onFailure(e); TimeValue executionTime = getTimeSince(startTimeMillis); if (logger.isTraceEnabled()) { logger.warn(() -> format(""" @@ -461,10 +491,6 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements e ); } - // failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we - // continue we will retry with the same cluster state but that might not help. - assert applicationMayFail(); - clusterApplyListener.onFailure(e); } finally { clearIsApplyingClusterState(); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index afee2491672e..3c1f53ca4a2c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryFactory; import org.elasticsearch.index.store.Store; @@ -191,7 +190,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, - SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING, + IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING, IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING, InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index aec9c108d898..16c6844f4640 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -1727,7 +1727,7 @@ public class Setting implements ToXContentObject { * * @param key the key for the setting * @param defaultValue the default value for this setting - * @param properties properties properties for this setting like scope, filtering... + * @param properties properties for this setting like scope, filtering... * @return the setting object */ public static Setting memorySizeSetting(String key, ByteSizeValue defaultValue, Property... properties) { diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java deleted file mode 100644 index 72fc955320b9..000000000000 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.health; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -public class HealthFeatures implements FeatureSpecification { - - public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator", true); - - @Override - public Set getFeatures() { - return Set.of(SUPPORTS_EXTENDED_REPOSITORY_INDICATOR); - } -} diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index aab9e972cba7..113e789727f0 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.health.node.selection.HealthNode; @@ -62,7 +61,6 @@ public class LocalHealthMonitor implements ClusterStateListener { private final ClusterService clusterService; private final ThreadPool threadPool; private final Client client; - private final FeatureService featureService; private volatile TimeValue monitorInterval; private volatile boolean enabled; @@ -88,7 +86,6 @@ public class LocalHealthMonitor implements ClusterStateListener { ClusterService clusterService, ThreadPool threadPool, Client client, - FeatureService featureService, List> healthTrackers ) { this.threadPool = threadPool; @@ -96,7 +93,6 @@ public class LocalHealthMonitor implements ClusterStateListener { this.enabled = HealthNodeTaskExecutor.ENABLED_SETTING.get(settings); this.clusterService = clusterService; this.client = client; - this.featureService = featureService; this.healthTrackers = healthTrackers; } @@ -105,17 +101,9 @@ public class LocalHealthMonitor implements ClusterStateListener { ClusterService clusterService, ThreadPool threadPool, Client client, - FeatureService featureService, List> healthTrackers ) { - LocalHealthMonitor localHealthMonitor = new LocalHealthMonitor( - settings, - clusterService, - threadPool, - client, - featureService, - healthTrackers - ); + LocalHealthMonitor localHealthMonitor = new LocalHealthMonitor(settings, clusterService, threadPool, client, healthTrackers); localHealthMonitor.registerListeners(); return localHealthMonitor; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index bf8f590b9f9a..d9a62f713050 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -623,10 +623,7 @@ public enum IndexMode { } } if (indexMode == LOOKUP) { - return Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all") - .build(); + return Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build(); } else { return Settings.EMPTY; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 68f334b10ea5..284140460a43 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.SourceFieldMapper; @@ -52,7 +51,6 @@ import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_IGNORE_ import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING; import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING; import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING; -import static org.elasticsearch.index.mapper.SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING; /** * This class encapsulates all index level settings and handles settings updates. @@ -655,48 +653,6 @@ public final class IndexSettings { Property.Final ); - public static final Setting RECOVERY_USE_SYNTHETIC_SOURCE_SETTING = Setting.boolSetting( - "index.recovery.use_synthetic_source", - false, - new Setting.Validator<>() { - @Override - public void validate(Boolean value) {} - - @Override - public void validate(Boolean enabled, Map, Object> settings) { - if (enabled == false) { - return; - } - - // Verify if synthetic source is enabled on the index; fail if it is not - var indexMode = (IndexMode) settings.get(MODE); - if (indexMode.defaultSourceMode() != SourceFieldMapper.Mode.SYNTHETIC) { - var sourceMode = (SourceFieldMapper.Mode) settings.get(INDEX_MAPPER_SOURCE_MODE_SETTING); - if (sourceMode != SourceFieldMapper.Mode.SYNTHETIC) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "The setting [%s] is only permitted when [%s] is set to [%s]. Current mode: [%s].", - RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), - INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), - SourceFieldMapper.Mode.SYNTHETIC.name(), - sourceMode.name() - ) - ); - } - } - } - - @Override - public Iterator> settings() { - List> res = List.of(INDEX_MAPPER_SOURCE_MODE_SETTING, MODE); - return res.iterator(); - } - }, - Property.IndexScope, - Property.Final - ); - /** * Returns true if TSDB encoding is enabled. The default is true */ @@ -753,6 +709,60 @@ public final class IndexSettings { Property.ServerlessPublic ); + public static final Setting INDEX_MAPPER_SOURCE_MODE_SETTING = Setting.enumSetting( + SourceFieldMapper.Mode.class, + settings -> { + final IndexMode indexMode = IndexSettings.MODE.get(settings); + return indexMode.defaultSourceMode().name(); + }, + "index.mapping.source.mode", + value -> {}, + Setting.Property.Final, + Setting.Property.IndexScope + ); + + public static final Setting RECOVERY_USE_SYNTHETIC_SOURCE_SETTING = Setting.boolSetting( + "index.recovery.use_synthetic_source", + false, + new Setting.Validator<>() { + @Override + public void validate(Boolean value) {} + + @Override + public void validate(Boolean enabled, Map, Object> settings) { + if (enabled == false) { + return; + } + + // Verify if synthetic source is enabled on the index; fail if it is not + var indexMode = (IndexMode) settings.get(MODE); + if (indexMode.defaultSourceMode() != SourceFieldMapper.Mode.SYNTHETIC) { + var sourceMode = (SourceFieldMapper.Mode) settings.get(INDEX_MAPPER_SOURCE_MODE_SETTING); + if (sourceMode != SourceFieldMapper.Mode.SYNTHETIC) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "The setting [%s] is only permitted when [%s] is set to [%s]. Current mode: [%s].", + RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), + INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), + SourceFieldMapper.Mode.SYNTHETIC.name(), + sourceMode.name() + ) + ); + } + } + } + + @Override + public Iterator> settings() { + List> res = List.of(INDEX_MAPPER_SOURCE_MODE_SETTING, MODE); + return res.iterator(); + } + }, + Property.IndexScope, + Property.Final + ); + /** * Legacy index setting, kept for 7.x BWC compatibility. This setting has no effect in 8.x. Do not use. * TODO: Remove in 9.0 @@ -806,8 +816,6 @@ public final class IndexSettings { } } - public static final NodeFeature IGNORE_ABOVE_INDEX_LEVEL_SETTING = new NodeFeature("mapper.ignore_above_index_level_setting", true); - private final Index index; private final IndexVersion version; private final Logger logger; diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 8d3d1bde316e..40839d8e1878 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -819,7 +819,7 @@ public class InternalEngine extends Engine { ) throws IOException { assert get.isReadFromTranslog(); translogGetCount.incrementAndGet(); - final TranslogDirectoryReader inMemoryReader = new TranslogDirectoryReader( + final DirectoryReader inMemoryReader = TranslogDirectoryReader.create( shardId, index, mappingLookup, @@ -3161,7 +3161,7 @@ public class InternalEngine extends Engine { final Translog.Snapshot snapshot; if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled()) { snapshot = new LuceneSyntheticSourceChangesSnapshot( - engineConfig.getMapperService().mappingLookup(), + engineConfig.getMapperService(), searcher, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE, maxChunkSize, @@ -3173,6 +3173,7 @@ public class InternalEngine extends Engine { ); } else { snapshot = new LuceneChangesSnapshot( + engineConfig.getMapperService(), searcher, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE, fromSeqNo, diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index d4466cbc17c5..30c6c639b9cf 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.core.Assertions; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.transport.Transports; @@ -46,6 +47,7 @@ public final class LuceneChangesSnapshot extends SearchBasedChangesSnapshot { /** * Creates a new "translog" snapshot from Lucene for reading operations whose seq# in the specified range. * + * @param mapperService the mapper service for this index * @param engineSearcher the internal engine searcher which will be taken over if the snapshot is opened successfully * @param searchBatchSize the number of documents should be returned by each search * @param fromSeqNo the min requesting seq# - inclusive @@ -56,6 +58,7 @@ public final class LuceneChangesSnapshot extends SearchBasedChangesSnapshot { * @param indexVersionCreated the version on which this index was created */ public LuceneChangesSnapshot( + MapperService mapperService, Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, @@ -65,7 +68,7 @@ public final class LuceneChangesSnapshot extends SearchBasedChangesSnapshot { boolean accessStats, IndexVersion indexVersionCreated ) throws IOException { - super(engineSearcher, searchBatchSize, fromSeqNo, toSeqNo, requiredFullRange, accessStats, indexVersionCreated); + super(mapperService, engineSearcher, searchBatchSize, fromSeqNo, toSeqNo, requiredFullRange, accessStats, indexVersionCreated); this.creationThread = Assertions.ENABLED ? Thread.currentThread() : null; this.singleConsumer = singleConsumer; this.parallelArray = new ParallelArray(this.searchBatchSize); @@ -214,20 +217,24 @@ public final class LuceneChangesSnapshot extends SearchBasedChangesSnapshot { if (leaf.reader() instanceof SequentialStoredFieldsLeafReader) { storedFieldsReader = ((SequentialStoredFieldsLeafReader) leaf.reader()).getSequentialStoredFieldsReader(); storedFieldsReaderOrd = leaf.ord; + setNextSourceMetadataReader(leaf); } else { storedFieldsReader = null; storedFieldsReaderOrd = -1; } } } + if (storedFieldsReader != null) { assert singleConsumer : "Sequential access optimization must not be enabled for multiple consumers"; assert parallelArray.useSequentialStoredFieldsReader; assert storedFieldsReaderOrd == leaf.ord : storedFieldsReaderOrd + " != " + leaf.ord; storedFieldsReader.document(segmentDocID, fields); } else { + setNextSourceMetadataReader(leaf); leaf.reader().storedFields().document(segmentDocID, fields); } + final BytesReference source = fields.source() != null ? addSourceMetadata(fields.source(), segmentDocID) : null; final Translog.Operation op; final boolean isTombstone = parallelArray.isTombStone[docIndex]; @@ -241,7 +248,6 @@ public final class LuceneChangesSnapshot extends SearchBasedChangesSnapshot { op = new Translog.Delete(id, seqNo, primaryTerm, version); assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { - final BytesReference source = fields.source(); if (source == null) { // TODO: Callers should ask for the range that source should be retained. Thus we should always // check for the existence source once we make peer-recovery to send ops after the local checkpoint. diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java index 08508103181e..20154c20b363 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java @@ -13,12 +13,11 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.util.ArrayUtil; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; -import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMetrics; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.translog.Translog; @@ -66,7 +65,7 @@ public class LuceneSyntheticSourceChangesSnapshot extends SearchBasedChangesSnap private final Deque operationQueue = new LinkedList<>(); public LuceneSyntheticSourceChangesSnapshot( - MappingLookup mappingLookup, + MapperService mapperService, Engine.Searcher engineSearcher, int searchBatchSize, long maxMemorySizeInBytes, @@ -76,13 +75,13 @@ public class LuceneSyntheticSourceChangesSnapshot extends SearchBasedChangesSnap boolean accessStats, IndexVersion indexVersionCreated ) throws IOException { - super(engineSearcher, searchBatchSize, fromSeqNo, toSeqNo, requiredFullRange, accessStats, indexVersionCreated); + super(mapperService, engineSearcher, searchBatchSize, fromSeqNo, toSeqNo, requiredFullRange, accessStats, indexVersionCreated); // a MapperService#updateMapping(...) of empty index may not have been invoked and then mappingLookup is empty - assert engineSearcher.getDirectoryReader().maxDoc() == 0 || mappingLookup.isSourceSynthetic() + assert engineSearcher.getDirectoryReader().maxDoc() == 0 || mapperService.mappingLookup().isSourceSynthetic() : "either an empty index or synthetic source must be enabled for proper functionality."; // ensure we can buffer at least one document this.maxMemorySizeInBytes = maxMemorySizeInBytes > 0 ? maxMemorySizeInBytes : 1; - this.sourceLoader = mappingLookup.newSourceLoader(null, SourceFieldMetrics.NOOP); + this.sourceLoader = mapperService.mappingLookup().newSourceLoader(null, SourceFieldMetrics.NOOP); Set storedFields = sourceLoader.requiredStoredFields(); this.storedFieldLoader = StoredFieldLoader.create(false, storedFields); this.lastSeenSeqNo = fromSeqNo - 1; @@ -194,6 +193,7 @@ public class LuceneSyntheticSourceChangesSnapshot extends SearchBasedChangesSnap leafFieldLoader = storedFieldLoader.getLoader(leafReaderContext, null); leafSourceLoader = sourceLoader.leaf(leafReaderContext.reader(), null); + setNextSourceMetadataReader(leafReaderContext); } int segmentDocID = docRecord.docID() - docBase; leafFieldLoader.advanceTo(segmentDocID); @@ -229,17 +229,16 @@ public class LuceneSyntheticSourceChangesSnapshot extends SearchBasedChangesSnap return null; } } - BytesReference source = sourceLoader.source(fieldLoader, segmentDocID).internalSourceRef(); + var sourceBytes = addSourceMetadata(sourceLoader.source(fieldLoader, segmentDocID).internalSourceRef(), segmentDocID); return new Translog.Index( fieldLoader.id(), docRecord.seqNo(), docRecord.primaryTerm(), docRecord.version(), - source, + sourceBytes, fieldLoader.routing(), -1 // autogenerated timestamp ); } } - } diff --git a/server/src/main/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshot.java index 191125c59705..8a96d4a2a252 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshot.java @@ -22,12 +22,17 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldCollectorManager; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SeqNoFieldMapper; +import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.search.lookup.Source; import java.io.Closeable; import java.io.IOException; @@ -44,6 +49,7 @@ public abstract class SearchBasedChangesSnapshot implements Translog.Snapshot, C private final IndexVersion indexVersionCreated; private final IndexSearcher indexSearcher; + private final ValueFetcher sourceMetadataFetcher; private final Closeable onClose; protected final long fromSeqNo, toSeqNo; @@ -67,6 +73,7 @@ public abstract class SearchBasedChangesSnapshot implements Translog.Snapshot, C * @param indexVersionCreated Version of the index when it was created. */ protected SearchBasedChangesSnapshot( + MapperService mapperService, Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, @@ -103,6 +110,19 @@ public abstract class SearchBasedChangesSnapshot implements Translog.Snapshot, C this.accessStats = accessStats; this.totalHits = accessStats ? indexSearcher.count(rangeQuery(fromSeqNo, toSeqNo, indexVersionCreated)) : -1; + this.sourceMetadataFetcher = createSourceMetadataValueFetcher(mapperService, indexSearcher); + } + + private ValueFetcher createSourceMetadataValueFetcher(MapperService mapperService, IndexSearcher searcher) { + if (mapperService.mappingLookup().inferenceFields().isEmpty()) { + return null; + } + var mapper = (InferenceMetadataFieldsMapper) mapperService.mappingLookup() + .getMapping() + .getMetadataMapperByName(InferenceMetadataFieldsMapper.NAME); + return mapper != null + ? mapper.fieldType().valueFetcher(mapperService.mappingLookup(), mapperService.getBitSetProducer(), searcher) + : null; } /** @@ -184,6 +204,45 @@ public abstract class SearchBasedChangesSnapshot implements Translog.Snapshot, C return results; } + /** + * Sets the reader context to enable reading metadata that was removed from the {@code _source}. + * This method sets up the {@code sourceMetadataFetcher} with the provided {@link LeafReaderContext}, + * ensuring it is ready to fetch metadata for subsequent operations. + * + *

Note: This method should be called before {@link #addSourceMetadata(BytesReference, int)} at the start of every leaf + * to ensure the metadata fetcher is properly initialized.

+ */ + protected void setNextSourceMetadataReader(LeafReaderContext context) { + if (sourceMetadataFetcher != null) { + sourceMetadataFetcher.setNextReader(context); + } + } + + /** + * Creates a new {@link Source} object by combining the provided {@code originalSource} + * with additional metadata fields. If the {@code sourceMetadataFetcher} is null or no metadata + * fields are fetched, the original source is returned unchanged. + * + * @param originalSourceBytes the original source bytes + * @param segmentDocID the document ID used to fetch metadata fields + * @return a new {@link Source} instance containing the original data and additional metadata, + * or the original source if no metadata is added + * @throws IOException if an error occurs while fetching metadata values + */ + protected BytesReference addSourceMetadata(BytesReference originalSourceBytes, int segmentDocID) throws IOException { + if (sourceMetadataFetcher == null) { + return originalSourceBytes; + } + var originalSource = Source.fromBytes(originalSourceBytes); + List values = sourceMetadataFetcher.fetchValues(originalSource, segmentDocID, List.of()); + if (values.isEmpty()) { + return originalSourceBytes; + } + var map = originalSource.source(); + map.put(InferenceMetadataFieldsMapper.NAME, values.get(0)); + return Source.fromMap(map, originalSource.sourceContentType()).internalSourceRef(); + } + static IndexSearcher newIndexSearcher(Engine.Searcher engineSearcher) throws IOException { return new IndexSearcher(Lucene.wrapAllDocsLive(engineSearcher.getDirectoryReader())); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index 0928b4500e6d..ac5bf31c2b73 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.engine; +import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; @@ -46,6 +47,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.fieldvisitor.FieldNamesProvidingStoredFieldsVisitor; @@ -76,9 +80,9 @@ import java.util.concurrent.atomic.AtomicReference; * into an in-memory Lucene segment that is created on-demand. */ final class TranslogDirectoryReader extends DirectoryReader { - private final TranslogLeafReader leafReader; + private final LeafReader leafReader; - TranslogDirectoryReader( + static DirectoryReader create( ShardId shardId, Translog.Index operation, MappingLookup mappingLookup, @@ -86,11 +90,39 @@ final class TranslogDirectoryReader extends DirectoryReader { EngineConfig engineConfig, Runnable onSegmentCreated ) throws IOException { - this(new TranslogLeafReader(shardId, operation, mappingLookup, documentParser, engineConfig, onSegmentCreated)); + final Directory directory = new ByteBuffersDirectory(); + boolean success = false; + try { + final LeafReader leafReader; + // When using synthetic source, the translog operation must always be reindexed into an in-memory Lucene to ensure consistent + // output for realtime-get operations. However, this can degrade the performance of realtime-get and update operations. + // If slight inconsistencies in realtime-get operations are acceptable, the translog operation can be reindexed lazily. + if (mappingLookup.isSourceSynthetic()) { + onSegmentCreated.run(); + leafReader = createInMemoryReader(shardId, engineConfig, directory, documentParser, mappingLookup, false, operation); + } else { + leafReader = new TranslogLeafReader( + shardId, + operation, + mappingLookup, + documentParser, + engineConfig, + directory, + onSegmentCreated + ); + } + var directoryReader = ElasticsearchDirectoryReader.wrap(new TranslogDirectoryReader(directory, leafReader), shardId); + success = true; + return directoryReader; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(directory); + } + } } - private TranslogDirectoryReader(TranslogLeafReader leafReader) throws IOException { - super(leafReader.directory, new LeafReader[] { leafReader }, null); + private TranslogDirectoryReader(Directory directory, LeafReader leafReader) throws IOException { + super(directory, new LeafReader[] { leafReader }, null); this.leafReader = leafReader; } @@ -139,12 +171,13 @@ final class TranslogDirectoryReader extends DirectoryReader { return leafReader.getReaderCacheHelper(); } - static DirectoryReader createInMemoryReader( + private static LeafReader createInMemoryReader( ShardId shardId, EngineConfig engineConfig, Directory directory, DocumentParser documentParser, MappingLookup mappingLookup, + boolean rootDocOnly, Translog.Index operation ) { final ParsedDocument parsedDocs = documentParser.parseDocument( @@ -159,12 +192,21 @@ final class TranslogDirectoryReader extends DirectoryReader { IndexWriterConfig.OpenMode.CREATE ).setCodec(engineConfig.getCodec()); try (IndexWriter writer = new IndexWriter(directory, writeConfig)) { - writer.addDocument(parsedDocs.rootDoc()); + final int numDocs; + if (rootDocOnly) { + numDocs = 1; + writer.addDocument(parsedDocs.rootDoc()); + } else { + numDocs = parsedDocs.docs().size(); + writer.addDocuments(parsedDocs.docs()); + } final DirectoryReader reader = open(writer); - if (reader.leaves().size() != 1 || reader.leaves().get(0).reader().numDocs() != 1) { + if (reader.leaves().size() != 1 || reader.leaves().get(0).reader().numDocs() != numDocs) { reader.close(); throw new IllegalStateException( - "Expected a single document segment; " + "Expected a single segment with " + + numDocs + + " documents, " + "but [" + reader.leaves().size() + " segments with " @@ -172,7 +214,33 @@ final class TranslogDirectoryReader extends DirectoryReader { + " documents" ); } - return reader; + LeafReader leafReader = reader.leaves().get(0).reader(); + return new SequentialStoredFieldsLeafReader(leafReader) { + @Override + protected void doClose() throws IOException { + IOUtils.close(super::doClose, directory); + } + + @Override + public CacheHelper getCoreCacheHelper() { + return leafReader.getCoreCacheHelper(); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return leafReader.getReaderCacheHelper(); + } + + @Override + public StoredFieldsReader getSequentialStoredFieldsReader() { + return Lucene.segmentReader(leafReader).getFieldsReader().getMergeInstance(); + } + + @Override + protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { + return reader; + } + }; } catch (IOException e) { throw new EngineException(shardId, "failed to create an in-memory segment for get [" + operation.id() + "]", e); } @@ -259,6 +327,7 @@ final class TranslogDirectoryReader extends DirectoryReader { MappingLookup mappingLookup, DocumentParser documentParser, EngineConfig engineConfig, + Directory directory, Runnable onSegmentCreated ) { this.shardId = shardId; @@ -267,7 +336,7 @@ final class TranslogDirectoryReader extends DirectoryReader { this.documentParser = documentParser; this.engineConfig = engineConfig; this.onSegmentCreated = onSegmentCreated; - this.directory = new ByteBuffersDirectory(); + this.directory = directory; this.uid = Uid.encodeId(operation.id()); } @@ -279,7 +348,15 @@ final class TranslogDirectoryReader extends DirectoryReader { ensureOpen(); reader = delegate.get(); if (reader == null) { - var indexReader = createInMemoryReader(shardId, engineConfig, directory, documentParser, mappingLookup, operation); + var indexReader = createInMemoryReader( + shardId, + engineConfig, + directory, + documentParser, + mappingLookup, + true, + operation + ); reader = indexReader.leaves().get(0).reader(); final LeafReader existing = delegate.getAndSet(reader); assert existing == null; @@ -443,7 +520,7 @@ final class TranslogDirectoryReader extends DirectoryReader { SourceFieldMapper mapper = mappingLookup.getMapping().getMetadataMapperByClass(SourceFieldMapper.class); if (mapper != null) { try { - sourceBytes = mapper.applyFilters(mappingLookup, sourceBytes, null); + sourceBytes = mapper.applyFilters(mappingLookup, sourceBytes, null, true); } catch (IOException e) { throw new IOException("Failed to reapply filters after reading from translog", e); } @@ -464,7 +541,12 @@ final class TranslogDirectoryReader extends DirectoryReader { @Override protected synchronized void doClose() throws IOException { - IOUtils.close(delegate.get(), directory); + final LeafReader leaf = delegate.get(); + if (leaf != null) { + leaf.close(); + } else { + directory.close(); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java index 00de13b8e8d8..4170d06c4d6e 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java @@ -10,7 +10,6 @@ package org.elasticsearch.index.engine; import org.apache.lucene.search.similarities.BM25Similarity; -import org.apache.lucene.store.ByteBuffersDirectory; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.mapper.DocumentParser; import org.elasticsearch.index.mapper.MappingLookup; @@ -52,10 +51,7 @@ public abstract class TranslogOperationAsserter { final ShardId shardId = engineConfig.getShardId(); final MappingLookup mappingLookup = engineConfig.getMapperService().mappingLookup(); final DocumentParser documentParser = engineConfig.getMapperService().documentParser(); - try ( - var directory = new ByteBuffersDirectory(); - var reader = TranslogDirectoryReader.createInMemoryReader(shardId, engineConfig, directory, documentParser, mappingLookup, op) - ) { + try (var reader = TranslogDirectoryReader.create(shardId, op, mappingLookup, documentParser, engineConfig, () -> {})) { final Engine.Searcher searcher = new Engine.Searcher( "assert_translog", reader, @@ -66,7 +62,7 @@ public abstract class TranslogOperationAsserter { ); try ( LuceneSyntheticSourceChangesSnapshot snapshot = new LuceneSyntheticSourceChangesSnapshot( - mappingLookup, + engineConfig.getMapperService(), searcher, LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, Integer.MAX_VALUE, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 9de463ec5f6f..77de1654cf4b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -62,8 +61,6 @@ public class BooleanFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "boolean"; - static final NodeFeature BOOLEAN_DIMENSION = new NodeFeature("mapper.boolean_dimension", true); - public static class Values { public static final BytesRef TRUE = new BytesRef("T"); public static final BytesRef FALSE = new BytesRef("F"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 068a9828809d..cf0c355a22e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -12,7 +12,6 @@ package org.elasticsearch.index.mapper; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSortConfig; @@ -22,8 +21,6 @@ import org.elasticsearch.index.IndexVersions; import java.util.List; public class DocumentMapper { - static final NodeFeature INDEX_SORTING_ON_NESTED = new NodeFeature("mapper.index_sorting_on_nested", true); - private final String type; private final CompressedXContent mappingSource; private final MappingLookup mappingLookup; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index e03494dcb592..bdb3d97d4c18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -56,7 +56,6 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper { public static final TypeParser PARSER = new FixedTypeParser(context -> new IgnoredSourceFieldMapper(context.getIndexSettings())); - static final NodeFeature TRACK_IGNORED_SOURCE = new NodeFeature("mapper.track_ignored_source", true); static final NodeFeature DONT_EXPAND_DOTS_IN_IGNORED_SOURCE = new NodeFeature("mapper.ignored_source.dont_expand_dots"); static final NodeFeature IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD = new NodeFeature( "mapper.ignored_source_as_top_level_metadata_array_field" diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java index e539c07caef6..9708753926e1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -31,8 +30,6 @@ import java.util.List; public class IndexModeFieldMapper extends MetadataFieldMapper { - static final NodeFeature QUERYING_INDEX_MODE = new NodeFeature("mapper.query_index_mode", true); - public static final String NAME = "_index_mode"; public static final String CONTENT_TYPE = "_index_mode"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index a26a4bb80d50..bdcf9bf98279 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -89,9 +88,6 @@ public final class KeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "keyword"; - static final NodeFeature KEYWORD_DIMENSION_IGNORE_ABOVE = new NodeFeature("mapper.keyword_dimension_ignore_above", true); - static final NodeFeature KEYWORD_NORMALIZER_SYNTHETIC_SOURCE = new NodeFeature("mapper.keyword_normalizer_synthetic_source", true); - public static class Defaults { public static final FieldType FIELD_TYPE; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index f293ced122d2..bafa74b662f0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.FieldType; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.StringLiteralDeduplicator; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -30,8 +29,6 @@ import java.util.function.Function; public abstract class Mapper implements ToXContentFragment, Iterable { - public static final NodeFeature SYNTHETIC_SOURCE_KEEP_FEATURE = new NodeFeature("mapper.synthetic_source_keep", true); - public static final String SYNTHETIC_SOURCE_KEEP_PARAM = "synthetic_source_keep"; // Only relevant for synthetic source mode. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 5dbaf0e0f40a..8e669a91fd9e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -11,9 +11,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper; -import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.util.Set; @@ -28,33 +25,7 @@ public class MapperFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( - BWC_WORKAROUND_9_0, - IgnoredSourceFieldMapper.TRACK_IGNORED_SOURCE, - PassThroughObjectMapper.PASS_THROUGH_PRIORITY, - RangeFieldMapper.NULL_VALUES_OFF_BY_ONE_FIX, - SourceFieldMapper.SYNTHETIC_SOURCE_FALLBACK, - DenseVectorFieldMapper.INT4_QUANTIZATION, - DenseVectorFieldMapper.BIT_VECTORS, - DocumentMapper.INDEX_SORTING_ON_NESTED, - KeywordFieldMapper.KEYWORD_DIMENSION_IGNORE_ABOVE, - IndexModeFieldMapper.QUERYING_INDEX_MODE, - NodeMappingStats.SEGMENT_LEVEL_FIELDS_STATS, - BooleanFieldMapper.BOOLEAN_DIMENSION, - ObjectMapper.SUBOBJECTS_AUTO, - ObjectMapper.SUBOBJECTS_AUTO_FIXES, - KeywordFieldMapper.KEYWORD_NORMALIZER_SYNTHETIC_SOURCE, - SourceFieldMapper.SYNTHETIC_SOURCE_STORED_FIELDS_ADVANCE_FIX, - Mapper.SYNTHETIC_SOURCE_KEEP_FEATURE, - SourceFieldMapper.SYNTHETIC_SOURCE_WITH_COPY_TO_AND_DOC_VALUES_FALSE_SUPPORT, - SourceFieldMapper.SYNTHETIC_SOURCE_COPY_TO_FIX, - FlattenedFieldMapper.IGNORE_ABOVE_SUPPORT, - IndexSettings.IGNORE_ABOVE_INDEX_LEVEL_SETTING, - SourceFieldMapper.SYNTHETIC_SOURCE_COPY_TO_INSIDE_OBJECTS_FIX, - TimeSeriesRoutingHashFieldMapper.TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF, - FlattenedFieldMapper.IGNORE_ABOVE_WITH_ARRAYS_SUPPORT, - DenseVectorFieldMapper.BBQ_FORMAT - ); + return Set.of(BWC_WORKAROUND_9_0); } public static final NodeFeature CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX = new NodeFeature( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java b/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java index e7ca7367832b..0987c6dfb8c8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,8 +27,6 @@ import java.util.Objects; */ public class NodeMappingStats implements Writeable, ToXContentFragment { - public static final NodeFeature SEGMENT_LEVEL_FIELDS_STATS = new NodeFeature("mapper.segment_level_fields_stats", true); - private static final class Fields { static final String MAPPINGS = "mappings"; static final String TOTAL_COUNT = "total_count"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index e734a8e5b437..86ce4fbb7483 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -52,9 +51,6 @@ public class ObjectMapper extends Mapper { public static final String CONTENT_TYPE = "object"; static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; - static final NodeFeature SUBOBJECTS_AUTO = new NodeFeature("mapper.subobjects_auto", true); - // No-op. All uses of this feature were reverted but node features can't be removed. - static final NodeFeature SUBOBJECTS_AUTO_FIXES = new NodeFeature("mapper.subobjects_auto_fixes", true); /** * Enhances the previously boolean option for subobjects support with an intermediate mode `auto` that uses diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index d16acab11a50..fbf8dd453803 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -10,7 +10,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.Explicit; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xcontent.XContentBuilder; @@ -39,8 +38,6 @@ public class PassThroughObjectMapper extends ObjectMapper { public static final String CONTENT_TYPE = "passthrough"; public static final String PRIORITY_PARAM_NAME = "priority"; - static final NodeFeature PASS_THROUGH_PRIORITY = new NodeFeature("mapper.pass_through_priority", true); - public static class Builder extends ObjectMapper.Builder { // Controls whether subfields are configured as time-series dimensions. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 36f61311ddfc..461ad74a9434 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -51,7 +51,6 @@ import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD; /** A {@link FieldMapper} for indexing numeric and date ranges, and creating queries */ public class RangeFieldMapper extends FieldMapper { - public static final NodeFeature NULL_VALUES_OFF_BY_ONE_FIX = new NodeFeature("mapper.range.null_values_off_by_one_fix", true); public static final NodeFeature DATE_RANGE_INDEXING_FIX = new NodeFeature("mapper.range.date_range_indexing_fix"); public static final boolean DEFAULT_INCLUDE_UPPER = true; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 29acdf200692..6a06d8ba4df2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,7 +18,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; @@ -27,8 +26,11 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.engine.SearchBasedChangesSnapshot; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.fetch.FetchContext; +import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceFilter; import org.elasticsearch.xcontent.XContentType; @@ -41,20 +43,6 @@ import java.util.List; import java.util.Locale; public class SourceFieldMapper extends MetadataFieldMapper { - public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback", true); - public static final NodeFeature SYNTHETIC_SOURCE_STORED_FIELDS_ADVANCE_FIX = new NodeFeature( - "mapper.source.synthetic_source_stored_fields_advance_fix", - true - ); - public static final NodeFeature SYNTHETIC_SOURCE_WITH_COPY_TO_AND_DOC_VALUES_FALSE_SUPPORT = new NodeFeature( - "mapper.source.synthetic_source_with_copy_to_and_doc_values_false", - true - ); - public static final NodeFeature SYNTHETIC_SOURCE_COPY_TO_FIX = new NodeFeature("mapper.source.synthetic_source_copy_to_fix", true); - public static final NodeFeature SYNTHETIC_SOURCE_COPY_TO_INSIDE_OBJECTS_FIX = new NodeFeature( - "mapper.source.synthetic_source_copy_to_inside_objects_fix", - true - ); public static final NodeFeature REMOVE_SYNTHETIC_SOURCE_ONLY_VALIDATION = new NodeFeature( "mapper.source.remove_synthetic_source_only_validation" ); @@ -70,11 +58,6 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final String LOSSY_PARAMETERS_ALLOWED_SETTING_NAME = "index.lossy.source-mapping-parameters"; - public static final Setting INDEX_MAPPER_SOURCE_MODE_SETTING = Setting.enumSetting(SourceFieldMapper.Mode.class, settings -> { - final IndexMode indexMode = IndexSettings.MODE.get(settings); - return indexMode.defaultSourceMode().name(); - }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); - public static final String DEPRECATION_WARNING = "Configuring source mode in mappings is deprecated and will be removed " + "in future versions. Use [index.mapping.source.mode] index setting instead."; @@ -264,8 +247,8 @@ public class SourceFieldMapper extends MetadataFieldMapper { private Mode resolveSourceMode() { // If the `index.mapping.source.mode` exists it takes precedence to determine the source mode for `_source` // otherwise the mode is determined according to `_source.mode`. - if (INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings)) { - return INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings); + if (IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings)) { + return IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings); } // If `_source.mode` is not set we need to apply a default according to index mode. @@ -297,7 +280,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { return DEFAULT; } - final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); + final Mode settingSourceMode = IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); // Needed for bwc so that "mode" is not serialized in case of standard index with stored source. if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; @@ -406,9 +389,14 @@ public class SourceFieldMapper extends MetadataFieldMapper { @Override public void preParse(DocumentParserContext context) throws IOException { - BytesReference originalSource = context.sourceToParse().source(); + int originalSourceLength = context.sourceToParse().source().length(); XContentType contentType = context.sourceToParse().getXContentType(); - final BytesReference adaptedSource = applyFilters(context.mappingLookup(), originalSource, contentType); + BytesReference originalSource = removeInferenceMetadataFields( + context.mappingLookup(), + context.sourceToParse().source(), + contentType + ); + final BytesReference adaptedSource = applyFilters(context.mappingLookup(), originalSource, contentType, false); if (adaptedSource != null) { final BytesRef ref = adaptedSource.toBytesRef(); @@ -427,7 +415,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { * This size is used in {@link LuceneSyntheticSourceChangesSnapshot} to control memory * usage during the recovery process when loading a batch of synthetic sources. */ - context.doc().add(new NumericDocValuesField(RECOVERY_SOURCE_SIZE_NAME, ref.length)); + context.doc().add(new NumericDocValuesField(RECOVERY_SOURCE_SIZE_NAME, originalSourceLength)); } else { context.doc().add(new StoredField(RECOVERY_SOURCE_NAME, ref.bytes, ref.offset, ref.length)); context.doc().add(new NumericDocValuesField(RECOVERY_SOURCE_NAME, 1)); @@ -435,22 +423,48 @@ public class SourceFieldMapper extends MetadataFieldMapper { } } - @Nullable - public BytesReference applyFilters( - @Nullable MappingLookup mappingLookup, + /** + * Removes the {@link InferenceMetadataFieldsMapper} content from the {@code _source} if it is present. + * This metadata is regenerated at query or snapshot recovery time using stored fields and doc values. + * + *

For details on how the metadata is re-added, see:

+ *
    + *
  • {@link SearchBasedChangesSnapshot#addSourceMetadata(BytesReference, int)}
  • + *
  • {@link FetchSourcePhase#getProcessor(FetchContext)}
  • + *
+ */ + private BytesReference removeInferenceMetadataFields( + MappingLookup mappingLookup, @Nullable BytesReference originalSource, @Nullable XContentType contentType + ) { + if (originalSource != null + && InferenceMetadataFieldsMapper.isEnabled(mappingLookup) + && mappingLookup.inferenceFields().isEmpty() == false) { + return Source.fromBytes(originalSource, contentType) + .filter(new SourceFilter(new String[] {}, new String[] { InferenceMetadataFieldsMapper.NAME })) + .internalSourceRef(); + } else { + return originalSource; + } + } + + @Nullable + public BytesReference applyFilters( + MappingLookup mappingLookup, + @Nullable BytesReference originalSource, + @Nullable XContentType contentType, + boolean removeMetadataFields ) throws IOException { if (stored() == false || originalSource == null) { return null; } var modSourceFilter = sourceFilter; - if (mappingLookup != null + if (removeMetadataFields && InferenceMetadataFieldsMapper.isEnabled(mappingLookup) && mappingLookup.inferenceFields().isEmpty() == false) { - /** - * Removes {@link InferenceMetadataFieldsMapper} content from _source. - * This content is re-generated at query time (if requested) using stored fields and doc values. + /* + * Removes the {@link InferenceMetadataFieldsMapper} content from the {@code _source}. */ String[] modExcludes = new String[excludes != null ? excludes.length + 1 : 1]; if (excludes != null) { @@ -482,11 +496,11 @@ public class SourceFieldMapper extends MetadataFieldMapper { } public static boolean isSynthetic(IndexSettings indexSettings) { - return INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexSettings.getSettings()) == SourceFieldMapper.Mode.SYNTHETIC; + return IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexSettings.getSettings()) == SourceFieldMapper.Mode.SYNTHETIC; } public static boolean isStored(IndexSettings indexSettings) { - return INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexSettings.getSettings()) == Mode.STORED; + return IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexSettings.getSettings()) == Mode.STORED; } public boolean isDisabled() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java index 2a7069c5a52e..4a6ba5d1fa80 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java @@ -15,7 +15,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ByteUtils; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldData; @@ -47,10 +46,6 @@ public class TimeSeriesRoutingHashFieldMapper extends MetadataFieldMapper { public static final TimeSeriesRoutingHashFieldMapper INSTANCE = new TimeSeriesRoutingHashFieldMapper(); public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesRoutingHashFieldMapper()); - static final NodeFeature TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF = new NodeFeature( - "tsdb.ts_routing_hash_doc_value_parse_byte_ref", - true - ); public static final DocValueFormat TS_ROUTING_HASH_DOC_VALUE_FORMAT = TimeSeriesRoutingHashFieldType.DOC_VALUE_FORMAT; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index fc3f297f9725..7ef12f6dd30d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -112,12 +111,6 @@ import static org.elasticsearch.index.IndexSettings.IGNORE_ABOVE_SETTING; */ public final class FlattenedFieldMapper extends FieldMapper { - public static final NodeFeature IGNORE_ABOVE_SUPPORT = new NodeFeature("flattened.ignore_above_support", true); - public static final NodeFeature IGNORE_ABOVE_WITH_ARRAYS_SUPPORT = new NodeFeature( - "mapper.flattened.ignore_above_with_arrays_support", - true - ); - public static final String CONTENT_TYPE = "flattened"; public static final String KEYED_FIELD_SUFFIX = "._keyed"; public static final String KEYED_IGNORED_VALUES_FIELD_SUFFIX = "._keyed._ignored"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 3e0656205b97..5edff48577ef 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -38,7 +38,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat; @@ -108,10 +107,6 @@ public class DenseVectorFieldMapper extends FieldMapper { return Math.abs(magnitude - 1.0f) > EPS; } - public static final NodeFeature INT4_QUANTIZATION = new NodeFeature("mapper.vectors.int4_quantization", true); - public static final NodeFeature BIT_VECTORS = new NodeFeature("mapper.vectors.bit_vectors", true); - public static final NodeFeature BBQ_FORMAT = new NodeFeature("mapper.vectors.bbq", true); - public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index fa5e95324338..bfe2291c75e1 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNode; @@ -179,8 +180,12 @@ public final class IndicesStore implements ClusterStateListener, Closeable { ); switch (shardDeletionCheckResult) { case FOLDER_FOUND_CAN_DELETE: + var clusterState = event.state(); + var clusterName = clusterState.getClusterName(); + var nodes = clusterState.nodes(); + var clusterStateVersion = clusterState.getVersion(); indicesClusterStateService.onClusterStateShardsClosed( - () -> deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable) + () -> deleteShardIfExistElseWhere(clusterName, nodes, clusterStateVersion, indexShardRoutingTable) ); break; case NO_FOLDER_FOUND: @@ -224,14 +229,18 @@ public final class IndicesStore implements ClusterStateListener, Closeable { return true; } - private void deleteShardIfExistElseWhere(ClusterState state, IndexShardRoutingTable indexShardRoutingTable) { + private void deleteShardIfExistElseWhere( + ClusterName clusterName, + DiscoveryNodes nodes, + long clusterStateVersion, + IndexShardRoutingTable indexShardRoutingTable + ) { List> requests = new ArrayList<>(indexShardRoutingTable.size()); String indexUUID = indexShardRoutingTable.shardId().getIndex().getUUID(); - ClusterName clusterName = state.getClusterName(); for (int copy = 0; copy < indexShardRoutingTable.size(); copy++) { ShardRouting shardRouting = indexShardRoutingTable.shard(copy); assert shardRouting.started() : "expected started shard but was " + shardRouting; - DiscoveryNode currentNode = state.nodes().get(shardRouting.currentNodeId()); + DiscoveryNode currentNode = nodes.get(shardRouting.currentNodeId()); requests.add( new Tuple<>(currentNode, new ShardActiveRequest(clusterName, indexUUID, shardRouting.shardId(), deleteShardTimeout)) ); @@ -239,7 +248,7 @@ public final class IndicesStore implements ClusterStateListener, Closeable { ShardActiveResponseHandler responseHandler = new ShardActiveResponseHandler( indexShardRoutingTable.shardId(), - state.getVersion(), + clusterStateVersion, requests.size() ); for (Tuple request : requests) { diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index f319157828df..17e77be43bd1 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -29,7 +29,8 @@ public enum TaskType implements Writeable { public boolean isAnyOrSame(TaskType other) { return true; } - }; + }, + CHAT_COMPLETION; public static final String NAME = "task_type"; diff --git a/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java b/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java index 5bf5182af336..56c08f2b2fb8 100644 --- a/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java +++ b/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java @@ -13,7 +13,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.XContentBuilder; @@ -36,10 +35,6 @@ public final class EnterpriseGeoIpTask { } public static final String ENTERPRISE_GEOIP_DOWNLOADER = "enterprise-geoip-downloader"; - public static final NodeFeature GEOIP_DOWNLOADER_DATABASE_CONFIGURATION = new NodeFeature( - "geoip.downloader.database.configuration", - true - ); public static class EnterpriseGeoIpTaskParams implements PersistentTaskParams { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java b/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java deleted file mode 100644 index 7c12b180b460..000000000000 --- a/server/src/main/java/org/elasticsearch/ingest/IngestGeoIpFeatures.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -import static org.elasticsearch.ingest.EnterpriseGeoIpTask.GEOIP_DOWNLOADER_DATABASE_CONFIGURATION; - -public class IngestGeoIpFeatures implements FeatureSpecification { - - public static final NodeFeature GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE = new NodeFeature( - "get_database_configuration_action.multi_node", - true - ); - - public static final NodeFeature PUT_DATABASE_CONFIGURATION_ACTION_IPINFO = new NodeFeature( - "put_database_configuration_action.ipinfo", - true - ); - - public Set getFeatures() { - return Set.of( - GEOIP_DOWNLOADER_DATABASE_CONFIGURATION, - GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE, - PUT_DATABASE_CONFIGURATION_ACTION_IPINFO - ); - } -} diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index f4afb095450e..1ff6f4b265d4 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1219,7 +1219,6 @@ class NodeConstruction { DataStreamAutoShardingService dataStreamAutoShardingService = new DataStreamAutoShardingService( settings, clusterService, - featureService, threadPool::absoluteTimeInMillis ); dataStreamAutoShardingService.init(); @@ -1370,7 +1369,7 @@ class NodeConstruction { var serverHealthIndicatorServices = Stream.of( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), - new RepositoryIntegrityHealthIndicatorService(clusterService, featureService), + new RepositoryIntegrityHealthIndicatorService(clusterService), new DiskHealthIndicatorService(clusterService, featureService), new ShardsCapacityHealthIndicatorService(clusterService, featureService), fileSettingsHealthIndicatorService @@ -1395,14 +1394,7 @@ class NodeConstruction { new DiskHealthTracker(nodeService, clusterService), new RepositoriesHealthTracker(repositoriesService) ); - LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( - settings, - clusterService, - threadPool, - client, - featureService, - healthTrackers - ); + LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create(settings, clusterService, threadPool, client, healthTrackers); HealthInfoCache nodeHealthOverview = HealthInfoCache.create(clusterService); return b -> { diff --git a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java index de56ead9b5ab..1a169699d413 100644 --- a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java +++ b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java @@ -22,9 +22,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; -import org.elasticsearch.reservedstate.service.FileSettingsFeatures; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.transport.BindTransportException; @@ -280,22 +278,7 @@ public class ReadinessService extends AbstractLifecycleComponent implements Clus // protected to allow mock service to override protected boolean areFileSettingsApplied(ClusterState clusterState) { ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (fileSettingsMetadata == null) { - // In order to block readiness on file settings being applied, we need to know that the master node has written an initial - // version, or a marker that file settings don't exist. When upgrading from a version that did not have file settings, the - // current master node may not be the first node upgraded. To be safe, we wait to consider file settings application for - // readiness until the whole cluster supports file settings. Note that this only applies when no reserved state metadata - // exists, so either we are starting up a current cluster (and the feature will be found) or we are upgrading from - // a version before file settings existed (before 8.4). - return supportsFileSettings(clusterState) == false; - } else { - return fileSettingsMetadata.version().equals(ReservedStateMetadata.NO_VERSION) == false; - } - } - - @SuppressForbidden(reason = "need to check file settings support on exact cluster state") - private boolean supportsFileSettings(ClusterState clusterState) { - return clusterState.clusterFeatures().clusterHasFeature(clusterState.nodes(), FileSettingsFeatures.FILE_SETTINGS_SUPPORTED); + return fileSettingsMetadata != null && fileSettingsMetadata.version().equals(ReservedStateMetadata.NO_VERSION) == false; } private void setReady(boolean ready) { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesFeatures.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesFeatures.java deleted file mode 100644 index b6dea6a2003f..000000000000 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesFeatures.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.repositories; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -public class RepositoriesFeatures implements FeatureSpecification { - public static final NodeFeature SUPPORTS_REPOSITORIES_USAGE_STATS = new NodeFeature("repositories.supports_usage_stats", true); - - @Override - public Set getFeatures() { - return Set.of(SUPPORTS_REPOSITORIES_USAGE_STATS); - } -} diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java deleted file mode 100644 index a60f525be988..000000000000 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsFeatures.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.reservedstate.service; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -public class FileSettingsFeatures implements FeatureSpecification { - - // Although file settings were supported starting in 8.4.0, this is really about whether file settings - // are used in readiness. - public static final NodeFeature FILE_SETTINGS_SUPPORTED = new NodeFeature("file_settings", true); - - @Override - public Set getFeatures() { - return Set.of(FILE_SETTINGS_SUPPORTED); - } -} diff --git a/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java b/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java index 07cc73f4da2b..31758be719a6 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java +++ b/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java @@ -11,7 +11,6 @@ package org.elasticsearch.rest; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.MediaType; import org.elasticsearch.xcontent.ParsedMediaType; @@ -27,7 +26,6 @@ class RestCompatibleVersionHelper { /** * @return The requested API version, or {@link Optional#empty()} if there was no explicit version in the request. */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) static Optional getCompatibleVersion( @Nullable ParsedMediaType acceptHeader, @Nullable ParsedMediaType contentTypeHeader, @@ -52,8 +50,7 @@ class RestCompatibleVersionHelper { if (hasContent) { // content-type version must be current or prior - // This can be uncommented once all references to RestApiVersion.V_7 are removed - /*if (contentTypeVersion > RestApiVersion.current().major || contentTypeVersion < RestApiVersion.minimumSupported().major) { + if (contentTypeVersion > RestApiVersion.current().major || contentTypeVersion < RestApiVersion.minimumSupported().major) { throw new ElasticsearchStatusException( "Content-Type version must be either version {} or {}, but found {}. Content-Type={}", RestStatus.BAD_REQUEST, @@ -62,7 +59,7 @@ class RestCompatibleVersionHelper { contentTypeVersion, contentTypeHeader ); - }*/ + } // if both accept and content-type are sent, the version must match if (contentTypeVersion != acceptVersion) { throw new ElasticsearchStatusException( diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java deleted file mode 100644 index e72b30526c8e..000000000000 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; - -import java.util.Set; - -import static org.elasticsearch.search.fetch.subphase.highlight.DefaultHighlighter.UNIFIED_HIGHLIGHTER_MATCHED_FIELDS; - -public class RestFeatures implements FeatureSpecification { - @Override - public Set getFeatures() { - return Set.of( - RestNodesCapabilitiesAction.CAPABILITIES_ACTION, - RestNodesCapabilitiesAction.LOCAL_ONLY_CAPABILITIES, - UNIFIED_HIGHLIGHTER_MATCHED_FIELDS - ); - } -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index ca9e4abcaeec..477cc1acb831 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -11,15 +11,16 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -52,19 +53,14 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { return "cluster_get_settings_action"; } - private static void setUpRequestParams(MasterNodeReadRequest clusterRequest, RestRequest request) { - clusterRequest.local(request.paramAsBoolean("local", clusterRequest.local())); - } - @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); ClusterGetSettingsAction.Request clusterSettingsRequest = new ClusterGetSettingsAction.Request(getMasterNodeTimeout(request)); + RestUtils.consumeDeprecatedLocalParameter(request); - setUpRequestParams(clusterSettingsRequest, request); - - return channel -> client.execute( + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute( ClusterGetSettingsAction.INSTANCE, clusterSettingsRequest, new RestToXContentListener(channel).map( diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java index 265cdd5979ad..7d660e527a81 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -12,7 +12,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -30,8 +29,6 @@ import static org.elasticsearch.rest.RestUtils.getTimeout; @ServerlessScope(Scope.INTERNAL) public class RestNodesCapabilitiesAction extends BaseRestHandler { - public static final NodeFeature CAPABILITIES_ACTION = new NodeFeature("rest.capabilities_action", true); - public static final NodeFeature LOCAL_ONLY_CAPABILITIES = new NodeFeature("rest.local_only_capabilities", true); private static final Set SUPPORTED_QUERY_PARAMETERS = Set.of( "timeout", "method", diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java index ec8bb6285bdd..da7a7d3379ee 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java @@ -14,16 +14,13 @@ import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesReq import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.List; -import java.util.function.Predicate; import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @@ -33,11 +30,6 @@ public class RestUpdateDesiredNodesAction extends BaseRestHandler { private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateDesiredNodesAction.class); private static final String VERSION_DEPRECATION_MESSAGE = "[version removal] Specifying node_version in desired nodes requests is deprecated."; - private final Predicate clusterSupportsFeature; - - public RestUpdateDesiredNodesAction(Predicate clusterSupportsFeature) { - this.clusterSupportsFeature = clusterSupportsFeature; - } @Override public String getName() { @@ -67,14 +59,8 @@ public class RestUpdateDesiredNodesAction extends BaseRestHandler { ); } - if (clusterSupportsFeature.test(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED)) { - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) { - deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE); - } - } else { - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(n -> n.hasVersion() == false)) { - throw new XContentParseException("[node_version] field is required and must have a valid value"); - } + if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) { + deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE); } return restChannel -> client.execute( diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index 9be3462e97e0..e49efc80250a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -48,11 +48,10 @@ public class RestGetIndicesAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final GetIndexRequest getIndexRequest = new GetIndexRequest(); + final GetIndexRequest getIndexRequest = new GetIndexRequest(getMasterNodeTimeout(request)); getIndexRequest.indices(indices); getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); - getIndexRequest.masterNodeTimeout(getMasterNodeTimeout(request)); getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); getIndexRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); getIndexRequest.features(GetIndexRequest.Feature.fromRequest(request)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 27620fa750ea..f9e02646041a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -51,11 +50,9 @@ public class RestGetMappingAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); + final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(getMasterNodeTimeout(request)); getMappingsRequest.indices(indices); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); - final TimeValue timeout = getMasterNodeTimeout(request); - getMappingsRequest.masterNodeTimeout(timeout); getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local())); final HttpChannel httpChannel = request.getHttpChannel(); return channel -> new RestCancellableNodeClient(client, httpChannel).admin() diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java index 4cc010c15ffb..1689d234387c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -41,8 +42,10 @@ public class RestSimulateIndexTemplateAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - SimulateIndexTemplateRequest simulateIndexTemplateRequest = new SimulateIndexTemplateRequest(request.param("name")); - simulateIndexTemplateRequest.masterNodeTimeout(getMasterNodeTimeout(request)); + SimulateIndexTemplateRequest simulateIndexTemplateRequest = new SimulateIndexTemplateRequest( + getMasterNodeTimeout(request), + request.param("name") + ); simulateIndexTemplateRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); if (request.hasContent()) { TransportPutComposableIndexTemplateAction.Request indexTemplateRequest = new TransportPutComposableIndexTemplateAction.Request( @@ -57,7 +60,7 @@ public class RestSimulateIndexTemplateAction extends BaseRestHandler { simulateIndexTemplateRequest.indexTemplateRequest(indexTemplateRequest); } - return channel -> client.execute( + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute( SimulateIndexTemplateAction.INSTANCE, simulateIndexTemplateRequest, new RestToXContentListener<>(channel) diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java index cc2e3136a416..47b68e24a9ed 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -39,8 +40,10 @@ public class RestSimulateTemplateAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - SimulateTemplateAction.Request simulateRequest = new SimulateTemplateAction.Request(); - simulateRequest.templateName(request.param("name")); + SimulateTemplateAction.Request simulateRequest = new SimulateTemplateAction.Request( + getMasterNodeTimeout(request), + request.param("name") + ); simulateRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); if (request.hasContent()) { TransportPutComposableIndexTemplateAction.Request indexTemplateRequest = new TransportPutComposableIndexTemplateAction.Request( @@ -54,8 +57,11 @@ public class RestSimulateTemplateAction extends BaseRestHandler { simulateRequest.indexTemplateRequest(indexTemplateRequest); } - simulateRequest.masterNodeTimeout(getMasterNodeTimeout(request)); - return channel -> client.execute(SimulateTemplateAction.INSTANCE, simulateRequest, new RestToXContentListener<>(channel)); + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute( + SimulateTemplateAction.INSTANCE, + simulateRequest, + new RestToXContentListener<>(channel) + ); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptFeatures.java b/server/src/main/java/org/elasticsearch/script/ScriptFeatures.java index 88756ddbf4ed..cd781e42379d 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptFeatures.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptFeatures.java @@ -17,6 +17,6 @@ import java.util.Set; public final class ScriptFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(VectorScoreScriptUtils.HAMMING_DISTANCE_FUNCTION, ScriptTermStats.TERM_STAT_FEATURE); + return Set.of(); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java index 82f6e972e126..9c51afce3a49 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java @@ -16,7 +16,6 @@ import org.apache.lucene.index.TermStates; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.util.CachedSupplier; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.search.internal.ContextIndexSearcher; import java.io.IOException; @@ -30,8 +29,6 @@ import java.util.function.Supplier; */ public class ScriptTermStats { - public static final NodeFeature TERM_STAT_FEATURE = new NodeFeature("script.term_stats", true); - private final IntSupplier docIdSupplier; private final Term[] terms; private final IndexSearcher searcher; diff --git a/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java index 9b4d105eea10..bdebdcc1eecb 100644 --- a/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java +++ b/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java @@ -10,7 +10,6 @@ package org.elasticsearch.script; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.script.field.vectors.DenseVector; import org.elasticsearch.script.field.vectors.DenseVectorDocValuesField; @@ -21,8 +20,6 @@ import java.util.List; public class VectorScoreScriptUtils { - public static final NodeFeature HAMMING_DISTANCE_FUNCTION = new NodeFeature("script.hamming", true); - public static class DenseVectorFunction { protected final ScoreScript scoreScript; protected final DenseVectorDocValuesField field; diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 553511346b18..98dd7f9388c1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -11,7 +11,6 @@ package org.elasticsearch.search; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import java.util.Set; @@ -21,7 +20,7 @@ public final class SearchFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); + return Set.of(LUCENE_10_0_0_UPGRADE); } public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled"); diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 8c21abe4180e..6d47493e4d06 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -1402,9 +1402,6 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R } } else if (token == XContentParser.Token.START_OBJECT) { if (RETRIEVER.match(currentFieldName, parser.getDeprecationHandler())) { - if (clusterSupportsFeature.test(RetrieverBuilder.RETRIEVERS_SUPPORTED) == false) { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a START_OBJECT in [retriever]."); - } retrieverBuilder = RetrieverBuilder.parseTopLevelRetrieverBuilder( parser, new RetrieverParserContext(searchUsage, clusterSupportsFeature) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 5220dadec7a1..53116ff8f6eb 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -9,10 +9,12 @@ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; +import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.fetch.FetchContext; @@ -52,8 +54,9 @@ public final class MatchedQueriesPhase implements FetchSubPhase { ); } return new FetchSubPhaseProcessor() { + record ScorerAndIterator(Scorer scorer, DocIdSetIterator approximation, TwoPhaseIterator twoPhase) {} - final Map matchingIterators = new HashMap<>(); + final Map matchingIterators = new HashMap<>(); @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { @@ -63,7 +66,14 @@ public final class MatchedQueriesPhase implements FetchSubPhase { if (ss != null) { Scorer scorer = ss.get(0L); if (scorer != null) { - matchingIterators.put(entry.getKey(), scorer); + final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); + final DocIdSetIterator iterator; + if (twoPhase == null) { + iterator = scorer.iterator(); + } else { + iterator = twoPhase.approximation(); + } + matchingIterators.put(entry.getKey(), new ScorerAndIterator(scorer, iterator, twoPhase)); } } } @@ -73,13 +83,13 @@ public final class MatchedQueriesPhase implements FetchSubPhase { public void process(HitContext hitContext) throws IOException { Map matches = new LinkedHashMap<>(); int doc = hitContext.docId(); - for (Map.Entry entry : matchingIterators.entrySet()) { - Scorer scorer = entry.getValue(); - if (scorer.iterator().docID() < doc) { - scorer.iterator().advance(doc); + for (Map.Entry entry : matchingIterators.entrySet()) { + ScorerAndIterator query = entry.getValue(); + if (query.approximation.docID() < doc) { + query.approximation.advance(doc); } - if (scorer.iterator().docID() == doc) { - matches.put(entry.getKey(), scorer.score()); + if (query.approximation.docID() == doc && (query.twoPhase == null || query.twoPhase.matches())) { + matches.put(entry.getKey(), query.scorer.score()); } } hitContext.hit().matchedQueries(matches); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java index b16617e2eb4d..c47f815c1863 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.text.Text; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -51,8 +50,6 @@ import static org.elasticsearch.lucene.search.uhighlight.CustomUnifiedHighlighte public class DefaultHighlighter implements Highlighter { - public static final NodeFeature UNIFIED_HIGHLIGHTER_MATCHED_FIELDS = new NodeFeature("unified_highlighter_matched_fields", true); - @Override public boolean canHighlight(MappedFieldType fieldType) { return true; diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java index 9eb0170af5ef..d899a390d8be 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java @@ -102,14 +102,6 @@ public class SearchLookup implements SourceProvider { this.fieldLookupProvider = searchLookup.fieldLookupProvider; } - private SearchLookup(SearchLookup searchLookup, SourceProvider sourceProvider, Set fieldChain) { - this.fieldChain = Collections.unmodifiableSet(fieldChain); - this.sourceProvider = sourceProvider; - this.fieldTypeLookup = searchLookup.fieldTypeLookup; - this.fieldDataLookup = searchLookup.fieldDataLookup; - this.fieldLookupProvider = searchLookup.fieldLookupProvider; - } - /** * Creates a copy of the current {@link SearchLookup} that looks fields up in the same way, but also tracks field references * in order to detect cycles and prevent resolving fields that depend on more than {@link #MAX_FIELD_CHAIN_DEPTH} other fields. @@ -153,7 +145,4 @@ public class SearchLookup implements SourceProvider { return sourceProvider.getSource(ctx, doc); } - public SearchLookup swapSourceProvider(SourceProvider sourceProvider) { - return new SearchLookup(this, sourceProvider, fieldChain); - } } diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java index e64bbe3c39d7..4374c06da365 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java @@ -83,7 +83,7 @@ public final class RankFeatureShardPhase { // FetchSearchResult#shardResult() SearchHits hits = fetchSearchResult.hits(); RankFeatureShardResult featureRankShardResult = (RankFeatureShardResult) rankFeaturePhaseRankShardContext - .buildRankFeatureShardResult(hits, searchContext.shardTarget().getShardId().id()); + .buildRankFeatureShardResult(hits, searchContext.request().shardRequestIndex()); // save the result in the search context // need to add profiling info as well available from fetch if (featureRankShardResult != null) { diff --git a/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java index ecc03d05b28a..737d2aa397c3 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java @@ -10,8 +10,6 @@ package org.elasticsearch.search.retriever; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -46,7 +44,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr public final class KnnRetrieverBuilder extends RetrieverBuilder { public static final String NAME = "knn"; - public static final NodeFeature KNN_RETRIEVER_SUPPORTED = new NodeFeature("knn_retriever_supported", true); public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField K_FIELD = new ParseField("k"); @@ -103,9 +100,6 @@ public final class KnnRetrieverBuilder extends RetrieverBuilder { } public static KnnRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { - if (context.clusterSupportsFeature(KNN_RETRIEVER_SUPPORTED) == false) { - throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + NAME + "]"); - } return PARSER.apply(parser, context); } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java index 357555cc5994..ce852a44c28e 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.SuggestingErrorOnUnknown; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -50,8 +49,6 @@ import java.util.Objects; */ public abstract class RetrieverBuilder implements Rewriteable, ToXContent { - public static final NodeFeature RETRIEVERS_SUPPORTED = new NodeFeature("retrievers_supported", true); - public static final ParseField PRE_FILTER_FIELD = new ParseField("filter"); public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score"); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieversFeatures.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieversFeatures.java index 74a8b30c8e7d..bfd6f572a9e6 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieversFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieversFeatures.java @@ -22,10 +22,6 @@ public class RetrieversFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( - RetrieverBuilder.RETRIEVERS_SUPPORTED, - StandardRetrieverBuilder.STANDARD_RETRIEVER_SUPPORTED, - KnnRetrieverBuilder.KNN_RETRIEVER_SUPPORTED - ); + return Set.of(); } } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/StandardRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/StandardRetrieverBuilder.java index 2ffb9e3a9802..3ca74dc133d4 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/StandardRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/StandardRetrieverBuilder.java @@ -9,8 +9,6 @@ package org.elasticsearch.search.retriever; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -39,7 +37,6 @@ import java.util.Objects; public final class StandardRetrieverBuilder extends RetrieverBuilder implements ToXContent { public static final String NAME = "standard"; - public static final NodeFeature STANDARD_RETRIEVER_SUPPORTED = new NodeFeature("standard_retriever_supported", true); public static final ParseField QUERY_FIELD = new ParseField("query"); public static final ParseField SEARCH_AFTER_FIELD = new ParseField("search_after"); @@ -81,9 +78,6 @@ public final class StandardRetrieverBuilder extends RetrieverBuilder implements } public static StandardRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { - if (context.clusterSupportsFeature(STANDARD_RETRIEVER_SUPPORTED) == false) { - throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + NAME + "]"); - } return PARSER.apply(parser, context); } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index 193191658af0..565fd7325a5a 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; @@ -56,8 +55,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr * {@link org.apache.lucene.search.KnnByteVectorQuery}. */ public class KnnVectorQueryBuilder extends AbstractQueryBuilder { - public static final NodeFeature K_PARAM_SUPPORTED = new NodeFeature("search.vectors.k_param_supported", true); - public static final String NAME = "knn"; private static final int NUM_CANDS_LIMIT = 10_000; private static final float NUM_CANDS_MULTIPLICATIVE_FACTOR = 1.5f; diff --git a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java index 4be1ee9ddc51..a6a7b458c216 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java @@ -13,9 +13,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -100,11 +98,9 @@ public class RepositoryIntegrityHealthIndicatorService implements HealthIndicato ); private final ClusterService clusterService; - private final FeatureService featureService; - public RepositoryIntegrityHealthIndicatorService(ClusterService clusterService, FeatureService featureService) { + public RepositoryIntegrityHealthIndicatorService(ClusterService clusterService) { this.clusterService = clusterService; - this.featureService = featureService; } @Override @@ -175,15 +171,8 @@ public class RepositoryIntegrityHealthIndicatorService implements HealthIndicato || invalidRepositories.isEmpty() == false) { healthStatus = YELLOW; } else if (repositoriesHealthByNode.isEmpty()) { - clusterHasFeature = featureService.clusterHasFeature( - clusterState, - HealthFeatures.SUPPORTS_EXTENDED_REPOSITORY_INDICATOR - ) == false; - if (clusterHasFeature) { - healthStatus = GREEN; - } else { - healthStatus = UNKNOWN; - } + clusterHasFeature = false; + healthStatus = UNKNOWN; } else { healthStatus = GREEN; } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 9d848b292ac5..8eb5bf47160c 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -72,7 +72,6 @@ import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -160,7 +159,7 @@ public final class RestoreService implements ClusterStateApplier { SETTING_CREATION_DATE, SETTING_HISTORY_UUID, IndexSettings.MODE.getKey(), - SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), + IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index f55e3740aaa8..37a3ec586d10 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.node.Node; import org.elasticsearch.node.ReportingService; import org.elasticsearch.telemetry.metric.Instrument; @@ -120,13 +119,7 @@ public class ThreadPool implements ReportingService, Scheduler, public static final String THREAD_POOL_METRIC_NAME_REJECTED = ".threads.rejected.total"; public enum ThreadPoolType { - @Deprecated(forRemoval = true) - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // no longer used, remove in v9 - DIRECT("direct"), FIXED("fixed"), - @Deprecated(forRemoval = true) - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // no longer used, remove in v9 - FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), SCALING("scaling"); private final String type; diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index 00146844043b..faafdf7d71e3 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -114,7 +114,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements protected final NetworkService networkService; protected final Set profileSettingsSet; protected final boolean rstOnClose; - private final TransportVersion version; private final CircuitBreakerService circuitBreakerService; private final ConcurrentMap profileBoundAddresses = newConcurrentMap(); @@ -148,7 +147,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements ) { this.settings = settings; this.profileSettingsSet = getProfileSettings(settings); - this.version = version; this.threadPool = threadPool; this.circuitBreakerService = circuitBreakerService; this.networkService = networkService; @@ -199,11 +197,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements ); } - @Override - public TransportVersion getVersion() { - return version; - } - public StatsTracker getStatsTracker() { return statsTracker; } diff --git a/server/src/main/java/org/elasticsearch/transport/Transport.java b/server/src/main/java/org/elasticsearch/transport/Transport.java index a47763583574..a1d35ce3f255 100644 --- a/server/src/main/java/org/elasticsearch/transport/Transport.java +++ b/server/src/main/java/org/elasticsearch/transport/Transport.java @@ -50,10 +50,6 @@ public interface Transport extends LifecycleComponent { return false; } - default TransportVersion getVersion() { - return TransportVersion.current(); - } - /** * The address the transport is bound on. */ diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index a43b1e7440bd..c4fdf05f5640 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -41,9 +41,14 @@ final class TransportHandshaker { * ignores the body of the request. After the handshake, the OutboundHandler uses the min(local,remote) protocol version for all later * messages. * - * This version supports two handshake protocols, v6080099 and v7170099, which respectively have the same message structure as the - * transport protocols of v6.8.0 and v7.17.0. This node only sends v7170099 requests, but it can send a valid response to any v6080099 - * requests that it receives. + * This version supports three handshake protocols, v6080099, v7170099 and v8800000, which respectively have the same message structure + * as the transport protocols of v6.8.0, v7.17.0, and v8.18.0. This node only sends v7170099 requests, but it can send a valid response + * to any v6080099 or v8800000 requests that it receives. + * + * Note that these are not really TransportVersion constants as used elsewhere in ES, they're independent things that just happen to be + * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did + * rely on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer + * true. * * Here are some example messages, broken down to show their structure: * @@ -79,7 +84,7 @@ final class TransportHandshaker { * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) * * - * ## v7170099 Request: + * ## v7170099 and v8800000 Requests: * * 45 53 -- 'ES' marker * 00 00 00 31 -- total message length @@ -98,7 +103,7 @@ final class TransportHandshaker { * 04 -- payload length * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) * - * ## v7170099 Response: + * ## v7170099 and v8800000 Responses: * * 45 53 -- 'ES' marker * 00 00 00 17 -- total message length @@ -116,9 +121,14 @@ final class TransportHandshaker { * [3] Parent task ID should be empty; see org.elasticsearch.tasks.TaskId.writeTo for its structure. */ - static final TransportVersion EARLIEST_HANDSHAKE_VERSION = TransportVersion.fromId(6080099); - static final TransportVersion REQUEST_HANDSHAKE_VERSION = TransportVersions.MINIMUM_COMPATIBLE; - static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of(EARLIEST_HANDSHAKE_VERSION, REQUEST_HANDSHAKE_VERSION); + static final TransportVersion V7_HANDSHAKE_VERSION = TransportVersion.fromId(6_08_00_99); + static final TransportVersion V8_HANDSHAKE_VERSION = TransportVersion.fromId(7_17_00_99); + static final TransportVersion V9_HANDSHAKE_VERSION = TransportVersion.fromId(8_800_00_0); + static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of( + V7_HANDSHAKE_VERSION, + V8_HANDSHAKE_VERSION, + V9_HANDSHAKE_VERSION + ); static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake"; private final ConcurrentMap pendingHandshakes = new ConcurrentHashMap<>(); @@ -156,7 +166,7 @@ final class TransportHandshaker { ); boolean success = false; try { - handshakeRequestSender.sendRequest(node, channel, requestId, REQUEST_HANDSHAKE_VERSION); + handshakeRequestSender.sendRequest(node, channel, requestId, V8_HANDSHAKE_VERSION); threadPool.schedule( () -> handler.handleLocalException(new ConnectTransportException(node, "handshake_timeout[" + timeout + "]")), diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 8fa188efbd4a..a4f606b54827 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -7,21 +7,13 @@ # License v3.0 only", or the "Server Side Public License, v 1". # -org.elasticsearch.action.admin.indices.stats.IndicesStatsFeatures org.elasticsearch.action.bulk.BulkFeatures org.elasticsearch.features.FeatureInfrastructureFeatures -org.elasticsearch.health.HealthFeatures -org.elasticsearch.cluster.metadata.MetadataFeatures -org.elasticsearch.rest.RestFeatures -org.elasticsearch.repositories.RepositoriesFeatures -org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures org.elasticsearch.index.IndexFeatures org.elasticsearch.index.mapper.MapperFeatures -org.elasticsearch.ingest.IngestGeoIpFeatures org.elasticsearch.search.SearchFeatures org.elasticsearch.search.retriever.RetrieversFeatures org.elasticsearch.script.ScriptFeatures -org.elasticsearch.reservedstate.service.FileSettingsFeatures org.elasticsearch.cluster.routing.RoutingFeatures org.elasticsearch.action.admin.cluster.stats.ClusterStatsFeatures diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsActionTests.java index f3d8f8860ba8..d9bf3e0e99c8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsActionTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.routing.allocation.AllocationStatsService; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsTests; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ClusterServiceUtils; @@ -35,8 +34,6 @@ import java.util.Set; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.not; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -48,7 +45,6 @@ public class TransportGetAllocationStatsActionTests extends ESTestCase { private ClusterService clusterService; private TransportService transportService; private AllocationStatsService allocationStatsService; - private FeatureService featureService; private TransportGetAllocationStatsAction action; @@ -67,15 +63,13 @@ public class TransportGetAllocationStatsActionTests extends ESTestCase { Set.of() ); allocationStatsService = mock(AllocationStatsService.class); - featureService = mock(FeatureService.class); action = new TransportGetAllocationStatsAction( transportService, clusterService, threadPool, new ActionFilters(Set.of()), null, - allocationStatsService, - featureService + allocationStatsService ); } @@ -99,8 +93,6 @@ public class TransportGetAllocationStatsActionTests extends ESTestCase { ); when(allocationStatsService.stats()).thenReturn(Map.of(randomIdentifier(), NodeAllocationStatsTests.randomNodeAllocationStats())); - when(featureService.clusterHasFeature(any(ClusterState.class), eq(AllocationStatsFeatures.INCLUDE_DISK_THRESHOLD_SETTINGS))) - .thenReturn(true); var future = new PlainActionFuture(); action.masterOperation(mock(Task.class), request, ClusterState.EMPTY_STATE, future); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusActionTests.java index 8a51963097da..9fef4c4ed328 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusActionTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptorUtils; import org.elasticsearch.indices.SystemIndices; @@ -31,7 +31,8 @@ import static org.hamcrest.Matchers.hasSize; public class TransportGetFeatureUpgradeStatusActionTests extends ESTestCase { public static String TEST_SYSTEM_INDEX_PATTERN = ".test*"; - private static final IndexVersion TEST_OLD_VERSION = IndexVersion.fromId(6000099); + // Version just before MINIMUM_COMPATIBLE in order to check that UpgradeStatus.MIGRATION_NEEDED is set correctly + private static final IndexVersion TEST_OLD_VERSION = IndexVersion.fromId(IndexVersions.MINIMUM_COMPATIBLE.id() - 1); private static final ClusterState CLUSTER_STATE = getClusterState(); private static final SystemIndices.Feature FEATURE = getFeature(); @@ -85,8 +86,6 @@ public class TransportGetFeatureUpgradeStatusActionTests extends ESTestCase { .numberOfReplicas(0) .build(); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // Once we start testing 9.x, we should update this test to use a 7.x "version created" IndexMetadata indexMetadata2 = IndexMetadata.builder(".test-index-2") .settings(Settings.builder().put("index.version.created", TEST_OLD_VERSION).build()) .numberOfShards(1) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsSerializationTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsSerializationTests.java deleted file mode 100644 index 5954de058698..000000000000 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsSerializationTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action.admin.cluster.settings; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.AbstractWireSerializingTestCase; - -public class ClusterGetSettingsSerializationTests extends AbstractWireSerializingTestCase { - @Override - protected Writeable.Reader instanceReader() { - return ClusterGetSettingsAction.Response::new; - } - - @Override - protected ClusterGetSettingsAction.Response createTestInstance() { - final Settings persistentSettings = Settings.builder() - .put("persistent.foo.filtered", "bar") - .put("persistent.foo.non_filtered", "baz") - .build(); - - final Settings transientSettings = Settings.builder() - .put("transient.foo.filtered", "bar") - .put("transient.foo.non_filtered", "baz") - .build(); - - final Settings allSettings = Settings.builder().put(persistentSettings).put(transientSettings).build(); - - return new ClusterGetSettingsAction.Response(persistentSettings, transientSettings, allSettings); - } - - @Override - protected ClusterGetSettingsAction.Response mutateInstance(ClusterGetSettingsAction.Response instance) { - final Settings otherSettings = Settings.builder().put("random.setting", randomAlphaOfLength(randomIntBetween(1, 12))).build(); - return switch (between(0, 2)) { - case 0 -> new ClusterGetSettingsAction.Response(otherSettings, instance.transientSettings(), instance.settings()); - case 1 -> new ClusterGetSettingsAction.Response(instance.persistentSettings(), otherSettings, instance.settings()); - case 2 -> new ClusterGetSettingsAction.Response(instance.persistentSettings(), instance.transientSettings(), otherSettings); - default -> throw new IllegalStateException("Unexpected switch value"); - }; - } -} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsTests.java index 47f3f71bcc11..58341b7d7fb9 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsTests.java @@ -12,17 +12,18 @@ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.List; +import java.util.Map; import static org.mockito.Mockito.mock; @@ -54,10 +55,8 @@ public class ClusterGetSettingsTests extends ESTestCase { TransportClusterGetSettingsAction action = new TransportClusterGetSettingsAction( transportService, mock(ClusterService.class), - threadPool, filter, - mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class) + mock(ActionFilters.class) ); final Settings persistentSettings = Settings.builder() @@ -74,7 +73,8 @@ public class ClusterGetSettingsTests extends ESTestCase { final ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); final PlainActionFuture future = new PlainActionFuture<>(); - action.masterOperation(null, null, clusterState, future); + final var task = new CancellableTask(1, "test", ClusterGetSettingsAction.NAME, "", null, Map.of()); + action.localClusterStateOperation(task, null, clusterState, future); assertTrue(future.isDone()); final ClusterGetSettingsAction.Response response = future.get(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java index fa2f350ecc93..844fe0b41b74 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java @@ -17,8 +17,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.script.Script; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -586,7 +586,7 @@ public class MappingStatsTests extends AbstractWireSerializingTestCase { - - @Override - protected Writeable.Reader instanceReader() { - return SimulateIndexTemplateRequest::new; - } - - @Override - protected SimulateIndexTemplateRequest createTestInstance() { - SimulateIndexTemplateRequest req = new SimulateIndexTemplateRequest(randomAlphaOfLength(10)); - TransportPutComposableIndexTemplateAction.Request newTemplateRequest = new TransportPutComposableIndexTemplateAction.Request( - randomAlphaOfLength(4) - ); - newTemplateRequest.indexTemplate(ComposableIndexTemplateTests.randomInstance()); - req.indexTemplateRequest(newTemplateRequest); - req.includeDefaults(randomBoolean()); - return req; - } - - @Override - protected SimulateIndexTemplateRequest mutateInstance(SimulateIndexTemplateRequest instance) { - return randomValueOtherThan(instance, this::createTestInstance); - } +public class SimulateIndexTemplateRequestTests extends ESTestCase { public void testIndexNameCannotBeNullOrEmpty() { - expectThrows(IllegalArgumentException.class, () -> new SimulateIndexTemplateRequest((String) null)); - expectThrows(IllegalArgumentException.class, () -> new SimulateIndexTemplateRequest("")); + expectThrows(IllegalArgumentException.class, () -> new SimulateIndexTemplateRequest(TEST_REQUEST_TIMEOUT, null)); + expectThrows(IllegalArgumentException.class, () -> new SimulateIndexTemplateRequest(TEST_REQUEST_TIMEOUT, "")); } public void testAddingGlobalTemplateWithHiddenIndexSettingIsIllegal() { @@ -60,7 +36,7 @@ public class SimulateIndexTemplateRequestTests extends AbstractWireSerializingTe TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request("test"); request.indexTemplate(globalTemplate); - SimulateIndexTemplateRequest simulateRequest = new SimulateIndexTemplateRequest("testing"); + SimulateIndexTemplateRequest simulateRequest = new SimulateIndexTemplateRequest(TEST_REQUEST_TIMEOUT, "testing"); simulateRequest.indexTemplateRequest(request); ActionRequestValidationException validationException = simulateRequest.validate(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateRequestTests.java index 14ebf260d3bf..6163566a5e59 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateRequestTests.java @@ -12,49 +12,17 @@ package org.elasticsearch.action.admin.indices.template.post; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplateTests; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.util.List; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SimulateTemplateRequestTests extends AbstractWireSerializingTestCase { - - @Override - protected Writeable.Reader instanceReader() { - return SimulateTemplateAction.Request::new; - } - - @Override - protected SimulateTemplateAction.Request createTestInstance() { - SimulateTemplateAction.Request req = new SimulateTemplateAction.Request(randomAlphaOfLength(10)); - TransportPutComposableIndexTemplateAction.Request newTemplateRequest = new TransportPutComposableIndexTemplateAction.Request( - randomAlphaOfLength(4) - ); - newTemplateRequest.indexTemplate(ComposableIndexTemplateTests.randomInstance()); - req.indexTemplateRequest(newTemplateRequest); - req.includeDefaults(randomBoolean()); - return req; - } - - @Override - protected SimulateTemplateAction.Request mutateInstance(SimulateTemplateAction.Request instance) { - return randomValueOtherThan(instance, this::createTestInstance); - } - - public void testIndexNameCannotBeNullOrEmpty() { - expectThrows(IllegalArgumentException.class, () -> new SimulateTemplateAction.Request((String) null)); - expectThrows( - IllegalArgumentException.class, - () -> new SimulateTemplateAction.Request((TransportPutComposableIndexTemplateAction.Request) null) - ); - } +public class SimulateTemplateRequestTests extends ESTestCase { public void testAddingGlobalTemplateWithHiddenIndexSettingIsIllegal() { Template template = new Template(Settings.builder().put(IndexMetadata.SETTING_INDEX_HIDDEN, true).build(), null, null); @@ -63,7 +31,7 @@ public class SimulateTemplateRequestTests extends AbstractWireSerializingTestCas TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request("test"); request.indexTemplate(globalTemplate); - SimulateTemplateAction.Request simulateRequest = new SimulateTemplateAction.Request("testing"); + SimulateTemplateAction.Request simulateRequest = new SimulateTemplateAction.Request(TEST_REQUEST_TIMEOUT, "testing"); simulateRequest.indexTemplateRequest(request); ActionRequestValidationException validationException = simulateRequest.validate(); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index a4185675efe6..94cef3c84de2 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -158,7 +158,6 @@ public class TransportBulkActionIngestTests extends ESTestCase { transportService, TransportBulkActionIngestTests.this.clusterService, ingestService, - mockFeatureService, new NodeClient(Settings.EMPTY, TransportBulkActionIngestTests.this.threadPool), new ActionFilters(Collections.emptySet()), TestIndexNameExpressionResolver.newInstance(), diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index cf1bed1bd3d9..fdc0abf5880f 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -121,7 +121,6 @@ public class TransportBulkActionTests extends ESTestCase { transportService, TransportBulkActionTests.this.clusterService, null, - mockFeatureService, new NodeClient(Settings.EMPTY, TransportBulkActionTests.this.threadPool), new ActionFilters(Collections.emptySet()), new Resolver(), diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index b3cad671db15..8515ef36b538 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -248,7 +248,6 @@ public class TransportBulkActionTookTests extends ESTestCase { transportService, clusterService, null, - null, client, actionFilters, indexNameExpressionResolver, diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 8c071e81ab6e..29a5c4b7fcd4 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -28,9 +28,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersion; @@ -82,12 +79,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { service = new DataStreamAutoShardingService( Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true).build(), clusterService, - new FeatureService(List.of(new FeatureSpecification() { - @Override - public Set getFeatures() { - return Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE); - } - })), () -> now ); dataStreamName = randomAlphaOfLengthBetween(10, 100); @@ -115,14 +106,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -131,12 +114,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { DataStreamAutoShardingService disabledAutoshardingService = new DataStreamAutoShardingService( Settings.EMPTY, clusterService, - new FeatureService(List.of(new FeatureSpecification() { - @Override - public Set getFeatures() { - return Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE); - } - })), System::currentTimeMillis ); @@ -144,46 +121,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { assertThat(autoShardingResult, is(NOT_APPLICABLE_RESULT)); } - { - // cluster doesn't have feature - ClusterState stateNoFeature = ClusterState.builder(ClusterName.DEFAULT) - .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures(Map.of("n1", Set.of(), "n2", Set.of())) - .metadata(Metadata.builder()) - .build(); - - Settings settings = Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true).build(); - DataStreamAutoShardingService noFeatureService = new DataStreamAutoShardingService( - settings, - clusterService, - new FeatureService(List.of()), - () -> now - ); - - AutoShardingResult autoShardingResult = noFeatureService.calculate(stateNoFeature.projectState(), dataStream, 2.0); - assertThat(autoShardingResult, is(NOT_APPLICABLE_RESULT)); - } - - { - Settings settings = Settings.builder() - .put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true) - .putList( - DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), - List.of("foo", dataStreamName + "*") - ) - .build(); - // patterns are configured to exclude the current data stream - DataStreamAutoShardingService noFeatureService = new DataStreamAutoShardingService( - settings, - clusterService, - new FeatureService(List.of()), - () -> now - ); - - AutoShardingResult autoShardingResult = noFeatureService.calculate(state.projectState(projectId), dataStream, 2.0); - assertThat(autoShardingResult, is(NOT_APPLICABLE_RESULT)); - } - { // null write load passed AutoShardingResult autoShardingResult = service.calculate(state.projectState(projectId), dataStream, null); @@ -212,14 +149,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -252,14 +181,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -292,14 +213,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -332,14 +245,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -374,14 +279,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -424,14 +321,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -472,14 +361,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); @@ -514,14 +395,6 @@ public class DataStreamAutoShardingServiceTests extends ESTestCase { builder.put(dataStream); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1")).add(DiscoveryNodeUtils.create("n2"))) - .nodeFeatures( - Map.of( - "n1", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()), - "n2", - Set.of(DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE.id()) - ) - ) .putProjectMetadata(builder.build()) .build(); diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 71bf2a47cfa4..47ff4ca6f060 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -207,12 +207,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { List> nodeLookups = new ArrayList<>(); ArraySearchPhaseResults phaseResults = phaseResults(requestIds, nodeLookups, 0); AbstractSearchAsyncAction action = createAction(searchRequest, phaseResults, listener, false, new AtomicLong()); - action.onPhaseFailure(new SearchPhase("test") { - @Override - public void run() { - - } - }, "message", null); + action.onPhaseFailure("test", "message", null); assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class)); SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException) exception.get(); assertEquals("message", searchPhaseExecutionException.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 193855a4c835..bf62973b9b05 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -141,7 +141,7 @@ public class DfsQueryPhaseTests extends ESTestCase { ) { DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override - public void run() throws IOException { + protected void run() { responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); @@ -227,7 +227,7 @@ public class DfsQueryPhaseTests extends ESTestCase { ) { DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override - public void run() throws IOException { + protected void run() { responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); @@ -315,7 +315,7 @@ public class DfsQueryPhaseTests extends ESTestCase { ) { DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override - public void run() throws IOException { + protected void run() { responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 5fb70500d515..65fdec96c92f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -119,7 +119,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { try { ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() { + protected void run() { try (var sections = new SearchResponseSections(hits, null, null, false, null, null, 1)) { mockSearchPhaseContext.sendSearchResponse(sections, null); } @@ -210,7 +210,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { try { ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() { + protected void run() { try (var sections = new SearchResponseSections(hits, null, null, false, null, null, 1)) { mockSearchPhaseContext.sendSearchResponse(sections, null); } @@ -246,7 +246,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { try { ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() { + protected void run() { try (var sections = new SearchResponseSections(hits, null, null, false, null, null, 1)) { mockSearchPhaseContext.sendSearchResponse(sections, null); } @@ -286,7 +286,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { SearchHits hits = SearchHits.empty(new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() { + protected void run() { mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); @@ -336,7 +336,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { try { ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() { + protected void run() { mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); @@ -385,7 +385,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { try { ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() { + protected void run() { mockSearchPhaseContext.sendSearchResponse( new SearchResponseSections(hits, null, null, false, null, null, 1), new AtomicArray<>(0) diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index dda20dfb37e9..fd60621c7e40 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -488,7 +488,7 @@ public class FetchSearchPhaseTests extends ESTestCase { reducedQueryPhase, (searchResponse, scrollId) -> new SearchPhase("test") { @Override - public void run() { + protected void run() { mockSearchPhaseContext.sendSearchResponse(searchResponse, null); latch.countDown(); } @@ -764,7 +764,7 @@ public class FetchSearchPhaseTests extends ESTestCase { ) { return (searchResponse, scrollId) -> new SearchPhase("test") { @Override - public void run() { + protected void run() { mockSearchPhaseContext.sendSearchResponse(searchResponse, null); } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index cf65d756811a..e8e12300c23e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -113,7 +113,7 @@ public final class MockSearchPhaseContext extends AbstractSearchAsyncAction nextPhaseSupplier) { + public void executeNextPhase(String currentPhase, Supplier nextPhaseSupplier) { var nextPhase = nextPhaseSupplier.get(); try { nextPhase.run(); } catch (Exception e) { - onPhaseFailure(nextPhase, "phase failed", e); + onPhaseFailure(nextPhase.getName(), "phase failed", e); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 2361beb7ad03..7e9e6f623cab 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -140,7 +140,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { assertTrue(searchPhaseDidRun.compareAndSet(false, true)); latch.countDown(); } @@ -255,7 +255,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { assertTrue(searchPhaseDidRun.compareAndSet(false, true)); latch.countDown(); } @@ -363,7 +363,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { for (int i = 0; i < results.getNumShards(); i++) { TestSearchPhaseResult result = results.getAtomicArray().get(i); assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); @@ -497,7 +497,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { throw new RuntimeException("boom"); } }; @@ -608,7 +608,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { assertTrue(searchPhaseDidRun.compareAndSet(false, true)); latch.countDown(); } @@ -688,7 +688,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { assertTrue(searchPhaseDidRun.compareAndSet(false, true)); latch.countDown(); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 6357155793fd..f005f862720f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -207,7 +207,7 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override - public void run() { + protected void run() { latch.countDown(); } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java index 8e4b8db77bb1..1952e9c26c57 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -86,7 +85,7 @@ public class SearchScrollAsyncActionTests extends ESTestCase { assertEquals(1, movedCounter.incrementAndGet()); return new SearchPhase("test") { @Override - public void run() throws IOException { + protected void run() { latch.countDown(); } }; @@ -183,7 +182,7 @@ public class SearchScrollAsyncActionTests extends ESTestCase { assertEquals(1, movedCounter.incrementAndGet()); return new SearchPhase("TEST_PHASE") { @Override - public void run() throws IOException { + protected void run() { throw new IllegalArgumentException("BOOM"); } }; @@ -261,7 +260,7 @@ public class SearchScrollAsyncActionTests extends ESTestCase { assertEquals(1, movedCounter.incrementAndGet()); return new SearchPhase("test") { @Override - public void run() throws IOException { + protected void run() { latch.countDown(); } }; @@ -343,7 +342,7 @@ public class SearchScrollAsyncActionTests extends ESTestCase { assertEquals(1, movedCounter.incrementAndGet()); return new SearchPhase("test") { @Override - public void run() throws IOException { + protected void run() { latch.countDown(); } }; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 2b0331458ee3..e2bcc8e95aea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -614,6 +614,9 @@ public class MetadataCreateIndexServiceTests extends ESTestCase { validateIndexName(checkerService, "..", "must not be '.' or '..'"); validateIndexName(checkerService, "foo:bar", "must not contain ':'"); + + validateIndexName(checkerService, "", "must not be empty"); + validateIndexName(checkerService, null, "must not be empty"); })); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceTests.java index 760900817780..2c15addfe217 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceTests.java @@ -46,19 +46,19 @@ public class DesiredBalanceTests extends ESTestCase { ); assertThat( - "1 shard movements when existing shard is moved and new shard copy is unassigned", + "1 shard movements when an existing shard copy is moved and new shard copy is unassigned", shardMovements(new ShardAssignment(Set.of("a", "b"), 2, 0, 0), new ShardAssignment(Set.of("a", "c"), 3, 1, 0)), equalTo(1) ); assertThat( - "1 shard movement", + "1 shard movement when an existing shard copy is moved", shardMovements(new ShardAssignment(Set.of("a", "b"), 2, 0, 0), new ShardAssignment(Set.of("a", "c"), 2, 0, 0)), equalTo(1) ); assertThat( - "2 shard movement", + "2 shard movements when both shard copies are move to new nodes", shardMovements(new ShardAssignment(Set.of("a", "b"), 2, 0, 0), new ShardAssignment(Set.of("c", "d"), 2, 0, 0)), equalTo(2) ); @@ -77,10 +77,10 @@ public class DesiredBalanceTests extends ESTestCase { } private static int shardMovements(ShardAssignment old, ShardAssignment updated) { - return DesiredBalance.shardMovements(of(old), of(updated)); + return DesiredBalance.shardMovements(createDesiredBalanceWith(old), createDesiredBalanceWith(updated)); } - private static DesiredBalance of(ShardAssignment assignment) { + private static DesiredBalance createDesiredBalanceWith(ShardAssignment assignment) { return new DesiredBalance(1, Map.of(new ShardId("index", "_na_", 0), assignment)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java index 7c1c954e7b4e..e6f50ef42365 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java @@ -66,6 +66,12 @@ public class ClusterApplierServiceTests extends ESTestCase { assertThat(Thread.currentThread().getName(), containsString(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME)); return currentTimeMillis; } + + @Override + public long rawRelativeTimeInMillis() { + assertThat(Thread.currentThread().getName(), containsString(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME)); + return currentTimeMillis; + } }; clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); allowClusterStateApplicationFailure = false; @@ -207,15 +213,33 @@ public class ClusterApplierServiceTests extends ESTestCase { ); mockLog.addExpectation( new MockLog.SeenEventExpectation( - "test4", + "test3", ClusterApplierService.class.getCanonicalName(), Level.WARN, "*cluster state applier task [test3] took [34s] which is above the warn threshold of [*]: " + "[running task [test3]] took [*" ) ); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "test4", + ClusterApplierService.class.getCanonicalName(), + Level.WARN, + "*cluster state applier task [test4] took [36s] which is above the warn threshold of [*]: " + + "[running task [test4]] took [*" + ) + ); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "test5", + ClusterApplierService.class.getCanonicalName(), + Level.WARN, + "*cluster state applier task [test5] took [38s] which is above the warn threshold of [*]: " + + "[running task [test5]] took [*" + ) + ); - final CountDownLatch latch = new CountDownLatch(4); + final CountDownLatch latch = new CountDownLatch(6); final CountDownLatch processedFirstTask = new CountDownLatch(1); currentTimeMillis = randomLongBetween(0L, Long.MAX_VALUE / 2); clusterApplierService.runOnApplierThread( @@ -266,9 +290,39 @@ public class ClusterApplierServiceTests extends ESTestCase { } } ); + clusterApplierService.runOnApplierThread("test4", Priority.HIGH, currentState -> { + // do nothing (testing that onResponse is included in timing) + }, new ActionListener<>() { + + @Override + public void onResponse(Void unused) { + advanceTime(TimeValue.timeValueSeconds(36).millis()); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }); + clusterApplierService.runOnApplierThread("test5", Priority.HIGH, currentState -> { + throw new IllegalArgumentException("Testing that onFailure is included in timing"); + }, new ActionListener<>() { + + @Override + public void onResponse(Void unused) { + fail(); + } + + @Override + public void onFailure(Exception e) { + advanceTime(TimeValue.timeValueSeconds(38).millis()); + latch.countDown(); + } + }); // Additional update task to make sure all previous logging made it to the loggerName // We don't check logging for this on since there is no guarantee that it will occur before our check - clusterApplierService.runOnApplierThread("test4", Priority.HIGH, currentState -> {}, new ActionListener<>() { + clusterApplierService.runOnApplierThread("test6", Priority.HIGH, currentState -> {}, new ActionListener<>() { @Override public void onResponse(Void ignored) { latch.countDown(); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java index de1990361e76..8f62f8d33eac 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java @@ -12,7 +12,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -116,8 +115,6 @@ public class CompatibleNamedXContentRegistryTests extends ESTestCase { } } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - @AwaitsFix(bugUrl = "this can be re-enabled once our rest api version is bumped to V_9") public void testNotCompatibleRequest() throws IOException { NamedXContentRegistry registry = new NamedXContentRegistry( List.of( diff --git a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java index 7a63934b4810..eca30fdce2cf 100644 --- a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java @@ -24,8 +24,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RelativeByteSizeValue; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.selection.HealthNode; @@ -119,18 +117,9 @@ public class LocalHealthMonitorTests extends ESTestCase { client = mock(Client.class); - FeatureService featureService = new FeatureService(List.of(new HealthFeatures())); - mockHealthTracker = new MockHealthTracker(); - localHealthMonitor = LocalHealthMonitor.create( - Settings.EMPTY, - clusterService, - threadPool, - client, - featureService, - List.of(mockHealthTracker) - ); + localHealthMonitor = LocalHealthMonitor.create(Settings.EMPTY, clusterService, threadPool, client, List.of(mockHealthTracker)); } @After diff --git a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java index 51dadd815454..3069589f9556 100644 --- a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksService; @@ -78,7 +77,7 @@ public class HealthNodeTaskExecutorTests extends ESTestCase { localNodeId = clusterService.localNode().getId(); persistentTasksService = mock(PersistentTasksService.class); settings = Settings.builder().build(); - featureService = new FeatureService(List.of(new HealthFeatures())); + featureService = new FeatureService(List.of()); clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java index 4d97235c5fae..61bd5323b5b4 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java @@ -437,32 +437,35 @@ class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { float[] centroid, float cDotC ) throws IOException { - long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); - final IndexOutput tempQuantizedVectorData = segmentWriteState.directory.createTempOutput( - binarizedVectorData.getName(), - "temp", - segmentWriteState.context - ); - final IndexOutput tempScoreQuantizedVectorData = segmentWriteState.directory.createTempOutput( - binarizedVectorData.getName(), - "score_temp", - segmentWriteState.context - ); - IndexInput binarizedDataInput = null; - IndexInput binarizedScoreDataInput = null; - boolean success = false; - int descritizedDimension = BQVectorUtils.discretize(fieldInfo.getVectorDimension(), 64); - BinaryQuantizer quantizer = new BinaryQuantizer( + final long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); + final int descritizedDimension = BQVectorUtils.discretize(fieldInfo.getVectorDimension(), 64); + final BinaryQuantizer quantizer = new BinaryQuantizer( fieldInfo.getVectorDimension(), descritizedDimension, fieldInfo.getVectorSimilarityFunction() ); + + IndexOutput tempQuantizedVectorData = null; + IndexOutput tempScoreQuantizedVectorData = null; + final DocsWithFieldSet docsWithField; + boolean success = false; + try { + tempQuantizedVectorData = segmentWriteState.directory.createTempOutput( + binarizedVectorData.getName(), + "temp", + segmentWriteState.context + ); + tempScoreQuantizedVectorData = segmentWriteState.directory.createTempOutput( + binarizedVectorData.getName(), + "score_temp", + segmentWriteState.context + ); FloatVectorValues floatVectorValues = KnnVectorsWriter.MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState); if (fieldInfo.getVectorSimilarityFunction() == COSINE) { floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues); } - DocsWithFieldSet docsWithField = writeBinarizedVectorAndQueryData( + docsWithField = writeBinarizedVectorAndQueryData( tempQuantizedVectorData, tempScoreQuantizedVectorData, floatVectorValues, @@ -470,13 +473,30 @@ class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { quantizer ); CodecUtil.writeFooter(tempQuantizedVectorData); - IOUtils.close(tempQuantizedVectorData); + CodecUtil.writeFooter(tempScoreQuantizedVectorData); + success = true; + } finally { + if (success) { + IOUtils.close(tempQuantizedVectorData, tempScoreQuantizedVectorData); + } else { + IOUtils.closeWhileHandlingException(tempQuantizedVectorData, tempScoreQuantizedVectorData); + if (tempQuantizedVectorData != null) { + IOUtils.deleteFilesIgnoringExceptions(segmentWriteState.directory, tempQuantizedVectorData.getName()); + } + if (tempScoreQuantizedVectorData != null) { + IOUtils.deleteFilesIgnoringExceptions(segmentWriteState.directory, tempScoreQuantizedVectorData.getName()); + } + } + } + + IndexInput binarizedDataInput = null; + IndexInput binarizedScoreDataInput = null; + success = false; + try { binarizedDataInput = segmentWriteState.directory.openInput(tempQuantizedVectorData.getName(), segmentWriteState.context); binarizedVectorData.copyBytes(binarizedDataInput, binarizedDataInput.length() - CodecUtil.footerLength()); - long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; + final long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; CodecUtil.retrieveChecksum(binarizedDataInput); - CodecUtil.writeFooter(tempScoreQuantizedVectorData); - IOUtils.close(tempScoreQuantizedVectorData); binarizedScoreDataInput = segmentWriteState.directory.openInput( tempScoreQuantizedVectorData.getName(), segmentWriteState.context @@ -490,10 +510,9 @@ class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { cDotC, docsWithField ); - success = true; final IndexInput finalBinarizedDataInput = binarizedDataInput; final IndexInput finalBinarizedScoreDataInput = binarizedScoreDataInput; - OffHeapBinarizedVectorValues vectorValues = new OffHeapBinarizedVectorValues.DenseOffHeapVectorValues( + final OffHeapBinarizedVectorValues vectorValues = new OffHeapBinarizedVectorValues.DenseOffHeapVectorValues( fieldInfo.getVectorDimension(), docsWithField.cardinality(), centroid, @@ -503,7 +522,7 @@ class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { vectorsScorer, finalBinarizedDataInput ); - RandomVectorScorerSupplier scorerSupplier = vectorsScorer.getRandomVectorScorerSupplier( + final RandomVectorScorerSupplier scorerSupplier = vectorsScorer.getRandomVectorScorerSupplier( fieldInfo.getVectorSimilarityFunction(), new OffHeapBinarizedQueryVectorValues( finalBinarizedScoreDataInput, @@ -513,22 +532,20 @@ class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { ), vectorValues ); + final String tempQuantizedVectorDataName = tempQuantizedVectorData.getName(); + final String tempScoreQuantizedVectorDataName = tempScoreQuantizedVectorData.getName(); + success = true; return new BinarizedCloseableRandomVectorScorerSupplier(scorerSupplier, vectorValues, () -> { IOUtils.close(finalBinarizedDataInput, finalBinarizedScoreDataInput); IOUtils.deleteFilesIgnoringExceptions( segmentWriteState.directory, - tempQuantizedVectorData.getName(), - tempScoreQuantizedVectorData.getName() + tempQuantizedVectorDataName, + tempScoreQuantizedVectorDataName ); }); } finally { if (success == false) { - IOUtils.closeWhileHandlingException( - tempQuantizedVectorData, - tempScoreQuantizedVectorData, - binarizedDataInput, - binarizedScoreDataInput - ); + IOUtils.closeWhileHandlingException(binarizedDataInput, binarizedScoreDataInput); IOUtils.deleteFilesIgnoringExceptions( segmentWriteState.directory, tempQuantizedVectorData.getName(), diff --git a/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java index 5863d2f93296..a1a119d416f0 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java @@ -12,7 +12,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.NoMergePolicy; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; @@ -21,7 +21,7 @@ import java.io.IOException; public class LuceneChangesSnapshotTests extends SearchBasedChangesSnapshotTests { @Override protected Translog.Snapshot newRandomSnapshot( - MappingLookup mappingLookup, + MapperService mapperService, Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, @@ -32,6 +32,7 @@ public class LuceneChangesSnapshotTests extends SearchBasedChangesSnapshotTests IndexVersion indexVersionCreated ) throws IOException { return new LuceneChangesSnapshot( + mapperService, engineSearcher, searchBatchSize, fromSeqNo, diff --git a/server/src/test/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshotTests.java b/server/src/test/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshotTests.java index 2a6c3428d6d4..2beeffef7fbd 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshotTests.java @@ -13,27 +13,25 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.translog.Translog; import java.io.IOException; -import static org.elasticsearch.index.mapper.SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING; - public class LuceneSyntheticSourceChangesSnapshotTests extends SearchBasedChangesSnapshotTests { @Override protected Settings indexSettings() { return Settings.builder() .put(super.indexSettings()) - .put(INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()) .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) .build(); } @Override protected Translog.Snapshot newRandomSnapshot( - MappingLookup mappingLookup, + MapperService mapperService, Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, @@ -44,7 +42,7 @@ public class LuceneSyntheticSourceChangesSnapshotTests extends SearchBasedChange IndexVersion indexVersionCreated ) throws IOException { return new LuceneSyntheticSourceChangesSnapshot( - mappingLookup, + mapperService, engineSearcher, searchBatchSize, randomLongBetween(0, ByteSizeValue.ofBytes(Integer.MAX_VALUE).getBytes()), diff --git a/server/src/test/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshotTests.java b/server/src/test/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshotTests.java index 9cfa7321973a..ed11ac7fd05c 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshotTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.store.Store; @@ -46,7 +46,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { } protected abstract Translog.Snapshot newRandomSnapshot( - MappingLookup mappingLookup, + MapperService mapperService, Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, @@ -117,7 +117,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); try ( Translog.Snapshot snapshot = newRandomSnapshot( - engine.engineConfig.getMapperService().mappingLookup(), + engine.engineConfig.getMapperService(), searcher, between(1, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, @@ -137,7 +137,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); try ( Translog.Snapshot snapshot = newRandomSnapshot( - engine.engineConfig.getMapperService().mappingLookup(), + engine.engineConfig.getMapperService(), searcher, between(1, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, @@ -163,7 +163,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); try ( Translog.Snapshot snapshot = newRandomSnapshot( - engine.engineConfig.getMapperService().mappingLookup(), + engine.engineConfig.getMapperService(), searcher, between(1, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, @@ -182,7 +182,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); try ( Translog.Snapshot snapshot = newRandomSnapshot( - engine.engineConfig.getMapperService().mappingLookup(), + engine.engineConfig.getMapperService(), searcher, between(1, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, @@ -206,7 +206,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); try ( Translog.Snapshot snapshot = newRandomSnapshot( - engine.engineConfig.getMapperService().mappingLookup(), + engine.engineConfig.getMapperService(), searcher, between(1, SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, @@ -270,7 +270,7 @@ public abstract class SearchBasedChangesSnapshotTests extends EngineTestCase { final boolean accessStats = randomBoolean(); try ( Translog.Snapshot snapshot = newRandomSnapshot( - engine.engineConfig.getMapperService().mappingLookup(), + engine.engineConfig.getMapperService(), searcher, between(1, 100), 0, diff --git a/server/src/test/java/org/elasticsearch/index/engine/TranslogOperationAsserterTests.java b/server/src/test/java/org/elasticsearch/index/engine/TranslogOperationAsserterTests.java index b764bce464d1..d0455c14bd78 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/TranslogOperationAsserterTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/TranslogOperationAsserterTests.java @@ -24,8 +24,6 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import static org.elasticsearch.index.mapper.SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING; - public class TranslogOperationAsserterTests extends EngineTestCase { @Override @@ -33,7 +31,7 @@ public class TranslogOperationAsserterTests extends EngineTestCase { return Settings.builder() .put(super.indexSettings()) .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .put(INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()) .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) .build(); } @@ -57,10 +55,10 @@ public class TranslogOperationAsserterTests extends EngineTestCase { EngineConfig config = engine.config(); Settings.Builder settings = Settings.builder().put(config.getIndexSettings().getSettings()); if (useSyntheticSource) { - settings.put(INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()); + settings.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()); settings.put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true); } else { - settings.put(INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.name()); + settings.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.name()); settings.put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), false); } IndexMetadata imd = IndexMetadata.builder(config.getIndexSettings().getIndexMetadata()).settings(settings).build(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java index 907a1a15721d..c17c0d10410f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java @@ -54,12 +54,14 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } public void testGetMappings() { - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings().get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT).get(); assertExpectedMappings(getMappingsResponse.mappings()); } public void testGetIndex() { - GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex().setFeatures(GetIndexRequest.Feature.MAPPINGS).get(); + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setFeatures(GetIndexRequest.Feature.MAPPINGS) + .get(); assertExpectedMappings(getIndexResponse.mappings()); } @@ -71,7 +73,7 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { assertFieldMappings(mappings.get("filtered"), FILTERED_FLAT_FIELDS); // double check that submitting the filtered mappings to an unfiltered index leads to the same get field mappings output // as the one coming from a filtered index with same mappings - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("filtered").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "filtered").get(); MappingMetadata filtered = getMappingsResponse.getMappings().get("filtered"); assertAcked(indicesAdmin().prepareCreate("test").setMapping(filtered.getSourceAsMap())); GetFieldMappingsResponse response = indicesAdmin().prepareGetFieldMappings("test").setFields("*").get(); @@ -98,7 +100,7 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { assertFieldCaps(filtered, filteredFields); // double check that submitting the filtered mappings to an unfiltered index leads to the same field_caps output // as the one coming from a filtered index with same mappings - GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings("filtered").get(); + GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "filtered").get(); MappingMetadata filteredMapping = getMappingsResponse.getMappings().get("filtered"); assertAcked(indicesAdmin().prepareCreate("test").setMapping(filteredMapping.getSourceAsMap())); FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); @@ -155,7 +157,7 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { private void assertMappingsAreValid(Map sourceAsMap) { // check that the returned filtered mappings are still valid mappings by submitting them and retrieving them back assertAcked(indicesAdmin().prepareCreate("test").setMapping(sourceAsMap)); - GetMappingsResponse testMappingsResponse = indicesAdmin().prepareGetMappings("test").get(); + GetMappingsResponse testMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertEquals(1, testMappingsResponse.getMappings().size()); // the mappings are returned unfiltered for this index, yet they are the same as the previous ones that were returned filtered assertFiltered(testMappingsResponse.getMappings().get("test")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperConfigurationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperConfigurationTests.java index 8646e1b66dcb..1ba5f423d4b0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperConfigurationTests.java @@ -12,6 +12,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.DirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -130,7 +131,7 @@ public class IgnoredSourceFieldMapperConfigurationTests extends MapperServiceTes for (var entry : customSettings.entrySet()) { settings.put(entry.getKey(), entry.getValue()); } - settings.put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC); + settings.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC); return createMapperService(settings.build(), mapping(mapping)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NonDynamicFieldMapperTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/NonDynamicFieldMapperTestCase.java index 6ea41763cbcc..b669c717719b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NonDynamicFieldMapperTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NonDynamicFieldMapperTestCase.java @@ -43,7 +43,7 @@ public abstract class NonDynamicFieldMapperTestCase extends ESSingleNodeTestCase """, getMapping()); var resp = client().admin().indices().prepareCreate("test").setMapping(mapping).get(); assertTrue(resp.isAcknowledged()); - var mappingsResp = client().admin().indices().prepareGetMappings("test").get(); + var mappingsResp = client().admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); var mappingMetadata = mappingsResp.getMappings().get("test"); var fieldType = XContentMapValues.extractValue("properties.field.type", mappingMetadata.getSourceAsMap()); assertThat(fieldType, equalTo(getTypeName())); @@ -149,7 +149,7 @@ public abstract class NonDynamicFieldMapperTestCase extends ESSingleNodeTestCase var resp = client().prepareIndex("test1").setSource("field", "hello world").get(); assertThat(resp.status(), equalTo(RestStatus.CREATED)); - var mappingsResp = client().admin().indices().prepareGetMappings("test1").get(); + var mappingsResp = client().admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test1").get(); var mappingMetadata = mappingsResp.getMappings().get("test1"); var fieldType = XContentMapValues.extractValue("properties.field.type", mappingMetadata.getSourceAsMap()); assertThat(fieldType, equalTo(getTypeName())); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index b7693513a434..bc560d94b8f5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -427,7 +427,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { { Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()) .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) .build(); IllegalArgumentException exc = expectThrows( @@ -470,7 +470,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { public void testRecoverySourceWithSyntheticSource() throws IOException { { Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.toString()) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.toString()) .build(); MapperService mapperService = createMapperService(settings, topMapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); @@ -480,7 +480,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { } { Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.toString()) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.toString()) .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) .build(); MapperService mapperService = createMapperService(settings, topMapping(b -> {})); @@ -539,7 +539,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .build(); final MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); @@ -549,7 +549,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) .build(); final MapperService mapperService = createMapperService(settings, mappings); final DocumentMapper docMapper = mapperService.documentMapper(); @@ -559,7 +559,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) .build(); final MapperService mapperService = createMapperService(settings, mappings); final DocumentMapper docMapper = mapperService.documentMapper(); @@ -571,7 +571,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .build(); final MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); @@ -581,7 +581,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) .build(); final MapperService mapperService = createMapperService(settings, mappings); final DocumentMapper docMapper = mapperService.documentMapper(); @@ -591,7 +591,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) .build(); var ex = expectThrows(MapperParsingException.class, () -> createMapperService(settings, mappings)); assertEquals("Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode", ex.getMessage()); @@ -613,7 +613,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { """; final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "routing_field") .build(); final MapperService mapperService = createMapperService(settings, mappings); @@ -635,7 +635,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { """; final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "routing_field") .build(); final MapperService mapperService = createMapperService(settings, mappings); @@ -657,7 +657,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { """; final Settings settings = Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "routing_field") .build(); var ex = expectThrows(MapperParsingException.class, () -> createMapperService(settings, mappings)); @@ -668,7 +668,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .build(); final MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); @@ -677,7 +677,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED) .build(); final MapperService mapperService = createMapperService(settings, mappings); final DocumentMapper docMapper = mapperService.documentMapper(); @@ -686,7 +686,7 @@ public class SourceFieldMapperTests extends MetadataMapperTestCase { { final XContentBuilder mappings = topMapping(b -> {}); final Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.DISABLED) .build(); final MapperService mapperService = createMapperService(settings, mappings); final DocumentMapper docMapper = mapperService.documentMapper(); diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index 15febaa8db2a..8a84b0ec59b5 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; -import org.elasticsearch.reservedstate.service.FileSettingsFeatures; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; @@ -48,7 +47,6 @@ import java.net.UnknownHostException; import java.nio.channels.ServerSocketChannel; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Set; import static org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata.ErrorKind.TRANSIENT; @@ -60,7 +58,6 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private ThreadPool threadpool; private Environment env; private FakeHttpTransport httpTransport; - private static final Set nodeFeatures = Set.of(FileSettingsFeatures.FILE_SETTINGS_SUPPORTED.id()); private static Metadata emptyReservedStateMetadata; static { @@ -310,26 +307,6 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient readinessService.close(); } - public void testFileSettingsMixedCluster() throws Exception { - readinessService.start(); - - // initially the service isn't ready because initial cluster state has not been applied yet - assertFalse(readinessService.ready()); - - ClusterState noFileSettingsState = ClusterState.builder(noFileSettingsState()) - // the master node is upgraded to support file settings, but existing node2 is not - .nodeFeatures(Map.of(httpTransport.node.getId(), nodeFeatures)) - .build(); - ClusterChangedEvent event = new ClusterChangedEvent("test", noFileSettingsState, emptyState()); - readinessService.clusterChanged(event); - - // when upgrading from nodes before file settings exist, readiness should return true once a master is elected - assertTrue(readinessService.ready()); - - readinessService.stop(); - readinessService.close(); - } - private ClusterState emptyState() { return ClusterState.builder(new ClusterName("cluster")) .nodes( @@ -347,7 +324,6 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient .masterNodeId(httpTransport.node.getId()) .localNodeId(httpTransport.node.getId()) ) - .nodeFeatures(Map.of(httpTransport.node.getId(), nodeFeatures, "node2", nodeFeatures)) .build(); } } diff --git a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java index 040ab9fd5c2e..028438b5e926 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java @@ -210,12 +210,15 @@ public class RestCompatibleVersionHelperTests extends ESTestCase { assertThat( e.getMessage(), equalTo( - "A compatible version is required on both Content-Type and Accept headers if either one has requested a " - + "compatible version and the compatible versions must match. " - + "Accept=" - + acceptHeader(PREVIOUS_VERSION) - + ", Content-Type=" - + contentTypeHeader(OBSOLETE_VERSION) + "Content-Type version must be either version " + + CURRENT_VERSION + + " or " + + PREVIOUS_VERSION + + ", but found " + + OBSOLETE_VERSION + + ". " + + "Content-Type=" + + acceptHeader(OBSOLETE_VERSION) ) ); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java index 16e066e486de..fe602d2854c8 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java @@ -1215,7 +1215,7 @@ public class SearchServiceSingleNodeTests extends ESSingleNodeTestCase { // ok } - expectThrows(IndexNotFoundException.class, () -> indicesAdmin().prepareGetIndex().setIndices("index").get()); + expectThrows(IndexNotFoundException.class, () -> indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("index").get()); assertEquals(0, service.getActiveContexts()); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 0f73c367ff2e..1bcc89b68c14 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -574,11 +574,10 @@ public class HighlightBuilderTests extends ESTestCase { assertEquals("pre_tags are set but post_tags are not set", e.getCause().getCause().getMessage()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/119723") public void testInvalidMaxAnalyzedOffset() throws IOException { XContentParseException e = expectParseThrows( XContentParseException.class, - "{ \"max_analyzed_offset\" : " + randomIntBetween(-100, -1) + "}" + "{ \"max_analyzed_offset\" : " + randomIntBetween(-100, -2) + "}" ); assertThat(e.getMessage(), containsString("[highlight] failed to parse field [" + MAX_ANALYZED_OFFSET_FIELD.toString() + "]")); assertThat(e.getCause().getMessage(), containsString("[max_analyzed_offset] must be a positive integer, or -1")); diff --git a/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java b/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java index da28b0eff441..2724b86f9acd 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java @@ -11,6 +11,7 @@ package org.elasticsearch.search.retriever; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -88,10 +89,7 @@ public class KnnRetrieverBuilderParsingTests extends AbstractXContentTestCase nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == KnnRetrieverBuilder.KNN_RETRIEVER_SUPPORTED - ) + new RetrieverParserContext(new SearchUsage(), Predicates.never()) ); } diff --git a/server/src/test/java/org/elasticsearch/search/retriever/RetrieverBuilderVersionTests.java b/server/src/test/java/org/elasticsearch/search/retriever/RetrieverBuilderVersionTests.java deleted file mode 100644 index 6448d11de2e4..000000000000 --- a/server/src/test/java/org/elasticsearch/search/retriever/RetrieverBuilderVersionTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.search.retriever; - -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.List; - -/** Tests retrievers validate on their own {@link NodeFeature} */ -public class RetrieverBuilderVersionTests extends ESTestCase { - - public void testRetrieverVersions() throws IOException { - try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"retriever\":{\"standard\":{}}}")) { - SearchSourceBuilder ssb = new SearchSourceBuilder(); - ParsingException iae = expectThrows(ParsingException.class, () -> ssb.parseXContent(parser, true, nf -> false)); - assertEquals("Unknown key for a START_OBJECT in [retriever].", iae.getMessage()); - } - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"retriever\":{\"standard\":{}}}")) { - SearchSourceBuilder ssb = new SearchSourceBuilder(); - ParsingException iae = expectThrows( - ParsingException.class, - () -> ssb.parseXContent(parser, true, nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED) - ); - assertEquals("unknown retriever [standard]", iae.getMessage()); - } - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"retriever\":{\"standard\":{}}}")) { - SearchSourceBuilder ssb = new SearchSourceBuilder(); - ssb.parseXContent( - parser, - true, - nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == StandardRetrieverBuilder.STANDARD_RETRIEVER_SUPPORTED - ); - } - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"retriever\":{\"knn\":{}}}")) { - SearchSourceBuilder ssb = new SearchSourceBuilder(); - ParsingException iae = expectThrows( - ParsingException.class, - () -> ssb.parseXContent(parser, true, nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED) - ); - assertEquals("unknown retriever [knn]", iae.getMessage()); - } - - try ( - XContentParser parser = createParser( - JsonXContent.jsonXContent, - "{\"retriever\":{\"knn\":{\"field\": \"test\", \"k\": 2, \"num_candidates\": 5, \"query_vector\": [1, 2, 3]}}}" - ) - ) { - SearchSourceBuilder ssb = new SearchSourceBuilder(); - ssb.parseXContent( - parser, - true, - nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == KnnRetrieverBuilder.KNN_RETRIEVER_SUPPORTED - ); - } - } - - @Override - protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); - } -} diff --git a/server/src/test/java/org/elasticsearch/search/retriever/StandardRetrieverBuilderParsingTests.java b/server/src/test/java/org/elasticsearch/search/retriever/StandardRetrieverBuilderParsingTests.java index eacd949077bc..979c588089f5 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/StandardRetrieverBuilderParsingTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/StandardRetrieverBuilderParsingTests.java @@ -12,6 +12,7 @@ package org.elasticsearch.search.retriever; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -100,10 +101,7 @@ public class StandardRetrieverBuilderParsingTests extends AbstractXContentTestCa protected StandardRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { return (StandardRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( parser, - new RetrieverParserContext( - new SearchUsage(), - nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == StandardRetrieverBuilder.STANDARD_RETRIEVER_SUPPORTED - ) + new RetrieverParserContext(new SearchUsage(), Predicates.never()) ); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java index cc21ade31471..cadd9d5196f6 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.Diagnosis.Resource.Type; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.SimpleHealthIndicatorDetails; @@ -37,7 +36,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.Stream; import static org.elasticsearch.cluster.node.DiscoveryNode.DISCOVERY_NODE_COMPARATOR; @@ -382,10 +380,7 @@ public class RepositoryIntegrityHealthIndicatorServiceTests extends ESTestCase { } private ClusterState createClusterStateWith(RepositoriesMetadata metadata) { - var features = Set.of(HealthFeatures.SUPPORTS_EXTENDED_REPOSITORY_INDICATOR.id()); - var builder = ClusterState.builder(new ClusterName("test-cluster")) - .nodes(DiscoveryNodes.builder().add(node1).add(node2).build()) - .nodeFeatures(Map.of(node1.getId(), features, node2.getId(), features)); + var builder = ClusterState.builder(new ClusterName("test-cluster")).nodes(DiscoveryNodes.builder().add(node1).add(node2).build()); if (metadata != null) { builder.metadata(Metadata.builder().putCustom(RepositoriesMetadata.TYPE, metadata)); } @@ -399,7 +394,7 @@ public class RepositoryIntegrityHealthIndicatorServiceTests extends ESTestCase { private RepositoryIntegrityHealthIndicatorService createRepositoryIntegrityHealthIndicatorService(ClusterState clusterState) { var clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); - return new RepositoryIntegrityHealthIndicatorService(clusterService, featureService); + return new RepositoryIntegrityHealthIndicatorService(clusterService); } private SimpleHealthIndicatorDetails createDetails(int total, int corruptedCount, List corrupted, int unknown, int invalid) { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 9500c66245ea..97dedebef0cd 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -243,9 +243,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class SnapshotResiliencyTests extends ESTestCase { @@ -2114,8 +2112,6 @@ public class SnapshotResiliencyTests extends ESTestCase { } ); recoverySettings = new RecoverySettings(settings, clusterSettings); - FeatureService mockFeatureService = mock(FeatureService.class); - when(mockFeatureService.clusterHasFeature(any(), any())).thenReturn(true); mockTransport = new DisruptableMockTransport(node, deterministicTaskQueue) { @Override protected ConnectionStatus getConnectionStatus(DiscoveryNode destination) { @@ -2410,7 +2406,6 @@ public class SnapshotResiliencyTests extends ESTestCase { FailureStoreMetrics.NOOP, TestProjectResolvers.singleProjectOnly() ), - mockFeatureService, client, actionFilters, indexNameExpressionResolver, diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index 889afab00e83..9b56cd3bde53 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.InboundDecoder.ChannelType; @@ -27,6 +28,7 @@ import java.util.ArrayList; import static org.elasticsearch.common.bytes.ReleasableBytesReferenceStreamInputTests.wrapAsReleasable; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; @@ -124,12 +126,12 @@ public class InboundDecoderTests extends ESTestCase { } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // can delete test in v9 public void testDecodePreHeaderSizeVariableInt() throws IOException { - // TODO: Can delete test on 9.0 Compression.Scheme compressionScheme = randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.DEFLATE, null); String action = "test-request"; long requestId = randomNonNegativeLong(); - final TransportVersion preHeaderVariableInt = TransportHandshaker.EARLIEST_HANDSHAKE_VERSION; + final TransportVersion preHeaderVariableInt = TransportHandshaker.V7_HANDSHAKE_VERSION; final String contentValue = randomAlphaOfLength(100); // 8.0 is only compatible with handshakes on a pre-variable int version final OutboundMessage message = new OutboundMessage.Request( @@ -182,13 +184,13 @@ public class InboundDecoderTests extends ESTestCase { } } - public void testDecodeHandshakeCompatibility() throws IOException { + public void testDecodeHandshakeV7Compatibility() throws IOException { String action = "test-request"; long requestId = randomNonNegativeLong(); final String headerKey = randomAlphaOfLength(10); final String headerValue = randomAlphaOfLength(20); threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.EARLIEST_HANDSHAKE_VERSION; + TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; OutboundMessage message = new OutboundMessage.Request( threadContext, new TestRequest(randomAlphaOfLength(100)), @@ -223,6 +225,55 @@ public class InboundDecoderTests extends ESTestCase { } + public void testDecodeHandshakeV8Compatibility() throws IOException { + doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, null); + doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, Compression.Scheme.DEFLATE); + } + + public void testDecodeHandshakeV9Compatibility() throws IOException { + doHandshakeCompatibilityTest(TransportHandshaker.V9_HANDSHAKE_VERSION, null); + doHandshakeCompatibilityTest(TransportHandshaker.V9_HANDSHAKE_VERSION, Compression.Scheme.DEFLATE); + } + + private void doHandshakeCompatibilityTest(TransportVersion transportVersion, Compression.Scheme compressionScheme) throws IOException { + String action = "test-request"; + long requestId = randomNonNegativeLong(); + final String headerKey = randomAlphaOfLength(10); + final String headerValue = randomAlphaOfLength(20); + threadContext.putHeader(headerKey, headerValue); + OutboundMessage message = new OutboundMessage.Request( + threadContext, + new TestRequest(randomAlphaOfLength(100)), + transportVersion, + action, + requestId, + true, + compressionScheme + ); + + try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { + final BytesReference bytes = message.serialize(os); + int totalHeaderSize = TcpHeader.headerSize(transportVersion); + + InboundDecoder decoder = new InboundDecoder(recycler); + final ArrayList fragments = new ArrayList<>(); + final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); + int bytesConsumed = decoder.decode(releasable1, fragments::add); + assertThat(bytesConsumed, greaterThan(totalHeaderSize)); + assertTrue(releasable1.hasReferences()); + + final Header header = (Header) fragments.get(0); + assertEquals(requestId, header.getRequestId()); + assertEquals(transportVersion, header.getVersion()); + assertEquals(compressionScheme == Compression.Scheme.DEFLATE, header.isCompressed()); + assertTrue(header.isHandshake()); + assertTrue(header.isRequest()); + assertFalse(header.needsToReadVariableHeader()); + assertEquals(headerValue, header.getRequestHeaders().get(headerKey)); + fragments.clear(); + } + } + public void testClientChannelTypeFailsDecodingRequests() throws Exception { String action = "test-request"; long requestId = randomNonNegativeLong(); @@ -236,13 +287,18 @@ public class InboundDecoderTests extends ESTestCase { } } // a request + final var isHandshake = randomBoolean(); + final var version = isHandshake + ? randomFrom(TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS) + : TransportVersionUtils.randomCompatibleVersion(random()); + logger.info("--> version = {}", version); OutboundMessage message = new OutboundMessage.Request( threadContext, new TestRequest(randomAlphaOfLength(100)), - TransportHandshaker.REQUEST_HANDSHAKE_VERSION, + version, action, requestId, - randomBoolean(), + isHandshake, randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.LZ4, null) ); @@ -259,9 +315,9 @@ public class InboundDecoderTests extends ESTestCase { try (InboundDecoder decoder = new InboundDecoder(recycler, randomFrom(ChannelType.SERVER, ChannelType.MIX))) { final ArrayList fragments = new ArrayList<>(); int bytesConsumed = decoder.decode(wrapAsReleasable(bytes), fragments::add); - int totalHeaderSize = TcpHeader.headerSize(TransportVersion.current()) + bytes.getInt( - TcpHeader.VARIABLE_HEADER_SIZE_POSITION - ); + int totalHeaderSize = TcpHeader.headerSize(version) + (version.onOrAfter(TransportVersions.V_7_6_0) + ? bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION) + : 0); assertEquals(totalHeaderSize, bytesConsumed); final Header header = (Header) fragments.get(0); assertEquals(requestId, header.getRequestId()); @@ -281,12 +337,16 @@ public class InboundDecoderTests extends ESTestCase { } } // a response + final var isHandshake = randomBoolean(); + final var version = isHandshake + ? randomFrom(TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS) + : TransportVersionUtils.randomCompatibleVersion(random()); OutboundMessage message = new OutboundMessage.Response( threadContext, new TestResponse(randomAlphaOfLength(100)), - TransportHandshaker.REQUEST_HANDSHAKE_VERSION, + version, requestId, - randomBoolean(), + isHandshake, randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.LZ4, null) ); @@ -301,9 +361,9 @@ public class InboundDecoderTests extends ESTestCase { try (InboundDecoder decoder = new InboundDecoder(recycler, randomFrom(ChannelType.CLIENT, ChannelType.MIX))) { final ArrayList fragments = new ArrayList<>(); int bytesConsumed = decoder.decode(wrapAsReleasable(bytes), fragments::add); - int totalHeaderSize = TcpHeader.headerSize(TransportVersion.current()) + bytes.getInt( - TcpHeader.VARIABLE_HEADER_SIZE_POSITION - ); + int totalHeaderSize = TcpHeader.headerSize(version) + (version.onOrAfter(TransportVersions.V_7_6_0) + ? bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION) + : 0); assertEquals(totalHeaderSize, bytesConsumed); final Header header = (Header) fragments.get(0); assertEquals(requestId, header.getRequestId()); @@ -399,7 +459,7 @@ public class InboundDecoderTests extends ESTestCase { final String headerKey = randomAlphaOfLength(10); final String headerValue = randomAlphaOfLength(20); threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.EARLIEST_HANDSHAKE_VERSION; + TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; OutboundMessage message = new OutboundMessage.Request( threadContext, new TestRequest(randomAlphaOfLength(100)), @@ -488,23 +548,16 @@ public class InboundDecoderTests extends ESTestCase { } public void testCheckHandshakeCompatibility() { - try { - InboundDecoder.checkHandshakeVersionCompatibility(randomFrom(TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS)); - } catch (IllegalStateException e) { - throw new AssertionError(e); - } + for (final var allowedHandshakeVersion : TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS) { + InboundDecoder.checkHandshakeVersionCompatibility(allowedHandshakeVersion); // should not throw - var invalid = TransportVersion.fromId(TransportHandshaker.EARLIEST_HANDSHAKE_VERSION.id() - 1); - try { - InboundDecoder.checkHandshakeVersionCompatibility(invalid); - fail(); - } catch (IllegalStateException expected) { + var invalid = TransportVersion.fromId(allowedHandshakeVersion.id() + randomFrom(-1, +1)); assertEquals( "Received message from unsupported version: [" + invalid + "] allowed versions are: " + TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS, - expected.getMessage() + expectThrows(IllegalStateException.class, () -> InboundDecoder.checkHandshakeVersionCompatibility(invalid)).getMessage() ); } } diff --git a/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java index 8ff7c511fa48..47e0dc1f61ca 100644 --- a/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java @@ -12,6 +12,7 @@ package org.elasticsearch.transport; import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -34,6 +35,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Streams; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.TransportVersionUtils; @@ -133,11 +135,14 @@ public class OutboundHandlerTests extends ESTestCase { public void testSendRequest() throws IOException { ThreadContext threadContext = threadPool.getThreadContext(); - TransportVersion version = TransportHandshaker.REQUEST_HANDSHAKE_VERSION; String action = "handshake"; long requestId = randomLongBetween(0, 300); boolean isHandshake = randomBoolean(); - boolean compress = randomBoolean(); + TransportVersion version = isHandshake + ? randomFrom(TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS) + : TransportVersionUtils.randomCompatibleVersion(random()); + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // drop the version.onOrAfter() in v9 + boolean compress = version.onOrAfter(TransportVersions.MINIMUM_COMPATIBLE) && randomBoolean(); String value = "message"; threadContext.putHeader("header", "header_value"); TestRequest request = new TestRequest(value); @@ -204,11 +209,14 @@ public class OutboundHandlerTests extends ESTestCase { public void testSendResponse() throws IOException { ThreadContext threadContext = threadPool.getThreadContext(); - TransportVersion version = TransportHandshaker.REQUEST_HANDSHAKE_VERSION; String action = "handshake"; long requestId = randomLongBetween(0, 300); boolean isHandshake = randomBoolean(); - boolean compress = randomBoolean(); + TransportVersion version = isHandshake + ? randomFrom(TransportHandshaker.ALLOWED_HANDSHAKE_VERSIONS) + : TransportVersionUtils.randomCompatibleVersion(random()); + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // drop the version.onOrAfter() in v9 + boolean compress = version.onOrAfter(TransportVersions.MINIMUM_COMPATIBLE) && randomBoolean(); String value = "message"; threadContext.putHeader("header", "header_value"); @@ -269,8 +277,8 @@ public class OutboundHandlerTests extends ESTestCase { public void testErrorResponse() throws IOException { ThreadContext threadContext = threadPool.getThreadContext(); - TransportVersion version = TransportHandshaker.REQUEST_HANDSHAKE_VERSION; - String action = "handshake"; + TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); + String action = "not-a-handshake"; long requestId = randomLongBetween(0, 300); threadContext.putHeader("header", "header_value"); ElasticsearchException error = new ElasticsearchException("boom"); @@ -322,7 +330,7 @@ public class OutboundHandlerTests extends ESTestCase { } public void testSendErrorAfterFailToSendResponse() throws Exception { - TransportVersion version = TransportHandshaker.REQUEST_HANDSHAKE_VERSION; + TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); String action = randomAlphaOfLength(10); long requestId = randomLongBetween(0, 300); var response = new ReleasbleTestResponse(randomAlphaOfLength(10)) { diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index 26b76c798b2f..2eceaa8e421e 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -37,6 +37,8 @@ public class TransportHandshakerTests extends ESTestCase { private TestThreadPool threadPool; private TransportHandshaker.HandshakeRequestSender requestSender; + private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V8_HANDSHAKE_VERSION; + @Override public void setUp() throws Exception { super.setUp(); @@ -64,7 +66,7 @@ public class TransportHandshakerTests extends ESTestCase { long reqId = randomLongBetween(1, 10); handshaker.sendHandshake(reqId, node, channel, new TimeValue(30, TimeUnit.SECONDS), versionFuture); - verify(requestSender).sendRequest(node, channel, reqId, TransportHandshaker.REQUEST_HANDSHAKE_VERSION); + verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); assertFalse(versionFuture.isDone()); @@ -87,7 +89,7 @@ public class TransportHandshakerTests extends ESTestCase { long reqId = randomLongBetween(1, 10); handshaker.sendHandshake(reqId, node, channel, new TimeValue(30, TimeUnit.SECONDS), new PlainActionFuture<>()); - verify(requestSender).sendRequest(node, channel, reqId, TransportHandshaker.REQUEST_HANDSHAKE_VERSION); + verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current()); BytesStreamOutput currentHandshakeBytes = new BytesStreamOutput(); @@ -123,7 +125,7 @@ public class TransportHandshakerTests extends ESTestCase { long reqId = randomLongBetween(1, 10); handshaker.sendHandshake(reqId, node, channel, new TimeValue(30, TimeUnit.SECONDS), versionFuture); - verify(requestSender).sendRequest(node, channel, reqId, TransportHandshaker.REQUEST_HANDSHAKE_VERSION); + verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); assertFalse(versionFuture.isDone()); @@ -138,8 +140,7 @@ public class TransportHandshakerTests extends ESTestCase { public void testSendRequestThrowsException() throws IOException { PlainActionFuture versionFuture = new PlainActionFuture<>(); long reqId = randomLongBetween(1, 10); - doThrow(new IOException("boom")).when(requestSender) - .sendRequest(node, channel, reqId, TransportHandshaker.REQUEST_HANDSHAKE_VERSION); + doThrow(new IOException("boom")).when(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); handshaker.sendHandshake(reqId, node, channel, new TimeValue(30, TimeUnit.SECONDS), versionFuture); @@ -154,7 +155,7 @@ public class TransportHandshakerTests extends ESTestCase { long reqId = randomLongBetween(1, 10); handshaker.sendHandshake(reqId, node, channel, new TimeValue(100, TimeUnit.MILLISECONDS), versionFuture); - verify(requestSender).sendRequest(node, channel, reqId, TransportHandshaker.REQUEST_HANDSHAKE_VERSION); + verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); ConnectTransportException cte = expectThrows(ConnectTransportException.class, versionFuture::actionGet); assertThat(cte.getMessage(), containsString("handshake_timeout")); diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index ace3db377664..958132b3e407 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -194,6 +194,16 @@ public class HeapAttackIT extends ESRestTestCase { ); } + private void assertFoldCircuitBreaks(ThrowingRunnable r) throws IOException { + ResponseException e = expectThrows(ResponseException.class, r); + Map map = responseAsMap(e.getResponse()); + logger.info("expected fold circuit breaking {}", map); + assertMap( + map, + matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "fold_too_much_memory_exception")) + ); + } + private void assertParseFailure(ThrowingRunnable r) throws IOException { ResponseException e = expectThrows(ResponseException.class, r); Map map = responseAsMap(e.getResponse()); @@ -325,11 +335,23 @@ public class HeapAttackIT extends ESRestTestCase { assertManyStrings(resp, strings); } + /** + * Hits a circuit breaker by building many moderately long strings. + */ + public void testHugeManyConcatFromRow() throws IOException { + assertFoldCircuitBreaks( + () -> manyConcat( + "ROW a=9999999999999, b=99999999999999999, c=99999999999999999, d=99999999999999999, e=99999999999999999", + 5000 + ) + ); + } + /** * Fails to parse a huge huge query. */ public void testHugeHugeManyConcatFromRow() throws IOException { - assertParseFailure(() -> manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", 50000)); + assertParseFailure(() -> manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", 6000)); } /** @@ -387,13 +409,20 @@ public class HeapAttackIT extends ESRestTestCase { * Returns many moderately long strings. */ public void testManyRepeatFromRow() throws IOException { - int strings = 10000; + int strings = 300; Response resp = manyRepeat("ROW a = 99", strings); assertManyStrings(resp, strings); } /** - * Fails to parse a huge huge query. + * Hits a circuit breaker by building many moderately long strings. + */ + public void testHugeManyRepeatFromRow() throws IOException { + assertFoldCircuitBreaks(() -> manyRepeat("ROW a = 99", 400)); + } + + /** + * Fails to parse a huge, huge query. */ public void testHugeHugeManyRepeatFromRow() throws IOException { assertParseFailure(() -> manyRepeat("ROW a = 99", 100000)); diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java index 3ee18d71a5a7..56e702d2f76a 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java @@ -15,7 +15,7 @@ import org.testcontainers.images.builder.ImageFromDockerfile; public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer { private static final int servicePort = 9000; - public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2021-03-01T04-20-55Z"; + public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2024-12-18T13-15-44Z"; private final boolean enabled; public MinioTestContainer(boolean enabled, String accessKey, String secretKey, String bucketName) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index fe1b08d5e738..37352410811a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -29,18 +30,28 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.IOException; import java.nio.file.Path; import java.util.Collections; +import java.util.List; import static org.elasticsearch.test.ESTestCase.createTestAnalysis; public class MapperTestUtils { - public static MapperService newMapperService( NamedXContentRegistry xContentRegistry, Path tempDir, Settings indexSettings, String indexName ) throws IOException { - IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); + return newMapperService(List.of(), xContentRegistry, tempDir, indexSettings, indexName); + } + + public static MapperService newMapperService( + List extraMappers, + NamedXContentRegistry xContentRegistry, + Path tempDir, + Settings indexSettings, + String indexName + ) throws IOException { + IndicesModule indicesModule = new IndicesModule(extraMappers); return newMapperService(xContentRegistry, tempDir, indexSettings, indicesModule, indexName); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 9a160fffb965..7a2f37500187 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -99,6 +99,7 @@ import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogDeletionPolicy; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; @@ -220,6 +221,10 @@ public abstract class EngineTestCase extends ESTestCase { """; } + protected List extraMappers() { + return List.of(); + } + @Override @Before public void setUp() throws Exception { @@ -241,7 +246,7 @@ public abstract class EngineTestCase extends ESTestCase { Lucene.cleanLuceneIndex(store.directory()); Lucene.cleanLuceneIndex(storeReplica.directory()); primaryTranslogDir = createTempDir("translog-primary"); - mapperService = createMapperService(defaultSettings.getSettings(), defaultMapping()); + mapperService = createMapperService(defaultSettings.getSettings(), defaultMapping(), extraMappers()); translogHandler = createTranslogHandler(mapperService); engine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy()); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); @@ -1440,15 +1445,21 @@ public abstract class EngineTestCase extends ESTestCase { } public static MapperService createMapperService() throws IOException { - return createMapperService(Settings.EMPTY, "{}"); + return createMapperService(Settings.EMPTY, "{}", List.of()); } public static MapperService createMapperService(Settings settings, String mappings) throws IOException { + return createMapperService(settings, mappings, List.of()); + } + + public static MapperService createMapperService(Settings settings, String mappings, List extraMappers) + throws IOException { IndexMetadata indexMetadata = IndexMetadata.builder("index") .settings(indexSettings(1, 1).put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).put(settings)) .putMapping(mappings) .build(); MapperService mapperService = MapperTestUtils.newMapperService( + extraMappers, new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), createTempDir(), indexMetadata.getSettings(), diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index bce88a2a8d2f..bb48b0031483 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1180,7 +1180,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase { var firstExample = support.example(1); int maxDocs = randomIntBetween(20, 50); var settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true) .build(); var mapperService = createMapperService(getVersion(), settings, () -> true, mapping(b -> { @@ -1213,7 +1213,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase { try (var indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { int start = randomBoolean() ? 0 : randomIntBetween(1, maxDocs - 10); var snapshot = new LuceneSyntheticSourceChangesSnapshot( - mapperService.mappingLookup(), + mapperService, new Engine.Searcher( "recovery", indexReader, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 8d0c9b58a767..e3b364ad3d14 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1638,7 +1638,7 @@ public abstract class ESIntegTestCase extends ESTestCase { public static boolean indexExists(String index, Client client) { GetIndexResponse getIndexResponse = client.admin() .indices() - .prepareGetIndex() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndices(index) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED) .get(); @@ -2589,14 +2589,14 @@ public abstract class ESIntegTestCase extends ESTestCase { } public static Index resolveIndex(String index) { - GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex().setIndices(index).get(); + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); String uuid = getIndexResponse.getSettings().get(index).get(IndexMetadata.SETTING_INDEX_UUID); return new Index(index, uuid); } public static String resolveCustomDataPath(String index) { - GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex().setIndices(index).get(); + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); return getIndexResponse.getSettings().get(index).get(IndexMetadata.SETTING_DATA_PATH); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 63334bd70306..9bee617d90db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -171,7 +171,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { metadata.transientSettings().size(), equalTo(0) ); - GetIndexResponse indices = indicesAdmin().prepareGetIndex() + GetIndexResponse indices = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .addIndices("*") .get(); @@ -401,7 +401,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { } public Index resolveIndex(String index) { - GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex().setIndices(index).get(); + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); String uuid = getIndexResponse.getSettings().get(index).get(IndexMetadata.SETTING_INDEX_UUID); return new Index(index, uuid); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 1fef01e624fb..cc541008bcdc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1239,7 +1239,7 @@ public abstract class ESTestCase extends LuceneTestCase { return randomBoolean() ? null : randomLong(); } - public static Long randomPositiveLongOrNull() { + public static Long randomNonNegativeLongOrNull() { return randomBoolean() ? null : randomNonNegativeLong(); } @@ -1247,7 +1247,7 @@ public abstract class ESTestCase extends LuceneTestCase { return randomBoolean() ? null : randomInt(); } - public static Integer randomPositiveIntOrNull() { + public static Integer randomNonNegativeIntOrNull() { return randomBoolean() ? null : randomNonNegativeInt(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 9054dc6f9418..e2a24eac6f8a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -91,6 +91,12 @@ class ESRestTestFeatureService implements TestFeatureService { } if (hasFeatureMetadata()) { + if (isRestApiCompatibilityTest()) { + // assume this is a feature that has been assumed, then removed in this version + // the feature is therefore logically present, but not specified by the cluster + return true; + } + throw new IllegalArgumentException( Strings.format( "Unknown feature %s: check the respective FeatureSpecification is provided both in module-info.java " @@ -102,6 +108,10 @@ class ESRestTestFeatureService implements TestFeatureService { return false; } + private static boolean isRestApiCompatibilityTest() { + return Boolean.parseBoolean(System.getProperty("tests.restCompat", "false")); + } + public static boolean hasFeatureMetadata() { return MetadataHolder.FEATURE_NAMES.isEmpty() == false; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java index 3461073b19eb..c5200c816fb6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java @@ -138,11 +138,6 @@ public class StubbableTransport implements Transport { return delegate; } - @Override - public TransportVersion getVersion() { - return delegate.getVersion(); - } - @Override public void setMessageListener(TransportMessageListener listener) { delegate.setMessageListener(listener); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 357c0fe74b5c..031f8f908847 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -21,7 +21,6 @@ import org.elasticsearch.client.NodeSelector; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.test.rest.Stash; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; @@ -300,41 +299,24 @@ public class ClientYamlTestExecutionContext { params.put("capabilities", capabilitiesString); } params.put("error_trace", "false"); // disable error trace + params.put("local_only", "true"); // we're calling each node individually - if (clusterHasFeature(RestNodesCapabilitiesAction.LOCAL_ONLY_CAPABILITIES.id(), false) == false) { - // can only check the whole cluster - if (any) { - logger.warn( - "Cluster does not support checking individual nodes for capabilities," - + "check for [{} {}?{} {}] may be incorrect in mixed-version clusters", - method, - path, - parametersString, - capabilitiesString - ); + // individually call each node, so we can control whether we do an 'any' or 'all' check + List nodes = clientYamlTestClient.getRestClient(NodeSelector.ANY).getNodes(); + + for (Node n : nodes) { + Optional nodeResult = checkCapability(new SpecificNodeSelector(n), params); + if (nodeResult.isEmpty()) { + return Optional.empty(); + } else if (any == nodeResult.get()) { + // either any == true and node has cap, + // or any == false (ie all) and this node does not have cap + return nodeResult; } - return checkCapability(NodeSelector.ANY, params); - } else { - // check each node individually - we can actually check any here - params.put("local_only", "true"); // we're calling each node individually - - // individually call each node, so we can control whether we do an 'any' or 'all' check - List nodes = clientYamlTestClient.getRestClient(NodeSelector.ANY).getNodes(); - - for (Node n : nodes) { - Optional nodeResult = checkCapability(new SpecificNodeSelector(n), params); - if (nodeResult.isEmpty()) { - return Optional.empty(); - } else if (any == nodeResult.get()) { - // either any == true and node has cap, - // or any == false (ie all) and this node does not have cap - return nodeResult; - } - } - - // if we got here, either any is true and no node has it, or any == false and all nodes have it - return Optional.of(any == false); } + + // if we got here, either any is true and no node has it, or any == false and all nodes have it + return Optional.of(any == false); } private Optional checkCapability(NodeSelector nodeSelector, Map params) { diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderIT.java index df1022e45c5a..7132d9ffac30 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderIT.java @@ -123,7 +123,7 @@ public class FrozenExistenceDeciderIT extends AbstractFrozenAutoscalingIntegTest assertBusy(() -> { ExplainLifecycleResponse response = client().execute( ExplainLifecycleAction.INSTANCE, - new ExplainLifecycleRequest().indices(INDEX_NAME) + new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(INDEX_NAME) ).actionGet(); IndexLifecycleExplainResponse indexResponse = response.getIndexResponses().get(INDEX_NAME); assertNotNull(indexResponse); @@ -150,7 +150,7 @@ public class FrozenExistenceDeciderIT extends AbstractFrozenAutoscalingIntegTest } private String[] indices() { - return indicesAdmin().prepareGetIndex().addIndices("index").get().indices(); + return indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("index").get().indices(); } private void assertMinimumCapacity(AutoscalingCapacity.AutoscalingResources resources) { diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 0bb4afe51b85..e8e19bad2a7e 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -368,7 +368,7 @@ public class FollowIndexIT extends ESCCRRestTestCase { if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .build(); createIndex(adminClient(), leaderIndexName, settings, """ "properties": {"kwd": {"type": "keyword"}}}""", null); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index c065454922c4..7c47237d35bd 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -488,7 +488,7 @@ public class CcrRepositoryIT extends CcrIntegTestCase { clusterStateRequest.indices(followerIndex); MappingMetadata mappingMetadata = followerClient().admin() .indices() - .prepareGetMappings("index2") + .prepareGetMappings(TEST_REQUEST_TIMEOUT, "index2") .get() .getMappings() .get("index2"); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index 510f9d6fa4ec..7f30c8db06bf 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -331,7 +331,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertFalse(response.isIndexFollowingStarted()); // Check that the index exists, would throw index not found exception if the index is missing - followerClient().admin().indices().prepareGetIndex().addIndices("index2").get(); + followerClient().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices("index2").get(); ensureFollowerGreen(true, "index2"); final Map firstBatchNumDocsPerShard = new HashMap<>(); @@ -371,7 +371,12 @@ public class IndexFollowingIT extends CcrIntegTestCase { } assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), firstBatchNumDocs)); - MappingMetadata mappingMetadata = followerClient().admin().indices().prepareGetMappings("index2").get().getMappings().get("index2"); + MappingMetadata mappingMetadata = followerClient().admin() + .indices() + .prepareGetMappings(TEST_REQUEST_TIMEOUT, "index2") + .get() + .getMappings() + .get("index2"); assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetadata.sourceAsMap()), equalTo("integer")); assertThat(XContentMapValues.extractValue("properties.k", mappingMetadata.sourceAsMap()), nullValue()); @@ -382,7 +387,12 @@ public class IndexFollowingIT extends CcrIntegTestCase { } assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), firstBatchNumDocs + secondBatchNumDocs)); - mappingMetadata = followerClient().admin().indices().prepareGetMappings("index2").get().getMappings().get("index2"); + mappingMetadata = followerClient().admin() + .indices() + .prepareGetMappings(TEST_REQUEST_TIMEOUT, "index2") + .get() + .getMappings() + .get("index2"); assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetadata.sourceAsMap()), equalTo("integer")); assertThat(XContentMapValues.extractValue("properties.k.type", mappingMetadata.sourceAsMap()), equalTo("long")); pauseFollow("index2"); @@ -410,7 +420,12 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); pauseFollow("index2"); - MappingMetadata mappingMetadata = followerClient().admin().indices().prepareGetMappings("index2").get().getMappings().get("index2"); + MappingMetadata mappingMetadata = followerClient().admin() + .indices() + .prepareGetMappings(TEST_REQUEST_TIMEOUT, "index2") + .get() + .getMappings() + .get("index2"); assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetadata.sourceAsMap()), equalTo("long")); assertThat(XContentMapValues.extractValue("properties.k", mappingMetadata.sourceAsMap()), nullValue()); } @@ -1188,7 +1203,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(getSettingsResponse.getSetting("follower", "index.analysis.analyzer.my_analyzer.type"), equalTo("custom")); assertThat(getSettingsResponse.getSetting("follower", "index.analysis.analyzer.my_analyzer.tokenizer"), equalTo("keyword")); - GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); + GetMappingsRequest getMappingsRequest = new GetMappingsRequest(TEST_REQUEST_TIMEOUT); getMappingsRequest.indices("follower"); GetMappingsResponse getMappingsResponse = followerClient().admin().indices().getMappings(getMappingsRequest).actionGet(); MappingMetadata mappingMetadata = getMappingsResponse.getMappings().get("follower"); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java index e5298df35bf0..e3fb5781bed2 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestActionListener; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -105,7 +106,12 @@ public class RestShardChangesAction extends BaseRestHandler { client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME) ); final CompletableFuture indexUUIDCompletableFuture = indexNameCompletableFuture.thenCompose( - concreteIndexName -> asyncGetIndexUUID(client, concreteIndexName, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME)) + concreteIndexName -> asyncGetIndexUUID( + client, + concreteIndexName, + client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME), + RestUtils.getMasterNodeTimeout(restRequest) + ) ); final CompletableFuture shardStatsCompletableFuture = indexNameCompletableFuture.thenCompose( concreteIndexName -> asyncShardStats(client, concreteIndexName, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME)) @@ -336,18 +342,20 @@ public class RestShardChangesAction extends BaseRestHandler { * @param client The NodeClient for executing the asynchronous request. * @param concreteIndexName The name of the index for which to retrieve the index UUID. * @param executorService The executorService service for executing the asynchronous task. + * @param masterTimeout The timeout for waiting until the cluster is unblocked. * @return A CompletableFuture that completes with the retrieved index UUID. * @throws ElasticsearchException If an error occurs while retrieving the index UUID. */ private static CompletableFuture asyncGetIndexUUID( final NodeClient client, final String concreteIndexName, - final ExecutorService executorService + final ExecutorService executorService, + TimeValue masterTimeout ) { return supplyAsyncTask( () -> client.admin() .indices() - .prepareGetIndex() + .prepareGetIndex(masterTimeout) .setIndices(concreteIndexName) .get(GET_INDEX_UUID_TIMEOUT) .getSetting(concreteIndexName, IndexMetadata.SETTING_INDEX_UUID), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index f998bcc9bb64..697efcca0f15 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -449,14 +449,18 @@ public abstract class CcrIntegTestCase extends ESTestCase { } protected final Index resolveLeaderIndex(String index) { - GetIndexResponse getIndexResponse = leaderClient().admin().indices().prepareGetIndex().setIndices(index).get(); + GetIndexResponse getIndexResponse = leaderClient().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); String uuid = getIndexResponse.getSettings().get(index).get(IndexMetadata.SETTING_INDEX_UUID); return new Index(index, uuid); } protected final Index resolveFollowerIndex(String index) { - GetIndexResponse getIndexResponse = followerClient().admin().indices().prepareGetIndex().setIndices(index).get(); + GetIndexResponse getIndexResponse = followerClient().admin() + .indices() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(index) + .get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); String uuid = getIndexResponse.getSettings().get(index).get(IndexMetadata.SETTING_INDEX_UUID); return new Index(index, uuid); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index 357e1bca38e8..6bc25215855e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrSettings; @@ -335,7 +334,7 @@ public class TransportResumeFollowActionTests extends ESTestCase { replicatedSettings.add(IndexSettings.PREFER_ILM_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); - replicatedSettings.add(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); + replicatedSettings.add(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING); for (Setting setting : IndexScopedSettings.BUILT_IN_INDEX_SETTINGS) { // removed settings have no effect, they are only there for BWC diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index c39e947341d7..6074b81d6860 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -197,7 +197,7 @@ public class SourceOnlySnapshotIT extends AbstractSnapshotIntegTestCase { } private static void assertMappings(String sourceIdx, boolean requireRouting, boolean useNested) throws IOException { - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(sourceIdx).get(); + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, sourceIdx).get(); MappingMetadata mapping = getMappingsResponse.getMappings().get(sourceIdx); String nested = useNested ? """ ,"incorrect":{"type":"object"},"nested":{"type":"nested","properties":{"value":{"type":"long"}}}""" : ""; diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java index ab3234f6d3d7..041be4ea40e6 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java @@ -72,7 +72,7 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { indicesAdmin().prepareCreate(index).setWaitForActiveShards(0).get(); - Settings idxSettings = indicesAdmin().prepareGetIndex().addIndices(index).get().getSettings().get(index); + Settings idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(index).get().getSettings().get(index); assertThat(DataTier.TIER_PREFERENCE_SETTING.get(idxSettings), equalTo(DataTier.DATA_CONTENT)); // index should be red @@ -248,7 +248,7 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { ) .get(); - var replicas = indicesAdmin().prepareGetIndex() + var replicas = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndices(index) .get() .getSetting(index, INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()); @@ -261,7 +261,7 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { updateDesiredNodes(desiredNodesWithoutColdTier); assertBusy(() -> { - var newReplicaCount = indicesAdmin().prepareGetIndex() + var newReplicaCount = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndices(index) .get() .getSetting(index, INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()); @@ -280,7 +280,7 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { .setSettings(Settings.builder().put(DataTier.TIER_PREFERENCE, DataTier.DATA_WARM)) .get(); - Settings idxSettings = indicesAdmin().prepareGetIndex().addIndices(index).get().getSettings().get(index); + Settings idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(index).get().getSettings().get(index); assertThat(idxSettings.get(DataTier.TIER_PREFERENCE), equalTo(DataTier.DATA_WARM)); // index should be yellow @@ -297,7 +297,7 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { .setSettings(Settings.builder().putNull(DataTier.TIER_PREFERENCE)) // will be overridden to data_content .get(); - Settings idxSettings = indicesAdmin().prepareGetIndex().addIndices(index).get().getSettings().get(index); + Settings idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(index).get().getSettings().get(index); assertThat(DataTier.TIER_PREFERENCE_SETTING.get(idxSettings), equalTo("data_content")); // index should be yellow @@ -333,7 +333,11 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { ensureGreen(index + "-shrunk"); - Settings idxSettings = indicesAdmin().prepareGetIndex().addIndices(index + "-shrunk").get().getSettings().get(index + "-shrunk"); + Settings idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(index + "-shrunk") + .get() + .getSettings() + .get(index + "-shrunk"); // It should inherit the setting of its originator assertThat(DataTier.TIER_PREFERENCE_SETTING.get(idxSettings), equalTo(DataTier.DATA_WARM)); @@ -353,7 +357,7 @@ public class DataTierAllocationDeciderIT extends ESIntegTestCase { indicesAdmin().prepareCreate(index).setWaitForActiveShards(0).get(); - Settings idxSettings = indicesAdmin().prepareGetIndex().addIndices(index).get().getSettings().get(index); + Settings idxSettings = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(index).get().getSettings().get(index); assertThat(DataTier.TIER_PREFERENCE_SETTING.get(idxSettings), equalTo("data_content")); // index should be yellow diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java index a38170d87f9a..873a5b4587e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.license.internal.MutableLicenseService; import org.elasticsearch.license.internal.TrialLicenseVersion; @@ -65,7 +64,6 @@ public class ClusterStateLicenseService extends AbstractLifecycleComponent private final Settings settings; private final ClusterService clusterService; - private final FeatureService featureService; /** * The xpack feature state to update when license changes are made. @@ -104,12 +102,10 @@ public class ClusterStateLicenseService extends AbstractLifecycleComponent ThreadPool threadPool, ClusterService clusterService, Clock clock, - XPackLicenseState xPacklicenseState, - FeatureService featureService + XPackLicenseState xPacklicenseState ) { this.settings = settings; this.clusterService = clusterService; - this.featureService = featureService; this.startTrialTaskQueue = clusterService.createTaskQueue( "license-service-start-trial", Priority.NORMAL, @@ -344,7 +340,7 @@ public class ClusterStateLicenseService extends AbstractLifecycleComponent } startTrialTaskQueue.submitTask( StartTrialClusterTask.TASK_SOURCE, - new StartTrialClusterTask(logger, clusterService.getClusterName().value(), clock, featureService, request, listener), + new StartTrialClusterTask(logger, clusterService.getClusterName().value(), clock, request, listener), null // TODO should pass in request.masterNodeTimeout() here ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index 1d6f53c9aa23..d3347f3432a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -39,8 +38,6 @@ import java.util.stream.Stream; */ public class License implements ToXContentObject { - public static final NodeFeature INDEPENDENT_TRIAL_VERSION_FEATURE = new NodeFeature("license-trial-independent-version", true); - public enum LicenseType { BASIC, STANDARD, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java index 22f4de105cb2..bec2ee98b175 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.xpack.core.XPackPlugin; @@ -41,13 +40,11 @@ public class StartTrialClusterTask implements ClusterStateTaskListener { private final PostStartTrialRequest request; private final ActionListener listener; private final Clock clock; - private final FeatureService featureService; StartTrialClusterTask( Logger logger, String clusterName, Clock clock, - FeatureService featureService, PostStartTrialRequest request, ActionListener listener ) { @@ -56,7 +53,6 @@ public class StartTrialClusterTask implements ClusterStateTaskListener { this.request = request; this.listener = listener; this.clock = clock; - this.featureService = featureService; } private LicensesMetadata execute( @@ -65,9 +61,6 @@ public class StartTrialClusterTask implements ClusterStateTaskListener { ClusterStateTaskExecutor.TaskContext taskContext ) { assert taskContext.getTask() == this; - if (featureService.clusterHasFeature(state, License.INDEPENDENT_TRIAL_VERSION_FEATURE) == false) { - throw new IllegalStateException("Please ensure all nodes are up to date before starting your trial"); - } final var listener = ActionListener.runBefore(this.listener, () -> { logger.debug("started self generated trial license: {}", currentLicensesMetadata); }); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java index 4fa8332df9fb..5ddfe365a32d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java @@ -27,11 +27,8 @@ public class TrialLicenseVersion implements ToXContentFragment, Writeable { // generic Elasticsearch version. While it's derived from the Elasticsearch version formula for BWC, it is independent of it going // forward. When we want users to be able to start a new trial, increment this number. // Pkg-private for testing only. - static final int TRIAL_VERSION_CUTOVER = 8_12_00_99; - public static final TrialLicenseVersion CURRENT = new TrialLicenseVersion(TRIAL_VERSION_CUTOVER); - - // The most recently released major version when we cut over. Here for maintaining BWC behavior. - static final int TRIAL_VERSION_CUTOVER_MAJOR = 8; + static final int CURRENT_TRIAL_VERSION = 9_00_00_00; + public static final TrialLicenseVersion CURRENT = new TrialLicenseVersion(CURRENT_TRIAL_VERSION); private final int trialVersion; @@ -84,10 +81,6 @@ public class TrialLicenseVersion implements ToXContentFragment, Writeable { public boolean ableToStartNewTrial() { assert trialVersion <= CURRENT.trialVersion : "trial version [" + trialVersion + "] cannot be greater than CURRENT [" + CURRENT.trialVersion + "]"; - if (trialVersion < TRIAL_VERSION_CUTOVER) { - int sinceMajorVersion = trialVersion / 1_000_000; // integer division is intentional - return sinceMajorVersion < TRIAL_VERSION_CUTOVER_MAJOR; - } return trialVersion != CURRENT.trialVersion; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java index 42f66c6ca6ee..42824a553d2b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -9,8 +9,6 @@ package org.elasticsearch.xpack.core; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.license.License; -import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; import java.util.Set; @@ -18,16 +16,8 @@ import java.util.Set; * Provides the XPack features that this version of the code supports */ public class XPackFeatures implements FeatureSpecification { - public static final NodeFeature LOGSDB_TELEMETRY = new NodeFeature("logsdb_telemetry", true); - public static final NodeFeature LOGSDB_TELMETRY_STATS = new NodeFeature("logsdb_telemetry_stats", true); - @Override public Set getFeatures() { - return Set.of( - NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE, // Added in 8.12 - License.INDEPENDENT_TRIAL_VERSION_FEATURE, // 8.14.0 - LOGSDB_TELEMETRY, - LOGSDB_TELMETRY_STATS - ); + return Set.of(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index e735aac6e1d6..58ff9c65dcb8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -331,8 +331,7 @@ public class XPackPlugin extends XPackClientPlugin services.threadPool(), services.clusterService(), getClock(), - getLicenseState(), - services.featureService() + getLicenseState() ); setLicenseService(licenseService); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java index b5e5741cc8bf..b141f308fd98 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.core.datatiers; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; @@ -22,7 +21,6 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.indices.NodeIndicesStats; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -46,7 +44,6 @@ import java.util.stream.StreamSupport; public class DataTiersUsageTransportAction extends XPackUsageFeatureTransportAction { private final Client client; - private final FeatureService featureService; @Inject public DataTiersUsageTransportAction( @@ -55,8 +52,7 @@ public class DataTiersUsageTransportAction extends XPackUsageFeatureTransportAct ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client, - FeatureService featureService + Client client ) { super( XPackUsageFeatureAction.DATA_TIERS.name(), @@ -67,7 +63,6 @@ public class DataTiersUsageTransportAction extends XPackUsageFeatureTransportAct indexNameExpressionResolver ); this.client = client; - this.featureService = featureService; } @Override @@ -77,42 +72,22 @@ public class DataTiersUsageTransportAction extends XPackUsageFeatureTransportAct ClusterState state, ActionListener listener ) { - if (featureService.clusterHasFeature(state, NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE)) { - new ParentTaskAssigningClient(client, clusterService.localNode(), task).admin() - .cluster() - .execute( - NodesDataTiersUsageTransportAction.TYPE, - new NodesDataTiersUsageTransportAction.NodesRequest(), - listener.delegateFailureAndWrap((delegate, response) -> { - // Generate tier specific stats for the nodes and indices - delegate.onResponse( - new XPackUsageFeatureResponse( - new DataTiersFeatureSetUsage( - aggregateStats(response.getNodes(), getIndicesGroupedByTier(state, response.getNodes())) - ) - ) - ); - }) - ); - } else { - new ParentTaskAssigningClient(client, clusterService.localNode(), task).admin() - .cluster() - .prepareNodesStats() - .setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store)) - .execute(listener.delegateFailureAndWrap((delegate, nodesStatsResponse) -> { - List response = nodesStatsResponse.getNodes() - .stream() - .map( - nodeStats -> new NodeDataTiersUsage(nodeStats.getNode(), precalculateLocalStatsFromNodeStats(nodeStats, state)) - ) - .toList(); + new ParentTaskAssigningClient(client, clusterService.localNode(), task).admin() + .cluster() + .execute( + NodesDataTiersUsageTransportAction.TYPE, + new NodesDataTiersUsageTransportAction.NodesRequest(), + listener.delegateFailureAndWrap((delegate, response) -> { + // Generate tier specific stats for the nodes and indices delegate.onResponse( new XPackUsageFeatureResponse( - new DataTiersFeatureSetUsage(aggregateStats(response, getIndicesGroupedByTier(state, response))) + new DataTiersFeatureSetUsage( + aggregateStats(response.getNodes(), getIndicesGroupedByTier(state, response.getNodes())) + ) ) ); - })); - } + }) + ); } // Visible for testing diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java index 90f7c0a25fc8..29ceee29127f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.indices.IndicesService; @@ -58,7 +57,6 @@ public class NodesDataTiersUsageTransportAction extends TransportNodesAction< Void> { public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/data_tier_usage"); - public static final NodeFeature LOCALLY_PRECALCULATED_STATS_FEATURE = new NodeFeature("usage.data_tiers.precalculate_stats", true); private static final CommonStatsFlags STATS_FLAGS = new CommonStatsFlags().clear() .set(CommonStatsFlags.Flag.Docs, true) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequest.java index 5c607335bff3..beff1e6ab39d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequest.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.info.ClusterInfoRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import java.io.IOException; import java.util.Arrays; @@ -27,8 +28,8 @@ public class ExplainLifecycleRequest extends ClusterInfoRequest List filterPlugins(Class type) { + protected List filterPlugins(Class type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.getClass())).map(p -> ((T) p)).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java index f1529fafaaff..18f9ad508eef 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java @@ -131,7 +131,10 @@ public class AsyncTaskServiceTests extends ESSingleNodeTestCase { } private void assertSettings() { - GetIndexResponse getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest().indices(index)).actionGet(); + GetIndexResponse getIndexResponse = client().admin() + .indices() + .getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)) + .actionGet(); Settings settings = getIndexResponse.getSettings().get(index); Settings expected = AsyncTaskIndexService.settings(); assertThat(expected, is(settings.filter(expected::hasValue))); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequestTests.java index 25de084a34b0..6cea7100a9da 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleRequestTests.java @@ -17,7 +17,7 @@ public class ExplainLifecycleRequestTests extends AbstractWireSerializingTestCas @Override protected ExplainLifecycleRequest createTestInstance() { - ExplainLifecycleRequest request = new ExplainLifecycleRequest(); + ExplainLifecycleRequest request = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT); if (randomBoolean()) { request.indices(generateRandomStringArray(20, 20, false, false)); } @@ -71,7 +71,7 @@ public class ExplainLifecycleRequestTests extends AbstractWireSerializingTestCas case 3 -> onlyManaged = onlyManaged == false; default -> throw new AssertionError("Illegal randomisation branch"); } - ExplainLifecycleRequest newRequest = new ExplainLifecycleRequest(); + ExplainLifecycleRequest newRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT); newRequest.indices(indices); newRequest.indicesOptions(indicesOptions); newRequest.onlyErrors(onlyErrors); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionRequestTests.java index 120c2a6dbc5e..c85d9a056141 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionRequestTests.java @@ -183,7 +183,7 @@ public class UnifiedCompletionRequestTests extends AbstractBWCWireSerializationT return new UnifiedCompletionRequest( randomList(5, UnifiedCompletionRequestTests::randomMessage), randomAlphaOfLengthOrNull(10), - randomPositiveLongOrNull(), + randomNonNegativeLongOrNull(), randomStopOrNull(), randomFloatOrNull(), randomToolChoiceOrNull(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index 9663e41a647a..bfac286bc3c3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -614,7 +614,8 @@ public class SSLServiceTests extends ESTestCase { "xpack.security.authc.realms.ldap.realm1.ssl", "xpack.security.authc.realms.saml.realm2.ssl", "xpack.monitoring.exporters.mon1.ssl", - "xpack.monitoring.exporters.mon2.ssl" }; + "xpack.monitoring.exporters.mon2.ssl", + "xpack.inference.elastic.http.ssl" }; assumeTrue("Not enough cipher suites are available to support this test", getCipherSuites.length >= contextNames.length); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index 50f3ab6bf9a0..b0c99bf8a0ce 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -55,6 +55,13 @@ } } }, + "os": { + "properties": { + "type": { + "type": "keyword" + } + } + }, "profiling": { "properties": { "project.id": { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodesDeprecationCheckAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodesDeprecationCheckAction.java index 2e40481f7c56..48009f810542 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodesDeprecationCheckAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodesDeprecationCheckAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -32,7 +31,6 @@ public class NodesDeprecationCheckAction extends ActionType client(nodeName).admin() .indices() - .prepareGetIndex() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) .addIndices(indexName) .addFeatures(GetIndexRequest.Feature.values()) .get() diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java index 5fba98b765a6..aa9f6f804dab 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java @@ -203,7 +203,7 @@ public class ILMDownsampleDisruptionIT extends ESIntegTestCase { final GetIndexResponse getIndexResponse = cluster.client() .admin() .indices() - .getIndex(new GetIndexRequest().indices(targetIndex)) + .getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(targetIndex)) .actionGet(); assertEquals(1, getIndexResponse.indices().length); assertResponse( diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 79023177706a..6cb0b5e8df43 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -287,7 +287,7 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc // At any point if there is an issue, delete the downsample index // 1. Extract source index mappings - final GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(sourceIndexName); + final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(request.masterNodeTimeout()).indices(sourceIndexName); getMappingsRequest.setParentTask(parentTask); client.admin().indices().getMappings(getMappingsRequest, listener.delegateFailureAndWrap((delegate, getMappingsResponse) -> { final Map sourceIndexMappings = getMappingsResponse.mappings() diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 461cb4491fac..f5e5811205ae 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -449,7 +449,9 @@ public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase { prepareSourceIndex(sourceIndex, true); downsample(sourceIndex, downsampleIndex, config); - GetIndexResponse indexSettingsResp = indicesAdmin().prepareGetIndex().addIndices(sourceIndex, downsampleIndex).get(); + GetIndexResponse indexSettingsResp = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(sourceIndex, downsampleIndex) + .get(); assertDownsampleIndexSettings(sourceIndex, downsampleIndex, indexSettingsResp); for (String key : settings.keySet()) { if (LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey().equals(key)) { @@ -605,7 +607,7 @@ public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase { ); assertBusy(() -> { try { - assertEquals(indicesAdmin().prepareGetIndex().addIndices(downsampleIndex).get().getIndices().length, 1); + assertEquals(indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(downsampleIndex).get().getIndices().length, 1); } catch (IndexNotFoundException e) { fail("downsample index has not been created"); } @@ -1179,7 +1181,7 @@ public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase { @SuppressWarnings("unchecked") private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, DownsampleConfig config) throws Exception { // Retrieve field information for the metric fields - final GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(sourceIndex).get(); + final GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, sourceIndex).get(); final Map sourceIndexMappings = getMappingsResponse.mappings() .entrySet() .stream() @@ -1212,7 +1214,9 @@ public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase { assertDownsampleIndexAggregations(sourceIndex, downsampleIndex, config, metricFields, labelFields); - GetIndexResponse indexSettingsResp = indicesAdmin().prepareGetIndex().addIndices(sourceIndex, downsampleIndex).get(); + GetIndexResponse indexSettingsResp = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(sourceIndex, downsampleIndex) + .get(); assertDownsampleIndexSettings(sourceIndex, downsampleIndex, indexSettingsResp); Map> mappings = (Map>) indexSettingsResp.getMappings() @@ -1222,8 +1226,9 @@ public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase { assertFieldMappings(config, metricFields, mappings); - GetMappingsResponse indexMappings = indicesAdmin().getMappings(new GetMappingsRequest().indices(downsampleIndex, sourceIndex)) - .actionGet(); + GetMappingsResponse indexMappings = indicesAdmin().getMappings( + new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(downsampleIndex, sourceIndex) + ).actionGet(); Map downsampleIndexProperties = (Map) indexMappings.mappings() .get(downsampleIndex) .sourceAsMap() diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java index 169195414aea..5ba66dd99079 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -147,7 +147,7 @@ public class EnrichPolicyRunner { // Collect the source index information final String[] sourceIndices = policy.getIndices().toArray(new String[0]); logger.debug("Policy [{}]: Checking source indices [{}]", policyName, sourceIndices); - GetIndexRequest getIndexRequest = new GetIndexRequest().indices(sourceIndices); + GetIndexRequest getIndexRequest = new GetIndexRequest(ENRICH_MASTER_REQUEST_TIMEOUT).indices(sourceIndices); // This call does not set the origin to ensure that the user executing the policy has permission to access the source index client.admin().indices().getIndex(getIndexRequest, l); }) diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java index 7e8894deee26..87f34b62cffc 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java @@ -125,8 +125,9 @@ public class TransportDeleteEnrichPolicyAction extends AcknowledgedTransportMast } try { - final GetIndexRequest indices = new GetIndexRequest().indices(EnrichPolicy.getBaseName(policyName) + "-*") - .indicesOptions(IndicesOptions.lenientExpand()); + final GetIndexRequest indices = new GetIndexRequest(request.masterNodeTimeout()).indices( + EnrichPolicy.getBaseName(policyName) + "-*" + ).indicesOptions(IndicesOptions.lenientExpand()); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNamesWithSystemIndexAccess(state, indices); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java index 0c19c2867a48..4b0aa73b9561 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java @@ -137,7 +137,10 @@ public class EnrichPolicyMaintenanceServiceTests extends ESSingleNodeTestCase { } private void assertEnrichIndicesExist(Set activeIndices) { - GetIndexResponse indices = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-*")).actionGet(); + GetIndexResponse indices = client().admin() + .indices() + .getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(".enrich-*")) + .actionGet(); assertThat(indices.indices().length, is(equalTo(activeIndices.size()))); for (String index : indices.indices()) { assertThat(activeIndices.contains(index), is(true)); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 65d53d3adabe..3a27d1a3fcb1 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -346,7 +346,8 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { } private GetIndexResponse getGetIndexResponseAndCheck(String createdEnrichIndex) { - GetIndexResponse enrichIndex = indicesAdmin().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + GetIndexResponse enrichIndex = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(".enrich-test1")) + .actionGet(); assertThat(enrichIndex.getIndices().length, equalTo(1)); assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); @@ -363,7 +364,8 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { .actionGet(); assertEquals(RestStatus.CREATED, indexRequest.status()); - GetIndexResponse sourceIndex = indicesAdmin().getIndex(new GetIndexRequest().indices(sourceIndexName)).actionGet(); + GetIndexResponse sourceIndex = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndexName)) + .actionGet(); // Validate Mapping Map sourceIndexMapping = sourceIndex.getMappings().get(sourceIndexName).sourceAsMap(); Map sourceIndexProperties = (Map) sourceIndexMapping.get("properties"); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java index 568f1074d1a5..fd23b121810e 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java @@ -142,7 +142,9 @@ public class TransportDeleteEnrichPolicyActionTests extends AbstractEnrichTestCa createIndex(EnrichPolicy.getIndexName(name, 1001)); createIndex(EnrichPolicy.getIndexName(name, 1002)); - indicesAdmin().prepareGetIndex().setIndices(EnrichPolicy.getIndexName(name, 1001), EnrichPolicy.getIndexName(name, 1002)).get(); + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(EnrichPolicy.getIndexName(name, 1001), EnrichPolicy.getIndexName(name, 1002)) + .get(); final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); @@ -169,7 +171,8 @@ public class TransportDeleteEnrichPolicyActionTests extends AbstractEnrichTestCa expectThrows( IndexNotFoundException.class, - indicesAdmin().prepareGetIndex().setIndices(EnrichPolicy.getIndexName(name, 1001), EnrichPolicy.getIndexName(name, 1001)) + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(EnrichPolicy.getIndexName(name, 1001), EnrichPolicy.getIndexName(name, 1001)) ); if (destructiveRequiresName) { @@ -307,7 +310,7 @@ public class TransportDeleteEnrichPolicyActionTests extends AbstractEnrichTestCa assertNotNull(EnrichStore.getPolicy(otherName, clusterService.state())); // and the index associated with the other index should be unaffected - indicesAdmin().prepareGetIndex().setIndices(EnrichPolicy.getIndexName(otherName, 1001)).get(); + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(EnrichPolicy.getIndexName(otherName, 1001)).get(); } } } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml index c54d764fc476..3a84bda7042a 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml @@ -280,9 +280,7 @@ setup: --- "List Connectors - Soft deleted connectors / no deleted": - - requires: - cluster_features: ["connector_soft_deletes"] - reason: Soft deletes were introduced in 9.0 release + - do: connector.list: @@ -293,9 +291,7 @@ setup: --- "List Connectors - Single soft deleted connector": - - requires: - cluster_features: ["connector_soft_deletes"] - reason: Soft deletes were introduced in 9.0 release + - do: connector.delete: @@ -312,11 +308,91 @@ setup: - match: { count: 3 } + +--- +"List Connectors - Single hard deleted connector": + + + - do: + connector.delete: + connector_id: connector-a + hard: true + + - do: + connector.list: {} + + - match: { count: 2 } + + - do: + connector.list: + include_deleted: true + + - match: { count: 2 } + + +--- +"List Connectors - All hard deleted connectors": + + + - do: + connector.delete: + connector_id: connector-a + hard: true + + - do: + connector.delete: + connector_id: connector-b + hard: true + + - do: + connector.delete: + connector_id: connector-c + hard: true + + - do: + connector.list: {} + + - match: { count: 0 } + + - do: + connector.list: + include_deleted: true + + - match: { count: 0 } + +--- +"List Connectors - 2 hard deleted connectors, 1 soft deleted": + + + - do: + connector.delete: + connector_id: connector-a + hard: false + + - do: + connector.delete: + connector_id: connector-b + hard: true + + - do: + connector.delete: + connector_id: connector-c + hard: true + + - do: + connector.list: {} + + - match: { count: 0 } + + - do: + connector.list: + include_deleted: true + + - match: { count: 1 } + --- "List Connectors - Soft deleted connectors": - - requires: - cluster_features: ["connector_soft_deletes"] - reason: Soft deletes were introduced in 9.0 release + - do: connector.delete: @@ -353,9 +429,7 @@ setup: --- "List Connectors - Soft deleted with from": - - requires: - cluster_features: ["connector_soft_deletes"] - reason: Soft deletes were introduced in 9.0 release + - do: connector.delete: @@ -387,9 +461,7 @@ setup: --- "List Connector - Soft deleted with size": - - requires: - cluster_features: ["connector_soft_deletes"] - reason: Soft deletes were introduced in 9.0 release + - do: connector.delete: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/30_connector_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/30_connector_delete.yml index aa820732be2e..6cb0be2a737e 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/30_connector_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/30_connector_delete.yml @@ -27,6 +27,31 @@ setup: connector_id: test-connector-to-delete +--- +"Delete Connector - Hard Delete": + - do: + connector.put: + connector_id: test-connector-hard-delete + body: + index_name: search-2-test + name: my-hard-delete-connector + language: en + is_native: false + service_type: super-connector + + - do: + connector.delete: + connector_id: test-connector-hard-delete + hard: true + + - match: { acknowledged: true } + + - do: + catch: "missing" + connector.get: + connector_id: test-connector-hard-delete + include_deleted: true + --- "Delete Connector - deletes associated sync jobs": @@ -107,12 +132,9 @@ setup: connector.delete: connector_id: test-nonexistent-connector - --- "Delete Connector - Supports soft deletes": - - requires: - cluster_features: ["connector_soft_deletes"] - reason: Soft deletes were introduced in 9.0 release + - do: connector.delete: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml index 0b98182b3960..f29deb943d95 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml @@ -306,10 +306,6 @@ teardown: --- 'List query rulesets - include rule types': - - requires: - cluster_features: [ "query_rule_list_types" ] - reason: 'List responses updated in 8.15.5 and 8.16.1' - - do: query_rules.put_ruleset: ruleset_id: a-test-query-ruleset-with-lots-of-criteria @@ -389,4 +385,4 @@ teardown: suffix: 1 always: 1 - match: { results.0.rule_type_counts: { pinned: 4, exclude: 1 } } - + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml index 016d9f10fe77..433a0f6705c7 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/70_query_rule_test.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: [ "query_rules.test" ] - reason: Introduced in 8.16.0 - - do: query_rules.put_ruleset: ruleset_id: test-ruleset diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml index 7967516c6ad5..089a078c6220 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: 'query_rule_retriever_supported' - reason: 'test requires query rule retriever implementation' - - do: indices.create: index: test-index1 diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java index ba121f2cf865..0f5c38e6f75e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java @@ -9,21 +9,13 @@ package org.elasticsearch.xpack.application; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.application.rules.action.ListQueryRulesetsAction; -import org.elasticsearch.xpack.application.rules.retriever.QueryRuleRetrieverBuilder; import java.util.Set; -import static org.elasticsearch.xpack.application.rules.action.TestQueryRulesetAction.QUERY_RULES_TEST_API; - public class EnterpriseSearchFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( - QUERY_RULES_TEST_API, - QueryRuleRetrieverBuilder.QUERY_RULE_RETRIEVERS_SUPPORTED, - ListQueryRulesetsAction.QUERY_RULE_LIST_TYPES - ); + return Set.of(); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index bb80f5fee4ec..3120124c1752 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -13,6 +13,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -232,40 +234,71 @@ public class ConnectorIndexService { } /** - * Soft deletes the {@link Connector} and optionally removes the related instances of {@link ConnectorSyncJob} in the underlying index. + * Deletes the {@link Connector} and optionally removes the related instances of {@link ConnectorSyncJob} in the underlying index. * * @param connectorId The id of the {@link Connector}. + * @param hardDelete If set to true, the {@link Connector} is permanently deleted; otherwise, it is soft-deleted. * @param shouldDeleteSyncJobs The flag indicating if {@link ConnectorSyncJob} should also be deleted. * @param listener The action listener to invoke on response/failure. */ - public void deleteConnector(String connectorId, boolean shouldDeleteSyncJobs, ActionListener listener) { + public void deleteConnector( + String connectorId, + boolean hardDelete, + boolean shouldDeleteSyncJobs, + ActionListener listener + ) { try { - // ensure that if connector is soft-deleted, deleting it again results in 404 - getConnector(connectorId, false, listener.delegateFailure((l, connector) -> { - final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .doc( - new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connectorId) - .source(Map.of(Connector.IS_DELETED_FIELD.getPreferredName(), true)) - ); - clientWithOrigin.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, l, (ll, updateResponse) -> { - if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); - return; - } - if (shouldDeleteSyncJobs) { - new ConnectorSyncJobIndexService(clientWithOrigin).deleteAllSyncJobsByConnectorId( - connectorId, - ll.map(r -> updateResponse) + if (hardDelete) { + final DeleteRequest deleteRequest = new DeleteRequest(CONNECTOR_INDEX_NAME).id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + clientWithOrigin.delete( + deleteRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, deleteResponse) -> { + if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + if (shouldDeleteSyncJobs) { + new ConnectorSyncJobIndexService(clientWithOrigin).deleteAllSyncJobsByConnectorId( + connectorId, + l.map(r -> deleteResponse) + ); + } else { + l.onResponse(deleteResponse); + } + }) + ); + } else { + getConnector(connectorId, false, listener.delegateFailure((l, connector) -> { + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .source(Map.of(Connector.IS_DELETED_FIELD.getPreferredName(), true)) ); - } else { - ll.onResponse(updateResponse); - } + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, l, (ll, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + if (shouldDeleteSyncJobs) { + new ConnectorSyncJobIndexService(clientWithOrigin).deleteAllSyncJobsByConnectorId( + connectorId, + ll.map(r -> updateResponse) + ); + } else { + ll.onResponse(updateResponse); + } + }) + ); })); - })); + } } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java index 5d98f9703ece..57d940537d17 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java @@ -9,9 +9,9 @@ package org.elasticsearch.xpack.application.connector.action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -35,19 +35,16 @@ public class DeleteConnectorAction { public static class Request extends ConnectorActionRequest implements ToXContentObject { private final String connectorId; + private final boolean hardDelete; private final boolean deleteSyncJobs; private static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + private static final ParseField HARD_DELETE_FIELD = new ParseField("hard"); private static final ParseField DELETE_SYNC_JOB_FIELD = new ParseField("delete_sync_jobs"); - public Request(StreamInput in) throws IOException { - super(in); - this.connectorId = in.readString(); - this.deleteSyncJobs = in.readBoolean(); - } - - public Request(String connectorId, boolean deleteSyncJobs) { + public Request(String connectorId, boolean hardDelete, boolean deleteSyncJobs) { this.connectorId = connectorId; + this.hardDelete = hardDelete; this.deleteSyncJobs = deleteSyncJobs; } @@ -66,15 +63,17 @@ public class DeleteConnectorAction { return connectorId; } + public boolean isHardDelete() { + return hardDelete; + } + public boolean shouldDeleteSyncJobs() { return deleteSyncJobs; } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(connectorId); - out.writeBoolean(deleteSyncJobs); + TransportAction.localOnly(); } @Override @@ -82,18 +81,21 @@ public class DeleteConnectorAction { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return deleteSyncJobs == request.deleteSyncJobs && Objects.equals(connectorId, request.connectorId); + return hardDelete == request.hardDelete + && deleteSyncJobs == request.deleteSyncJobs + && Objects.equals(connectorId, request.connectorId); } @Override public int hashCode() { - return Objects.hash(connectorId, deleteSyncJobs); + return Objects.hash(connectorId, hardDelete, deleteSyncJobs); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + builder.field(HARD_DELETE_FIELD.getPreferredName(), hardDelete); builder.field(DELETE_SYNC_JOB_FIELD.getPreferredName(), deleteSyncJobs); builder.endObject(); return builder; @@ -102,10 +104,11 @@ public class DeleteConnectorAction { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "delete_connector_request", false, - (p) -> new Request((String) p[0], (boolean) p[1]) + (p) -> new Request((String) p[0], (boolean) p[1], (boolean) p[2]) ); static { PARSER.declareString(constructorArg(), CONNECTOR_ID_FIELD); + PARSER.declareBoolean(constructorArg(), HARD_DELETE_FIELD); PARSER.declareBoolean(constructorArg(), DELETE_SYNC_JOB_FIELD); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java index 8d4a6dccd95f..2adcc1ccafbc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java @@ -40,8 +40,9 @@ public class RestDeleteConnectorAction extends BaseRestHandler { String connectorId = restRequest.param(CONNECTOR_ID_PARAM); boolean shouldDeleteSyncJobs = restRequest.paramAsBoolean("delete_sync_jobs", false); + boolean hardDelete = restRequest.paramAsBoolean("hard", false); - DeleteConnectorAction.Request request = new DeleteConnectorAction.Request(connectorId, shouldDeleteSyncJobs); + DeleteConnectorAction.Request request = new DeleteConnectorAction.Request(connectorId, hardDelete, shouldDeleteSyncJobs); return channel -> client.execute(DeleteConnectorAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java index e534d969fdaa..5e0e94ece2ae 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java @@ -9,7 +9,7 @@ package org.elasticsearch.xpack.application.connector.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -18,26 +18,21 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; -public class TransportDeleteConnectorAction extends HandledTransportAction { +public class TransportDeleteConnectorAction extends TransportAction { protected final ConnectorIndexService connectorIndexService; @Inject public TransportDeleteConnectorAction(TransportService transportService, ActionFilters actionFilters, Client client) { - super( - DeleteConnectorAction.NAME, - transportService, - actionFilters, - DeleteConnectorAction.Request::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(DeleteConnectorAction.NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.connectorIndexService = new ConnectorIndexService(client); } @Override protected void doExecute(Task task, DeleteConnectorAction.Request request, ActionListener listener) { String connectorId = request.getConnectorId(); + boolean hardDelete = request.isHardDelete(); boolean shouldDeleteSyncJobs = request.shouldDeleteSyncJobs(); - connectorIndexService.deleteConnector(connectorId, shouldDeleteSyncJobs, listener.map(v -> AcknowledgedResponse.TRUE)); + connectorIndexService.deleteConnector(connectorId, hardDelete, shouldDeleteSyncJobs, listener.map(v -> AcknowledgedResponse.TRUE)); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java index c85416c5f08c..11397583ce5b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -34,8 +33,6 @@ public class ListQueryRulesetsAction { public static final String NAME = "cluster:admin/xpack/query_rules/list"; public static final ActionType INSTANCE = new ActionType<>(NAME); - public static final NodeFeature QUERY_RULE_LIST_TYPES = new NodeFeature("query_rule_list_types", true); - private ListQueryRulesetsAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java index b8293ca64cf0..c661bc9467e6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -35,8 +34,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr public class TestQueryRulesetAction { - public static final NodeFeature QUERY_RULES_TEST_API = new NodeFeature("query_rules.test", true); - // TODO - We'd like to transition this to require less stringent permissions public static final ActionType TYPE = new ActionType<>("cluster:admin/xpack/query_rules/test"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java index d63166b01b4d..528204f4132e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.application.rules.retriever; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RankDocsQueryBuilder; import org.elasticsearch.license.LicenseUtils; @@ -45,7 +44,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr public final class QueryRuleRetrieverBuilder extends CompoundRetrieverBuilder { public static final String NAME = "rule"; - public static final NodeFeature QUERY_RULE_RETRIEVERS_SUPPORTED = new NodeFeature("query_rule_retriever_supported", true); public static final ParseField RULESET_IDS_FIELD = new ParseField("ruleset_ids"); public static final ParseField MATCH_CRITERIA_FIELD = new ParseField("match_criteria"); @@ -76,9 +74,6 @@ public final class QueryRuleRetrieverBuilder extends CompoundRetrieverBuilder connectorIds = new ArrayList<>(); for (int i = 0; i < numConnectors; i++) { @@ -111,11 +111,10 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { } String connectorIdToDelete = connectorIds.get(0); - UpdateResponse resp = awaitDeleteConnector(connectorIdToDelete, false); + DocWriteResponse resp = awaitDeleteConnector(connectorIdToDelete, false, false); assertThat(resp.status(), equalTo(RestStatus.OK)); expectThrows(ResourceNotFoundException.class, () -> awaitGetConnector(connectorIdToDelete)); - - expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete, false)); + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete, false, false)); } public void testDeleteConnector_expectSoftDeletion() throws Exception { @@ -130,13 +129,11 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { } String connectorIdToDelete = connectorIds.get(0); - UpdateResponse resp = awaitDeleteConnector(connectorIdToDelete, false); + DocWriteResponse resp = awaitDeleteConnector(connectorIdToDelete, false, false); assertThat(resp.status(), equalTo(RestStatus.OK)); expectThrows(ResourceNotFoundException.class, () -> awaitGetConnector(connectorIdToDelete)); - - expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete, false)); - - Connector softDeletedConnector = awaitGetSoftDeletedConnector(connectorIdToDelete); + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete, false, false)); + Connector softDeletedConnector = awaitGetConnectorIncludeDeleted(connectorIdToDelete); assertThat(softDeletedConnector.getConnectorId(), equalTo(connectorIdToDelete)); assertThat(softDeletedConnector.getServiceType(), equalTo(connectors.get(0).getServiceType())); } @@ -150,27 +147,65 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { connectorIds.add(resp.getId()); } - // Delete all of them for (int i = 0; i < numConnectors; i++) { String connectorIdToDelete = connectorIds.get(i); - UpdateResponse resp = awaitDeleteConnector(connectorIdToDelete, false); + DocWriteResponse resp = awaitDeleteConnector(connectorIdToDelete, false, false); assertThat(resp.status(), equalTo(RestStatus.OK)); } - // Connectors were deleted from main index for (int i = 0; i < numConnectors; i++) { String connectorId = connectorIds.get(i); expectThrows(ResourceNotFoundException.class, () -> awaitGetConnector(connectorId)); } - // Soft deleted connectors available in system index for (int i = 0; i < numConnectors; i++) { String connectorId = connectorIds.get(i); - Connector softDeletedConnector = awaitGetSoftDeletedConnector(connectorId); + Connector softDeletedConnector = awaitGetConnectorIncludeDeleted(connectorId); assertThat(softDeletedConnector.getConnectorId(), equalTo(connectorId)); } } + public void testDeleteConnector_expectHardDeletionSingle() throws Exception { + int numConnectors = 3; + List connectorIds = new ArrayList<>(); + for (int i = 0; i < numConnectors; i++) { + Connector connector = ConnectorTestUtils.getRandomConnector(); + ConnectorCreateActionResponse resp = awaitCreateConnector(null, connector); + connectorIds.add(resp.getId()); + } + + String connectorIdToDelete = connectorIds.get(0); + DocWriteResponse resp = awaitDeleteConnector(connectorIdToDelete, true, false); + assertThat(resp.status(), equalTo(RestStatus.OK)); + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnector(connectorIdToDelete)); + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnectorIncludeDeleted(connectorIdToDelete)); + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete, true, false)); + } + + public void testDeleteConnector_expectHardDeletionMultipleConnectors() throws Exception { + int numConnectors = 5; + List connectorIds = new ArrayList<>(); + for (int i = 0; i < numConnectors; i++) { + Connector connector = ConnectorTestUtils.getRandomConnector(); + ConnectorCreateActionResponse resp = awaitCreateConnector(null, connector); + connectorIds.add(resp.getId()); + } + + for (int i = 0; i < numConnectors; i++) { + String connectorIdToDelete = connectorIds.get(i); + DocWriteResponse resp = awaitDeleteConnector(connectorIdToDelete, true, false); + assertThat(resp.status(), equalTo(RestStatus.OK)); + } + + for (String connectorId : connectorIds) { + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnector(connectorId)); + } + + for (String connectorId : connectorIds) { + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnectorIncludeDeleted(connectorId)); + } + } + public void testUpdateConnectorConfiguration_FullConfiguration() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); @@ -949,13 +984,14 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { assertThat(updateApiKeyIdRequest.getApiKeySecretId(), equalTo(indexedConnector.getApiKeySecretId())); } - private UpdateResponse awaitDeleteConnector(String connectorId, boolean deleteConnectorSyncJobs) throws Exception { + private DocWriteResponse awaitDeleteConnector(String connectorId, boolean hardDelete, boolean deleteConnectorSyncJobs) + throws Exception { CountDownLatch latch = new CountDownLatch(1); - final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.deleteConnector(connectorId, deleteConnectorSyncJobs, new ActionListener<>() { + connectorIndexService.deleteConnector(connectorId, hardDelete, deleteConnectorSyncJobs, new ActionListener<>() { @Override - public void onResponse(UpdateResponse deleteResponse) { + public void onResponse(DocWriteResponse deleteResponse) { resp.set(deleteResponse); latch.countDown(); } @@ -1008,7 +1044,7 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { return resp.get(); } - private Connector awaitGetSoftDeletedConnector(String connectorId) throws Exception { + private Connector awaitGetConnectorIncludeDeleted(String connectorId) throws Exception { return awaitGetConnector(connectorId, true); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java deleted file mode 100644 index 5ad7109a6b7c..000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractBWCSerializationTestCase; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -public class DeleteConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { - - @Override - protected Writeable.Reader instanceReader() { - return DeleteConnectorAction.Request::new; - } - - @Override - protected DeleteConnectorAction.Request createTestInstance() { - return new DeleteConnectorAction.Request(randomAlphaOfLengthBetween(1, 10), false); - } - - @Override - protected DeleteConnectorAction.Request mutateInstance(DeleteConnectorAction.Request instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected DeleteConnectorAction.Request doParseInstance(XContentParser parser) throws IOException { - return DeleteConnectorAction.Request.parse(parser); - } - - @Override - protected DeleteConnectorAction.Request mutateInstanceForVersion(DeleteConnectorAction.Request instance, TransportVersion version) { - return new DeleteConnectorAction.Request(instance.getConnectorId(), instance.shouldDeleteSyncJobs()); - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilderTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilderTests.java index 3081dcf11d95..3c169d9a7677 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilderTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilderTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.application.rules.retriever; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; @@ -53,10 +54,7 @@ public class QueryRuleRetrieverBuilderTests extends AbstractXContentTestCase nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == QueryRuleRetrieverBuilder.QUERY_RULE_RETRIEVERS_SUPPORTED - ) + new RetrieverParserContext(new SearchUsage(), Predicates.never()) ); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java index 00765a8c0528..b254612a700d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java @@ -78,12 +78,20 @@ public abstract class Expression extends Node implements Resolvable super(source, children); } - // whether the expression can be evaluated statically (folded) or not + /** + * Whether the expression can be evaluated statically, aka "folded", or not. + */ public boolean foldable() { return false; } - public Object fold() { + /** + * Evaluate this expression statically to a constant. It is an error to call + * this if {@link #foldable} returns false. + */ + public Object fold(FoldContext ctx) { + // TODO After removing FoldContext.unbounded from non-test code examine all calls + // for places we should use instanceof Literal instead throw new QlIllegalArgumentException("Should not fold expression"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java index 4e4338aad370..739333ded0fd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java @@ -107,10 +107,10 @@ public final class Expressions { return true; } - public static List fold(List exps) { + public static List fold(FoldContext ctx, List exps) { List folded = new ArrayList<>(exps.size()); for (Expression exp : exps) { - folded.add(exp.fold()); + folded.add(exp.fold(ctx)); } return folded; @@ -135,7 +135,7 @@ public final class Expressions { /** * Is this {@linkplain Expression} guaranteed to have * only the {@code null} value. {@linkplain Expression}s that - * {@link Expression#fold()} to {@code null} may + * {@link Expression#fold} to {@code null} may * return {@code false} here, but should eventually be folded * into a {@link Literal} containing {@code null} which will return * {@code true} from here. diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FoldContext.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FoldContext.java new file mode 100644 index 000000000000..25da44c5fd22 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FoldContext.java @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.xpack.esql.core.QlClientException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +import java.util.Objects; + +/** + * Context passed to {@link Expression#fold}. This is not thread safe. + */ +public class FoldContext { + private static final long SMALL = MemorySizeValue.parseBytesSizeValueOrHeapRatio("5%", "small").getBytes(); + + /** + * {@link Expression#fold} using less than 5% of heap. Fine in tests but otherwise + * calling this is a signal that you either, shouldn't be calling {@link Expression#fold} + * at all, or should pass in a shared {@link FoldContext} made by {@code Configuration}. + */ + public static FoldContext small() { + return new FoldContext(SMALL); + } + + private final long initialAllowedBytes; + private long allowedBytes; + + public FoldContext(long allowedBytes) { + this.initialAllowedBytes = allowedBytes; + this.allowedBytes = allowedBytes; + } + + /** + * The maximum allowed bytes. {@link #allowedBytes()} will be the same as this + * for an unused context. + */ + public long initialAllowedBytes() { + return initialAllowedBytes; + } + + /** + * The remaining allowed bytes. + */ + long allowedBytes() { + return allowedBytes; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + FoldContext that = (FoldContext) o; + return initialAllowedBytes == that.initialAllowedBytes && allowedBytes == that.allowedBytes; + } + + @Override + public int hashCode() { + return Objects.hash(initialAllowedBytes, allowedBytes); + } + + @Override + public String toString() { + return "FoldContext[" + allowedBytes + '/' + initialAllowedBytes + ']'; + } + + /** + * Track an allocation. Best to call this before allocating + * if possible, but after is ok if the allocation is small. + *

+ * Note that, unlike {@link CircuitBreaker}, you don't have + * to free this allocation later. This is important because the query plan + * doesn't implement {@link Releasable} so it can't free + * consistently. But when you have to allocate big chunks of memory during + * folding and know that you are returning the memory it is kindest to + * call this with a negative number, effectively giving those bytes back. + *

+ */ + public void trackAllocation(Source source, long bytes) { + allowedBytes -= bytes; + assert allowedBytes <= initialAllowedBytes : "returned more bytes than it used"; + if (allowedBytes < 0) { + throw new FoldTooMuchMemoryException(source, bytes, initialAllowedBytes); + } + } + + /** + * Adapt this into a {@link CircuitBreaker} suitable for building bounded local + * DriverContext. This is absolutely an abuse of the {@link CircuitBreaker} contract + * and only methods used by BlockFactory are implemented. And this'll throw a + * {@link FoldTooMuchMemoryException} instead of the standard {@link CircuitBreakingException}. + * This works for the common folding implementation though. + */ + public CircuitBreaker circuitBreakerView(Source source) { + return new CircuitBreaker() { + @Override + public void circuitBreak(String fieldName, long bytesNeeded) { + throw new UnsupportedOperationException(); + } + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + trackAllocation(source, bytes); + } + + @Override + public void addWithoutBreaking(long bytes) { + assert bytes <= 0 : "we only expect this to be used for deallocation"; + allowedBytes -= bytes; + assert allowedBytes <= initialAllowedBytes : "returned more bytes than it used"; + } + + @Override + public long getUsed() { + /* + * This isn't expected to be used by we can implement it so we may as + * well. Maybe it'll be useful for debugging one day. + */ + return initialAllowedBytes - allowedBytes; + } + + @Override + public long getLimit() { + /* + * This isn't expected to be used by we can implement it so we may as + * well. Maybe it'll be useful for debugging one day. + */ + return initialAllowedBytes; + } + + @Override + public double getOverhead() { + return 1.0; + } + + @Override + public long getTrippedCount() { + return 0; + } + + @Override + public String getName() { + return REQUEST; + } + + @Override + public Durability getDurability() { + throw new UnsupportedOperationException(); + } + + @Override + public void setLimitAndOverhead(long limit, double overhead) { + throw new UnsupportedOperationException(); + } + }; + } + + public static class FoldTooMuchMemoryException extends QlClientException { + protected FoldTooMuchMemoryException(Source source, long bytesForExpression, long initialAllowedBytes) { + super( + "line {}:{}: Folding query used more than {}. The expression that pushed past the limit is [{}] which needed {}.", + source.source().getLineNumber(), + source.source().getColumnNumber(), + ByteSizeValue.ofBytes(initialAllowedBytes), + source.text(), + ByteSizeValue.ofBytes(bytesForExpression) + ); + } + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Foldables.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Foldables.java index 601758bca591..233113c3fe1b 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Foldables.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Foldables.java @@ -10,9 +10,9 @@ import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; public abstract class Foldables { - public static Object valueOf(Expression e) { + public static Object valueOf(FoldContext ctx, Expression e) { if (e.foldable()) { - return e.fold(); + return e.fold(ctx); } throw new QlIllegalArgumentException("Cannot determine value for {}", e); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java index 53f559c5c82f..afe616489d81 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java @@ -98,7 +98,7 @@ public class Literal extends LeafExpression { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { return value; } @@ -138,7 +138,7 @@ public class Literal extends LeafExpression { * Utility method for creating a literal out of a foldable expression. * Throws an exception if the expression is not foldable. */ - public static Literal of(Expression foldable) { + public static Literal of(FoldContext ctx, Expression foldable) { if (foldable.foldable() == false) { throw new QlIllegalArgumentException("Foldable expression required for Literal creation; received unfoldable " + foldable); } @@ -147,7 +147,7 @@ public class Literal extends LeafExpression { return (Literal) foldable; } - return new Literal(foldable.source(), foldable.fold(), foldable.dataType()); + return new Literal(foldable.source(), foldable.fold(ctx), foldable.dataType()); } public static Literal of(Expression source, Object value) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java index b817ec17c7bd..842f3c0ddadd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java @@ -133,6 +133,14 @@ public final class TypeResolutions { return TypeResolution.TYPE_RESOLVED; } + /** + * Is this {@link Expression#foldable()} and not {@code null}. + * + * @deprecated instead of calling this, check for a {@link Literal} containing + * {@code null}. Foldable expressions will be folded by other rules, + * eventually, to a {@link Literal}. + */ + @Deprecated public static TypeResolution isNotNullAndFoldable(Expression e, String operationName, ParamOrdinal paramOrd) { TypeResolution resolution = isFoldable(e, operationName, paramOrd); @@ -140,7 +148,7 @@ public final class TypeResolutions { return resolution; } - if (e.dataType() == DataType.NULL || e.fold() == null) { + if (e.dataType() == DataType.NULL || e.fold(FoldContext.small()) == null) { resolution = new TypeResolution( format( null, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java index 8704a42ed33e..36517b1be9ce 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.core.expression.function.scalar; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; @@ -53,5 +54,5 @@ public abstract class UnaryScalarFunction extends ScalarFunction { } @Override - public abstract Object fold(); + public abstract Object fold(FoldContext ctx); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/BinaryPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/BinaryPredicate.java index be5caedacd50..bf5549b31e5f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/BinaryPredicate.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/BinaryPredicate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -29,8 +30,8 @@ public abstract class BinaryPredicate { } @Override - public Object fold() { - return apply(field().fold()); + public Object fold(FoldContext ctx) { + return apply(field().fold(ctx)); } private static Boolean apply(Object input) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java index 9879a1f5ffc2..f5542ff7c3de 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.nulls; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; @@ -49,8 +50,8 @@ public class IsNotNull extends UnaryScalarFunction implements Negatable extends UnaryScalarFun } @Override - public Boolean fold() { + public Boolean fold(FoldContext ctx) { throw new UnsupportedOperationException(); } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FoldContextTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FoldContextTests.java new file mode 100644 index 000000000000..2080f4007777 --- /dev/null +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/FoldContextTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xpack.esql.core.tree.Source; + +import static org.hamcrest.Matchers.equalTo; + +public class FoldContextTests extends ESTestCase { + public void testEq() { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(randomFoldContext(), this::copy, this::mutate); + } + + private FoldContext randomFoldContext() { + FoldContext ctx = new FoldContext(randomNonNegativeLong()); + if (randomBoolean()) { + ctx.trackAllocation(Source.EMPTY, randomLongBetween(0, ctx.initialAllowedBytes())); + } + return ctx; + } + + private FoldContext copy(FoldContext ctx) { + FoldContext copy = new FoldContext(ctx.initialAllowedBytes()); + copy.trackAllocation(Source.EMPTY, ctx.initialAllowedBytes() - ctx.allowedBytes()); + return copy; + } + + private FoldContext mutate(FoldContext ctx) { + if (randomBoolean()) { + FoldContext differentInitial = new FoldContext(ctx.initialAllowedBytes() + 1); + differentInitial.trackAllocation(Source.EMPTY, differentInitial.initialAllowedBytes() - ctx.allowedBytes()); + assertThat(differentInitial.allowedBytes(), equalTo(ctx.allowedBytes())); + return differentInitial; + } else { + FoldContext differentAllowed = new FoldContext(ctx.initialAllowedBytes()); + long allowed = randomValueOtherThan(ctx.allowedBytes(), () -> randomLongBetween(0, ctx.initialAllowedBytes())); + differentAllowed.trackAllocation(Source.EMPTY, ctx.initialAllowedBytes() - allowed); + assertThat(differentAllowed.allowedBytes(), equalTo(allowed)); + return differentAllowed; + } + } + + public void testTrackAllocation() { + FoldContext ctx = new FoldContext(10); + ctx.trackAllocation(Source.synthetic("shouldn't break"), 10); + Exception e = expectThrows( + FoldContext.FoldTooMuchMemoryException.class, + () -> ctx.trackAllocation(Source.synthetic("should break"), 1) + ); + assertThat( + e.getMessage(), + equalTo( + "line -1:-1: Folding query used more than 10b. " + + "The expression that pushed past the limit is [should break] which needed 1b." + ) + ); + } + + public void testCircuitBreakerViewBreaking() { + FoldContext ctx = new FoldContext(10); + ctx.circuitBreakerView(Source.synthetic("shouldn't break")).addEstimateBytesAndMaybeBreak(10, "test"); + Exception e = expectThrows( + FoldContext.FoldTooMuchMemoryException.class, + () -> ctx.circuitBreakerView(Source.synthetic("should break")).addEstimateBytesAndMaybeBreak(1, "test") + ); + assertThat( + e.getMessage(), + equalTo( + "line -1:-1: Folding query used more than 10b. " + + "The expression that pushed past the limit is [should break] which needed 1b." + ) + ); + } + + public void testCircuitBreakerViewWithoutBreaking() { + FoldContext ctx = new FoldContext(10); + CircuitBreaker view = ctx.circuitBreakerView(Source.synthetic("shouldn't break")); + view.addEstimateBytesAndMaybeBreak(10, "test"); + view.addWithoutBreaking(-1); + assertThat(view.getUsed(), equalTo(9L)); + } + + public void testToString() { + // Random looking numbers are indeed random. Just so we have consistent numbers to assert on in toString. + FoldContext ctx = new FoldContext(123); + ctx.trackAllocation(Source.EMPTY, 22); + assertThat(ctx.toString(), equalTo("FoldContext[101/123]")); + } +} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java index ed4c6282368c..cd15ed5a94cf 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.DateUtils; @@ -211,7 +212,11 @@ public class RangeTests extends ESTestCase { (Boolean) test[7], ZoneId.systemDefault() ); - assertEquals("failed on test " + i + ": " + Arrays.toString(test), test[8], range.areBoundariesInvalid()); + assertEquals( + "failed on test " + i + ": " + Arrays.toString(test), + test[8], + range.areBoundariesInvalid(range.lower().fold(FoldContext.small()), range.upper().fold(FoldContext.small())) + ); } } @@ -226,5 +231,4 @@ public class RangeTests extends ESTestCase { private static DataType randomTextType() { return randomFrom(KEYWORD, TEXT); } - } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java index d323174d2d3d..91b0564a5b40 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; @@ -104,7 +105,7 @@ public class OptimizerRulesTests extends ESTestCase { Range r = rangeOf(fa, SIX, false, FIVE, true); assertTrue(r.foldable()); - assertEquals(Boolean.FALSE, r.fold()); + assertEquals(Boolean.FALSE, r.fold(FoldContext.small())); } // 6 < a <= 5.5 -> FALSE @@ -113,7 +114,7 @@ public class OptimizerRulesTests extends ESTestCase { Range r = rangeOf(fa, SIX, false, L(5.5d), true); assertTrue(r.foldable()); - assertEquals(Boolean.FALSE, r.fold()); + assertEquals(Boolean.FALSE, r.fold(FoldContext.small())); } // Conjunction diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 9cbcd8d49b58..d7fa786a24e7 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -283,8 +283,9 @@ tasks.named('checkstyleMain').configure { exclude { normalize(it.file.toString()).contains("src/main/generated") } } -def prop(Type, type, TYPE, BYTES, Array) { +def prop(Name, Type, type, TYPE, BYTES, Array) { return [ + "Name" : Name, "Type" : Type, "type" : type, "TYPE" : TYPE, @@ -296,15 +297,19 @@ def prop(Type, type, TYPE, BYTES, Array) { "double" : type == "double" ? "true" : "", "BytesRef" : type == "BytesRef" ? "true" : "", "boolean" : type == "boolean" ? "true" : "", + "nanosMillis" : Name == "NanosMillis" ? "true" : "", + "millisNanos" : Name == "MillisNanos" ? "true" : "", ] } tasks.named('stringTemplates').configure { - var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray") - var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray") - var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") - var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") - var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") + var intProperties = prop("Int", "Int", "int", "INT", "Integer.BYTES", "IntArray") + var longProperties = prop("Long", "Long", "long", "LONG", "Long.BYTES", "LongArray") + var nanosMillisProperties = prop("NanosMillis", "Long", "long", "LONG", "Long.BYTES", "LongArray") + var millisNanosProperties = prop("MillisNanos", "Long", "long", "LONG", "Long.BYTES", "LongArray") + var doubleProperties = prop("Double", "Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") + var bytesRefProperties = prop("BytesRef", "BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") + var booleanProperties = prop("Boolean", "Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") File inInputFile = file("src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st") template { @@ -322,6 +327,16 @@ tasks.named('stringTemplates').configure { it.inputFile = inInputFile it.outputFile = "org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InLongEvaluator.java" } + template { + it.properties = nanosMillisProperties + it.inputFile = inInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java" + } + template { + it.properties = millisNanosProperties + it.inputFile = inInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java" + } template { it.properties = doubleProperties it.inputFile = inInputFile diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java index 863f89827207..87b33b3b0893 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java @@ -246,4 +246,9 @@ public final class OrdinalBytesRefBlock extends AbstractNonThreadSafeRefCounted public long ramBytesUsed() { return ordinals.ramBytesUsed() + bytes.ramBytesUsed(); } + + @Override + public String toString() { + return getClass().getSimpleName() + "[ordinals=" + ordinals + ", bytes=" + bytes + "]"; + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index 9c35b5a44d5d..34c27a5c1fdf 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -138,7 +138,6 @@ public class LuceneCountOperator extends LuceneOperator { Page page = null; // emit only one page if (remainingDocs <= 0 && pagesEmitted == 0) { - pagesEmitted++; LongBlock count = null; BooleanBlock seen = null; try { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinMaxOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinMaxOperator.java index c41c31345df4..e9f540c654a2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinMaxOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinMaxOperator.java @@ -151,7 +151,6 @@ final class LuceneMinMaxOperator extends LuceneOperator { Page page = null; // emit only one page if (remainingDocs <= 0 && pagesEmitted == 0) { - pagesEmitted++; Block result = null; BooleanBlock seen = null; try { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index bbc3ace3716b..2f72c309b5f2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -68,6 +68,10 @@ public abstract class LuceneOperator extends SourceOperator { long processingNanos; int pagesEmitted; boolean doneCollecting; + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) { this.blockFactory = blockFactory; @@ -115,7 +119,12 @@ public abstract class LuceneOperator extends SourceOperator { @Override public final Page getOutput() { try { - return getCheckedOutput(); + Page page = getCheckedOutput(); + if (page != null) { + pagesEmitted++; + rowsEmitted += page.getPositionCount(); + } + return page; } catch (IOException ioe) { throw new UncheckedIOException(ioe); } @@ -252,6 +261,7 @@ public abstract class LuceneOperator extends SourceOperator { private final int sliceMin; private final int sliceMax; private final int current; + private final long rowsEmitted; private Status(LuceneOperator operator) { processedSlices = operator.processedSlices; @@ -276,6 +286,7 @@ public abstract class LuceneOperator extends SourceOperator { current = scorer.position; } pagesEmitted = operator.pagesEmitted; + rowsEmitted = operator.rowsEmitted; } Status( @@ -288,7 +299,8 @@ public abstract class LuceneOperator extends SourceOperator { int pagesEmitted, int sliceMin, int sliceMax, - int current + int current, + long rowsEmitted ) { this.processedSlices = processedSlices; this.processedQueries = processedQueries; @@ -300,6 +312,7 @@ public abstract class LuceneOperator extends SourceOperator { this.sliceMin = sliceMin; this.sliceMax = sliceMax; this.current = current; + this.rowsEmitted = rowsEmitted; } Status(StreamInput in) throws IOException { @@ -318,6 +331,11 @@ public abstract class LuceneOperator extends SourceOperator { sliceMin = in.readVInt(); sliceMax = in.readVInt(); current = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsEmitted = in.readVLong(); + } else { + rowsEmitted = 0; + } } @Override @@ -336,6 +354,9 @@ public abstract class LuceneOperator extends SourceOperator { out.writeVInt(sliceMin); out.writeVInt(sliceMax); out.writeVInt(current); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsEmitted); + } } @Override @@ -383,6 +404,10 @@ public abstract class LuceneOperator extends SourceOperator { return current; } + public long rowsEmitted() { + return rowsEmitted; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -399,6 +424,7 @@ public abstract class LuceneOperator extends SourceOperator { builder.field("slice_min", sliceMin); builder.field("slice_max", sliceMax); builder.field("current", current); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -416,12 +442,13 @@ public abstract class LuceneOperator extends SourceOperator { && pagesEmitted == status.pagesEmitted && sliceMin == status.sliceMin && sliceMax == status.sliceMax - && current == status.current; + && current == status.current + && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current); + return Objects.hash(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 4afabcadf60c..3d34067e1a83 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -175,7 +175,6 @@ public class LuceneSourceOperator extends LuceneOperator { } Page page = null; if (currentPagePos >= minPageSize || remainingDocs <= 0 || scorer.isDone()) { - pagesEmitted++; IntBlock shard = null; IntBlock leaf = null; IntVector docs = null; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 8da62963ffb6..d25cb3a870da 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -240,7 +240,6 @@ public final class LuceneTopNSourceOperator extends LuceneOperator { Releasables.closeExpectNoException(shard, segments, docs, docBlock, scores); } } - pagesEmitted++; return page; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 74affb10eaf2..8fbb94658747 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -546,8 +546,8 @@ public class ValuesSourceReaderOperator extends AbstractPageMappingOperator { } @Override - protected Status status(long processNanos, int pagesProcessed) { - return new Status(new TreeMap<>(readersBuilt), processNanos, pagesProcessed); + protected Status status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { + return new Status(new TreeMap<>(readersBuilt), processNanos, pagesProcessed, rowsReceived, rowsEmitted); } public static class Status extends AbstractPageMappingOperator.Status { @@ -559,8 +559,8 @@ public class ValuesSourceReaderOperator extends AbstractPageMappingOperator { private final Map readersBuilt; - Status(Map readersBuilt, long processNanos, int pagesProcessed) { - super(processNanos, pagesProcessed); + Status(Map readersBuilt, long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { + super(processNanos, pagesProcessed, rowsReceived, rowsEmitted); this.readersBuilt = readersBuilt; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java index 05913b7dd5f6..09d04d36f831 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java @@ -37,6 +37,14 @@ public abstract class AbstractPageMappingOperator implements Operator { * Count of pages that have been processed by this operator. */ private int pagesProcessed; + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; protected abstract Page process(Page page); @@ -52,6 +60,7 @@ public abstract class AbstractPageMappingOperator implements Operator { public final void addInput(Page page) { assert prev == null : "has pending input page"; prev = page; + rowsReceived += page.getPositionCount(); } @Override @@ -75,6 +84,9 @@ public abstract class AbstractPageMappingOperator implements Operator { long start = System.nanoTime(); Page p = process(prev); pagesProcessed++; + if (p != null) { + rowsEmitted += p.getPositionCount(); + } processNanos += System.nanoTime() - start; prev = null; return p; @@ -82,11 +94,11 @@ public abstract class AbstractPageMappingOperator implements Operator { @Override public final Status status() { - return status(processNanos, pagesProcessed); + return status(processNanos, pagesProcessed, rowsReceived, rowsEmitted); } - protected Status status(long processNanos, int pagesProcessed) { - return new Status(processNanos, pagesProcessed); + protected Status status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { + return new Status(processNanos, pagesProcessed, rowsReceived, rowsEmitted); } @Override @@ -105,15 +117,26 @@ public abstract class AbstractPageMappingOperator implements Operator { private final long processNanos; private final int pagesProcessed; + private final long rowsReceived; + private final long rowsEmitted; - public Status(long processNanos, int pagesProcessed) { + public Status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { this.processNanos = processNanos; this.pagesProcessed = pagesProcessed; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } protected Status(StreamInput in) throws IOException { processNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; pagesProcessed = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + rowsEmitted = in.readVLong(); + } else { + rowsReceived = 0; + rowsEmitted = 0; + } } @Override @@ -122,6 +145,10 @@ public abstract class AbstractPageMappingOperator implements Operator { out.writeVLong(processNanos); } out.writeVInt(pagesProcessed); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override @@ -133,6 +160,14 @@ public abstract class AbstractPageMappingOperator implements Operator { return pagesProcessed; } + public long rowsReceived() { + return rowsReceived; + } + + public long rowsEmitted() { + return rowsEmitted; + } + public long processNanos() { return processNanos; } @@ -153,7 +188,7 @@ public abstract class AbstractPageMappingOperator implements Operator { if (builder.humanReadable()) { builder.field("process_time", TimeValue.timeValueNanos(processNanos)); } - return builder.field("pages_processed", pagesProcessed); + return builder.field("pages_processed", pagesProcessed).field("rows_received", rowsReceived).field("rows_emitted", rowsEmitted); } @Override @@ -161,12 +196,15 @@ public abstract class AbstractPageMappingOperator implements Operator { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; - return processNanos == status.processNanos && pagesProcessed == status.pagesProcessed; + return processNanos == status.processNanos + && pagesProcessed == status.pagesProcessed + && rowsReceived == status.rowsReceived + && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(processNanos, pagesProcessed); + return Objects.hash(processNanos, pagesProcessed, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java index 32492af157fe..6a165fdfa055 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java @@ -49,6 +49,16 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator */ private int pagesEmitted; + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; + /** * Build and Iterator of results for a new page. */ @@ -82,6 +92,7 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator } next = new RuntimeTrackingIterator(receive(page)); pagesReceived++; + rowsReceived += page.getPositionCount(); } @Override @@ -101,16 +112,23 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator } Page ret = next.next(); pagesEmitted++; + rowsEmitted += ret.getPositionCount(); return ret; } @Override public final AbstractPageMappingToIteratorOperator.Status status() { - return status(processNanos, pagesReceived, pagesEmitted); + return status(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } - protected AbstractPageMappingToIteratorOperator.Status status(long processNanos, int pagesReceived, int pagesEmitted) { - return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted); + protected AbstractPageMappingToIteratorOperator.Status status( + long processNanos, + int pagesReceived, + int pagesEmitted, + long rowsReceived, + long rowsEmitted + ) { + return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } @Override @@ -154,17 +172,28 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator private final long processNanos; private final int pagesReceived; private final int pagesEmitted; + private final long rowsReceived; + private final long rowsEmitted; - public Status(long processNanos, int pagesProcessed, int pagesEmitted) { + public Status(long processNanos, int pagesProcessed, int pagesEmitted, long rowsReceived, long rowsEmitted) { this.processNanos = processNanos; this.pagesReceived = pagesProcessed; this.pagesEmitted = pagesEmitted; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } protected Status(StreamInput in) throws IOException { processNanos = in.readVLong(); pagesReceived = in.readVInt(); pagesEmitted = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + rowsEmitted = in.readVLong(); + } else { + rowsReceived = 0; + rowsEmitted = 0; + } } @Override @@ -172,6 +201,10 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator out.writeVLong(processNanos); out.writeVInt(pagesReceived); out.writeVInt(pagesEmitted); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override @@ -187,6 +220,14 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator return pagesEmitted; } + public long rowsReceived() { + return rowsReceived; + } + + public long rowsEmitted() { + return rowsEmitted; + } + public long processNanos() { return processNanos; } @@ -207,8 +248,10 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator if (builder.humanReadable()) { builder.field("process_time", TimeValue.timeValueNanos(processNanos)); } - builder.field("pages_received", pagesReceived); - return builder.field("pages_emitted", pagesEmitted); + return builder.field("pages_received", pagesReceived) + .field("pages_emitted", pagesEmitted) + .field("rows_received", rowsReceived) + .field("rows_emitted", rowsEmitted); } @Override @@ -216,12 +259,16 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AbstractPageMappingToIteratorOperator.Status status = (AbstractPageMappingToIteratorOperator.Status) o; - return processNanos == status.processNanos && pagesReceived == status.pagesReceived && pagesEmitted == status.pagesEmitted; + return processNanos == status.processNanos + && pagesReceived == status.pagesReceived + && pagesEmitted == status.pagesEmitted + && rowsReceived == status.rowsReceived + && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(processNanos, pagesReceived, pagesEmitted); + return Objects.hash(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java index f57f450c7ee3..ab086a7fbe48 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java @@ -58,6 +58,14 @@ public class AggregationOperator implements Operator { * Count of pages this operator has processed. */ private int pagesProcessed; + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; public record AggregationOperatorFactory(List aggregators, AggregatorMode mode) implements OperatorFactory { @@ -106,12 +114,16 @@ public class AggregationOperator implements Operator { page.releaseBlocks(); aggregationNanos += System.nanoTime() - start; pagesProcessed++; + rowsReceived += page.getPositionCount(); } } @Override public Page getOutput() { Page p = output; + if (p != null) { + rowsEmitted += p.getPositionCount(); + } this.output = null; return p; } @@ -181,7 +193,7 @@ public class AggregationOperator implements Operator { @Override public Operator.Status status() { - return new Status(aggregationNanos, aggregationFinishNanos, pagesProcessed); + return new Status(aggregationNanos, aggregationFinishNanos, pagesProcessed, rowsReceived, rowsEmitted); } public static class Status implements Operator.Status { @@ -204,6 +216,14 @@ public class AggregationOperator implements Operator { * Count of pages this operator has processed. */ private final int pagesProcessed; + /** + * Count of rows this operator has received. + */ + private final long rowsReceived; + /** + * Count of rows this operator has emitted. + */ + private final long rowsEmitted; /** * Build. @@ -211,10 +231,12 @@ public class AggregationOperator implements Operator { * @param aggregationFinishNanos Nanoseconds this operator has spent running the aggregations. * @param pagesProcessed Count of pages this operator has processed. */ - public Status(long aggregationNanos, long aggregationFinishNanos, int pagesProcessed) { + public Status(long aggregationNanos, long aggregationFinishNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { this.aggregationNanos = aggregationNanos; this.aggregationFinishNanos = aggregationFinishNanos; this.pagesProcessed = pagesProcessed; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } protected Status(StreamInput in) throws IOException { @@ -225,6 +247,13 @@ public class AggregationOperator implements Operator { aggregationFinishNanos = null; } pagesProcessed = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + rowsEmitted = in.readVLong(); + } else { + rowsReceived = 0; + rowsEmitted = 0; + } } @Override @@ -234,6 +263,10 @@ public class AggregationOperator implements Operator { out.writeOptionalVLong(aggregationFinishNanos); } out.writeVInt(pagesProcessed); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override @@ -262,6 +295,20 @@ public class AggregationOperator implements Operator { return pagesProcessed; } + /** + * Count of rows this operator has received. + */ + public long rowsReceived() { + return rowsReceived; + } + + /** + * Count of rows this operator has emitted. + */ + public long rowsEmitted() { + return rowsEmitted; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -277,6 +324,8 @@ public class AggregationOperator implements Operator { ); } builder.field("pages_processed", pagesProcessed); + builder.field("rows_received", rowsReceived); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -287,13 +336,15 @@ public class AggregationOperator implements Operator { if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; return aggregationNanos == status.aggregationNanos + && Objects.equals(aggregationFinishNanos, status.aggregationFinishNanos) && pagesProcessed == status.pagesProcessed - && Objects.equals(aggregationFinishNanos, status.aggregationFinishNanos); + && rowsReceived == status.rowsReceived + && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(aggregationNanos, aggregationFinishNanos, pagesProcessed); + return Objects.hash(aggregationNanos, aggregationFinishNanos, pagesProcessed, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index 06b890603e48..df522e931ca0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -29,15 +29,18 @@ import java.util.Objects; import java.util.concurrent.atomic.LongAdder; /** - * {@link AsyncOperator} performs an external computation specified in {@link #performAsync(Page, ActionListener)}. - * This operator acts as a client and operates on a per-page basis to reduce communication overhead. + * {@link AsyncOperator} performs an external computation specified in + * {@link #performAsync(Page, ActionListener)}. This operator acts as a client + * to reduce communication overhead and fetches a {@code Fetched} at a time. + * It's the responsibility of subclasses to transform that {@code Fetched} into + * output. * @see #performAsync(Page, ActionListener) */ -public abstract class AsyncOperator implements Operator { +public abstract class AsyncOperator implements Operator { private volatile SubscribableListener blockedFuture; - private final Map buffers = ConcurrentCollections.newConcurrentMap(); + private final Map buffers = ConcurrentCollections.newConcurrentMap(); private final FailureCollector failureCollector = new FailureCollector(); private final DriverContext driverContext; @@ -84,7 +87,7 @@ public abstract class AsyncOperator implements Operator { driverContext.addAsyncAction(); boolean success = false; try { - final ActionListener listener = ActionListener.wrap(output -> { + final ActionListener listener = ActionListener.wrap(output -> { buffers.put(seqNo, output); onSeqNoCompleted(seqNo); }, e -> { @@ -105,18 +108,20 @@ public abstract class AsyncOperator implements Operator { } } - private void releasePageOnAnyThread(Page page) { + protected static void releasePageOnAnyThread(Page page) { page.allowPassingToDifferentDriver(); page.releaseBlocks(); } + protected abstract void releaseFetchedOnAnyThread(Fetched result); + /** * Performs an external computation and notify the listener when the result is ready. * * @param inputPage the input page * @param listener the listener */ - protected abstract void performAsync(Page inputPage, ActionListener listener); + protected abstract void performAsync(Page inputPage, ActionListener listener); protected abstract void doClose(); @@ -126,7 +131,7 @@ public abstract class AsyncOperator implements Operator { notifyIfBlocked(); } if (closed || failureCollector.hasFailure()) { - discardPages(); + discardResults(); } } @@ -146,18 +151,18 @@ public abstract class AsyncOperator implements Operator { private void checkFailure() { Exception e = failureCollector.getFailure(); if (e != null) { - discardPages(); + discardResults(); throw ExceptionsHelper.convertToRuntime(e); } } - private void discardPages() { + private void discardResults() { long nextCheckpoint; while ((nextCheckpoint = checkpoint.getPersistedCheckpoint() + 1) <= checkpoint.getProcessedCheckpoint()) { - Page page = buffers.remove(nextCheckpoint); + Fetched result = buffers.remove(nextCheckpoint); checkpoint.markSeqNoAsPersisted(nextCheckpoint); - if (page != null) { - releasePageOnAnyThread(page); + if (result != null) { + releaseFetchedOnAnyThread(result); } } } @@ -166,7 +171,7 @@ public abstract class AsyncOperator implements Operator { public final void close() { finish(); closed = true; - discardPages(); + discardResults(); doClose(); } @@ -185,15 +190,18 @@ public abstract class AsyncOperator implements Operator { } } - @Override - public Page getOutput() { + /** + * Get a {@link Fetched} from the buffer. + * @return a result if one is ready or {@code null} if none are available. + */ + public final Fetched fetchFromBuffer() { checkFailure(); long persistedCheckpoint = checkpoint.getPersistedCheckpoint(); if (persistedCheckpoint < checkpoint.getProcessedCheckpoint()) { persistedCheckpoint++; - Page page = buffers.remove(persistedCheckpoint); + Fetched result = buffers.remove(persistedCheckpoint); checkpoint.markSeqNoAsPersisted(persistedCheckpoint); - return page; + return result; } else { return null; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index acbf8a17b31f..78572f55cd5e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -74,10 +75,9 @@ public class Driver implements Releasable, Describable { private final long statusNanos; private final AtomicReference cancelReason = new AtomicReference<>(); - private final AtomicReference> blocked = new AtomicReference<>(); - private final AtomicBoolean started = new AtomicBoolean(); private final SubscribableListener completionListener = new SubscribableListener<>(); + private final DriverScheduler scheduler = new DriverScheduler(); /** * Status reported to the tasks API. We write the status at most once every @@ -186,7 +186,13 @@ public class Driver implements Releasable, Describable { long nextStatus = startTime + statusNanos; int iter = 0; while (true) { - IsBlockedResult isBlocked = runSingleLoopIteration(); + IsBlockedResult isBlocked = Operator.NOT_BLOCKED; + try { + isBlocked = runSingleLoopIteration(); + } catch (DriverEarlyTerminationException unused) { + closeEarlyFinishedOperators(); + assert isFinished() : "not finished after early termination"; + } iter++; if (isBlocked.listener().isDone() == false) { updateStatus(nowSupplier.getAsLong() - startTime, iter, DriverStatus.Status.ASYNC, isBlocked.reason()); @@ -242,7 +248,7 @@ public class Driver implements Releasable, Describable { } private IsBlockedResult runSingleLoopIteration() { - ensureNotCancelled(); + driverContext.checkForEarlyTermination(); boolean movedPage = false; for (int i = 0; i < activeOperators.size() - 1; i++) { @@ -255,6 +261,7 @@ public class Driver implements Releasable, Describable { } if (op.isFinished() == false && nextOp.needsInput()) { + driverContext.checkForEarlyTermination(); Page page = op.getOutput(); if (page == null) { // No result, just move to the next iteration @@ -263,16 +270,37 @@ public class Driver implements Releasable, Describable { page.releaseBlocks(); } else { // Non-empty result from the previous operation, move it to the next operation + try { + driverContext.checkForEarlyTermination(); + } catch (DriverEarlyTerminationException | TaskCancelledException e) { + page.releaseBlocks(); + throw e; + } nextOp.addInput(page); movedPage = true; } } if (op.isFinished()) { + driverContext.checkForEarlyTermination(); nextOp.finish(); } } + closeEarlyFinishedOperators(); + + if (movedPage == false) { + return oneOf( + activeOperators.stream() + .map(Operator::isBlocked) + .filter(laf -> laf.listener().isDone() == false) + .collect(Collectors.toList()) + ); + } + return Operator.NOT_BLOCKED; + } + + private void closeEarlyFinishedOperators() { for (int index = activeOperators.size() - 1; index >= 0; index--) { if (activeOperators.get(index).isFinished()) { /* @@ -298,37 +326,11 @@ public class Driver implements Releasable, Describable { break; } } - - if (movedPage == false) { - return oneOf( - activeOperators.stream() - .map(Operator::isBlocked) - .filter(laf -> laf.listener().isDone() == false) - .collect(Collectors.toList()) - ); - } - return Operator.NOT_BLOCKED; } public void cancel(String reason) { if (cancelReason.compareAndSet(null, reason)) { - synchronized (this) { - SubscribableListener fut = this.blocked.get(); - if (fut != null) { - fut.onFailure(new TaskCancelledException(reason)); - } - } - } - } - - private boolean isCancelled() { - return cancelReason.get() != null; - } - - private void ensureNotCancelled() { - String reason = cancelReason.get(); - if (reason != null) { - throw new TaskCancelledException(reason); + scheduler.runPendingTasks(); } } @@ -342,10 +344,36 @@ public class Driver implements Releasable, Describable { driver.completionListener.addListener(listener); if (driver.started.compareAndSet(false, true)) { driver.updateStatus(0, 0, DriverStatus.Status.STARTING, "driver starting"); + initializeEarlyTerminationChecker(driver); schedule(DEFAULT_TIME_BEFORE_YIELDING, maxIterations, threadContext, executor, driver, driver.completionListener); } } + private static void initializeEarlyTerminationChecker(Driver driver) { + // Register a listener to an exchange sink to handle early completion scenarios: + // 1. When the query accumulates sufficient data (e.g., reaching the LIMIT). + // 2. When users abort the query but want to retain the current result. + // This allows the Driver to finish early without waiting for the scheduled task. + final AtomicBoolean earlyFinished = new AtomicBoolean(); + driver.driverContext.initializeEarlyTerminationChecker(() -> { + final String reason = driver.cancelReason.get(); + if (reason != null) { + throw new TaskCancelledException(reason); + } + if (earlyFinished.get()) { + throw new DriverEarlyTerminationException("Exchange sink is closed"); + } + }); + if (driver.activeOperators.isEmpty() == false) { + if (driver.activeOperators.getLast() instanceof ExchangeSinkOperator sinkOperator) { + sinkOperator.addCompletionListener(ActionListener.running(() -> { + earlyFinished.set(true); + driver.scheduler.runPendingTasks(); + })); + } + } + } + // Drains all active operators and closes them. private void drainAndCloseOperators(@Nullable Exception e) { Iterator itr = activeOperators.iterator(); @@ -371,7 +399,7 @@ public class Driver implements Releasable, Describable { Driver driver, ActionListener listener ) { - executor.execute(new AbstractRunnable() { + final var task = new AbstractRunnable() { @Override protected void doRun() { @@ -383,16 +411,12 @@ public class Driver implements Releasable, Describable { if (fut.isDone()) { schedule(maxTime, maxIterations, threadContext, executor, driver, listener); } else { - synchronized (driver) { - if (driver.isCancelled() == false) { - driver.blocked.set(fut); - } - } ActionListener readyListener = ActionListener.wrap( ignored -> schedule(maxTime, maxIterations, threadContext, executor, driver, listener), this::onFailure ); fut.addListener(ContextPreservingActionListener.wrapPreservingContext(readyListener, threadContext)); + driver.scheduler.addOrRunDelayedTask(() -> fut.onResponse(null)); } } @@ -405,7 +429,8 @@ public class Driver implements Releasable, Describable { void onComplete(ActionListener listener) { driver.driverContext.waitForAsyncActions(ContextPreservingActionListener.wrapPreservingContext(listener, threadContext)); } - }); + }; + driver.scheduler.scheduleOrRunTask(executor, task); } private static IsBlockedResult oneOf(List results) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index 843aa4aaaa88..1877f564677b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -60,6 +60,8 @@ public class DriverContext { private final WarningsMode warningsMode; + private Runnable earlyTerminationChecker = () -> {}; + public DriverContext(BigArrays bigArrays, BlockFactory blockFactory) { this(bigArrays, blockFactory, WarningsMode.COLLECT); } @@ -175,6 +177,21 @@ public class DriverContext { asyncActions.removeInstance(); } + /** + * Checks if the Driver associated with this DriverContext has been cancelled or early terminated. + */ + public void checkForEarlyTermination() { + earlyTerminationChecker.run(); + } + + /** + * Initializes the early termination or cancellation checker for this DriverContext. + * This method should be called when associating this DriverContext with a driver. + */ + public void initializeEarlyTerminationChecker(Runnable checker) { + this.earlyTerminationChecker = checker; + } + /** * Evaluators should use this function to decide their warning behavior. * @return an appropriate {@link WarningsMode} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverEarlyTerminationException.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverEarlyTerminationException.java new file mode 100644 index 000000000000..6f79a6341df7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverEarlyTerminationException.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.ElasticsearchException; + +/** + * An exception indicates that a compute should be terminated early as the downstream pipeline has enough or no long requires more data. + */ +public final class DriverEarlyTerminationException extends ElasticsearchException { + public DriverEarlyTerminationException(String message) { + super(message); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java new file mode 100644 index 000000000000..05fe38007a92 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.util.concurrent.EsExecutors; + +import java.util.List; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +/** + * A Driver be put to sleep while its sink is full or its source is empty or be rescheduled after running several iterations. + * This scheduler tracks the delayed and scheduled tasks, allowing them to run without waking up the driver or waiting for + * the thread pool to pick up the task. This enables fast cancellation or early finishing without discarding the current result. + */ +final class DriverScheduler { + private final AtomicReference delayedTask = new AtomicReference<>(); + private final AtomicReference scheduledTask = new AtomicReference<>(); + private final AtomicBoolean completing = new AtomicBoolean(); + + void addOrRunDelayedTask(Runnable task) { + delayedTask.set(task); + if (completing.get()) { + final Runnable toRun = delayedTask.getAndSet(null); + if (toRun != null) { + assert task == toRun; + toRun.run(); + } + } + } + + void scheduleOrRunTask(Executor executor, Runnable task) { + final Runnable existing = scheduledTask.getAndSet(task); + assert existing == null : existing; + final Executor executorToUse = completing.get() ? EsExecutors.DIRECT_EXECUTOR_SERVICE : executor; + executorToUse.execute(() -> { + final Runnable next = scheduledTask.getAndSet(null); + if (next != null) { + assert next == task; + next.run(); + } + }); + } + + void runPendingTasks() { + completing.set(true); + for (var taskHolder : List.of(delayedTask, scheduledTask)) { + final Runnable task = taskHolder.getAndSet(null); + if (task != null) { + task.run(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index ccddfdf5cc74..c47b6cebdadd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -99,6 +99,14 @@ public class HashAggregationOperator implements Operator { * Count of pages this operator has processed. */ private int pagesProcessed; + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; @SuppressWarnings("this-escape") public HashAggregationOperator( @@ -187,12 +195,16 @@ public class HashAggregationOperator implements Operator { } finally { page.releaseBlocks(); pagesProcessed++; + rowsReceived += page.getPositionCount(); } } @Override public Page getOutput() { Page p = output; + if (p != null) { + rowsEmitted += p.getPositionCount(); + } output = null; return p; } @@ -246,7 +258,7 @@ public class HashAggregationOperator implements Operator { @Override public Operator.Status status() { - return new Status(hashNanos, aggregationNanos, pagesProcessed); + return new Status(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted); } protected static void checkState(boolean condition, String msg) { @@ -288,23 +300,43 @@ public class HashAggregationOperator implements Operator { * Count of pages this operator has processed. */ private final int pagesProcessed; + /** + * Count of rows this operator has received. + */ + private final long rowsReceived; + /** + * Count of rows this operator has emitted. + */ + private final long rowsEmitted; /** * Build. * @param hashNanos Nanoseconds this operator has spent hashing grouping keys. * @param aggregationNanos Nanoseconds this operator has spent running the aggregations. * @param pagesProcessed Count of pages this operator has processed. + * @param rowsReceived Count of rows this operator has received. + * @param rowsEmitted Count of rows this operator has emitted. */ - public Status(long hashNanos, long aggregationNanos, int pagesProcessed) { + public Status(long hashNanos, long aggregationNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { this.hashNanos = hashNanos; this.aggregationNanos = aggregationNanos; this.pagesProcessed = pagesProcessed; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } protected Status(StreamInput in) throws IOException { hashNanos = in.readVLong(); aggregationNanos = in.readVLong(); pagesProcessed = in.readVInt(); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + rowsEmitted = in.readVLong(); + } else { + rowsReceived = 0; + rowsEmitted = 0; + } } @Override @@ -312,6 +344,11 @@ public class HashAggregationOperator implements Operator { out.writeVLong(hashNanos); out.writeVLong(aggregationNanos); out.writeVInt(pagesProcessed); + + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override @@ -340,6 +377,20 @@ public class HashAggregationOperator implements Operator { return pagesProcessed; } + /** + * Count of rows this operator has received. + */ + public long rowsReceived() { + return rowsReceived; + } + + /** + * Count of rows this operator has emitted. + */ + public long rowsEmitted() { + return rowsEmitted; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -352,6 +403,8 @@ public class HashAggregationOperator implements Operator { builder.field("aggregation_time", TimeValue.timeValueNanos(aggregationNanos)); } builder.field("pages_processed", pagesProcessed); + builder.field("rows_received", rowsReceived); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -361,12 +414,16 @@ public class HashAggregationOperator implements Operator { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; - return hashNanos == status.hashNanos && aggregationNanos == status.aggregationNanos && pagesProcessed == status.pagesProcessed; + return hashNanos == status.hashNanos + && aggregationNanos == status.aggregationNanos + && pagesProcessed == status.pagesProcessed + && rowsReceived == status.rowsReceived + && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(hashNanos, aggregationNanos, pagesProcessed); + return Objects.hash(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java index 34e37031e6f1..b669be9192d0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java @@ -37,6 +37,16 @@ public class LimitOperator implements Operator { */ private int pagesProcessed; + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; + private Page lastInput; private boolean finished; @@ -67,6 +77,7 @@ public class LimitOperator implements Operator { public void addInput(Page page) { assert lastInput == null : "has pending input page"; lastInput = page; + rowsReceived += page.getPositionCount(); } @Override @@ -117,13 +128,14 @@ public class LimitOperator implements Operator { } lastInput = null; pagesProcessed++; + rowsEmitted += result.getPositionCount(); return result; } @Override public Status status() { - return new Status(limit, limitRemaining, pagesProcessed); + return new Status(limit, limitRemaining, pagesProcessed, rowsReceived, rowsEmitted); } @Override @@ -160,16 +172,35 @@ public class LimitOperator implements Operator { */ private final int pagesProcessed; - protected Status(int limit, int limitRemaining, int pagesProcessed) { + /** + * Count of rows this operator has received. + */ + private final long rowsReceived; + + /** + * Count of rows this operator has emitted. + */ + private final long rowsEmitted; + + protected Status(int limit, int limitRemaining, int pagesProcessed, long rowsReceived, long rowsEmitted) { this.limit = limit; this.limitRemaining = limitRemaining; this.pagesProcessed = pagesProcessed; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } protected Status(StreamInput in) throws IOException { limit = in.readVInt(); limitRemaining = in.readVInt(); pagesProcessed = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + rowsEmitted = in.readVLong(); + } else { + rowsReceived = 0; + rowsEmitted = 0; + } } @Override @@ -177,6 +208,10 @@ public class LimitOperator implements Operator { out.writeVInt(limit); out.writeVInt(limitRemaining); out.writeVInt(pagesProcessed); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override @@ -205,12 +240,28 @@ public class LimitOperator implements Operator { return pagesProcessed; } + /** + * Count of rows this operator has received. + */ + public long rowsReceived() { + return rowsReceived; + } + + /** + * Count of rows this operator has emitted. + */ + public long rowsEmitted() { + return rowsEmitted; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("limit", limit); builder.field("limit_remaining", limitRemaining); builder.field("pages_processed", pagesProcessed); + builder.field("rows_received", rowsReceived); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -219,12 +270,16 @@ public class LimitOperator implements Operator { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; - return limit == status.limit && limitRemaining == status.limitRemaining && pagesProcessed == status.pagesProcessed; + return limit == status.limit + && limitRemaining == status.limitRemaining + && pagesProcessed == status.pagesProcessed + && rowsReceived == status.rowsReceived + && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(limit, limitRemaining, pagesProcessed); + return Objects.hash(limit, limitRemaining, pagesProcessed, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java index e87329a90705..1659a88a84cd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java @@ -69,10 +69,21 @@ public class MvExpandOperator implements Operator { private int nextItemOnExpanded = 0; /** - * Count of pages that have been processed by this operator. + * Count of pages that this operator has received. */ - private int pagesIn; - private int pagesOut; + private int pagesReceived; + /** + * Count of pages this operator has emitted. + */ + private int pagesEmitted; + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; public MvExpandOperator(int channel, int pageSize) { this.channel = channel; @@ -82,10 +93,18 @@ public class MvExpandOperator implements Operator { @Override public final Page getOutput() { + Page result = getOutputInternal(); + if (result != null) { + pagesEmitted++; + rowsEmitted += result.getPositionCount(); + } + return result; + } + + private Page getOutputInternal() { if (prev == null) { return null; } - pagesOut++; if (expandedBlock == null) { /* @@ -214,7 +233,8 @@ public class MvExpandOperator implements Operator { assert prev == null : "has pending input page"; prev = page; this.expandingBlock = prev.getBlock(channel); - pagesIn++; + pagesReceived++; + rowsReceived += page.getPositionCount(); } @Override @@ -229,7 +249,7 @@ public class MvExpandOperator implements Operator { @Override public final Status status() { - return new Status(pagesIn, pagesOut, noops); + return new Status(pagesReceived, pagesEmitted, noops, rowsReceived, rowsEmitted); } @Override @@ -248,9 +268,11 @@ public class MvExpandOperator implements Operator { public static final class Status implements Operator.Status { - private final int pagesIn; - private final int pagesOut; + private final int pagesReceived; + private final int pagesEmitted; private final int noops; + private final long rowsReceived; + private final long rowsEmitted; public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Operator.Status.class, @@ -258,31 +280,46 @@ public class MvExpandOperator implements Operator { Status::new ); - Status(int pagesIn, int pagesOut, int noops) { - this.pagesIn = pagesIn; - this.pagesOut = pagesOut; + Status(int pagesReceived, int pagesEmitted, int noops, long rowsReceived, long rowsEmitted) { + this.pagesReceived = pagesReceived; + this.pagesEmitted = pagesEmitted; this.noops = noops; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } Status(StreamInput in) throws IOException { - pagesIn = in.readVInt(); - pagesOut = in.readVInt(); + pagesReceived = in.readVInt(); + pagesEmitted = in.readVInt(); noops = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + rowsEmitted = in.readVLong(); + } else { + rowsReceived = 0; + rowsEmitted = 0; + } } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(pagesIn); - out.writeVInt(pagesOut); + out.writeVInt(pagesReceived); + out.writeVInt(pagesEmitted); out.writeVInt(noops); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("pages_in", pagesIn); - builder.field("pages_out", pagesOut); + builder.field("pages_received", pagesReceived); + builder.field("pages_emitted", pagesEmitted); builder.field("noops", noops); + builder.field("rows_received", rowsReceived); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -304,20 +341,32 @@ public class MvExpandOperator implements Operator { return false; } Status status = (Status) o; - return noops == status.noops && pagesIn == status.pagesIn && pagesOut == status.pagesOut; + return noops == status.noops + && pagesReceived == status.pagesReceived + && pagesEmitted == status.pagesEmitted + && rowsReceived == status.rowsReceived + && rowsEmitted == status.rowsEmitted; } - public int pagesIn() { - return pagesIn; + public int pagesReceived() { + return pagesReceived; } - public int pagesOut() { - return pagesOut; + public int pagesEmitted() { + return pagesEmitted; + } + + public long rowsReceived() { + return rowsReceived; + } + + public long rowsEmitted() { + return rowsEmitted; } @Override public int hashCode() { - return Objects.hash(noops, pagesIn, pagesOut); + return Objects.hash(noops, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSink.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSink.java index e96ca9e39b7e..ee939c9a07e0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSink.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSink.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator.exchange; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.IsBlockedResult; @@ -30,6 +31,11 @@ public interface ExchangeSink { */ boolean isFinished(); + /** + * Adds a listener that will be notified when this exchange sink is finished. + */ + void addCompletionListener(ActionListener listener); + /** * Whether the sink is blocked on adding more pages */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java index 614c3fe0ecc5..21eb2ed56561 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java @@ -52,9 +52,11 @@ public final class ExchangeSinkHandler { private class ExchangeSinkImpl implements ExchangeSink { boolean finished; + private final SubscribableListener onFinished = new SubscribableListener<>(); ExchangeSinkImpl() { onChanged(); + buffer.addCompletionListener(onFinished); outstandingSinks.incrementAndGet(); } @@ -68,6 +70,7 @@ public final class ExchangeSinkHandler { public void finish() { if (finished == false) { finished = true; + onFinished.onResponse(null); onChanged(); if (outstandingSinks.decrementAndGet() == 0) { buffer.finish(false); @@ -78,7 +81,12 @@ public final class ExchangeSinkHandler { @Override public boolean isFinished() { - return finished || buffer.isFinished(); + return onFinished.isDone(); + } + + @Override + public void addCompletionListener(ActionListener listener) { + onFinished.addListener(listener); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java index dd89dfe480c3..f87edd1a3e16 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java @@ -9,6 +9,7 @@ package org.elasticsearch.compute.operator.exchange; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -32,7 +33,8 @@ public class ExchangeSinkOperator extends SinkOperator { private final ExchangeSink sink; private final Function transformer; - private int pagesAccepted; + private int pagesReceived; + private long rowsReceived; public record ExchangeSinkOperatorFactory(Supplier exchangeSinks, Function transformer) implements @@ -59,6 +61,10 @@ public class ExchangeSinkOperator extends SinkOperator { return sink.isFinished(); } + public void addCompletionListener(ActionListener listener) { + sink.addCompletionListener(listener); + } + @Override public void finish() { sink.finish(); @@ -76,7 +82,8 @@ public class ExchangeSinkOperator extends SinkOperator { @Override protected void doAddInput(Page page) { - pagesAccepted++; + pagesReceived++; + rowsReceived += page.getPositionCount(); sink.addPage(transformer.apply(page)); } @@ -92,7 +99,7 @@ public class ExchangeSinkOperator extends SinkOperator { @Override public Status status() { - return new Status(pagesAccepted); + return new Status(pagesReceived, rowsReceived); } public static class Status implements Operator.Status { @@ -102,19 +109,31 @@ public class ExchangeSinkOperator extends SinkOperator { Status::new ); - private final int pagesAccepted; + private final int pagesReceived; + private final long rowsReceived; - Status(int pagesAccepted) { - this.pagesAccepted = pagesAccepted; + Status(int pagesReceived, long rowsReceived) { + this.pagesReceived = pagesReceived; + this.rowsReceived = rowsReceived; } Status(StreamInput in) throws IOException { - pagesAccepted = in.readVInt(); + pagesReceived = in.readVInt(); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsReceived = in.readVLong(); + } else { + rowsReceived = 0; + } } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(pagesAccepted); + out.writeVInt(pagesReceived); + + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsReceived); + } } @Override @@ -122,14 +141,19 @@ public class ExchangeSinkOperator extends SinkOperator { return ENTRY.name; } - public int pagesAccepted() { - return pagesAccepted; + public int pagesReceived() { + return pagesReceived; + } + + public long rowsReceived() { + return rowsReceived; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("pages_accepted", pagesAccepted); + builder.field("pages_received", pagesReceived); + builder.field("rows_received", rowsReceived); return builder.endObject(); } @@ -138,12 +162,12 @@ public class ExchangeSinkOperator extends SinkOperator { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; - return pagesAccepted == status.pagesAccepted; + return pagesReceived == status.pagesReceived && rowsReceived == status.rowsReceived; } @Override public int hashCode() { - return Objects.hash(pagesAccepted); + return Objects.hash(pagesReceived, rowsReceived); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperator.java index 2d0ce228e81d..3a96f1bb1d36 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperator.java @@ -32,6 +32,7 @@ public class ExchangeSourceOperator extends SourceOperator { private final ExchangeSource source; private IsBlockedResult isBlocked = NOT_BLOCKED; private int pagesEmitted; + private long rowsEmitted; public record ExchangeSourceOperatorFactory(Supplier exchangeSources) implements SourceOperatorFactory { @@ -55,6 +56,7 @@ public class ExchangeSourceOperator extends SourceOperator { final var page = source.pollPage(); if (page != null) { pagesEmitted++; + rowsEmitted += page.getPositionCount(); } return page; } @@ -92,7 +94,7 @@ public class ExchangeSourceOperator extends SourceOperator { @Override public Status status() { - return new Status(source.bufferSize(), pagesEmitted); + return new Status(source.bufferSize(), pagesEmitted, rowsEmitted); } public static class Status implements Operator.Status { @@ -104,21 +106,33 @@ public class ExchangeSourceOperator extends SourceOperator { private final int pagesWaiting; private final int pagesEmitted; + private final long rowsEmitted; - Status(int pagesWaiting, int pagesEmitted) { + Status(int pagesWaiting, int pagesEmitted, long rowsEmitted) { this.pagesWaiting = pagesWaiting; this.pagesEmitted = pagesEmitted; + this.rowsEmitted = rowsEmitted; } Status(StreamInput in) throws IOException { pagesWaiting = in.readVInt(); pagesEmitted = in.readVInt(); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + rowsEmitted = in.readVLong(); + } else { + rowsEmitted = 0; + } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(pagesWaiting); out.writeVInt(pagesEmitted); + + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVLong(rowsEmitted); + } } @Override @@ -134,11 +148,16 @@ public class ExchangeSourceOperator extends SourceOperator { return pagesEmitted; } + public long rowsEmitted() { + return rowsEmitted; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("pages_waiting", pagesWaiting); builder.field("pages_emitted", pagesEmitted); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -147,12 +166,12 @@ public class ExchangeSourceOperator extends SourceOperator { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; - return pagesWaiting == status.pagesWaiting && pagesEmitted == status.pagesEmitted; + return pagesWaiting == status.pagesWaiting && pagesEmitted == status.pagesEmitted && rowsEmitted == status.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(pagesWaiting, pagesEmitted); + return Objects.hash(pagesWaiting, pagesEmitted, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java index f9895ff346b5..2e2a0d383e6b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java @@ -128,7 +128,7 @@ import java.util.stream.IntStream; * | l99 | null | null | * } */ -class RightChunkedLeftJoin implements Releasable { +public class RightChunkedLeftJoin implements Releasable { private final Page leftHand; private final int mergedElementCount; /** @@ -138,12 +138,12 @@ class RightChunkedLeftJoin implements Releasable { */ private int next = 0; - RightChunkedLeftJoin(Page leftHand, int mergedElementCounts) { + public RightChunkedLeftJoin(Page leftHand, int mergedElementCounts) { this.leftHand = leftHand; this.mergedElementCount = mergedElementCounts; } - Page join(Page rightHand) { + public Page join(Page rightHand) { IntVector positions = rightHand.getBlock(0).asVector(); if (positions.getInt(0) < next - 1) { throw new IllegalArgumentException("maximum overlap is one position"); @@ -209,7 +209,7 @@ class RightChunkedLeftJoin implements Releasable { } } - Optional noMoreRightHandPages() { + public Optional noMoreRightHandPages() { if (next == leftHand.getPositionCount()) { return Optional.empty(); } @@ -237,6 +237,14 @@ class RightChunkedLeftJoin implements Releasable { } } + /** + * Release this on any thread, rather than just the thread that built it. + */ + public void releaseOnAnyThread() { + leftHand.allowPassingToDifferentDriver(); + leftHand.releaseBlocks(); + } + @Override public void close() { Releasables.close(leftHand::releaseBlocks); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 682a5d6050c2..0489be58fade 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -278,6 +278,26 @@ public class TopNOperator implements Operator, Accountable { private Iterator output; + /** + * Count of pages that have been received by this operator. + */ + private int pagesReceived; + + /** + * Count of pages that have been emitted by this operator. + */ + private int pagesEmitted; + + /** + * Count of rows this operator has received. + */ + private long rowsReceived; + + /** + * Count of rows this operator has emitted. + */ + private long rowsEmitted; + public TopNOperator( BlockFactory blockFactory, CircuitBreaker breaker, @@ -368,7 +388,9 @@ public class TopNOperator implements Operator, Accountable { spare = inputQueue.insertWithOverflow(spare); } } finally { - Releasables.close(() -> page.releaseBlocks()); + page.releaseBlocks(); + pagesReceived++; + rowsReceived += page.getPositionCount(); } } @@ -491,10 +513,13 @@ public class TopNOperator implements Operator, Accountable { @Override public Page getOutput() { - if (output != null && output.hasNext()) { - return output.next(); + if (output == null || output.hasNext() == false) { + return null; } - return null; + Page ret = output.next(); + pagesEmitted++; + rowsEmitted += ret.getPositionCount(); + return ret; } @Override @@ -531,7 +556,7 @@ public class TopNOperator implements Operator, Accountable { @Override public Status status() { - return new TopNOperatorStatus(inputQueue.size(), ramBytesUsed()); + return new TopNOperatorStatus(inputQueue.size(), ramBytesUsed(), pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java index 1617a546be2c..ceccdce529ce 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java @@ -27,21 +27,55 @@ public class TopNOperatorStatus implements Operator.Status { ); private final int occupiedRows; private final long ramBytesUsed; + private final int pagesReceived; + private final int pagesEmitted; + private final long rowsReceived; + private final long rowsEmitted; - public TopNOperatorStatus(int occupiedRows, long ramBytesUsed) { + public TopNOperatorStatus( + int occupiedRows, + long ramBytesUsed, + int pagesReceived, + int pagesEmitted, + long rowsReceived, + long rowsEmitted + ) { this.occupiedRows = occupiedRows; this.ramBytesUsed = ramBytesUsed; + this.pagesReceived = pagesReceived; + this.pagesEmitted = pagesEmitted; + this.rowsReceived = rowsReceived; + this.rowsEmitted = rowsEmitted; } TopNOperatorStatus(StreamInput in) throws IOException { this.occupiedRows = in.readVInt(); this.ramBytesUsed = in.readVLong(); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + this.pagesReceived = in.readVInt(); + this.pagesEmitted = in.readVInt(); + this.rowsReceived = in.readVLong(); + this.rowsEmitted = in.readVLong(); + } else { + this.pagesReceived = 0; + this.pagesEmitted = 0; + this.rowsReceived = 0; + this.rowsEmitted = 0; + } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(occupiedRows); out.writeVLong(ramBytesUsed); + + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { + out.writeVInt(pagesReceived); + out.writeVInt(pagesEmitted); + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); + } } @Override @@ -57,12 +91,32 @@ public class TopNOperatorStatus implements Operator.Status { return ramBytesUsed; } + public int pagesReceived() { + return pagesReceived; + } + + public int pagesEmitted() { + return pagesEmitted; + } + + public long rowsReceived() { + return rowsReceived; + } + + public long rowsEmitted() { + return rowsEmitted; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("occupied_rows", occupiedRows); builder.field("ram_bytes_used", ramBytesUsed); builder.field("ram_used", ByteSizeValue.ofBytes(ramBytesUsed)); + builder.field("pages_received", pagesReceived); + builder.field("pages_emitted", pagesEmitted); + builder.field("rows_received", rowsReceived); + builder.field("rows_emitted", rowsEmitted); return builder.endObject(); } @@ -72,12 +126,17 @@ public class TopNOperatorStatus implements Operator.Status { return false; } TopNOperatorStatus that = (TopNOperatorStatus) o; - return occupiedRows == that.occupiedRows && ramBytesUsed == that.ramBytesUsed; + return occupiedRows == that.occupiedRows + && ramBytesUsed == that.ramBytesUsed + && pagesReceived == that.pagesReceived + && pagesEmitted == that.pagesEmitted + && rowsReceived == that.rowsReceived + && rowsEmitted == that.rowsEmitted; } @Override public int hashCode() { - return Objects.hash(occupiedRows, ramBytesUsed); + return Objects.hash(occupiedRows, ramBytesUsed, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index def0710644d2..28aa9e7976c7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -20,7 +20,19 @@ import static org.hamcrest.Matchers.equalTo; public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase { public static LuceneSourceOperator.Status simple() { - return new LuceneSourceOperator.Status(2, Set.of("*:*"), new TreeSet<>(List.of("a:0", "a:1")), 1002, 0, 1, 5, 123, 99990, 8000); + return new LuceneSourceOperator.Status( + 2, + Set.of("*:*"), + new TreeSet<>(List.of("a:0", "a:1")), + 1002, + 0, + 1, + 5, + 123, + 99990, + 8000, + 222 + ); } public static String simpleToJson() { @@ -41,7 +53,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest "pages_emitted" : 5, "slice_min" : 123, "slice_max" : 99990, - "current" : 8000 + "current" : 8000, + "rows_emitted" : 222 }"""; } @@ -66,7 +79,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeInt(), - randomNonNegativeInt() + randomNonNegativeInt(), + randomNonNegativeLong() ); } @@ -100,7 +114,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest int sliceMin = instance.sliceMin(); int sliceMax = instance.sliceMax(); int current = instance.current(); - switch (between(0, 9)) { + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 10)) { case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt); case 1 -> processedQueries = randomValueOtherThan(processedQueries, LuceneSourceOperatorStatusTests::randomProcessedQueries); case 2 -> processedShards = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards); @@ -111,6 +126,7 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest case 7 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt); case 8 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt); case 9 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt); + case 10 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } return new LuceneSourceOperator.Status( @@ -123,7 +139,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest pagesEmitted, sliceMin, sliceMax, - current + current, + rowsEmitted ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java index 5887da0bc466..4303137f74bb 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java @@ -20,7 +20,7 @@ import static org.hamcrest.Matchers.equalTo; public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializingTestCase { public static ValuesSourceReaderOperator.Status simple() { - return new ValuesSourceReaderOperator.Status(Map.of("ReaderType", 3), 1022323, 123); + return new ValuesSourceReaderOperator.Status(Map.of("ReaderType", 3), 1022323, 123, 111, 222); } public static String simpleToJson() { @@ -31,7 +31,9 @@ public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializi }, "process_nanos" : 1022323, "process_time" : "1ms", - "pages_processed" : 123 + "pages_processed" : 123, + "rows_received" : 111, + "rows_emitted" : 222 }"""; } @@ -46,7 +48,13 @@ public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializi @Override public ValuesSourceReaderOperator.Status createTestInstance() { - return new ValuesSourceReaderOperator.Status(randomReadersBuilt(), randomNonNegativeLong(), randomNonNegativeInt()); + return new ValuesSourceReaderOperator.Status( + randomReadersBuilt(), + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); } private Map randomReadersBuilt() { @@ -63,12 +71,16 @@ public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializi Map readersBuilt = instance.readersBuilt(); long processNanos = instance.processNanos(); int pagesProcessed = instance.pagesProcessed(); - switch (between(0, 2)) { + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 4)) { case 0 -> readersBuilt = randomValueOtherThan(readersBuilt, this::randomReadersBuilt); case 1 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong); case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); + case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new ValuesSourceReaderOperator.Status(readersBuilt, processNanos, pagesProcessed); + return new ValuesSourceReaderOperator.Status(readersBuilt, processNanos, pagesProcessed, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingOperatorStatusTests.java index 3c04e6e5a9f5..3e8aaf4f6b0b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingOperatorStatusTests.java @@ -16,7 +16,7 @@ import static org.hamcrest.Matchers.equalTo; public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializingTestCase { public static AbstractPageMappingOperator.Status simple() { - return new AbstractPageMappingOperator.Status(200012, 123); + return new AbstractPageMappingOperator.Status(200012, 123, 111, 222); } public static String simpleToJson() { @@ -24,7 +24,9 @@ public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializ { "process_nanos" : 200012, "process_time" : "200micros", - "pages_processed" : 123 + "pages_processed" : 123, + "rows_received" : 111, + "rows_emitted" : 222 }"""; } @@ -39,18 +41,27 @@ public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializ @Override public AbstractPageMappingOperator.Status createTestInstance() { - return new AbstractPageMappingOperator.Status(randomNonNegativeLong(), randomNonNegativeInt()); + return new AbstractPageMappingOperator.Status( + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); } @Override protected AbstractPageMappingOperator.Status mutateInstance(AbstractPageMappingOperator.Status instance) { long processNanos = instance.processNanos(); int pagesProcessed = instance.pagesProcessed(); - switch (between(0, 1)) { + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 3)) { case 0 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong); case 1 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); + case 2 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 3 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new AbstractPageMappingOperator.Status(processNanos, pagesProcessed); + return new AbstractPageMappingOperator.Status(processNanos, pagesProcessed, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperatorStatusTests.java index 41db82b9b4c8..b131c43ad648 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperatorStatusTests.java @@ -17,7 +17,7 @@ import static org.hamcrest.Matchers.equalTo; public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWireSerializingTestCase< AbstractPageMappingToIteratorOperator.Status> { public static AbstractPageMappingToIteratorOperator.Status simple() { - return new AbstractPageMappingToIteratorOperator.Status(200012, 123, 204); + return new AbstractPageMappingToIteratorOperator.Status(200012, 123, 204, 111, 222); } public static String simpleToJson() { @@ -26,7 +26,9 @@ public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWi "process_nanos" : 200012, "process_time" : "200micros", "pages_received" : 123, - "pages_emitted" : 204 + "pages_emitted" : 204, + "rows_received" : 111, + "rows_emitted" : 222 }"""; } @@ -41,7 +43,13 @@ public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWi @Override public AbstractPageMappingToIteratorOperator.Status createTestInstance() { - return new AbstractPageMappingToIteratorOperator.Status(randomNonNegativeLong(), randomNonNegativeInt(), randomNonNegativeInt()); + return new AbstractPageMappingToIteratorOperator.Status( + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); } @Override @@ -49,12 +57,16 @@ public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWi long processNanos = instance.processNanos(); int pagesReceived = instance.pagesReceived(); int pagesEmitted = instance.pagesEmitted(); - switch (between(0, 2)) { + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 4)) { case 0 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong); case 1 -> pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt); case 2 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted); + return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorStatusTests.java index f9d806b72cb4..ba6c3ea0c153 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorStatusTests.java @@ -16,7 +16,7 @@ import static org.hamcrest.Matchers.equalTo; public class AggregationOperatorStatusTests extends AbstractWireSerializingTestCase { public static AggregationOperator.Status simple() { - return new AggregationOperator.Status(200012, 400036, 123); + return new AggregationOperator.Status(200012, 400036, 123, 111, 222); } public static String simpleToJson() { @@ -26,7 +26,9 @@ public class AggregationOperatorStatusTests extends AbstractWireSerializingTestC "aggregation_time" : "200micros", "aggregation_finish_nanos" : 400036, "aggregation_finish_time" : "400micros", - "pages_processed" : 123 + "pages_processed" : 123, + "rows_received" : 111, + "rows_emitted" : 222 }"""; } @@ -41,7 +43,13 @@ public class AggregationOperatorStatusTests extends AbstractWireSerializingTestC @Override public AggregationOperator.Status createTestInstance() { - return new AggregationOperator.Status(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeInt()); + return new AggregationOperator.Status( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); } @Override @@ -49,12 +57,16 @@ public class AggregationOperatorStatusTests extends AbstractWireSerializingTestC long aggregationNanos = instance.aggregationNanos(); long aggregationFinishNanos = instance.aggregationFinishNanos(); int pagesProcessed = instance.pagesProcessed(); - switch (between(0, 2)) { + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 4)) { case 0 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong); case 1 -> aggregationFinishNanos = randomValueOtherThan(aggregationFinishNanos, ESTestCase::randomNonNegativeLong); case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); + case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new AggregationOperator.Status(aggregationNanos, aggregationFinishNanos, pagesProcessed); + return new AggregationOperator.Status(aggregationNanos, aggregationFinishNanos, pagesProcessed, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index ba3e7d816e42..3d4c8b8ed226 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -8,10 +8,20 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matcher; +import java.io.IOException; + +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.matchesPattern; /** @@ -74,6 +84,38 @@ public abstract class AnyOperatorTestCase extends ComputeTestCase { } } + /** + * Ensures that the Operator.Status of this operator has the standard fields. + */ + public void testOperatorStatus() throws IOException { + DriverContext driverContext = driverContext(); + try (var operator = simple().get(driverContext)) { + Operator.Status status = operator.status(); + + assumeTrue("Operator does not provide a status", status != null); + + var xContent = XContentType.JSON.xContent(); + try (var xContentBuilder = XContentBuilder.builder(xContent)) { + status.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + + var bytesReference = BytesReference.bytes(xContentBuilder); + var map = XContentHelper.convertToMap(bytesReference, false, xContentBuilder.contentType()).v2(); + + if (operator instanceof SourceOperator) { + assertThat(map, hasKey("pages_emitted")); + assertThat(map, hasKey("rows_emitted")); + } else if (operator instanceof SinkOperator) { + assertThat(map, hasKey("pages_received")); + assertThat(map, hasKey("rows_received")); + } else { + assertThat(map, either(hasKey("pages_processed")).or(both(hasKey("pages_received")).and(hasKey("pages_emitted")))); + assertThat(map, hasKey("rows_received")); + assertThat(map, hasKey("rows_emitted")); + } + } + } + } + /** * A {@link DriverContext} with a nonBreakingBigArrays. */ diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index fbcf11cd948c..38f25244cd91 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -110,7 +110,7 @@ public class AsyncOperatorTests extends ESTestCase { } }; int maxConcurrentRequests = randomIntBetween(1, 10); - AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { + AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { final LookupService lookupService = new LookupService(threadPool, globalBlockFactory, dict, maxConcurrentRequests); @Override @@ -118,6 +118,16 @@ public class AsyncOperatorTests extends ESTestCase { lookupService.lookupAsync(inputPage, listener); } + @Override + public Page getOutput() { + return fetchFromBuffer(); + } + + @Override + protected void releaseFetchedOnAnyThread(Page page) { + releasePageOnAnyThread(page); + } + @Override public void doClose() { @@ -159,7 +169,7 @@ public class AsyncOperatorTests extends ESTestCase { Releasables.close(localBreaker); } - class TestOp extends AsyncOperator { + class TestOp extends AsyncOperator { Map> handlers = new HashMap<>(); TestOp(DriverContext driverContext, int maxOutstandingRequests) { @@ -171,6 +181,16 @@ public class AsyncOperatorTests extends ESTestCase { handlers.put(inputPage, listener); } + @Override + public Page getOutput() { + return fetchFromBuffer(); + } + + @Override + protected void releaseFetchedOnAnyThread(Page page) { + releasePageOnAnyThread(page); + } + @Override protected void doClose() { @@ -233,7 +253,7 @@ public class AsyncOperatorTests extends ESTestCase { ); int maxConcurrentRequests = randomIntBetween(1, 10); AtomicBoolean failed = new AtomicBoolean(); - AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { + AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { @Override protected void performAsync(Page inputPage, ActionListener listener) { ActionRunnable command = new ActionRunnable<>(listener) { @@ -256,6 +276,16 @@ public class AsyncOperatorTests extends ESTestCase { } } + @Override + public Page getOutput() { + return fetchFromBuffer(); + } + + @Override + protected void releaseFetchedOnAnyThread(Page page) { + releasePageOnAnyThread(page); + } + @Override protected void doClose() { @@ -285,7 +315,7 @@ public class AsyncOperatorTests extends ESTestCase { for (int i = 0; i < iters; i++) { DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); CyclicBarrier barrier = new CyclicBarrier(2); - AsyncOperator asyncOperator = new AsyncOperator(driverContext, between(1, 10)) { + AsyncOperator asyncOperator = new AsyncOperator(driverContext, between(1, 10)) { @Override protected void performAsync(Page inputPage, ActionListener listener) { ActionRunnable command = new ActionRunnable<>(listener) { @@ -302,6 +332,16 @@ public class AsyncOperatorTests extends ESTestCase { threadPool.executor(ESQL_TEST_EXECUTOR).execute(command); } + @Override + public Page getOutput() { + return fetchFromBuffer(); + } + + @Override + protected void releaseFetchedOnAnyThread(Page page) { + releasePageOnAnyThread(page); + } + @Override protected void doClose() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index 694aaba4bd85..b067c44a289b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -18,9 +19,14 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.compute.data.BasicBlockTests; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; +import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -35,8 +41,11 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; import java.util.function.LongSupplier; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; public class DriverTests extends ESTestCase { @@ -273,6 +282,76 @@ public class DriverTests extends ESTestCase { } } + public void testEarlyTermination() { + DriverContext driverContext = driverContext(); + ThreadPool threadPool = threadPool(); + try { + int positions = between(1000, 5000); + List inPages = randomList(1, 100, () -> { + var block = driverContext.blockFactory().newConstantIntBlockWith(randomInt(), positions); + return new Page(block); + }); + final var sourceOperator = new CannedSourceOperator(inPages.iterator()); + final int maxAllowedRows = between(1, 100); + final AtomicInteger processedRows = new AtomicInteger(0); + var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), positions, System::currentTimeMillis); + var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(), Function.identity()); + final var delayOperator = new EvalOperator(driverContext.blockFactory(), new EvalOperator.ExpressionEvaluator() { + @Override + public Block eval(Page page) { + for (int i = 0; i < page.getPositionCount(); i++) { + driverContext.checkForEarlyTermination(); + if (processedRows.incrementAndGet() >= maxAllowedRows) { + sinkHandler.fetchPageAsync(true, ActionListener.noop()); + } + } + return driverContext.blockFactory().newConstantBooleanBlockWith(true, page.getPositionCount()); + } + + @Override + public void close() { + + } + }); + Driver driver = new Driver(driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); + ThreadContext threadContext = threadPool.getThreadContext(); + PlainActionFuture future = new PlainActionFuture<>(); + + Driver.start(threadContext, threadPool.executor("esql"), driver, between(1, 1000), future); + future.actionGet(30, TimeUnit.SECONDS); + assertThat(processedRows.get(), equalTo(maxAllowedRows)); + } finally { + terminate(threadPool); + } + } + + public void testResumeOnEarlyFinish() throws Exception { + DriverContext driverContext = driverContext(); + ThreadPool threadPool = threadPool(); + try { + PlainActionFuture sourceFuture = new PlainActionFuture<>(); + var sourceHandler = new ExchangeSourceHandler(between(1, 5), threadPool.executor("esql"), sourceFuture); + var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), between(1, 5), System::currentTimeMillis); + var sourceOperator = new ExchangeSourceOperator(sourceHandler.createExchangeSource()); + var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(), Function.identity()); + Driver driver = new Driver(driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); + PlainActionFuture future = new PlainActionFuture<>(); + Driver.start(threadPool.getThreadContext(), threadPool.executor("esql"), driver, between(1, 1000), future); + assertBusy( + () -> assertThat( + driver.status().status(), + either(equalTo(DriverStatus.Status.ASYNC)).or(equalTo(DriverStatus.Status.STARTING)) + ) + ); + sinkHandler.fetchPageAsync(true, ActionListener.noop()); + future.actionGet(5, TimeUnit.SECONDS); + assertThat(driver.status().status(), equalTo(DriverStatus.Status.DONE)); + sourceFuture.actionGet(5, TimeUnit.SECONDS); + } finally { + terminate(threadPool); + } + } + private static void assertRunningWithRegularUser(ThreadPool threadPool) { String user = threadPool.getThreadContext().getHeader("user"); assertThat(user, equalTo("user1")); @@ -291,7 +370,7 @@ public class DriverTests extends ESTestCase { return new Page(block.block()); } - static class SwitchContextOperator extends AsyncOperator { + static class SwitchContextOperator extends AsyncOperator { private final ThreadPool threadPool; SwitchContextOperator(DriverContext driverContext, ThreadPool threadPool) { @@ -314,6 +393,16 @@ public class DriverTests extends ESTestCase { }), TimeValue.timeValueNanos(between(1, 1_000_000)), threadPool.executor("esql")); } + @Override + public Page getOutput() { + return fetchFromBuffer(); + } + + @Override + protected void releaseFetchedOnAnyThread(Page page) { + releasePageOnAnyThread(page); + } + @Override protected void doClose() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorStatusTests.java index 245ae171c630..93e1a9f8d221 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorStatusTests.java @@ -16,7 +16,7 @@ import static org.hamcrest.Matchers.equalTo; public class HashAggregationOperatorStatusTests extends AbstractWireSerializingTestCase { public static HashAggregationOperator.Status simple() { - return new HashAggregationOperator.Status(500012, 200012, 123); + return new HashAggregationOperator.Status(500012, 200012, 123, 111, 222); } public static String simpleToJson() { @@ -26,7 +26,9 @@ public class HashAggregationOperatorStatusTests extends AbstractWireSerializingT "hash_time" : "500micros", "aggregation_nanos" : 200012, "aggregation_time" : "200micros", - "pages_processed" : 123 + "pages_processed" : 123, + "rows_received" : 111, + "rows_emitted" : 222 }"""; } @@ -41,7 +43,13 @@ public class HashAggregationOperatorStatusTests extends AbstractWireSerializingT @Override public HashAggregationOperator.Status createTestInstance() { - return new HashAggregationOperator.Status(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeInt()); + return new HashAggregationOperator.Status( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); } @Override @@ -49,12 +57,16 @@ public class HashAggregationOperatorStatusTests extends AbstractWireSerializingT long hashNanos = instance.hashNanos(); long aggregationNanos = instance.aggregationNanos(); int pagesProcessed = instance.pagesProcessed(); - switch (between(0, 2)) { + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 4)) { case 0 -> hashNanos = randomValueOtherThan(hashNanos, ESTestCase::randomNonNegativeLong); case 1 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong); case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); + case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new HashAggregationOperator.Status(hashNanos, aggregationNanos, pagesProcessed); + return new HashAggregationOperator.Status(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitStatusTests.java index fd2b75f6bd81..016c8a85f94c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitStatusTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -17,8 +18,8 @@ import static org.hamcrest.Matchers.equalTo; public class LimitStatusTests extends AbstractWireSerializingTestCase { public void testToXContent() { - assertThat(Strings.toString(new LimitOperator.Status(10, 1, 1)), equalTo(""" - {"limit":10,"limit_remaining":1,"pages_processed":1}""")); + assertThat(Strings.toString(new LimitOperator.Status(10, 1, 1, 111, 222)), equalTo(""" + {"limit":10,"limit_remaining":1,"pages_processed":1,"rows_received":111,"rows_emitted":222}""")); } @Override @@ -28,7 +29,13 @@ public class LimitStatusTests extends AbstractWireSerializingTestCase between(0, Integer.MAX_VALUE)); + limit = randomValueOtherThan(limit, ESTestCase::randomNonNegativeInt); break; case 1: - limitRemaining = randomValueOtherThan(limitRemaining, () -> between(0, Integer.MAX_VALUE)); + limitRemaining = randomValueOtherThan(limitRemaining, ESTestCase::randomNonNegativeInt); break; case 2: - pagesProcessed = randomValueOtherThan(pagesProcessed, () -> between(0, Integer.MAX_VALUE)); + pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); + break; + case 3: + rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + break; + case 4: + rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); break; default: throw new IllegalArgumentException(); } - return new LimitOperator.Status(limit, limitRemaining, pagesProcessed); + return new LimitOperator.Status(limit, limitRemaining, pagesProcessed, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorStatusTests.java index 9527388a0d3c..a421ba360e4a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorStatusTests.java @@ -16,12 +16,12 @@ import static org.hamcrest.Matchers.equalTo; public class MvExpandOperatorStatusTests extends AbstractWireSerializingTestCase { public static MvExpandOperator.Status simple() { - return new MvExpandOperator.Status(10, 15, 9); + return new MvExpandOperator.Status(10, 15, 9, 111, 222); } public static String simpleToJson() { return """ - {"pages_in":10,"pages_out":15,"noops":9}"""; + {"pages_received":10,"pages_emitted":15,"noops":9,"rows_received":111,"rows_emitted":222}"""; } public void testToXContent() { @@ -35,32 +35,30 @@ public class MvExpandOperatorStatusTests extends AbstractWireSerializingTestCase @Override public MvExpandOperator.Status createTestInstance() { - return new MvExpandOperator.Status(randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeInt()); + return new MvExpandOperator.Status( + randomNonNegativeInt(), + randomNonNegativeInt(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); } @Override protected MvExpandOperator.Status mutateInstance(MvExpandOperator.Status instance) { - switch (between(0, 2)) { - case 0: - return new MvExpandOperator.Status( - randomValueOtherThan(instance.pagesIn(), ESTestCase::randomNonNegativeInt), - instance.pagesOut(), - instance.noops() - ); - case 1: - return new MvExpandOperator.Status( - instance.pagesIn(), - randomValueOtherThan(instance.pagesOut(), ESTestCase::randomNonNegativeInt), - instance.noops() - ); - case 2: - return new MvExpandOperator.Status( - instance.pagesIn(), - instance.pagesOut(), - randomValueOtherThan(instance.noops(), ESTestCase::randomNonNegativeInt) - ); - default: - throw new UnsupportedOperationException(); + int pagesReceived = instance.pagesReceived(); + int pagesEmitted = instance.pagesEmitted(); + int noops = instance.noops(); + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 4)) { + case 0 -> pagesReceived = randomValueOtherThan(instance.pagesReceived(), ESTestCase::randomNonNegativeInt); + case 1 -> pagesEmitted = randomValueOtherThan(instance.pagesEmitted(), ESTestCase::randomNonNegativeInt); + case 2 -> noops = randomValueOtherThan(instance.noops(), ESTestCase::randomNonNegativeInt); + case 3 -> rowsReceived = randomValueOtherThan(instance.rowsReceived(), ESTestCase::randomNonNegativeLong); + case 4 -> rowsEmitted = randomValueOtherThan(instance.rowsEmitted(), ESTestCase::randomNonNegativeLong); + default -> throw new UnsupportedOperationException(); } + return new MvExpandOperator.Status(pagesReceived, pagesEmitted, noops, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 9442fb05761d..b07ff8b0da57 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -210,8 +210,8 @@ public class MvExpandOperatorTests extends OperatorTestCase { assertThat(result, hasSize(1)); assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2)))); MvExpandOperator.Status status = op.status(); - assertThat(status.pagesIn(), equalTo(1)); - assertThat(status.pagesOut(), equalTo(1)); + assertThat(status.pagesReceived(), equalTo(1)); + assertThat(status.pagesEmitted(), equalTo(1)); assertThat(status.noops(), equalTo(1)); } @@ -223,8 +223,8 @@ public class MvExpandOperatorTests extends OperatorTestCase { assertThat(result, hasSize(1)); assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2)))); MvExpandOperator.Status status = op.status(); - assertThat(status.pagesIn(), equalTo(1)); - assertThat(status.pagesOut(), equalTo(1)); + assertThat(status.pagesReceived(), equalTo(1)); + assertThat(status.pagesEmitted(), equalTo(1)); assertThat(status.noops(), equalTo(0)); result.forEach(Page::releaseBlocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 54db0453530b..28b7095aa1bd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -212,7 +212,8 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { // Clone the input so that the operator can close it, then, later, we can read it again to build the assertion. List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); - List results = drive(simple().get(context), input.iterator(), context); + var operator = simple().get(context); + List results = drive(operator, input.iterator(), context); assertSimpleOutput(origInput, results); assertThat(context.breaker().getUsed(), equalTo(0L)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java index 369913c7d152..aa2ca2faebbd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.operator.exchange; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -21,13 +22,14 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTest } public static ExchangeSinkOperator.Status simple() { - return new ExchangeSinkOperator.Status(10); + return new ExchangeSinkOperator.Status(10, 111); } public static String simpleToJson() { return """ { - "pages_accepted" : 10 + "pages_received" : 10, + "rows_received" : 111 }"""; } @@ -38,11 +40,18 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTest @Override public ExchangeSinkOperator.Status createTestInstance() { - return new ExchangeSinkOperator.Status(between(0, Integer.MAX_VALUE)); + return new ExchangeSinkOperator.Status(randomNonNegativeInt(), randomNonNegativeLong()); } @Override protected ExchangeSinkOperator.Status mutateInstance(ExchangeSinkOperator.Status instance) throws IOException { - return new ExchangeSinkOperator.Status(randomValueOtherThan(instance.pagesAccepted(), () -> between(0, Integer.MAX_VALUE))); + int pagesReceived = instance.pagesReceived(); + long rowsReceived = instance.rowsReceived(); + switch (between(0, 1)) { + case 0 -> pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt); + case 1 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + default -> throw new UnsupportedOperationException(); + } + return new ExchangeSinkOperator.Status(pagesReceived, rowsReceived); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperatorStatusTests.java index 2c5f7eebbaf3..e99ea69af54d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceOperatorStatusTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.operator.exchange; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -17,8 +18,8 @@ import static org.hamcrest.Matchers.equalTo; public class ExchangeSourceOperatorStatusTests extends AbstractWireSerializingTestCase { public void testToXContent() { - assertThat(Strings.toString(new ExchangeSourceOperator.Status(0, 10)), equalTo(""" - {"pages_waiting":0,"pages_emitted":10}""")); + assertThat(Strings.toString(new ExchangeSourceOperator.Status(0, 10, 111)), equalTo(""" + {"pages_waiting":0,"pages_emitted":10,"rows_emitted":111}""")); } @Override @@ -28,24 +29,20 @@ public class ExchangeSourceOperatorStatusTests extends AbstractWireSerializingTe @Override protected ExchangeSourceOperator.Status createTestInstance() { - return new ExchangeSourceOperator.Status(between(0, Integer.MAX_VALUE), between(0, Integer.MAX_VALUE)); + return new ExchangeSourceOperator.Status(randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeLong()); } @Override protected ExchangeSourceOperator.Status mutateInstance(ExchangeSourceOperator.Status instance) throws IOException { - switch (between(0, 1)) { - case 0: - return new ExchangeSourceOperator.Status( - randomValueOtherThan(instance.pagesWaiting(), () -> between(0, Integer.MAX_VALUE)), - instance.pagesEmitted() - ); - case 1: - return new ExchangeSourceOperator.Status( - instance.pagesWaiting(), - randomValueOtherThan(instance.pagesEmitted(), () -> between(0, Integer.MAX_VALUE)) - ); - default: - throw new UnsupportedOperationException(); + int pagesWaiting = instance.pagesWaiting(); + int pagesEmitted = instance.pagesEmitted(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 2)) { + case 0 -> pagesWaiting = randomValueOtherThan(pagesWaiting, ESTestCase::randomNonNegativeInt); + case 1 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + case 2 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); + default -> throw new UnsupportedOperationException(); } + return new ExchangeSourceOperator.Status(pagesWaiting, pagesEmitted, rowsEmitted); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java index f52274b68bdf..5faf5159a546 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java @@ -10,13 +10,30 @@ package org.elasticsearch.compute.operator.topn; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; public class TopNOperatorStatusTests extends AbstractWireSerializingTestCase { + public static TopNOperatorStatus simple() { + return new TopNOperatorStatus(10, 2000, 123, 123, 111, 222); + } + + public static String simpleToJson() { + return """ + { + "occupied_rows" : 10, + "ram_bytes_used" : 2000, + "ram_used" : "1.9kb", + "pages_received" : 123, + "pages_emitted" : 123, + "rows_received" : 111, + "rows_emitted" : 222 + }"""; + } + public void testToXContent() { - assertThat(Strings.toString(new TopNOperatorStatus(10, 2000)), equalTo(""" - {"occupied_rows":10,"ram_bytes_used":2000,"ram_used":"1.9kb"}""")); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } @Override @@ -26,23 +43,46 @@ public class TopNOperatorStatusTests extends AbstractWireSerializingTestCase randomNonNegativeInt()); + occupiedRows = randomValueOtherThan(occupiedRows, ESTestCase::randomNonNegativeInt); break; case 1: - ramBytesUsed = randomValueOtherThan(ramBytesUsed, () -> randomNonNegativeLong()); + ramBytesUsed = randomValueOtherThan(ramBytesUsed, ESTestCase::randomNonNegativeLong); + break; + case 2: + pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt); + break; + case 3: + pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + break; + case 4: + rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + break; + case 5: + rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); break; default: throw new IllegalArgumentException(); } - return new TopNOperatorStatus(occupiedRows, ramBytesUsed); + return new TopNOperatorStatus(occupiedRows, ramBytesUsed, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); } } diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index ce4aa8582929..98e5799c8d3f 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -548,7 +548,7 @@ public class EsqlSecurityIT extends ESRestTestCase { public void testLookupJoinIndexAllowed() throws Exception { assumeTrue( "Requires LOOKUP JOIN capability", - EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V10.capabilityName())) + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V11.capabilityName())) ); Response resp = runESQLCommand( @@ -587,7 +587,7 @@ public class EsqlSecurityIT extends ESRestTestCase { public void testLookupJoinIndexForbidden() throws Exception { assumeTrue( "Requires LOOKUP JOIN capability", - EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V10.capabilityName())) + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V11.capabilityName())) ); var resp = expectThrows( diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index b22925b44eba..3b5377c2768f 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.qa.mixed; import org.elasticsearch.Version; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase; @@ -21,8 +20,7 @@ import java.io.IOException; import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V10; -import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V11; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @ClassRule @@ -49,10 +47,6 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { return oldClusterTestFeatureService.clusterHasFeature(featureId); } - protected static boolean oldClusterHasFeature(NodeFeature feature) { - return oldClusterHasFeature(feature.id()); - } - @AfterClass public static void cleanUp() { oldClusterTestFeatureService = null; @@ -74,14 +68,6 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, instructions, bwcVersion)); - if (mode == ASYNC) { - assumeTrue("Async is not supported on " + bwcVersion, supportsAsync()); - } - } - - @Override - protected boolean supportsAsync() { - return oldClusterHasFeature(ASYNC_QUERY_FEATURE_ID); } @Override @@ -96,7 +82,7 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @Override protected boolean supportsIndexModeLookup() throws IOException { - return hasCapabilities(List.of(JOIN_LOOKUP_V10.capabilityName())); + return hasCapabilities(List.of(JOIN_LOOKUP_V11.capabilityName())); } @Override diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 987a5334f903..f8b921f23992 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_SOURCE_INDI import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V10; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V11; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,7 +124,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V10.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V11.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 050259bbb5b5..cae9e1ba8eb6 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -312,22 +312,23 @@ public class RestEsqlIT extends RestEsqlTestCase { } signatures.add(sig); } + var readProfile = matchesList().item("LuceneSourceOperator") + .item("ValuesSourceReaderOperator") + .item("AggregationOperator") + .item("ExchangeSinkOperator"); + var mergeProfile = matchesList().item("ExchangeSourceOperator") + .item("AggregationOperator") + .item("ProjectOperator") + .item("LimitOperator") + .item("EvalOperator") + .item("ProjectOperator") + .item("OutputOperator"); + var emptyReduction = matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator"); + var reduction = matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator"); assertThat( signatures, - containsInAnyOrder( - matchesList().item("LuceneSourceOperator") - .item("ValuesSourceReaderOperator") - .item("AggregationOperator") - .item("ExchangeSinkOperator"), - matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator"), - matchesList().item("ExchangeSourceOperator") - .item("AggregationOperator") - .item("ProjectOperator") - .item("LimitOperator") - .item("EvalOperator") - .item("ProjectOperator") - .item("OutputOperator") - ) + Matchers.either(containsInAnyOrder(readProfile, reduction, mergeProfile)) + .or(containsInAnyOrder(readProfile, emptyReduction, mergeProfile)) ); } @@ -574,23 +575,35 @@ public class RestEsqlIT extends RestEsqlTestCase { .entry("slice_min", 0) .entry("current", DocIdSetIterator.NO_MORE_DOCS) .entry("pages_emitted", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)) .entry("processing_nanos", greaterThan(0)) .entry("processed_queries", List.of("*:*")); case "ValuesSourceReaderOperator" -> basicProfile().entry("readers_built", matchesMap().extraOk()); case "AggregationOperator" -> matchesMap().entry("pages_processed", greaterThan(0)) + .entry("rows_received", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)) .entry("aggregation_nanos", greaterThan(0)) .entry("aggregation_finish_nanos", greaterThan(0)); - case "ExchangeSinkOperator" -> matchesMap().entry("pages_accepted", greaterThan(0)); - case "ExchangeSourceOperator" -> matchesMap().entry("pages_emitted", greaterThan(0)).entry("pages_waiting", 0); + case "ExchangeSinkOperator" -> matchesMap().entry("pages_received", greaterThan(0)).entry("rows_received", greaterThan(0)); + case "ExchangeSourceOperator" -> matchesMap().entry("pages_waiting", 0) + .entry("pages_emitted", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)); case "ProjectOperator", "EvalOperator" -> basicProfile(); case "LimitOperator" -> matchesMap().entry("pages_processed", greaterThan(0)) .entry("limit", 1000) - .entry("limit_remaining", 999); + .entry("limit_remaining", 999) + .entry("rows_received", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)); case "OutputOperator" -> null; case "TopNOperator" -> matchesMap().entry("occupied_rows", 0) + .entry("pages_received", greaterThan(0)) + .entry("pages_emitted", greaterThan(0)) + .entry("rows_received", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)) .entry("ram_used", instanceOf(String.class)) .entry("ram_bytes_used", greaterThan(0)); case "LuceneTopNSourceOperator" -> matchesMap().entry("pages_emitted", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)) .entry("current", greaterThan(0)) .entry("processed_slices", greaterThan(0)) .entry("processed_shards", List.of("rest-esql-test:0")) @@ -611,7 +624,10 @@ public class RestEsqlIT extends RestEsqlTestCase { } private MapMatcher basicProfile() { - return matchesMap().entry("pages_processed", greaterThan(0)).entry("process_nanos", greaterThan(0)); + return matchesMap().entry("pages_processed", greaterThan(0)) + .entry("process_nanos", greaterThan(0)) + .entry("rows_received", greaterThan(0)) + .entry("rows_emitted", greaterThan(0)); } private void assertException(String query, String... errorMessages) throws IOException { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 49f5b01f247c..18bfb6b8676c 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -75,9 +75,6 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public abstract class EsqlSpecTestCase extends ESRestTestCase { - // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here - protected static final String ASYNC_QUERY_FEATURE_ID = "esql.async_query"; - private static final Logger LOGGER = LogManager.getLogger(EsqlSpecTestCase.class); private final String fileName; private final String groupName; @@ -140,10 +137,6 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { } } - protected boolean supportsAsync() { - return clusterHasFeature(ASYNC_QUERY_FEATURE_ID); // the Async API was introduced in 8.13.0 - } - @AfterClass public static void wipeTestData() throws IOException { try { @@ -281,7 +274,6 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { private Map runEsql(RequestObjectBuilder requestObject, AssertWarnings assertWarnings) throws IOException { if (mode == Mode.ASYNC) { - assert supportsAsync(); return RestEsqlTestCase.runEsqlAsync(requestObject, assertWarnings); } else { return RestEsqlTestCase.runEsqlSync(requestObject, assertWarnings); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index ac5a3d4be27f..5e0aeb5b3535 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -227,7 +227,7 @@ public abstract class RequestIndexFilteringTestCase extends ESRestTestCase { assertThat(e.getMessage(), containsString("index_not_found_exception")); assertThat(e.getMessage(), anyOf(containsString("no such index [foo]"), containsString("no such index [remote_cluster:foo]"))); - if (EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()) { + if (EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()) { e = expectThrows( ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query(from("test1") + " | LOOKUP JOIN foo ON id1")) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 66fd7d3ee5eb..7e25fb29fdb7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; @@ -61,6 +62,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Les import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.index.EsIndex; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; @@ -350,6 +352,10 @@ public final class EsqlTestUtils { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); + public static LogicalOptimizerContext unboundLogicalOptimizerContext() { + return new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG, FoldContext.small()); + } + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); public static final QueryBuilderResolver MOCK_QUERY_BUILDER_RESOLVER = new MockQueryBuilderResolver(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 57c4fca6223e..bb46c9d31f74 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1450,3 +1450,47 @@ FROM employees cnt:long 19 ; + +implicit casting strings to dates for IN operator +FROM employees +| WHERE birth_date IN ("1953-04-20", "1958-10-31") +| KEEP emp_no, first_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword +10006 | Anneke +10025 | Prasadram +; + +IN operator with null in list, finds match + +FROM employees +| EVAL x = NULL +| WHERE birth_date IN (TO_DATETIME("1958-02-19T00:00:00Z"), x) +| KEEP birth_date, first_name; + +birth_date:datetime | first_name:keyword +1958-02-19T00:00:00.000Z | Saniya +; + +IN operator with null in list, doesn't find match + +FROM employees +| EVAL x = NULL +| WHERE birth_date IN (TO_DATETIME("1900-02-19T00:00:00Z"), x) +| KEEP birth_date, first_name; + +birth_date:datetime | first_name:keyword +; + +IN operator with null in list, doesn't find match, EVAL to check value + +FROM employees +| EVAL x = NULL +| EVAL result = birth_date IN (TO_DATETIME("1900-02-19T00:00:00Z"), x) +| LIMIT 1 +| KEEP result; + +result:boolean +null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index 4206d6b48699..f44653171a4f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -1161,3 +1161,90 @@ row dt = to_date_nanos(0::long) plus:date_nanos 1970-01-01T00:00:01.000Z ; + +Date Nanos IN constant date nanos +required_capability: date_nanos_in_operator +required_capability: to_date_nanos + +FROM date_nanos +| WHERE MV_FIRST(nanos) IN (TO_DATE_NANOS("2023-10-23T13:55:01.543123456Z"), TO_DATE_NANOS("2023-10-23T12:27:28.948Z"), TO_DATE_NANOS("2017-10-23T13:53:55.832987654Z")); +ignoreOrder:true + +millis:date | nanos:date_nanos | num:long +2023-10-23T13:55:01.543Z | 2023-10-23T13:55:01.543123456Z | 1698069301543123456 +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +; + +Date Nanos IN constant date nanos, implicit casting +required_capability: date_nanos_in_operator +required_capability: to_date_nanos +required_capability: date_nanos_implicit_casting + +FROM date_nanos +| WHERE MV_FIRST(nanos) IN ("2023-10-23T13:55:01.543123456Z", "2023-10-23T12:27:28.948Z", "2017-10-23T13:53:55.832987654Z"); +ignoreOrder:true + +millis:date | nanos:date_nanos | num:long +2023-10-23T13:55:01.543Z | 2023-10-23T13:55:01.543123456Z | 1698069301543123456 +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +; + +Date Nanos IN date nanos field, implicit casting +required_capability: date_nanos_in_operator +required_capability: to_date_nanos +required_capability: date_nanos_implicit_casting + +FROM date_nanos +| WHERE "2023-10-23T13:55:01.543123456Z" IN (MV_FIRST(nanos)); + +millis:date | nanos:date_nanos | num:long +2023-10-23T13:55:01.543Z | 2023-10-23T13:55:01.543123456Z | 1698069301543123456 +; + +Date nanos IN millisecond date field +required_capability: date_nanos_in_operator +required_capability: to_date_nanos +required_capability: date_nanos_implicit_casting + +# Note: It is important to have two dates in the IN so the optimizer doesn't turn this into an == +FROM date_nanos +| WHERE MV_FIRST(nanos) IN (TO_DATETIME("2023-10-23T12:27:28.948"), TO_DATETIME("2020-02-02")); + +millis:date | nanos:date_nanos | num:long +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +; + +Millisecond field IN date nanos constants +required_capability: date_nanos_in_operator +required_capability: to_date_nanos +required_capability: date_nanos_implicit_casting + +# Note: It is important to have two dates in the IN so the optimizer doesn't turn this into an == +FROM date_nanos +| WHERE MV_FIRST(millis) IN (TO_DATE_NANOS("2023-10-23T12:27:28.948"), TO_DATE_NANOS("2020-02-02")); + +millis:date | nanos:date_nanos | num:long +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +; + +Date Nanos Format +required_capability: date_nanos_date_format + +FROM date_nanos +| EVAL sv_nanos = MV_MAX(nanos) +| EVAL a = DATE_FORMAT(sv_nanos), b = DATE_FORMAT("yyyy-MM-dd", sv_nanos), c = DATE_FORMAT("strict_date_optional_time_nanos", sv_nanos) +| KEEP sv_nanos, a, b, c; +ignoreOrder:true + +sv_nanos:date_nanos | a:keyword | b:keyword | c:keyword +2023-10-23T13:55:01.543123456Z | 2023-10-23T13:55:01.543Z | 2023-10-23 | 2023-10-23T13:55:01.543123456Z +2023-10-23T13:53:55.832987654Z | 2023-10-23T13:53:55.832Z | 2023-10-23 | 2023-10-23T13:53:55.832987654Z +2023-10-23T13:52:55.015787878Z | 2023-10-23T13:52:55.015Z | 2023-10-23 | 2023-10-23T13:52:55.015787878Z +2023-10-23T13:51:54.732102837Z | 2023-10-23T13:51:54.732Z | 2023-10-23 | 2023-10-23T13:51:54.732102837Z +2023-10-23T13:33:34.937193000Z | 2023-10-23T13:33:34.937Z | 2023-10-23 | 2023-10-23T13:33:34.937193Z +2023-10-23T12:27:28.948000000Z | 2023-10-23T12:27:28.948Z | 2023-10-23 | 2023-10-23T12:27:28.948Z +2023-10-23T12:15:03.360103847Z | 2023-10-23T12:15:03.360Z | 2023-10-23 | 2023-10-23T12:15:03.360103847Z +2023-10-23T12:15:03.360103847Z | 2023-10-23T12:15:03.360Z | 2023-10-23 | 2023-10-23T12:15:03.360103847Z +2023-03-23T12:15:03.360103847Z | 2023-03-23T12:15:03.360Z | 2023-03-23 | 2023-03-23T12:15:03.360103847Z +2023-03-23T12:15:03.360103847Z | 2023-03-23T12:15:03.360Z | 2023-03-23 | 2023-03-23T12:15:03.360103847Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 95119cae9559..0bad68cd9b28 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -8,7 +8,7 @@ ############################################### basicOnTheDataNode -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = languages @@ -25,7 +25,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -36,7 +36,7 @@ language_code:integer | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -53,7 +53,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; subsequentEvalOnTheDataNode -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = languages @@ -71,7 +71,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -89,7 +89,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; sortEvalBeforeLookup -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -106,7 +106,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; nonUniqueLeftKeyOnTheDataNode -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | WHERE emp_no <= 10030 @@ -130,60 +130,69 @@ emp_no:integer | language_code:integer | language_name:keyword ; nonUniqueRightKeyOnTheDataNode -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = emp_no % 10 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code | WHERE emp_no > 10090 AND emp_no < 10096 -| SORT emp_no -| EVAL country = MV_SORT(country) +| SORT emp_no, country | KEEP emp_no, language_code, language_name, country ; -emp_no:integer | language_code:integer | language_name:keyword | country:keyword -10091 | 1 | [English, English, English] | [Canada, United Kingdom, United States of America] -10092 | 2 | [German, German, German] | [Austria, Germany, Switzerland] -10093 | 3 | null | null -10094 | 4 | Quenya | null -10095 | 5 | null | Atlantis +emp_no:integer | language_code:integer | language_name:keyword | country:text + 10091 | 1 | English | Canada + 10091 | 1 | null | United Kingdom + 10091 | 1 | English | United States of America + 10091 | 1 | English | null + 10092 | 2 | German | [Germany, Austria] + 10092 | 2 | German | Switzerland + 10092 | 2 | German | null + 10093 | 3 | null | null + 10094 | 4 | Quenya | null + 10095 | 5 | null | Atlantis ; nonUniqueRightKeyOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no | LIMIT 5 | EVAL language_code = emp_no % 10 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code -| EVAL country = MV_SORT(country) | KEEP emp_no, language_code, language_name, country ; -emp_no:integer | language_code:integer | language_name:keyword | country:keyword -10001 | 1 | [English, English, English] | [Canada, United Kingdom, United States of America] -10002 | 2 | [German, German, German] | [Austria, Germany, Switzerland] -10003 | 3 | null | null -10004 | 4 | Quenya | null -10005 | 5 | null | Atlantis +emp_no:integer | language_code:integer | language_name:keyword | country:text +10001 | 1 | English | Canada +10001 | 1 | English | null +10001 | 1 | null | United Kingdom +10001 | 1 | English | United States of America +10002 | 2 | German | [Germany, Austria] +10002 | 2 | German | Switzerland +10002 | 2 | German | null +10003 | 3 | null | null +10004 | 4 | Quenya | null +10005 | 5 | null | Atlantis ; nonUniqueRightKeyFromRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW language_code = 2 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code | DROP country.keyword -| EVAL country = MV_SORT(country) ; -language_code:integer | language_name:keyword | country:keyword -2 | [German, German, German] | [Austria, Germany, Switzerland] +language_code:integer | country:text | language_name:keyword +2 | [Germany, Austria] | German +2 | Switzerland | German +2 | null | German ; repeatedIndexOnFrom -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM languages_lookup | LOOKUP JOIN languages_lookup ON language_code @@ -201,7 +210,7 @@ dropAllLookedUpFieldsOnTheDataNode-Ignore // Depends on // https://github.com/elastic/elasticsearch/issues/118778 // https://github.com/elastic/elasticsearch/issues/118781 -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = emp_no % 10 @@ -222,7 +231,7 @@ dropAllLookedUpFieldsOnTheCoordinator-Ignore // Depends on // https://github.com/elastic/elasticsearch/issues/118778 // https://github.com/elastic/elasticsearch/issues/118781 -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -247,7 +256,7 @@ emp_no:integer ############################################### filterOnLeftSide -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = languages @@ -264,7 +273,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSide -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -280,7 +289,7 @@ FROM sample_data ; filterOnRightSideAfterStats -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -293,7 +302,7 @@ count:long | type:keyword ; filterOnJoinKey -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = languages @@ -308,7 +317,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSide -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | WHERE emp_no < 10006 @@ -325,7 +334,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSideOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -341,7 +350,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -357,7 +366,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSideOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | SORT emp_no @@ -374,7 +383,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnTheDataNodeThenFilterOnTheCoordinator -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | EVAL language_code = languages @@ -395,31 +404,35 @@ emp_no:integer | language_code:integer | language_name:keyword ########################################################################### nullJoinKeyOnTheDataNode -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | WHERE emp_no < 10004 | EVAL language_code = emp_no % 10, language_code = CASE(language_code == 3, null, language_code) | LOOKUP JOIN languages_lookup_non_unique_key ON language_code -| SORT emp_no +| SORT emp_no, language_code, language_name | KEEP emp_no, language_code, language_name ; emp_no:integer | language_code:integer | language_name:keyword -10001 | 1 | [English, English, English] -10002 | 2 | [German, German, German] +10001 | 1 | English +10001 | 1 | English +10001 | 1 | English +10001 | 1 | null +10002 | 2 | German +10002 | 2 | German +10002 | 2 | German 10003 | null | null ; mvJoinKeyOnTheDataNode -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | WHERE 10003 < emp_no AND emp_no < 10008 | EVAL language_code = emp_no % 10 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code -| SORT emp_no -| EVAL language_name = MV_SORT(language_name) +| SORT emp_no, language_name | KEEP emp_no, language_code, language_name ; @@ -427,38 +440,42 @@ emp_no:integer | language_code:integer | language_name:keyword 10004 | 4 | Quenya 10005 | 5 | null 10006 | 6 | Mv-Lang -10007 | 7 | [Mv-Lang, Mv-Lang2] +10007 | 7 | Mv-Lang +10007 | 7 | Mv-Lang2 ; mvJoinKeyFromRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW language_code = [4, 5, 6, 7] | LOOKUP JOIN languages_lookup_non_unique_key ON language_code -| EVAL language_name = MV_SORT(language_name), country = MV_SORT(country) | KEEP language_code, language_name, country ; -language_code:integer | language_name:keyword | country:keyword -[4, 5, 6, 7] | [Mv-Lang, Mv-Lang2, Quenya] | [Atlantis, Mv-Land, Mv-Land2] +language_code:integer | language_name:keyword | country:text +[4, 5, 6, 7] | Quenya | null +[4, 5, 6, 7] | null | Atlantis +[4, 5, 6, 7] | Mv-Lang | Mv-Land +[4, 5, 6, 7] | Mv-Lang2 | Mv-Land2 ; mvJoinKeyFromRowExpanded -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW language_code = [4, 5, 6, 7, 8] | MV_EXPAND language_code | LOOKUP JOIN languages_lookup_non_unique_key ON language_code -| EVAL language_name = MV_SORT(language_name), country = MV_SORT(country) | KEEP language_code, language_name, country +| SORT language_code, language_name, country ; -language_code:integer | language_name:keyword | country:keyword -4 | Quenya | null -5 | null | Atlantis -6 | Mv-Lang | Mv-Land -7 | [Mv-Lang, Mv-Lang2] | [Mv-Land, Mv-Land2] -8 | Mv-Lang2 | Mv-Land2 +language_code:integer | language_name:keyword | country:text +4 | Quenya | null +5 | null | Atlantis +6 | Mv-Lang | Mv-Land +7 | Mv-Lang | Mv-Land +7 | Mv-Lang2 | Mv-Land2 +8 | Mv-Lang2 | Mv-Land2 ; ########################################################################### @@ -466,7 +483,7 @@ language_code:integer | language_name:keyword | country:keyword ########################################################################### joinOnNestedField -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM employees | WHERE 10000 < emp_no AND emp_no < 10006 @@ -486,7 +503,7 @@ emp_no:integer | language.id:integer | language.name:text joinOnNestedFieldRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW language.code = "EN" | LOOKUP JOIN languages_nested_fields ON language.code @@ -499,7 +516,7 @@ language.id:integer | language.code:keyword | language.name.keyword:keyword joinOnNestedNestedFieldRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW language.name.keyword = "English" | LOOKUP JOIN languages_nested_fields ON language.name.keyword @@ -515,7 +532,7 @@ language.id:integer | language.name:text | language.name.keyword:keyword ############################################### lookupIPFromRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -526,7 +543,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromKeepRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", right = "right" | KEEP left, client_ip, right @@ -538,7 +555,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowing -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -549,7 +566,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -562,7 +579,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeepReordered -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -575,7 +592,7 @@ right | Development | 172.21.0.5 ; lookupIPFromIndex -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -594,7 +611,7 @@ ignoreOrder:true ; lookupIPFromIndexKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -614,7 +631,7 @@ ignoreOrder:true ; lookupIPFromIndexKeepKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -636,7 +653,7 @@ timestamp:date | client_ip:keyword | event_duration:long | msg:keyword ; lookupIPFromIndexStats -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -652,7 +669,7 @@ count:long | env:keyword ; lookupIPFromIndexStatsKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -669,7 +686,7 @@ count:long | env:keyword ; statsAndLookupIPFromIndex -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -690,7 +707,7 @@ count:long | client_ip:keyword | env:keyword ############################################### lookupMessageFromRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -701,7 +718,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromKeepRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | KEEP left, message, right @@ -713,7 +730,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowing -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -724,7 +741,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowingKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -736,7 +753,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromIndex -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -754,7 +771,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -773,7 +790,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -793,7 +810,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepReordered -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -812,7 +829,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; lookupMessageFromIndexStats -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -827,7 +844,7 @@ count:long | type:keyword ; lookupMessageFromIndexStatsKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -843,7 +860,7 @@ count:long | type:keyword ; statsAndLookupMessageFromIndex -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | STATS count = count(message) BY message @@ -861,7 +878,7 @@ count:long | type:keyword | message:keyword ; lookupMessageFromIndexTwice -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -883,7 +900,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -906,7 +923,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceFullyShadowing -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -930,7 +947,7 @@ ignoreOrder:true ############################################### lookupIPAndMessageFromRow -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -942,7 +959,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBefore -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | KEEP left, client_ip, message, right @@ -955,7 +972,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBetween -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -968,7 +985,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepAfter -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -981,7 +998,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowing -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", type = "type", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -993,7 +1010,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1007,7 +1024,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1022,7 +1039,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeepKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1038,7 +1055,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepReordered -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1052,7 +1069,7 @@ right | Development | Success | 172.21.0.5 ; lookupIPAndMessageFromIndex -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1072,7 +1089,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1093,7 +1110,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexStats -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1111,7 +1128,7 @@ count:long | env:keyword | type:keyword ; lookupIPAndMessageFromIndexStatsKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1130,7 +1147,7 @@ count:long | env:keyword | type:keyword ; statsAndLookupIPAndMessageFromIndex -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1149,7 +1166,7 @@ count:long | client_ip:keyword | message:keyword | env:keyword | type:keyw ; lookupIPAndMessageFromIndexChainedEvalKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1171,7 +1188,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexChainedRenameKeep -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1193,7 +1210,7 @@ ignoreOrder:true ; lookupIndexInFromRepeatedRowBug -required_capability: join_lookup_v10 +required_capability: join_lookup_v11 FROM languages_lookup_non_unique_key | WHERE language_code == 1 | LOOKUP JOIN languages_lookup ON language_code diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 34991c3958f2..8e27cfceb28e 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -160,7 +160,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { } if (o.operator().equals("ExchangeSinkOperator")) { ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status(); - assertThat(oStatus.pagesAccepted(), greaterThanOrEqualTo(0)); + assertThat(oStatus.pagesReceived(), greaterThanOrEqualTo(0)); exchangeSinks++; } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index f31eabea9d61..060a50684b39 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -60,6 +60,7 @@ import org.elasticsearch.xpack.esql.plugin.TransportEsqlQueryAction; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; @@ -80,9 +81,8 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { /** * Tests when multiple results match. */ - @AwaitsFix(bugUrl = "fixing real soon now") public void testLookupIndexMultiResults() throws IOException { - runLookup(new UsingSingleLookupTable(new Object[] { "aa", new String[] { "bb", "ff" }, "cc", "dd" })); + runLookup(new UsingSingleLookupTable(new String[] { "aa", "bb", "bb", "dd" })); } interface PopulateIndices { @@ -90,24 +90,24 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { } class UsingSingleLookupTable implements PopulateIndices { - private final Object[] lookupData; + private final Map> matches = new HashMap<>(); + private final String[] lookupData; - UsingSingleLookupTable(Object[] lookupData) { + UsingSingleLookupTable(String[] lookupData) { this.lookupData = lookupData; + for (int i = 0; i < lookupData.length; i++) { + matches.computeIfAbsent(lookupData[i], k -> new ArrayList<>()).add(i); + } } @Override - public void populate(int docCount, List expected) throws IOException { + public void populate(int docCount, List expected) { List docs = new ArrayList<>(); for (int i = 0; i < docCount; i++) { - docs.add(client().prepareIndex("source").setSource(Map.of("data", lookupData[i % lookupData.length]))); - Object d = lookupData[i % lookupData.length]; - if (d instanceof String s) { - expected.add(s + ":" + (i % lookupData.length)); - } else if (d instanceof String[] ss) { - for (String s : ss) { - expected.add(s + ":" + (i % lookupData.length)); - } + String data = lookupData[i % lookupData.length]; + docs.add(client().prepareIndex("source").setSource(Map.of("data", data))); + for (Integer match : matches.get(data)) { + expected.add(data + ":" + match); } } for (int i = 0; i < lookupData.length; i++) { diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index efc2e3660990..4d4cb7d2caac 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -143,18 +143,9 @@ indexString ; metadata - : metadataOption - | deprecated_metadata - ; - -metadataOption : METADATA UNQUOTED_SOURCE (COMMA UNQUOTED_SOURCE)* ; -deprecated_metadata - : OPENING_BRACKET metadataOption CLOSING_BRACKET - ; - metricsCommand : DEV_METRICS indexPattern (COMMA indexPattern)* aggregates=aggFields? (BY grouping=fields)? ; diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java new file mode 100644 index 000000000000..14ca1e9a768f --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +import java.util.Arrays; +import java.util.BitSet; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link In}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +public class InMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator lhs; + + private final EvalOperator.ExpressionEvaluator[] rhs; + + private final DriverContext driverContext; + + private Warnings warnings; + + public InMillisNanosEvaluator( + Source source, + EvalOperator.ExpressionEvaluator lhs, + EvalOperator.ExpressionEvaluator[] rhs, + DriverContext driverContext + ) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock lhsBlock = (LongBlock) lhs.eval(page)) { + LongBlock[] rhsBlocks = new LongBlock[rhs.length]; + try (Releasable rhsRelease = Releasables.wrap(rhsBlocks)) { + for (int i = 0; i < rhsBlocks.length; i++) { + rhsBlocks[i] = (LongBlock) rhs[i].eval(page); + } + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return eval(page.getPositionCount(), lhsBlock, rhsBlocks); + } + LongVector[] rhsVectors = new LongVector[rhs.length]; + for (int i = 0; i < rhsBlocks.length; i++) { + rhsVectors[i] = rhsBlocks[i].asVector(); + if (rhsVectors[i] == null) { + return eval(page.getPositionCount(), lhsBlock, rhsBlocks); + } + } + return eval(page.getPositionCount(), lhsVector, rhsVectors); + } + } + } + + private BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock[] rhsBlocks) { + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + long[] rhsValues = new long[rhs.length]; + BitSet nulls = new BitSet(rhs.length); + BitSet mvs = new BitSet(rhs.length); + boolean foundMatch; + for (int p = 0; p < positionCount; p++) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue; + } + // unpack rhsBlocks into rhsValues + nulls.clear(); + mvs.clear(); + for (int i = 0; i < rhsBlocks.length; i++) { + if (rhsBlocks[i].isNull(p)) { + nulls.set(i); + continue; + } + if (rhsBlocks[i].getValueCount(p) > 1) { + mvs.set(i); + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + continue; + } + int o = rhsBlocks[i].getFirstValueIndex(p); + rhsValues[i] = rhsBlocks[i].getLong(o); + } + if (nulls.cardinality() == rhsBlocks.length || mvs.cardinality() == rhsBlocks.length) { + result.appendNull(); + continue; + } + foundMatch = In.process(nulls, mvs, lhsBlock.getLong(lhsBlock.getFirstValueIndex(p)), rhsValues); + if (foundMatch) { + result.appendBoolean(true); + } else { + if (nulls.cardinality() > 0) { + result.appendNull(); + } else { + result.appendBoolean(false); + } + } + } + return result.build(); + } + } + + private BooleanBlock eval(int positionCount, LongVector lhsVector, LongVector[] rhsVectors) { + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + long[] rhsValues = new long[rhs.length]; + for (int p = 0; p < positionCount; p++) { + // unpack rhsVectors into rhsValues + for (int i = 0; i < rhsVectors.length; i++) { + rhsValues[i] = rhsVectors[i].getLong(p); + } + result.appendBoolean(In.processMillisNanos(null, null, lhsVector.getLong(p), rhsValues)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "InMillisNanosEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; + private final EvalOperator.ExpressionEvaluator.Factory[] rhs; + + Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory[] rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public InMillisNanosEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] rhs = Arrays.stream(this.rhs) + .map(a -> a.get(context)) + .toArray(EvalOperator.ExpressionEvaluator[]::new); + return new InMillisNanosEvaluator(source, lhs.get(context), rhs, context); + } + + @Override + public String toString() { + return "InMillisNanosEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java new file mode 100644 index 000000000000..278ecdba7d9e --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +import java.util.Arrays; +import java.util.BitSet; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link In}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +public class InNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator lhs; + + private final EvalOperator.ExpressionEvaluator[] rhs; + + private final DriverContext driverContext; + + private Warnings warnings; + + public InNanosMillisEvaluator( + Source source, + EvalOperator.ExpressionEvaluator lhs, + EvalOperator.ExpressionEvaluator[] rhs, + DriverContext driverContext + ) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock lhsBlock = (LongBlock) lhs.eval(page)) { + LongBlock[] rhsBlocks = new LongBlock[rhs.length]; + try (Releasable rhsRelease = Releasables.wrap(rhsBlocks)) { + for (int i = 0; i < rhsBlocks.length; i++) { + rhsBlocks[i] = (LongBlock) rhs[i].eval(page); + } + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return eval(page.getPositionCount(), lhsBlock, rhsBlocks); + } + LongVector[] rhsVectors = new LongVector[rhs.length]; + for (int i = 0; i < rhsBlocks.length; i++) { + rhsVectors[i] = rhsBlocks[i].asVector(); + if (rhsVectors[i] == null) { + return eval(page.getPositionCount(), lhsBlock, rhsBlocks); + } + } + return eval(page.getPositionCount(), lhsVector, rhsVectors); + } + } + } + + private BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock[] rhsBlocks) { + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + long[] rhsValues = new long[rhs.length]; + BitSet nulls = new BitSet(rhs.length); + BitSet mvs = new BitSet(rhs.length); + boolean foundMatch; + for (int p = 0; p < positionCount; p++) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue; + } + // unpack rhsBlocks into rhsValues + nulls.clear(); + mvs.clear(); + for (int i = 0; i < rhsBlocks.length; i++) { + if (rhsBlocks[i].isNull(p)) { + nulls.set(i); + continue; + } + if (rhsBlocks[i].getValueCount(p) > 1) { + mvs.set(i); + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + continue; + } + int o = rhsBlocks[i].getFirstValueIndex(p); + rhsValues[i] = rhsBlocks[i].getLong(o); + } + if (nulls.cardinality() == rhsBlocks.length || mvs.cardinality() == rhsBlocks.length) { + result.appendNull(); + continue; + } + foundMatch = In.process(nulls, mvs, lhsBlock.getLong(lhsBlock.getFirstValueIndex(p)), rhsValues); + if (foundMatch) { + result.appendBoolean(true); + } else { + if (nulls.cardinality() > 0) { + result.appendNull(); + } else { + result.appendBoolean(false); + } + } + } + return result.build(); + } + } + + private BooleanBlock eval(int positionCount, LongVector lhsVector, LongVector[] rhsVectors) { + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + long[] rhsValues = new long[rhs.length]; + for (int p = 0; p < positionCount; p++) { + // unpack rhsVectors into rhsValues + for (int i = 0; i < rhsVectors.length; i++) { + rhsValues[i] = rhsVectors[i].getLong(p); + } + result.appendBoolean(In.processNanosMillis(null, null, lhsVector.getLong(p), rhsValues)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "InNanosMillisEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; + private final EvalOperator.ExpressionEvaluator.Factory[] rhs; + + Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory[] rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public InNanosMillisEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] rhs = Arrays.stream(this.rhs) + .map(a -> a.get(context)) + .toArray(EvalOperator.ExpressionEvaluator[]::new); + return new InNanosMillisEvaluator(source, lhs.get(context), rhs, context); + } + + @Override + public String toString() { + return "InNanosMillisEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java new file mode 100644 index 000000000000..2f41a7440bb0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. + * This class is generated. Do not edit it. + */ +public final class DateFormatMillisConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final DateFormatter formatter; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateFormatMillisConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DateFormatter formatter, DriverContext driverContext) { + this.source = source; + this.val = val; + this.formatter = formatter; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(DateFormat.processMillis(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.formatter)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, LongVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(DateFormat.processMillis(valVector.getLong(p), this.formatter)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateFormatMillisConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final DateFormatter formatter; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, + DateFormatter formatter) { + this.source = source; + this.val = val; + this.formatter = formatter; + } + + @Override + public DateFormatMillisConstantEvaluator get(DriverContext context) { + return new DateFormatMillisConstantEvaluator(source, val.get(context), formatter, context); + } + + @Override + public String toString() { + return "DateFormatMillisConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java new file mode 100644 index 000000000000..29da191dbe78 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java @@ -0,0 +1,159 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.Locale; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. + * This class is generated. Do not edit it. + */ +public final class DateFormatMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final EvalOperator.ExpressionEvaluator formatter; + + private final Locale locale; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateFormatMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + EvalOperator.ExpressionEvaluator formatter, Locale locale, DriverContext driverContext) { + this.source = source; + this.val = val; + this.formatter = formatter; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + try (BytesRefBlock formatterBlock = (BytesRefBlock) formatter.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock, formatterBlock); + } + BytesRefVector formatterVector = formatterBlock.asVector(); + if (formatterVector == null) { + return eval(page.getPositionCount(), valBlock, formatterBlock); + } + return eval(page.getPositionCount(), valVector, formatterVector).asBlock(); + } + } + } + + public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock formatterBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef formatterScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (formatterBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(DateFormat.processMillis(valBlock.getLong(valBlock.getFirstValueIndex(p)), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), this.locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, LongVector valVector, + BytesRefVector formatterVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef formatterScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(DateFormat.processMillis(valVector.getLong(p), formatterVector.getBytesRef(p, formatterScratch), this.locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateFormatMillisEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val, formatter); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory formatter; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory formatter, Locale locale) { + this.source = source; + this.val = val; + this.formatter = formatter; + this.locale = locale; + } + + @Override + public DateFormatMillisEvaluator get(DriverContext context) { + return new DateFormatMillisEvaluator(source, val.get(context), formatter.get(context), locale, context); + } + + @Override + public String toString() { + return "DateFormatMillisEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java similarity index 83% rename from x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java rename to x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java index 25afa6bec360..1488833227dc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. * This class is generated. Do not edit it. */ -public final class DateFormatConstantEvaluator implements EvalOperator.ExpressionEvaluator { +public final class DateFormatNanosConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -35,7 +35,7 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio private Warnings warnings; - public DateFormatConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + public DateFormatNanosConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DateFormatter formatter, DriverContext driverContext) { this.source = source; this.val = val; @@ -68,7 +68,7 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio result.appendNull(); continue position; } - result.appendBytesRef(DateFormat.process(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.formatter)); + result.appendBytesRef(DateFormat.processNanos(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.formatter)); } return result.build(); } @@ -77,7 +77,7 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio public BytesRefVector eval(int positionCount, LongVector valVector) { try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(DateFormat.process(valVector.getLong(p), this.formatter)); + result.appendBytesRef(DateFormat.processNanos(valVector.getLong(p), this.formatter)); } return result.build(); } @@ -85,7 +85,7 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio @Override public String toString() { - return "DateFormatConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; + return "DateFormatNanosConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; } @Override @@ -120,13 +120,13 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio } @Override - public DateFormatConstantEvaluator get(DriverContext context) { - return new DateFormatConstantEvaluator(source, val.get(context), formatter, context); + public DateFormatNanosConstantEvaluator get(DriverContext context) { + return new DateFormatNanosConstantEvaluator(source, val.get(context), formatter, context); } @Override public String toString() { - return "DateFormatConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; + return "DateFormatNanosConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java similarity index 84% rename from x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java rename to x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java index 318ffa5af8f7..a94d52201481 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java @@ -25,7 +25,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. * This class is generated. Do not edit it. */ -public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluator { +public final class DateFormatNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -38,7 +38,7 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat private Warnings warnings; - public DateFormatEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + public DateFormatNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator formatter, Locale locale, DriverContext driverContext) { this.source = source; this.val = val; @@ -90,7 +90,7 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat result.appendNull(); continue position; } - result.appendBytesRef(DateFormat.process(valBlock.getLong(valBlock.getFirstValueIndex(p)), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), this.locale)); + result.appendBytesRef(DateFormat.processNanos(valBlock.getLong(valBlock.getFirstValueIndex(p)), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), this.locale)); } return result.build(); } @@ -101,7 +101,7 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(DateFormat.process(valVector.getLong(p), formatterVector.getBytesRef(p, formatterScratch), this.locale)); + result.appendBytesRef(DateFormat.processNanos(valVector.getLong(p), formatterVector.getBytesRef(p, formatterScratch), this.locale)); } return result.build(); } @@ -109,7 +109,7 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat @Override public String toString() { - return "DateFormatEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; + return "DateFormatNanosEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; } @Override @@ -147,13 +147,13 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat } @Override - public DateFormatEvaluator get(DriverContext context) { - return new DateFormatEvaluator(source, val.get(context), formatter.get(context), locale, context); + public DateFormatNanosEvaluator get(DriverContext context) { + return new DateFormatNanosEvaluator(source, val.get(context), formatter.get(context), locale, context); } @Override public String toString() { - return "DateFormatEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; + return "DateFormatNanosEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/util/DelayEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/util/DelayEvaluator.java deleted file mode 100644 index 0db714eceb28..000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/util/DelayEvaluator.java +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.util; - -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Delay}. - * This class is generated. Do not edit it. - */ -public final class DelayEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final long ms; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DelayEvaluator(Source source, long ms, DriverContext driverContext) { - this.source = source; - this.ms = ms; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - return eval(page.getPositionCount()).asBlock(); - } - - public BooleanVector eval(int positionCount) { - try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(p, Delay.process(this.ms)); - } - return result.build(); - } - } - - @Override - public String toString() { - return "DelayEvaluator[" + "ms=" + ms + "]"; - } - - @Override - public void close() { - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final long ms; - - public Factory(Source source, long ms) { - this.source = source; - this.ms = ms; - } - - @Override - public DelayEvaluator get(DriverContext context) { - return new DelayEvaluator(source, ms, context); - } - - @Override - public String toString() { - return "DelayEvaluator[" + "ms=" + ms + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 999ada6feba0..5468d57392c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -27,6 +27,113 @@ import java.util.Set; */ public class EsqlCapabilities { public enum Cap { + /** + * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. + * Added in #106095. + */ + MV_SORT, + + /** + * When we disabled some broken optimizations around {@code nullable}. + * Fixed in #105691. + */ + DISABLE_NULLABLE_OPTS, + + /** + * Introduction of {@code ST_X} and {@code ST_Y}. Added in #105768. + */ + ST_X_Y, + + /** + * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. + */ + SPATIAL_POINTS_FROM_SOURCE, + + /** + * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. + */ + SPATIAL_SHAPES, + + /** + * Support for spatial aggregation {@code ST_CENTROID}. Done in #104269. + */ + ST_CENTROID_AGG, + + /** + * Support for spatial aggregation {@code ST_INTERSECTS}. Done in #104907. + */ + ST_INTERSECTS, + + /** + * Support for spatial aggregation {@code ST_CONTAINS} and {@code ST_WITHIN}. Done in #106503. + */ + ST_CONTAINS_WITHIN, + + /** + * Support for spatial aggregation {@code ST_DISJOINT}. Done in #107007. + */ + ST_DISJOINT, + + /** + * The introduction of the {@code VALUES} agg. + */ + AGG_VALUES, + + /** + * Does ESQL support async queries. + */ + ASYNC_QUERY, + + /** + * Does ESQL support FROM OPTIONS? + */ + @Deprecated + FROM_OPTIONS, + + /** + * Cast string literals to a desired data type. + */ + STRING_LITERAL_AUTO_CASTING, + + /** + * Base64 encoding and decoding functions. + */ + BASE64_DECODE_ENCODE, + + /** + * Support for the :: casting operator + */ + CASTING_OPERATOR, + + /** + * Blocks can be labelled with {@link org.elasticsearch.compute.data.Block.MvOrdering#SORTED_ASCENDING} for optimizations. + */ + MV_ORDERING_SORTED_ASCENDING, + + /** + * Support for metrics counter fields + */ + METRICS_COUNTER_FIELDS, + + /** + * Cast string literals to a desired data type for IN predicate and more types for BinaryComparison. + */ + STRING_LITERAL_AUTO_CASTING_EXTENDED, + + /** + * Support for metadata fields. + */ + METADATA_FIELDS, + + /** + * Support for timespan units abbreviations + */ + TIMESPAN_ABBREVIATIONS, + + /** + * Support metrics counter types + */ + COUNTER_TYPES, /** * Support for function {@code BIT_LENGTH}. Done in #115792 @@ -380,6 +487,15 @@ public class EsqlCapabilities { */ DATE_NANOS_AGGREGATIONS(), + /** + * Support the {@link org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In} operator for date nanos + */ + DATE_NANOS_IN_OPERATOR(), + /** + * Support running date format function on nanosecond dates + */ + DATE_NANOS_DATE_FORMAT(), + /** * DATE_PARSE supports reading timezones */ @@ -564,7 +680,7 @@ public class EsqlCapabilities { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V10(Build.current().isSnapshot()), + JOIN_LOOKUP_V11(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 3d1bfdfd0ef4..a11b511cb83b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -325,7 +326,7 @@ public class Analyzer extends ParameterizedRuleExecutor + * Example: the {@link Bucket} function, which produces buckets over a numerical or date field, based on a number of literal + * arguments needs to check if its arguments are all indeed literals. This is how this verification is performed: + *
+     *     {@code
+     *
+     *      @Override
+     *      public void postLogicalOptimizationVerification(Failures failures) {
+     *          String operation = sourceText();
+     *
+     *          failures.add(isFoldable(buckets, operation, SECOND))
+     *              .add(from != null ? isFoldable(from, operation, THIRD) : null)
+     *              .add(to != null ? isFoldable(to, operation, FOURTH) : null);
+     *      }
+     *     }
+     *     
+ * + */ + void postLogicalOptimizationVerification(Failures failures); +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/Validatable.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/Validatable.java deleted file mode 100644 index f6733fa3f175..000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/Validatable.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.capabilities; - -import org.elasticsearch.xpack.esql.common.Failures; - -/** - * Interface implemented by expressions that require validation post logical optimization, - * when the plan and references have been not just resolved but also replaced. - */ -public interface Validatable { - - /** - * Validates the implementing expression - discovered failures are reported to the given - * {@link Failures} class. - */ - default void validate(Failures failures) {} -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 762018c0a9b6..a5ee11c4e84b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -22,13 +22,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; @@ -41,6 +39,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; +import org.elasticsearch.compute.operator.ProjectOperator; import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.EnrichQuerySourceOperator; import org.elasticsearch.compute.operator.lookup.MergePositionsOperator; @@ -87,19 +86,19 @@ import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.Executor; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; /** - * {@link AbstractLookupService} performs a single valued {@code LEFT JOIN} for a - * given input page against another index. This is quite similar to a nested loop - * join. It is restricted to indices with only a single shard. + * {@link AbstractLookupService} performs a {@code LEFT JOIN} for a given input + * page against another index that must have only a single + * shard. *

* This registers a {@link TransportRequestHandler} so we can handle requests * to join data that isn't local to the node, but it is much faster if the @@ -107,7 +106,7 @@ import java.util.stream.IntStream; *

*

* The join process spawns a {@link Driver} per incoming page which runs in - * three stages: + * two or three stages: *

*

* Stage 1: Finding matching document IDs for the input page. This stage is done @@ -120,9 +119,9 @@ import java.util.stream.IntStream; * {@code [DocVector, IntBlock: positions, Block: field1, Block: field2,...]}. *

*

- * Stage 3: Combining the extracted values based on positions and filling nulls for - * positions without matches. This is done by {@link MergePositionsOperator}. The output - * page is represented as {@code [Block: field1, Block: field2,...]}. + * Stage 3: Optionally this combines the extracted values based on positions and filling + * nulls for positions without matches. This is done by {@link MergePositionsOperator}. + * The output page is represented as {@code [Block: field1, Block: field2,...]}. *

*

* The {@link Page#getPositionCount()} of the output {@link Page} is equal to the @@ -139,6 +138,15 @@ abstract class AbstractLookupService readRequest ) { this.actionName = actionName; @@ -157,6 +166,7 @@ abstract class AbstractLookupService resultPages, BlockFactory blockFactory) throws IOException; + + /** + * Read the response from a {@link StreamInput}. + */ + protected abstract LookupResponse readLookupResponse(StreamInput in, BlockFactory blockFactory) throws IOException; + protected static QueryList termQueryList( MappedFieldType field, SearchExecutionContext searchExecutionContext, @@ -196,9 +216,9 @@ abstract class AbstractLookupService outListener) { + public final void lookupAsync(R request, CancellableTask parentTask, ActionListener> outListener) { ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); - ActionListener listener = ContextPreservingActionListener.wrapPreservingContext(outListener, threadContext); + ActionListener> listener = ContextPreservingActionListener.wrapPreservingContext(outListener, threadContext); hasPrivilege(listener.delegateFailureAndWrap((delegate, ignored) -> { ClusterState clusterState = clusterService.state(); GroupShardsIterator shardIterators = clusterService.operationRouting() @@ -225,8 +245,8 @@ abstract class AbstractLookupService( - delegate.map(LookupResponse::takePage), - in -> new LookupResponse(in, blockFactory), + delegate.map(LookupResponse::takePages), + in -> readLookupResponse(in, blockFactory), executor ) ); @@ -291,10 +311,13 @@ abstract class AbstractLookupService listener) { + private void doLookup(T request, CancellableTask task, ActionListener> listener) { Block inputBlock = request.inputPage.getBlock(0); if (inputBlock.areAllValuesNull()) { - listener.onResponse(createNullResponse(request.inputPage.getPositionCount(), request.extractFields)); + List nullResponse = mergePages + ? List.of(createNullResponse(request.inputPage.getPositionCount(), request.extractFields)) + : List.of(); + listener.onResponse(nullResponse); return; } final List releasables = new ArrayList<>(6); @@ -315,31 +338,31 @@ abstract class AbstractLookupService i + 2).toArray(); - final MergePositionsOperator mergePositionsOperator; + final Operator finishPages; final OrdinalBytesRefBlock ordinalsBytesRefBlock; - if (inputBlock instanceof BytesRefBlock bytesRefBlock && (ordinalsBytesRefBlock = bytesRefBlock.asOrdinals()) != null) { + if (mergePages // TODO fix this optimization for Lookup. + && inputBlock instanceof BytesRefBlock bytesRefBlock + && (ordinalsBytesRefBlock = bytesRefBlock.asOrdinals()) != null) { + inputBlock = ordinalsBytesRefBlock.getDictionaryVector().asBlock(); var selectedPositions = ordinalsBytesRefBlock.getOrdinalsBlock(); - mergePositionsOperator = new MergePositionsOperator( - 1, - mergingChannels, - mergingTypes, - selectedPositions, - driverContext.blockFactory() - ); - + finishPages = new MergePositionsOperator(1, mergingChannels, mergingTypes, selectedPositions, driverContext.blockFactory()); } else { - try (var selectedPositions = IntVector.range(0, inputBlock.getPositionCount(), blockFactory).asBlock()) { - mergePositionsOperator = new MergePositionsOperator( - 1, - mergingChannels, - mergingTypes, - selectedPositions, - driverContext.blockFactory() - ); + if (mergePages) { + try (var selectedPositions = IntVector.range(0, inputBlock.getPositionCount(), blockFactory).asBlock()) { + finishPages = new MergePositionsOperator( + 1, + mergingChannels, + mergingTypes, + selectedPositions, + driverContext.blockFactory() + ); + } + } else { + finishPages = dropDocBlockOperator(request.extractFields); } } - releasables.add(mergePositionsOperator); + releasables.add(finishPages); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); QueryList queryList = queryList(request, searchExecutionContext, inputBlock, request.inputDataType); var warnings = Warnings.createWarnings( @@ -359,8 +382,15 @@ abstract class AbstractLookupService result = new AtomicReference<>(); - OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); + /* + * Collect all result Pages in a synchronizedList mostly out of paranoia. We'll + * be collecting these results in the Driver thread and reading them in its + * completion listener which absolutely happens-after the insertions. So, + * technically, we don't need synchronization here. But we're doing it anyway + * because the list will never grow mega large. + */ + List collectedPages = Collections.synchronizedList(new ArrayList<>()); + OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), collectedPages::add); releasables.add(outputOperator); Driver driver = new Driver( "enrich-lookup:" + request.sessionId, @@ -369,7 +399,7 @@ abstract class AbstractLookupService { - Page out = result.get(); - if (out == null) { - out = createNullResponse(request.inputPage.getPositionCount(), request.extractFields); + List out = collectedPages; + if (mergePages && out.isEmpty()) { + out = List.of(createNullResponse(request.inputPage.getPositionCount(), request.extractFields)); } return out; })); @@ -434,6 +464,18 @@ abstract class AbstractLookupService extractFields) { + int end = extractFields.size() + 1; + List projection = new ArrayList<>(end); + for (int i = 1; i <= end; i++) { + projection.add(i); + } + return new ProjectOperator(projection); + } + private Page createNullResponse(int positionCount, List extractFields) { final Block[] blocks = new Block[extractFields.size()]; try { @@ -457,7 +499,7 @@ abstract class AbstractLookupService ActionListener.respondAndRelease(l, new LookupResponse(outPage, blockFactory)) + (l, resultPages) -> ActionListener.respondAndRelease(l, createLookupResponse(resultPages, blockFactory)) ) ); } @@ -587,45 +629,24 @@ abstract class AbstractLookupService takePages(); - @Override - public void writeTo(StreamOutput out) throws IOException { - long bytes = page.ramBytesUsedByBlocks(); - blockFactory.breaker().addEstimateBytesAndMaybeBreak(bytes, "serialize enrich lookup response"); - reservedBytes += bytes; - page.writeTo(out); - } - - Page takePage() { - var p = page; - page = null; - return p; - } - - private void releasePage() { + private void release() { blockFactory.breaker().addWithoutBreaking(-reservedBytes); - if (page != null) { - Releasables.closeExpectNoException(page::releaseBlocks); - } + innerRelease(); } + protected abstract void innerRelease(); + @Override public void incRef() { refs.incRef(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index df608a04632a..8083d67e5a19 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.ResponseHeadersCollector; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -27,7 +28,7 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -public final class EnrichLookupOperator extends AsyncOperator { +public final class EnrichLookupOperator extends AsyncOperator { private final EnrichLookupService enrichLookupService; private final String sessionId; private final CancellableTask parentTask; @@ -128,13 +129,29 @@ public final class EnrichLookupOperator extends AsyncOperator { enrichFields, source ); + CheckedFunction, Page, Exception> handleResponse = pages -> { + if (pages.size() != 1) { + throw new UnsupportedOperationException("ENRICH should only return a single page"); + } + return inputPage.appendPage(pages.getFirst()); + }; enrichLookupService.lookupAsync( request, parentTask, - ActionListener.runBefore(listener.map(inputPage::appendPage), responseHeadersCollector::collect) + ActionListener.runBefore(listener.map(handleResponse), responseHeadersCollector::collect) ); } + @Override + public Page getOutput() { + return fetchFromBuffer(); + } + + @Override + protected void releaseFetchedOnAnyThread(Page page) { + releasePageOnAnyThread(page); + } + @Override public String toString() { return "EnrichOperator[index=" diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 7057b586871e..e3d962fa9231 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; @@ -52,7 +53,16 @@ public class EnrichLookupService extends AbstractLookupService pages, BlockFactory blockFactory) throws IOException { + if (pages.size() != 1) { + throw new UnsupportedOperationException("ENRICH always makes a single page of output"); + } + return new LookupResponse(pages.getFirst(), blockFactory); + } + + @Override + protected LookupResponse readLookupResponse(StreamInput in, BlockFactory blockFactory) throws IOException { + return new LookupResponse(in, blockFactory); + } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) { // For range policy types, the ENRICH index field type will be one of a list of supported range types, @@ -210,4 +233,42 @@ public class EnrichLookupService extends AbstractLookupService takePages() { + var p = List.of(page); + page = null; + return p; + } + + @Override + protected void innerRelease() { + if (page != null) { + Releasables.closeExpectNoException(page::releaseBlocks); + } + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java index f09f7d0e23e7..73dfcf8d4362 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.enrich; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -15,7 +16,11 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.IsBlockedResult; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.lookup.RightChunkedLeftJoin; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -23,11 +28,13 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; +import java.util.Iterator; import java.util.List; import java.util.Objects; +import java.util.Optional; // TODO rename package -public final class LookupFromIndexOperator extends AsyncOperator { +public final class LookupFromIndexOperator extends AsyncOperator { public record Factory( String sessionId, CancellableTask parentTask, @@ -81,6 +88,14 @@ public final class LookupFromIndexOperator extends AsyncOperator { private final List loadFields; private final Source source; private long totalTerms = 0L; + /** + * Total number of pages emitted by this {@link Operator}. + */ + private long emittedPages = 0L; + /** + * The ongoing join or {@code null} none is ongoing at the moment. + */ + private OngoingJoin ongoing = null; public LookupFromIndexOperator( String sessionId, @@ -108,7 +123,7 @@ public final class LookupFromIndexOperator extends AsyncOperator { } @Override - protected void performAsync(Page inputPage, ActionListener listener) { + protected void performAsync(Page inputPage, ActionListener listener) { final Block inputBlock = inputPage.getBlock(inputChannel); totalTerms += inputBlock.getTotalValueCount(); LookupFromIndexService.Request request = new LookupFromIndexService.Request( @@ -120,7 +135,47 @@ public final class LookupFromIndexOperator extends AsyncOperator { loadFields, source ); - lookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); + lookupService.lookupAsync( + request, + parentTask, + listener.map(pages -> new OngoingJoin(new RightChunkedLeftJoin(inputPage, loadFields.size()), pages.iterator())) + ); + } + + @Override + public Page getOutput() { + if (ongoing == null) { + // No ongoing join, start a new one if we can. + ongoing = fetchFromBuffer(); + if (ongoing == null) { + // Buffer empty, wait for the next time we're called. + return null; + } + } + if (ongoing.itr.hasNext()) { + // There's more to do in the ongoing join. + Page right = ongoing.itr.next(); + emittedPages++; + try { + return ongoing.join.join(right); + } finally { + right.releaseBlocks(); + } + } + // Current join is all done. Emit any trailing unmatched rows. + Optional remaining = ongoing.join.noMoreRightHandPages(); + ongoing.close(); + ongoing = null; + if (remaining.isEmpty()) { + return null; + } + emittedPages++; + return remaining.get(); + } + + @Override + protected void releaseFetchedOnAnyThread(OngoingJoin ongoingJoin) { + ongoingJoin.releaseOnAnyThread(); } @Override @@ -138,15 +193,29 @@ public final class LookupFromIndexOperator extends AsyncOperator { + "]"; } + @Override + public boolean isFinished() { + return ongoing == null && super.isFinished(); + } + + @Override + public IsBlockedResult isBlocked() { + if (ongoing != null) { + return NOT_BLOCKED; + } + return super.isBlocked(); + } + @Override protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). + Releasables.close(ongoing); } @Override protected Operator.Status status(long receivedPages, long completedPages, long totalTimeInMillis) { - return new LookupFromIndexOperator.Status(receivedPages, completedPages, totalTimeInMillis, totalTerms); + return new LookupFromIndexOperator.Status(receivedPages, completedPages, totalTimeInMillis, totalTerms, emittedPages); } public static class Status extends AsyncOperator.Status { @@ -156,22 +225,29 @@ public final class LookupFromIndexOperator extends AsyncOperator { Status::new ); - final long totalTerms; + private final long totalTerms; + /** + * Total number of pages emitted by this {@link Operator}. + */ + private final long emittedPages; - Status(long receivedPages, long completedPages, long totalTimeInMillis, long totalTerms) { + Status(long receivedPages, long completedPages, long totalTimeInMillis, long totalTerms, long emittedPages) { super(receivedPages, completedPages, totalTimeInMillis); this.totalTerms = totalTerms; + this.emittedPages = emittedPages; } Status(StreamInput in) throws IOException { super(in); this.totalTerms = in.readVLong(); + this.emittedPages = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVLong(totalTerms); + out.writeVLong(emittedPages); } @Override @@ -179,11 +255,20 @@ public final class LookupFromIndexOperator extends AsyncOperator { return ENTRY.name; } + public long emittedPages() { + return emittedPages; + } + + public long totalTerms() { + return totalTerms; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - innerToXContent(builder); - builder.field("total_terms", totalTerms); + super.innerToXContent(builder); + builder.field("emitted_pages", emittedPages()); + builder.field("total_terms", totalTerms()); return builder.endObject(); } @@ -196,12 +281,26 @@ public final class LookupFromIndexOperator extends AsyncOperator { return false; } Status status = (Status) o; - return totalTerms == status.totalTerms; + return totalTerms == status.totalTerms && emittedPages == status.emittedPages; } @Override public int hashCode() { - return Objects.hash(super.hashCode(), totalTerms); + return Objects.hash(super.hashCode(), totalTerms, emittedPages); + } + } + + protected record OngoingJoin(RightChunkedLeftJoin join, Iterator itr) implements Releasable { + @Override + public void close() { + Releasables.close(join, Releasables.wrap(() -> Iterators.map(itr, page -> page::releaseBlocks))); + } + + public void releaseOnAnyThread() { + Releasables.close( + join::releaseOnAnyThread, + Releasables.wrap(() -> Iterators.map(itr, page -> () -> releasePageOnAnyThread(page))) + ); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 0bbfc6dd0ce9..ad65394fdfbd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.enrich; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; @@ -17,6 +18,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; @@ -33,6 +35,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; +import java.util.Objects; /** * {@link LookupFromIndexService} performs lookup against a Lookup index for @@ -49,7 +52,16 @@ public class LookupFromIndexService extends AbstractLookupService pages, BlockFactory blockFactory) throws IOException { + return new LookupResponse(pages, blockFactory); + } + + @Override + protected AbstractLookupService.LookupResponse readLookupResponse(StreamInput in, BlockFactory blockFactory) throws IOException { + return new LookupResponse(in, blockFactory); + } + @Override protected String getRequiredPrivilege() { return null; @@ -171,4 +193,65 @@ public class LookupFromIndexService extends AbstractLookupService takePages() { + var p = pages; + pages = null; + return p; + } + + List pages() { + return pages; + } + + @Override + protected void innerRelease() { + if (pages != null) { + Releasables.closeExpectNoException(Releasables.wrap(Iterators.map(pages.iterator(), page -> page::releaseBlocks))); + } + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + LookupResponse that = (LookupResponse) o; + return Objects.equals(pages, that.pages); + } + + @Override + public int hashCode() { + return Objects.hashCode(pages); + } + + @Override + public String toString() { + return "LookupResponse{pages=" + pages + '}'; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 9a2e9398f52f..b9c2b92ea72d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -23,6 +23,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; @@ -50,13 +51,23 @@ public final class EvalMapper { private EvalMapper() {} @SuppressWarnings({ "rawtypes", "unchecked" }) - public static ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { + public static ExpressionEvaluator.Factory toEvaluator(FoldContext foldCtx, Expression exp, Layout layout) { if (exp instanceof EvaluatorMapper m) { - return m.toEvaluator(e -> toEvaluator(e, layout)); + return m.toEvaluator(new EvaluatorMapper.ToEvaluator() { + @Override + public ExpressionEvaluator.Factory apply(Expression expression) { + return toEvaluator(foldCtx, expression, layout); + } + + @Override + public FoldContext foldCtx() { + return foldCtx; + } + }); } for (ExpressionMapper em : MAPPERS) { if (em.typeToken.isInstance(exp)) { - return em.map(exp, layout); + return em.map(foldCtx, exp, layout); } } throw new QlIllegalArgumentException("Unsupported expression [{}]", exp); @@ -64,9 +75,9 @@ public final class EvalMapper { static class BooleanLogic extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(BinaryLogic bc, Layout layout) { - var leftEval = toEvaluator(bc.left(), layout); - var rightEval = toEvaluator(bc.right(), layout); + public ExpressionEvaluator.Factory map(FoldContext foldCtx, BinaryLogic bc, Layout layout) { + var leftEval = toEvaluator(foldCtx, bc.left(), layout); + var rightEval = toEvaluator(foldCtx, bc.right(), layout); /** * Evaluator for the three-valued boolean expressions. * We can't generate these with the {@link Evaluator} annotation because that @@ -142,8 +153,8 @@ public final class EvalMapper { static class Nots extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(Not not, Layout layout) { - var expEval = toEvaluator(not.field(), layout); + public ExpressionEvaluator.Factory map(FoldContext foldCtx, Not not, Layout layout) { + var expEval = toEvaluator(foldCtx, not.field(), layout); return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical.NotEvaluator( not.source(), expEval.get(dvrCtx), @@ -154,7 +165,7 @@ public final class EvalMapper { static class Attributes extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(Attribute attr, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, Attribute attr, Layout layout) { record Attribute(int channel) implements ExpressionEvaluator { @Override public Block eval(Page page) { @@ -189,7 +200,7 @@ public final class EvalMapper { static class Literals extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(Literal lit, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, Literal lit, Layout layout) { record LiteralsEvaluator(DriverContext context, Literal lit) implements ExpressionEvaluator { @Override public Block eval(Page page) { @@ -246,8 +257,8 @@ public final class EvalMapper { static class IsNulls extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(IsNull isNull, Layout layout) { - var field = toEvaluator(isNull.field(), layout); + public ExpressionEvaluator.Factory map(FoldContext foldCtx, IsNull isNull, Layout layout) { + var field = toEvaluator(foldCtx, isNull.field(), layout); return new IsNullEvaluatorFactory(field); } @@ -294,8 +305,8 @@ public final class EvalMapper { static class IsNotNulls extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(IsNotNull isNotNull, Layout layout) { - return new IsNotNullEvaluatorFactory(toEvaluator(isNotNull.field(), layout)); + public ExpressionEvaluator.Factory map(FoldContext foldCtx, IsNotNull isNotNull, Layout layout) { + return new IsNotNullEvaluatorFactory(toEvaluator(foldCtx, isNotNull.field(), layout)); } record IsNotNullEvaluatorFactory(EvalOperator.ExpressionEvaluator.Factory field) implements ExpressionEvaluator.Factory { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index d8692faef529..5a8b3d32e7db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -7,11 +7,20 @@ package org.elasticsearch.xpack.esql.evaluator.mapper; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.indices.breaker.AllCircuitBreakerStats; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.CircuitBreakerStats; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.planner.Layout; import static org.elasticsearch.compute.data.BlockUtils.fromArrayRow; @@ -23,9 +32,12 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; public interface EvaluatorMapper { interface ToEvaluator { ExpressionEvaluator.Factory apply(Expression expression); + + FoldContext foldCtx(); } /** + * Convert this into an {@link ExpressionEvaluator}. *

* Note for implementors: * If you are implementing this function, you should call the passed-in @@ -35,8 +47,8 @@ public interface EvaluatorMapper { *

* Note for Callers: * If you are attempting to call this method, and you have an - * {@link Expression} and a {@link org.elasticsearch.xpack.esql.planner.Layout}, - * you likely want to call {@link org.elasticsearch.xpack.esql.evaluator.EvalMapper#toEvaluator(Expression, Layout)} + * {@link Expression} and a {@link Layout}, + * you likely want to call {@link EvalMapper#toEvaluator} * instead. On the other hand, if you already have something that * looks like the parameter for this method, you should call this method * with that function. @@ -56,19 +68,89 @@ public interface EvaluatorMapper { /** * Fold using {@link #toEvaluator} so you don't need a "by hand" - * implementation of fold. The evaluator that it makes is "funny" - * in that it'll always call {@link Expression#fold}, but that's - * good enough. + * implementation of {@link Expression#fold}. */ - default Object fold() { - return toJavaObject(toEvaluator(e -> driverContext -> new ExpressionEvaluator() { + default Object fold(Source source, FoldContext ctx) { + /* + * OK! So! We're going to build a bunch of *stuff* that so that we can + * call toEvaluator and use it without standing up an entire compute + * engine. + * + * Step 1 is creation of a `toEvaluator` which we'll soon use to turn + * the *children* of this Expression into ExpressionEvaluators. They + * have to be foldable or else we wouldn't have ended up here. So! + * We just call `fold` on them and turn the result of that into a + * Block. + * + * If the tree of expressions is pretty deep that `fold` call will + * likely end up being implemented by calling this method for the + * child. That's fine. Recursion is how you process trees. + */ + ToEvaluator foldChildren = new ToEvaluator() { @Override - public Block eval(Page page) { - return fromArrayRow(driverContext.blockFactory(), e.fold())[0]; + public ExpressionEvaluator.Factory apply(Expression expression) { + return driverContext -> new ExpressionEvaluator() { + @Override + public Block eval(Page page) { + return fromArrayRow(driverContext.blockFactory(), expression.fold(ctx))[0]; + } + + @Override + public void close() {} + }; } @Override - public void close() {} - }).get(DriverContext.getLocalDriver()).eval(new Page(1)), 0); + public FoldContext foldCtx() { + return ctx; + } + }; + + /* + * Step 2 is to create a DriverContext that we can pass to the above. + * This DriverContext is mostly about delegating to the FoldContext. + * That'll cause us to break if we attempt to allocate a huge amount + * of memory. Neat. + * + * Specifically, we make a CircuitBreaker view of the FoldContext, then + * we wrap it in a CircuitBreakerService so we can feed it to a BigArray + * so we can feed *that* into a DriverContext. It's a bit hacky, but + * that's what's going on here. + */ + CircuitBreaker breaker = ctx.circuitBreakerView(source); + BigArrays bigArrays = new BigArrays(null, new CircuitBreakerService() { + @Override + public CircuitBreaker getBreaker(String name) { + if (name.equals(CircuitBreaker.REQUEST) == false) { + throw new UnsupportedOperationException(); + } + return breaker; + } + + @Override + public AllCircuitBreakerStats stats() { + throw new UnsupportedOperationException(); + } + + @Override + public CircuitBreakerStats stats(String name) { + throw new UnsupportedOperationException(); + } + }, CircuitBreaker.REQUEST).withCircuitBreaking(); + DriverContext driverCtx = new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + + /* + * Finally we can call toEvaluator on ourselves! It'll fold our children, + * convert the result into Blocks, and then we'll run that with the memory + * breaking DriverContext. + * + * Then, finally finally, we turn the result into a java object to be compatible + * with the signature of `fold`. + */ + Block block = toEvaluator(foldChildren).get(driverCtx).eval(new Page(1)); + if (block.getPositionCount() != 1) { + throw new IllegalStateException("generated odd block from fold [" + block + "]"); + } + return toJavaObject(block, 0); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java index 5cd830058573..5a76080e7995 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.evaluator.mapper; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; import org.elasticsearch.xpack.esql.planner.Layout; @@ -19,5 +20,5 @@ public abstract class ExpressionMapper { typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass()); } - public abstract ExpressionEvaluator.Factory map(E expression, Layout layout); + public abstract ExpressionEvaluator.Factory map(FoldContext foldCtx, E expression, Layout layout); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 974f029eab2e..94913581f696 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.analysis.PreAnalyzer; import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; @@ -56,6 +57,7 @@ public class PlanExecutor { EsqlQueryRequest request, String sessionId, Configuration cfg, + FoldContext foldContext, EnrichPolicyResolver enrichPolicyResolver, EsqlExecutionInfo executionInfo, IndicesExpressionGrouper indicesExpressionGrouper, @@ -71,7 +73,7 @@ public class PlanExecutor { enrichPolicyResolver, preAnalyzer, functionRegistry, - new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg)), + new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg, foldContext)), mapper, verifier, planningMetrics, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index 011fcaccf7fe..8aa7f697489c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; @@ -92,7 +93,8 @@ public abstract class AggregateFunction extends Function implements PostAnalysis } public boolean hasFilter() { - return filter != null && (filter.foldable() == false || Boolean.TRUE.equals(filter.fold()) == false); + return filter != null + && (filter.foldable() == false || Boolean.TRUE.equals(filter.fold(FoldContext.small() /* TODO remove me */)) == false); } public Expression filter() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 7436db9e00dd..3170ae8f132c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -19,6 +19,7 @@ import org.elasticsearch.compute.aggregation.CountDistinctIntAggregatorFunctionS import org.elasticsearch.compute.aggregation.CountDistinctLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -210,7 +211,9 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument @Override public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - int precision = this.precision == null ? DEFAULT_PRECISION : ((Number) this.precision.fold()).intValue(); + int precision = this.precision == null + ? DEFAULT_PRECISION + : ((Number) this.precision.fold(FoldContext.small() /* TODO remove me */)).intValue(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 0d57267da1e2..8c943c991d50 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.aggregation.PercentileDoubleAggregatorFunctionS import org.elasticsearch.compute.aggregation.PercentileIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.PercentileLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -170,7 +171,7 @@ public class Percentile extends NumericAggregate implements SurrogateExpression } private int percentileValue() { - return ((Number) percentile.fold()).intValue(); + return ((Number) percentile.fold(FoldContext.small() /* TODO remove me */)).intValue(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 87ac9b77a682..85ae65b6c5dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -156,7 +157,7 @@ public class Rate extends AggregateFunction implements OptionalArgument, ToAggre } final Object foldValue; try { - foldValue = unit.fold(); + foldValue = unit.fold(FoldContext.small() /* TODO remove me */); } catch (Exception e) { throw new IllegalArgumentException("function [" + sourceText() + "] has invalid unit [" + unit.sourceText() + "]"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index 40777b4d78dc..9be8c94266ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -21,6 +21,7 @@ import org.elasticsearch.compute.aggregation.TopIpAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.TopLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -115,11 +116,11 @@ public class Top extends AggregateFunction implements ToAggregator, SurrogateExp } private int limitValue() { - return (int) limitField().fold(); + return (int) limitField().fold(FoldContext.small() /* TODO remove me */); } private String orderRawValue() { - return BytesRefs.toString(orderField().fold()); + return BytesRefs.toString(orderField().fold(FoldContext.small() /* TODO remove me */)); } private boolean orderValue() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java index 49c68d002440..bab65653ba57 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java @@ -11,8 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -34,7 +34,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.Param import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -public class WeightedAvg extends AggregateFunction implements SurrogateExpression, Validatable { +public class WeightedAvg extends AggregateFunction implements SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "WeightedAvg", @@ -115,9 +115,15 @@ public class WeightedAvg extends AggregateFunction implements SurrogateExpressio return resolution; } - if (weight.dataType() == DataType.NULL - || (weight.foldable() && (weight.fold() == null || weight.fold().equals(0) || weight.fold().equals(0.0)))) { - return new TypeResolution(format(null, invalidWeightError, SECOND, sourceText(), weight.foldable() ? weight.fold() : null)); + if (weight.dataType() == DataType.NULL) { + return new TypeResolution(format(null, invalidWeightError, SECOND, sourceText(), null)); + } + if (weight.foldable() == false) { + return TypeResolution.TYPE_RESOLVED; + } + Object weightVal = weight.fold(FoldContext.small()/* TODO remove me*/); + if (weightVal == null || weightVal.equals(0) || weightVal.equals(0.0)) { + return new TypeResolution(format(null, invalidWeightError, SECOND, sourceText(), weightVal)); } return TypeResolution.TYPE_RESOLVED; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 07c4bb282ba7..4da7c01139c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -12,6 +12,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -111,7 +112,7 @@ public abstract class FullTextFunction extends Function implements TranslationAw * @return query expression as an object */ public Object queryAsObject() { - Object queryAsObject = query().fold(); + Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); if (queryAsObject instanceof BytesRef bytesRef) { return bytesRef.utf8ToString(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index fb9f8cff1b8b..93f23d2f7ad0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -13,11 +13,12 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; @@ -60,7 +61,7 @@ import static org.elasticsearch.xpack.esql.expression.predicate.operator.compari /** * Full text function that performs a {@link QueryStringQuery} . */ -public class Match extends FullTextFunction implements Validatable { +public class Match extends FullTextFunction implements PostOptimizationVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::readFrom); @@ -102,7 +103,8 @@ public class Match extends FullTextFunction implements Validatable { Use `MATCH` to perform a <> on the specified field. Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. - Match can be used on text fields, as well as other field types like boolean, dates, and numeric types. + Match can be used on fields from the text family like <> and <>, + as well as other field types like keyword, boolean, dates, and numeric types. For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. @@ -201,7 +203,7 @@ public class Match extends FullTextFunction implements Validatable { } @Override - public void validate(Failures failures) { + public void postLogicalOptimizationVerification(Failures failures) { Expression fieldExpression = field(); // Field may be converted to other data type (field_name :: data_type), so we need to check the original field if (fieldExpression instanceof AbstractConvertFunction convertFunction) { @@ -222,7 +224,7 @@ public class Match extends FullTextFunction implements Validatable { @Override public Object queryAsObject() { - Object queryAsObject = query().fold(); + Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); // Convert BytesRef to string for string-based values if (queryAsObject instanceof BytesRef bytesRef) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index ff8085cd1b44..e77f95073050 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -39,7 +39,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr /** * Full text function that performs a {@link TermQuery} . */ -public class Term extends FullTextFunction implements Validatable { +public class Term extends FullTextFunction implements PostOptimizationVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Term", Term::readFrom); @@ -100,7 +100,7 @@ public class Term extends FullTextFunction implements Validatable { } @Override - public void validate(Failures failures) { + public void postLogicalOptimizationVerification(Failures failures) { if (field instanceof FieldAttribute == false) { failures.add( Failure.fail( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 12932ba8d6e1..7a3e080f5c83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -15,9 +15,10 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Foldables; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -56,7 +57,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeTo * from a number of desired buckets (as a hint) and a range (auto mode). * In the former case, two parameters will be provided, in the latter four. */ -public class Bucket extends GroupingFunction implements Validatable, TwoOptionalArguments { +public class Bucket extends GroupingFunction implements PostOptimizationVerificationAware, TwoOptionalArguments { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Bucket", Bucket::new); // TODO maybe we should just cover the whole of representable dates here - like ten years, 100 years, 1000 years, all the way up. @@ -255,25 +256,25 @@ public class Bucket extends GroupingFunction implements Validatable, TwoOptional if (field.dataType() == DataType.DATETIME || field.dataType() == DataType.DATE_NANOS) { Rounding.Prepared preparedRounding; if (buckets.dataType().isWholeNumber()) { - int b = ((Number) buckets.fold()).intValue(); - long f = foldToLong(from); - long t = foldToLong(to); + int b = ((Number) buckets.fold(toEvaluator.foldCtx())).intValue(); + long f = foldToLong(toEvaluator.foldCtx(), from); + long t = foldToLong(toEvaluator.foldCtx(), to); preparedRounding = new DateRoundingPicker(b, f, t).pickRounding().prepareForUnknown(); } else { assert DataType.isTemporalAmount(buckets.dataType()) : "Unexpected span data type [" + buckets.dataType() + "]"; - preparedRounding = DateTrunc.createRounding(buckets.fold(), DEFAULT_TZ); + preparedRounding = DateTrunc.createRounding(buckets.fold(toEvaluator.foldCtx()), DEFAULT_TZ); } return DateTrunc.evaluator(field.dataType(), source(), toEvaluator.apply(field), preparedRounding); } if (field.dataType().isNumeric()) { double roundTo; if (from != null) { - int b = ((Number) buckets.fold()).intValue(); - double f = ((Number) from.fold()).doubleValue(); - double t = ((Number) to.fold()).doubleValue(); + int b = ((Number) buckets.fold(toEvaluator.foldCtx())).intValue(); + double f = ((Number) from.fold(toEvaluator.foldCtx())).doubleValue(); + double t = ((Number) to.fold(toEvaluator.foldCtx())).doubleValue(); roundTo = pickRounding(b, f, t); } else { - roundTo = ((Number) buckets.fold()).doubleValue(); + roundTo = ((Number) buckets.fold(toEvaluator.foldCtx())).doubleValue(); } Literal rounding = new Literal(source(), roundTo, DataType.DOUBLE); @@ -408,7 +409,7 @@ public class Bucket extends GroupingFunction implements Validatable, TwoOptional } @Override - public void validate(Failures failures) { + public void postLogicalOptimizationVerification(Failures failures) { String operation = sourceText(); failures.add(isFoldable(buckets, operation, SECOND)) @@ -416,8 +417,8 @@ public class Bucket extends GroupingFunction implements Validatable, TwoOptional .add(to != null ? isFoldable(to, operation, FOURTH) : null); } - private long foldToLong(Expression e) { - Object value = Foldables.valueOf(e); + private long foldToLong(FoldContext ctx, Expression e) { + Object value = Foldables.valueOf(ctx, e); return DataType.isDateTime(e.dataType()) ? ((Number) value).longValue() : dateTimeToLong(((BytesRef) value).utf8ToString()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index a100dd64915f..570ce7a96dd6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -37,7 +36,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr * For the implementation, see {@link org.elasticsearch.compute.aggregation.blockhash.CategorizeBlockHash} *

*/ -public class Categorize extends GroupingFunction implements Validatable { +public class Categorize extends GroupingFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "Categorize", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java index 0fee65d32ca9..fd025e5e67a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.esql.expression.function.grouping; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -28,8 +29,8 @@ public abstract class GroupingFunction extends Function implements EvaluatorMapp } @Override - public Object fold() { - return EvaluatorMapper.super.fold(); + public Object fold(FoldContext ctx) { + return EvaluatorMapper.super.fold(source(), ctx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index 404ce7e3900c..85d15f82f458 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -34,7 +35,7 @@ public abstract class EsqlScalarFunction extends ScalarFunction implements Evalu } @Override - public Object fold() { - return EvaluatorMapper.super.fold(); + public Object fold(FoldContext ctx) { + return EvaluatorMapper.super.fold(source(), ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 824f02ca7ccb..236e625f7abe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -23,6 +23,7 @@ import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -227,7 +228,7 @@ public final class Case extends EsqlScalarFunction { if (condition.condition.foldable() == false) { return false; } - if (Boolean.TRUE.equals(condition.condition.fold())) { + if (Boolean.TRUE.equals(condition.condition.fold(FoldContext.small() /* TODO remove me - use literal true?*/))) { /* * `fold` can make four things here: * 1. `TRUE` @@ -264,7 +265,8 @@ public final class Case extends EsqlScalarFunction { * And those two combine so {@code EVAL c=CASE(false, foo, b, bar, true, bort, el)} becomes * {@code EVAL c=CASE(b, bar, bort)}. */ - public Expression partiallyFold() { + public Expression partiallyFold(FoldContext ctx) { + // TODO don't throw away the results of any `fold`. That might mean looking for literal TRUE on the conditions. List newChildren = new ArrayList<>(children().size()); boolean modified = false; for (Condition condition : conditions) { @@ -274,7 +276,7 @@ public final class Case extends EsqlScalarFunction { continue; } modified = true; - if (Boolean.TRUE.equals(condition.condition.fold())) { + if (Boolean.TRUE.equals(condition.condition.fold(ctx))) { /* * `fold` can make four things here: * 1. `TRUE` diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FoldablesConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FoldablesConvertFunction.java index 8f43a6481db0..57f362f86ff4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FoldablesConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FoldablesConvertFunction.java @@ -8,9 +8,10 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -26,7 +27,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.foldToTemp * Base class for functions that converts a constant into an interval type - DATE_PERIOD or TIME_DURATION. * The functions will be folded at the end of LogicalPlanOptimizer by the coordinator, it does not reach data node. */ -public abstract class FoldablesConvertFunction extends AbstractConvertFunction implements Validatable { +public abstract class FoldablesConvertFunction extends AbstractConvertFunction implements PostOptimizationVerificationAware { protected FoldablesConvertFunction(Source source, Expression field) { super(source, field); @@ -65,12 +66,12 @@ public abstract class FoldablesConvertFunction extends AbstractConvertFunction i } @Override - public final Object fold() { - return foldToTemporalAmount(field(), sourceText(), dataType()); + public final Object fold(FoldContext ctx) { + return foldToTemporalAmount(ctx, field(), sourceText(), dataType()); } @Override - public final void validate(Failures failures) { + public final void postLogicalOptimizationVerification(Failures failures) { failures.add(isFoldable(field(), sourceText(), null)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java index 8c4375b424cd..837475fca7e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java @@ -63,9 +63,9 @@ public class ToDateNanos extends AbstractConvertFunction { @FunctionInfo( returnType = "date_nanos", description = "Converts an input to a nanosecond-resolution date value (aka date_nanos).", - note = "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers " - + "cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", - preview = true + note = "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attepting to convert" + + "values outside of that range will result in null with a warning.. Additionally, integers cannot be converted into date " + + "nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch." ) public ToDateNanos( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index f6a23a5d5962..b588832aba4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -232,7 +232,7 @@ public class DateDiff extends EsqlScalarFunction { if (unit.foldable()) { try { - Part datePartField = Part.resolve(((BytesRef) unit.fold()).utf8ToString()); + Part datePartField = Part.resolve(((BytesRef) unit.fold(toEvaluator.foldCtx())).utf8ToString()); return new DateDiffConstantEvaluator.Factory(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); } catch (IllegalArgumentException e) { throw new InvalidArgumentException("invalid unit format for [{}]: {}", sourceText(), e.getMessage()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 501dfd431f10..7fc5d8244180 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -110,9 +111,9 @@ public class DateExtract extends EsqlConfigurationFunction { public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(children().get(1)); if (children().get(0).foldable()) { - ChronoField chrono = chronoField(); + ChronoField chrono = chronoField(toEvaluator.foldCtx()); if (chrono == null) { - BytesRef field = (BytesRef) children().get(0).fold(); + BytesRef field = (BytesRef) children().get(0).fold(toEvaluator.foldCtx()); throw new InvalidArgumentException("invalid date field for [{}]: {}", sourceText(), field.utf8ToString()); } return new DateExtractConstantEvaluator.Factory(source(), fieldEvaluator, chrono, configuration().zoneId()); @@ -121,14 +122,14 @@ public class DateExtract extends EsqlConfigurationFunction { return new DateExtractEvaluator.Factory(source(), fieldEvaluator, chronoEvaluator, configuration().zoneId()); } - private ChronoField chronoField() { + private ChronoField chronoField(FoldContext ctx) { // chronoField's never checked (the return is). The foldability test is done twice and type is checked in resolveType() already. // TODO: move the slimmed down code here to toEvaluator? if (chronoField == null) { Expression field = children().get(0); try { if (field.foldable() && DataType.isString(field.dataType())) { - chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold()); + chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold(ctx)); } } catch (Exception e) { return null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 60bc014ccbee..d30e99794a44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -14,8 +14,10 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -33,10 +35,11 @@ import java.util.Locale; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isDate; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.nanoTimeToString; public class DateFormat extends EsqlConfigurationFunction implements OptionalArgument { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -55,10 +58,14 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg ) public DateFormat( Source source, - @Param(optional = true, name = "dateFormat", type = { "keyword", "text" }, description = """ + @Param(optional = true, name = "dateFormat", type = { "keyword", "text", "date", "date_nanos" }, description = """ Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`, the function returns `null`.""") Expression format, - @Param(name = "date", type = { "date" }, description = "Date expression. If `null`, the function returns `null`.") Expression date, + @Param( + name = "date", + type = { "date", "date_nanos" }, + description = "Date expression. If `null`, the function returns `null`." + ) Expression date, Configuration configuration ) { super(source, date != null ? List.of(format, date) : List.of(format), configuration); @@ -114,7 +121,9 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg } } - resolution = isDate(field, sourceText(), format == null ? FIRST : SECOND); + String operationName = sourceText(); + TypeResolutions.ParamOrdinal paramOrd = format == null ? FIRST : SECOND; + resolution = TypeResolutions.isType(field, DataType::isDate, operationName, paramOrd, "datetime or date_nanos"); if (resolution.unresolved()) { return resolution; } @@ -127,31 +136,63 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg return field.foldable() && (format == null || format.foldable()); } - @Evaluator(extraName = "Constant") - static BytesRef process(long val, @Fixed DateFormatter formatter) { + @Evaluator(extraName = "MillisConstant") + static BytesRef processMillis(long val, @Fixed DateFormatter formatter) { return new BytesRef(dateTimeToString(val, formatter)); } - @Evaluator - static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { + @Evaluator(extraName = "Millis") + static BytesRef processMillis(long val, BytesRef formatter, @Fixed Locale locale) { return new BytesRef(dateTimeToString(val, toFormatter(formatter, locale))); } + @Evaluator(extraName = "NanosConstant") + static BytesRef processNanos(long val, @Fixed DateFormatter formatter) { + return new BytesRef(nanoTimeToString(val, formatter)); + } + + @Evaluator(extraName = "Nanos") + static BytesRef processNanos(long val, BytesRef formatter, @Fixed Locale locale) { + return new BytesRef(nanoTimeToString(val, toFormatter(formatter, locale))); + } + + private ExpressionEvaluator.Factory getConstantEvaluator( + DataType dateType, + EvalOperator.ExpressionEvaluator.Factory fieldEvaluator, + DateFormatter formatter + ) { + if (dateType == DATE_NANOS) { + return new DateFormatNanosConstantEvaluator.Factory(source(), fieldEvaluator, formatter); + } + return new DateFormatMillisConstantEvaluator.Factory(source(), fieldEvaluator, formatter); + } + + private ExpressionEvaluator.Factory getEvaluator( + DataType dateType, + EvalOperator.ExpressionEvaluator.Factory fieldEvaluator, + EvalOperator.ExpressionEvaluator.Factory formatEvaluator + ) { + if (dateType == DATE_NANOS) { + return new DateFormatNanosEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, configuration().locale()); + } + return new DateFormatMillisEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, configuration().locale()); + } + @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); + return getConstantEvaluator(field().dataType(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); } if (DataType.isString(format.dataType()) == false) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { - DateFormatter formatter = toFormatter(format.fold(), configuration().locale()); - return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, formatter); + DateFormatter formatter = toFormatter(format.fold(toEvaluator.foldCtx()), configuration().locale()); + return getConstantEvaluator(field.dataType(), fieldEvaluator, formatter); } var formatEvaluator = toEvaluator.apply(format); - return new DateFormatEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, configuration().locale()); + return getEvaluator(field().dataType(), fieldEvaluator, formatEvaluator); } private static DateFormatter toFormatter(Object format, Locale locale) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index e09fabab98d0..7c38b54ed232 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -143,7 +143,7 @@ public class DateParse extends EsqlScalarFunction implements OptionalArgument { } if (format.foldable()) { try { - DateFormatter formatter = toFormatter(format.fold()); + DateFormatter formatter = toFormatter(format.fold(toEvaluator.foldCtx())); return new DateParseConstantEvaluator.Factory(source(), fieldEvaluator, formatter); } catch (IllegalArgumentException e) { throw new InvalidArgumentException(e, "invalid date pattern for [{}]: {}", sourceText(), e.getMessage()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index a35b67d7ac3f..7983c38cc428 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -225,7 +225,7 @@ public class DateTrunc extends EsqlScalarFunction { } Object foldedInterval; try { - foldedInterval = interval.fold(); + foldedInterval = interval.fold(toEvaluator.foldCtx()); if (foldedInterval == null) { throw new IllegalArgumentException("Interval cannot not be null"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java index d259fc6ae57c..74c2da450995 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -59,7 +60,7 @@ public class Now extends EsqlConfigurationFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { return now; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java index 757b67b47ce7..e1eceef7ed1f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/E.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -49,7 +50,7 @@ public class E extends DoubleConstantFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { return Math.E; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java index 90a4f1f091e9..32b7a0ab88b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pi.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -49,7 +50,7 @@ public class Pi extends DoubleConstantFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { return Math.PI; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java index 17e5b027270d..1a7669b7391e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tau.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -51,7 +52,7 @@ public class Tau extends DoubleConstantFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { return TAU; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index 1996744a7656..26211258e6ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -91,8 +92,8 @@ public class MvConcat extends BinaryScalarFunction implements EvaluatorMapper { } @Override - public Object fold() { - return EvaluatorMapper.super.fold(); + public Object fold(FoldContext ctx) { + return EvaluatorMapper.super.fold(source(), ctx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java index 4dd447f93888..d5093964145b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java @@ -115,7 +115,7 @@ public class MvPSeriesWeightedSum extends EsqlScalarFunction implements Evaluato source(), toEvaluator.apply(field), ctx -> new CompensatedSum(), - (Double) p.fold() + (Double) p.fold(toEvaluator.foldCtx()) ); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index f4f9679dc370..4a04524d1b23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -187,8 +187,8 @@ public class MvSlice extends EsqlScalarFunction implements OptionalArgument, Eva @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (start.foldable() && end.foldable()) { - int startOffset = stringToInt(String.valueOf(start.fold())); - int endOffset = stringToInt(String.valueOf(end.fold())); + int startOffset = stringToInt(String.valueOf(start.fold(toEvaluator.foldCtx()))); + int endOffset = stringToInt(String.valueOf(end.fold(toEvaluator.foldCtx()))); checkStartEnd(startOffset, endOffset); } return switch (PlannerUtils.toElementType(field.dataType())) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index 2286a1357ced..b68718acfcd0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -29,10 +29,11 @@ import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeBytesRef; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeDouble; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeLong; -import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -58,7 +59,7 @@ import static org.elasticsearch.xpack.esql.expression.Validations.isFoldable; /** * Sorts a multivalued field in lexicographical order. */ -public class MvSort extends EsqlScalarFunction implements OptionalArgument, Validatable { +public class MvSort extends EsqlScalarFunction implements OptionalArgument, PostOptimizationVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "MvSort", MvSort::new); private final Expression field, order; @@ -155,12 +156,12 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali sourceText(), ASC.value(), DESC.value(), - ((BytesRef) order.fold()).utf8ToString() + ((BytesRef) order.fold(toEvaluator.foldCtx())).utf8ToString() ) ); } if (order != null && order.foldable()) { - ordering = ((BytesRef) order.fold()).utf8ToString().equalsIgnoreCase((String) ASC.value()); + ordering = ((BytesRef) order.fold(toEvaluator.foldCtx())).utf8ToString().equalsIgnoreCase((String) ASC.value()); } return switch (PlannerUtils.toElementType(field.dataType())) { @@ -230,7 +231,7 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali } @Override - public void validate(Failures failures) { + public void postLogicalOptimizationVerification(Failures failures) { if (order == null) { return; } @@ -238,7 +239,14 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali failures.add(isFoldable(order, operation, SECOND)); if (isValidOrder() == false) { failures.add( - Failure.fail(order, INVALID_ORDER_ERROR, sourceText(), ASC.value(), DESC.value(), ((BytesRef) order.fold()).utf8ToString()) + Failure.fail( + order, + INVALID_ORDER_ERROR, + sourceText(), + ASC.value(), + DESC.value(), + ((BytesRef) order.fold(FoldContext.small() /* TODO remove me */)).utf8ToString() + ) ); } } @@ -246,7 +254,7 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali private boolean isValidOrder() { boolean isValidOrder = true; if (order != null && order.foldable()) { - Object obj = order.fold(); + Object obj = order.fold(FoldContext.small() /* TODO remove me */); String o = null; if (obj instanceof BytesRef ob) { o = ob.utf8ToString(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 9189c6a7b8f7..b15d04aa792d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -26,6 +26,7 @@ import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -214,10 +215,10 @@ public class SpatialContains extends SpatialRelatesFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { try { - GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType(), left()); - Geometry rightGeom = makeGeometryFromLiteral(right()); + GeometryDocValueReader docValueReader = asGeometryDocValueReader(ctx, crsType(), left()); + Geometry rightGeom = makeGeometryFromLiteral(ctx, right()); Component2D[] components = asLuceneComponent2Ds(crsType(), rightGeom); return (crsType() == SpatialCrsType.GEO) ? GEO.geometryRelatesGeometries(docValueReader, components) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java index ee78f50c4d6b..3e16fa163fcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -23,6 +23,7 @@ import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -129,10 +130,10 @@ public class SpatialDisjoint extends SpatialRelatesFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { try { - GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType(), left()); - Component2D component2D = asLuceneComponent2D(crsType(), right()); + GeometryDocValueReader docValueReader = asGeometryDocValueReader(ctx, crsType(), left()); + Component2D component2D = asLuceneComponent2D(ctx, crsType(), right()); return (crsType() == SpatialCrsType.GEO) ? GEO.geometryRelatesGeometry(docValueReader, component2D) : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java index 1a51af8dfeeb..dcd53075cf69 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -171,7 +171,11 @@ abstract class SpatialEvaluatorFactory { @Override public EvalOperator.ExpressionEvaluator.Factory get(SpatialSourceSupplier s, EvaluatorMapper.ToEvaluator toEvaluator) { - return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2D(s.crsType(), s.right())); + return factoryCreator.apply( + s.source(), + toEvaluator.apply(s.left()), + asLuceneComponent2D(toEvaluator.foldCtx(), s.crsType(), s.right()) + ); } } @@ -197,7 +201,11 @@ abstract class SpatialEvaluatorFactory { @Override public EvalOperator.ExpressionEvaluator.Factory get(SpatialSourceSupplier s, EvaluatorMapper.ToEvaluator toEvaluator) { - return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2Ds(s.crsType(), s.right())); + return factoryCreator.apply( + s.source(), + toEvaluator.apply(s.left()), + asLuceneComponent2Ds(toEvaluator.foldCtx(), s.crsType(), s.right()) + ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 8d54e5ee443c..601550cd173b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -23,6 +23,7 @@ import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -127,10 +128,10 @@ public class SpatialIntersects extends SpatialRelatesFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { try { - GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType(), left()); - Component2D component2D = asLuceneComponent2D(crsType(), right()); + GeometryDocValueReader docValueReader = asGeometryDocValueReader(ctx, crsType(), left()); + Component2D component2D = asLuceneComponent2D(ctx, crsType(), right()); return (crsType() == SpatialCrsType.GEO) ? GEO.geometryRelatesGeometry(docValueReader, component2D) : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java index 6ae99ea8165c..1b06c6dfd3dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java @@ -29,6 +29,7 @@ import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; @@ -42,8 +43,8 @@ import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; public class SpatialRelatesUtils { /** Converts a {@link Expression} into a {@link Component2D}. */ - static Component2D asLuceneComponent2D(BinarySpatialFunction.SpatialCrsType crsType, Expression expression) { - return asLuceneComponent2D(crsType, makeGeometryFromLiteral(expression)); + static Component2D asLuceneComponent2D(FoldContext ctx, BinarySpatialFunction.SpatialCrsType crsType, Expression expression) { + return asLuceneComponent2D(crsType, makeGeometryFromLiteral(ctx, expression)); } /** Converts a {@link Geometry} into a {@link Component2D}. */ @@ -66,8 +67,8 @@ public class SpatialRelatesUtils { * Converts a {@link Expression} at a given {@code position} into a {@link Component2D} array. * The reason for generating an array instead of a single component is for multi-shape support with ST_CONTAINS. */ - static Component2D[] asLuceneComponent2Ds(BinarySpatialFunction.SpatialCrsType crsType, Expression expression) { - return asLuceneComponent2Ds(crsType, makeGeometryFromLiteral(expression)); + static Component2D[] asLuceneComponent2Ds(FoldContext ctx, BinarySpatialFunction.SpatialCrsType crsType, Expression expression) { + return asLuceneComponent2Ds(crsType, makeGeometryFromLiteral(ctx, expression)); } /** @@ -90,9 +91,12 @@ public class SpatialRelatesUtils { } /** Converts a {@link Expression} into a {@link GeometryDocValueReader} */ - static GeometryDocValueReader asGeometryDocValueReader(BinarySpatialFunction.SpatialCrsType crsType, Expression expression) - throws IOException { - Geometry geometry = makeGeometryFromLiteral(expression); + static GeometryDocValueReader asGeometryDocValueReader( + FoldContext ctx, + BinarySpatialFunction.SpatialCrsType crsType, + Expression expression + ) throws IOException { + Geometry geometry = makeGeometryFromLiteral(ctx, expression); if (crsType == BinarySpatialFunction.SpatialCrsType.GEO) { return asGeometryDocValueReader( CoordinateEncoder.GEO, @@ -167,8 +171,8 @@ public class SpatialRelatesUtils { * This function is used in two places, when evaluating a spatial constant in the SpatialRelatesFunction, as well as when * we do lucene-pushdown of spatial functions. */ - public static Geometry makeGeometryFromLiteral(Expression expr) { - return makeGeometryFromLiteralValue(valueOf(expr), expr.dataType()); + public static Geometry makeGeometryFromLiteral(FoldContext ctx, Expression expr) { + return makeGeometryFromLiteralValue(valueOf(ctx, expr), expr.dataType()); } private static Geometry makeGeometryFromLiteralValue(Object value, DataType dataType) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index 2005709cd37e..9fcece1ce65b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -23,6 +23,7 @@ import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -129,10 +130,10 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx } @Override - public Object fold() { + public Object fold(FoldContext ctx) { try { - GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType(), left()); - Component2D component2D = asLuceneComponent2D(crsType(), right()); + GeometryDocValueReader docValueReader = asGeometryDocValueReader(ctx, crsType(), left()); + Component2D component2D = asLuceneComponent2D(ctx, crsType(), right()); return (crsType() == SpatialCrsType.GEO) ? GEO.geometryRelatesGeometry(docValueReader, component2D) : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java index 3cf042a2db82..f0c25e3289cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java @@ -23,6 +23,7 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -280,18 +281,18 @@ public class StDistance extends BinarySpatialFunction implements EvaluatorMapper } @Override - public Object fold() { - var leftGeom = makeGeometryFromLiteral(left()); - var rightGeom = makeGeometryFromLiteral(right()); + public Object fold(FoldContext ctx) { + var leftGeom = makeGeometryFromLiteral(ctx, left()); + var rightGeom = makeGeometryFromLiteral(ctx, right()); return (crsType() == SpatialCrsType.GEO) ? GEO.distance(leftGeom, rightGeom) : CARTESIAN.distance(leftGeom, rightGeom); } @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (right().foldable()) { - return toEvaluator(toEvaluator, left(), makeGeometryFromLiteral(right()), leftDocValues); + return toEvaluator(toEvaluator, left(), makeGeometryFromLiteral(toEvaluator.foldCtx(), right()), leftDocValues); } else if (left().foldable()) { - return toEvaluator(toEvaluator, right(), makeGeometryFromLiteral(left()), rightDocValues); + return toEvaluator(toEvaluator, right(), makeGeometryFromLiteral(toEvaluator.foldCtx(), left()), rightDocValues); } else { EvalOperator.ExpressionEvaluator.Factory leftE = toEvaluator.apply(left()); EvalOperator.ExpressionEvaluator.Factory rightE = toEvaluator.apply(right()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java index 52d33c0fc9d3..be0a7b2fe27b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java @@ -146,7 +146,7 @@ public class Hash extends EsqlScalarFunction { if (algorithm.foldable()) { try { // hash function is created here in order to validate the algorithm is valid before evaluator is created - var hf = HashFunction.create((BytesRef) algorithm.fold()); + var hf = HashFunction.create((BytesRef) algorithm.fold(toEvaluator.foldCtx())); return new HashConstantEvaluator.Factory( source(), context -> new BreakingBytesRefBuilder(context.breaker(), "hash"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index 996c90a8e40b..fb0aac0c85b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -94,8 +95,8 @@ public class RLike extends org.elasticsearch.xpack.esql.core.expression.predicat } @Override - public Boolean fold() { - return (Boolean) EvaluatorMapper.super.fold(); + public Boolean fold(FoldContext ctx) { + return (Boolean) EvaluatorMapper.super.fold(source(), ctx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java index e91f03de3dd7..363991d1556f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java @@ -151,7 +151,7 @@ public class Repeat extends EsqlScalarFunction implements OptionalArgument { ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); if (number.foldable()) { - int num = (int) number.fold(); + int num = (int) number.fold(toEvaluator.foldCtx()); if (num < 0) { throw new IllegalArgumentException("Number parameter cannot be negative, found [" + number + "]"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index 4fa191244cb4..4b963b794aef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -152,7 +152,7 @@ public class Replace extends EsqlScalarFunction { if (regex.foldable() && regex.dataType() == DataType.KEYWORD) { Pattern regexPattern; try { - regexPattern = Pattern.compile(((BytesRef) regex.fold()).utf8ToString()); + regexPattern = Pattern.compile(((BytesRef) regex.fold(toEvaluator.foldCtx())).utf8ToString()); } catch (PatternSyntaxException pse) { // TODO this is not right (inconsistent). See also https://github.com/elastic/elasticsearch/issues/100038 // this should generate a header warning and return null (as do the rest of this functionality in evaluators), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java index 3b9a46696691..e46c0a730431 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java @@ -113,7 +113,7 @@ public class Space extends UnaryScalarFunction { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (field.foldable()) { - Object folded = field.fold(); + Object folded = field.fold(toEvaluator.foldCtx()); if (folded instanceof Integer num) { checkNumber(num); return toEvaluator.apply(new Literal(source(), " ".repeat(num), KEYWORD)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 24762122f755..d0c1035978ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -102,8 +103,8 @@ public class Split extends BinaryScalarFunction implements EvaluatorMapper { } @Override - public Object fold() { - return EvaluatorMapper.super.fold(); + public Object fold(FoldContext ctx) { + return EvaluatorMapper.super.fold(source(), ctx); } @Evaluator(extraName = "SingleByte") @@ -163,7 +164,7 @@ public class Split extends BinaryScalarFunction implements EvaluatorMapper { if (right().foldable() == false) { return new SplitVariableEvaluator.Factory(source(), str, toEvaluator.apply(right()), context -> new BytesRef()); } - BytesRef delim = (BytesRef) right().fold(); + BytesRef delim = (BytesRef) right().fold(toEvaluator.foldCtx()); checkDelimiter(delim); return new SplitSingleByteEvaluator.Factory(source(), str, delim.bytes[delim.offset], context -> new BytesRef()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index d2edb0f92e8f..65455c708cc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -100,8 +101,8 @@ public class WildcardLike extends org.elasticsearch.xpack.esql.core.expression.p } @Override - public Boolean fold() { - return (Boolean) EvaluatorMapper.super.fold(); + public Boolean fold(FoldContext ctx) { + return (Boolean) EvaluatorMapper.super.fold(source(), ctx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java index 1d03f09c8640..712dff3024de 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java @@ -10,10 +10,12 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.util; import org.elasticsearch.Build; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -84,15 +86,15 @@ public class Delay extends UnaryScalarFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { return null; } - private long msValue() { + private long msValue(FoldContext ctx) { if (field().foldable() == false) { throw new IllegalArgumentException("function [" + sourceText() + "] has invalid argument [" + field().sourceText() + "]"); } - var ms = field().fold(); + var ms = field().fold(ctx); if (ms instanceof Duration duration) { return duration.toMillis(); } @@ -101,21 +103,42 @@ public class Delay extends UnaryScalarFunction { @Override public ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator) { - return new DelayEvaluator.Factory(source(), msValue()); + return context -> new DelayEvaluator(context, msValue(toEvaluator.foldCtx())); } - @Evaluator - static boolean process(@Fixed long ms) { - // Only activate in snapshot builds - if (Build.current().isSnapshot()) { + static final class DelayEvaluator implements ExpressionEvaluator { + private final DriverContext driverContext; + private final long ms; + + DelayEvaluator(DriverContext driverContext, long ms) { + if (Build.current().isSnapshot() == false) { + throw new IllegalArgumentException("Delay function is only available in snapshot builds"); + } + this.driverContext = driverContext; + this.ms = ms; + } + + @Override + public Block eval(Page page) { + int positionCount = page.getPositionCount(); + for (int p = 0; p < positionCount; p++) { + delay(ms); + } + return driverContext.blockFactory().newConstantBooleanBlockWith(true, positionCount); + } + + private void delay(long ms) { try { + driverContext.checkForEarlyTermination(); Thread.sleep(ms); } catch (InterruptedException e) { - return true; + Thread.currentThread().interrupt(); } - } else { - throw new IllegalArgumentException("Delay function is only available in snapshot builds"); } - return true; + + @Override + public void close() { + + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 8bb166fac60b..424c080c905e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.ExceptionUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -111,7 +112,7 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio /** * Override this to allow processing literals of type {@link DataType#DATE_PERIOD} when folding constants. - * Used in {@link DateTimeArithmeticOperation#fold()}. + * Used in {@link DateTimeArithmeticOperation#fold}. * @param left the left period * @param right the right period * @return the result of the evaluation @@ -120,7 +121,7 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio /** * Override this to allow processing literals of type {@link DataType#TIME_DURATION} when folding constants. - * Used in {@link DateTimeArithmeticOperation#fold()}. + * Used in {@link DateTimeArithmeticOperation#fold}. * @param left the left duration * @param right the right duration * @return the result of the evaluation @@ -128,13 +129,13 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio abstract Duration fold(Duration left, Duration right); @Override - public final Object fold() { + public final Object fold(FoldContext ctx) { DataType leftDataType = left().dataType(); DataType rightDataType = right().dataType(); if (leftDataType == DATE_PERIOD && rightDataType == DATE_PERIOD) { // Both left and right expressions are temporal amounts; we can assume they are both foldable. - var l = left().fold(); - var r = right().fold(); + var l = left().fold(ctx); + var r = right().fold(ctx); if (l instanceof Collection || r instanceof Collection) { return null; } @@ -148,8 +149,8 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio } if (leftDataType == TIME_DURATION && rightDataType == TIME_DURATION) { // Both left and right expressions are temporal amounts; we can assume they are both foldable. - Duration l = (Duration) left().fold(); - Duration r = (Duration) right().fold(); + Duration l = (Duration) left().fold(ctx); + Duration r = (Duration) right().fold(ctx); try { return fold(l, r); } catch (ArithmeticException e) { @@ -161,7 +162,7 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio if (isNull(leftDataType) || isNull(rightDataType)) { return null; } - return super.fold(); + return super.fold(ctx); } @Override @@ -178,7 +179,11 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio temporalAmountArgument = left(); } - return millisEvaluator.apply(source(), toEvaluator.apply(datetimeArgument), (TemporalAmount) temporalAmountArgument.fold()); + return millisEvaluator.apply( + source(), + toEvaluator.apply(datetimeArgument), + (TemporalAmount) temporalAmountArgument.fold(toEvaluator.foldCtx()) + ); } else if (dataType() == DATE_NANOS) { // One of the arguments has to be a date_nanos and the other a temporal amount. Expression dateNanosArgument; @@ -191,7 +196,11 @@ public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperatio temporalAmountArgument = left(); } - return nanosEvaluator.apply(source(), toEvaluator.apply(dateNanosArgument), (TemporalAmount) temporalAmountArgument.fold()); + return nanosEvaluator.apply( + source(), + toEvaluator.apply(dateNanosArgument), + (TemporalAmount) temporalAmountArgument.fold(toEvaluator.foldCtx()) + ); } else { return super.toEvaluator(toEvaluator); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 74394d796855..e3248665ad48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryArithmeticOperation; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -120,8 +121,8 @@ public abstract class EsqlArithmeticOperation extends ArithmeticOperation implem } @Override - public Object fold() { - return EvaluatorMapper.super.fold(); + public Object fold(FoldContext ctx) { + return EvaluatorMapper.super.fold(source(), ctx); } public DataType dataType() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index fb32282005f0..6663ccf0ef7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.ExceptionUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -87,12 +88,12 @@ public class Neg extends UnaryScalarFunction { } @Override - public final Object fold() { + public final Object fold(FoldContext ctx) { DataType dataType = field().dataType(); // For date periods and time durations, we need to treat folding differently. These types are unrepresentable, so there is no // evaluator for them - but the default folding requires an evaluator. if (dataType == DATE_PERIOD) { - Period fieldValue = (Period) field().fold(); + Period fieldValue = (Period) field().fold(ctx); try { return fieldValue.negated(); } catch (ArithmeticException e) { @@ -102,7 +103,7 @@ public class Neg extends UnaryScalarFunction { } } if (dataType == TIME_DURATION) { - Duration fieldValue = (Duration) field().fold(); + Duration fieldValue = (Duration) field().fold(ctx); try { return fieldValue.negated(); } catch (ArithmeticException e) { @@ -111,7 +112,7 @@ public class Neg extends UnaryScalarFunction { throw ExceptionUtils.math(source(), e); } } - return super.fold(); + return super.fold(ctx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index 3e2a21664aa7..e56c19b26a90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -204,8 +205,8 @@ public abstract class EsqlBinaryComparison extends BinaryComparison implements E } @Override - public Boolean fold() { - return (Boolean) EvaluatorMapper.super.fold(); + public Boolean fold(FoldContext ctx) { + return (Boolean) EvaluatorMapper.super.fold(source(), ctx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index f596d589cdde..6a35fa8e8b95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -11,12 +11,14 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Comparisons; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -40,6 +42,7 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.IP; @@ -205,11 +208,11 @@ public class In extends EsqlScalarFunction { } @Override - public Object fold() { + public Object fold(FoldContext ctx) { if (Expressions.isGuaranteedNull(value) || list.stream().allMatch(Expressions::isGuaranteedNull)) { return null; } - return super.fold(); + return super.fold(ctx); } protected boolean areCompatible(DataType left, DataType right) { @@ -231,20 +234,47 @@ public class In extends EsqlScalarFunction { } DataType dt = value.dataType(); - for (int i = 0; i < list.size(); i++) { - Expression listValue = list.get(i); - if (areCompatible(dt, listValue.dataType()) == false) { - return new TypeResolution( - format( - null, - "{} argument of [{}] must be [{}], found value [{}] type [{}]", - ordinal(i + 1), - sourceText(), - dt.typeName(), - Expressions.name(listValue), - listValue.dataType().typeName() - ) - ); + if (dt.isDate()) { + // If value is a date (nanos or millis), list cannot contain both nanos and millis + DataType seenDateType = null; + for (int i = 0; i < list.size(); i++) { + Expression listValue = list.get(i); + if (seenDateType == null && listValue.dataType().isDate()) { + seenDateType = listValue.dataType(); + } + // The areCompatible test is still necessary to account for nulls. + if ((listValue.dataType().isDate() && listValue.dataType() != seenDateType) + || (listValue.dataType().isDate() == false && areCompatible(dt, listValue.dataType()) == false)) { + return new TypeResolution( + format( + null, + "{} argument of [{}] must be [{}], found value [{}] type [{}]", + ordinal(i + 1), + sourceText(), + dt.typeName(), + Expressions.name(listValue), + listValue.dataType().typeName() + ) + ); + } + } + + } else { + for (int i = 0; i < list.size(); i++) { + Expression listValue = list.get(i); + if (areCompatible(dt, listValue.dataType()) == false) { + return new TypeResolution( + format( + null, + "{} argument of [{}] must be [{}], found value [{}] type [{}]", + ordinal(i + 1), + sourceText(), + dt.typeName(), + Expressions.name(listValue), + listValue.dataType().typeName() + ) + ); + } } } @@ -261,9 +291,19 @@ public class In extends EsqlScalarFunction { @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - var commonType = commonType(); EvalOperator.ExpressionEvaluator.Factory lhs; EvalOperator.ExpressionEvaluator.Factory[] factories; + if (value.dataType() == DATE_NANOS && list.getFirst().dataType() == DATETIME) { + lhs = toEvaluator.apply(value); + factories = list.stream().map(toEvaluator::apply).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new); + return new InNanosMillisEvaluator.Factory(source(), lhs, factories); + } + if (value.dataType() == DATETIME && list.getFirst().dataType() == DATE_NANOS) { + lhs = toEvaluator.apply(value); + factories = list.stream().map(toEvaluator::apply).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new); + return new InMillisNanosEvaluator.Factory(source(), lhs, factories); + } + var commonType = commonType(); if (commonType.isNumeric()) { lhs = Cast.cast(source(), value.dataType(), commonType, toEvaluator.apply(value)); factories = list.stream() @@ -283,7 +323,7 @@ public class In extends EsqlScalarFunction { if (commonType == INTEGER) { return new InIntEvaluator.Factory(source(), lhs, factories); } - if (commonType == LONG || commonType == DATETIME || commonType == UNSIGNED_LONG) { + if (commonType == LONG || commonType == DATETIME || commonType == DATE_NANOS || commonType == UNSIGNED_LONG) { return new InLongEvaluator.Factory(source(), lhs, factories); } if (commonType == KEYWORD @@ -345,6 +385,39 @@ public class In extends EsqlScalarFunction { return false; } + /** + * Processor for mixed millisecond and nanosecond dates, where the "value" (aka lhs) is in nanoseconds + * and the "list" (aka rhs) is in milliseconds + */ + static boolean processNanosMillis(BitSet nulls, BitSet mvs, long lhs, long[] rhs) { + for (int i = 0; i < rhs.length; i++) { + if ((nulls != null && nulls.get(i)) || (mvs != null && mvs.get(i))) { + continue; + } + if (DateUtils.compareNanosToMillis(lhs, rhs[i]) == 0) { + return true; + } + } + return false; + } + + /** + * Processor for mixed millisecond and nanosecond dates, where the "value" (aka lhs) is in milliseoncds + * and the "list" (aka rhs) is in nanoseconds + */ + static boolean processMillisNanos(BitSet nulls, BitSet mvs, long lhs, long[] rhs) { + for (int i = 0; i < rhs.length; i++) { + if ((nulls != null && nulls.get(i)) || (mvs != null && mvs.get(i))) { + continue; + } + Boolean compResult = DateUtils.compareNanosToMillis(rhs[i], lhs) == 0; + if (compResult == Boolean.TRUE) { + return true; + } + } + return false; + } + static boolean process(BitSet nulls, BitSet mvs, double lhs, double[] rhs) { for (int i = 0; i < rhs.length; i++) { if ((nulls != null && nulls.get(i)) || (mvs != null && mvs.get(i))) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java index c731e44197f2..01564644bf5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -77,9 +78,9 @@ public class InsensitiveEquals extends InsensitiveBinaryComparison { } @Override - public Boolean fold() { - BytesRef leftVal = BytesRefs.toBytesRef(left().fold()); - BytesRef rightVal = BytesRefs.toBytesRef(right().fold()); + public Boolean fold(FoldContext ctx) { + BytesRef leftVal = BytesRefs.toBytesRef(left().fold(ctx)); + BytesRef rightVal = BytesRefs.toBytesRef(right().fold(ctx)); if (leftVal == null || rightVal == null) { return null; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java index f5704239993f..7ea95c764f36 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; @@ -28,15 +29,15 @@ public class InsensitiveEqualsMapper extends ExpressionMapper InsensitiveEqualsEvaluator.Factory::new; @Override - public final ExpressionEvaluator.Factory map(InsensitiveEquals bc, Layout layout) { + public final ExpressionEvaluator.Factory map(FoldContext foldCtx, InsensitiveEquals bc, Layout layout) { DataType leftType = bc.left().dataType(); DataType rightType = bc.right().dataType(); - var leftEval = toEvaluator(bc.left(), layout); - var rightEval = toEvaluator(bc.right(), layout); + var leftEval = toEvaluator(foldCtx, bc.left(), layout); + var rightEval = toEvaluator(foldCtx, bc.right(), layout); if (DataType.isString(leftType)) { if (bc.right().foldable() && DataType.isString(rightType)) { - BytesRef rightVal = BytesRefs.toBytesRef(bc.right().fold()); + BytesRef rightVal = BytesRefs.toBytesRef(bc.right().fold(FoldContext.small() /* TODO remove me */)); Automaton automaton = InsensitiveEquals.automaton(rightVal); return dvrCtx -> new InsensitiveEqualsConstantEvaluator( bc.source(), @@ -51,13 +52,14 @@ public class InsensitiveEqualsMapper extends ExpressionMapper } public static ExpressionEvaluator.Factory castToEvaluator( + FoldContext foldCtx, InsensitiveEquals op, Layout layout, DataType required, TriFunction factory ) { - var lhs = Cast.cast(op.source(), op.left().dataType(), required, toEvaluator(op.left(), layout)); - var rhs = Cast.cast(op.source(), op.right().dataType(), required, toEvaluator(op.right(), layout)); + var lhs = Cast.cast(op.source(), op.left().dataType(), required, toEvaluator(foldCtx, op.left(), layout)); + var rhs = Cast.cast(op.source(), op.right().dataType(), required, toEvaluator(foldCtx, op.right(), layout)); return factory.apply(op.source(), lhs, rhs); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st index f63815f475d8..0cdea82119ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st @@ -42,7 +42,7 @@ import java.util.BitSet; * {@link EvalOperator.ExpressionEvaluator} implementation for {@link In}. * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ -public class In$Type$Evaluator implements EvalOperator.ExpressionEvaluator { +public class In$Name$Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -53,7 +53,7 @@ public class In$Type$Evaluator implements EvalOperator.ExpressionEvaluator { private Warnings warnings; - public In$Type$Evaluator( + public In$Name$Evaluator( Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator[] rhs, @@ -166,6 +166,9 @@ $if(boolean)$ foundMatch = lhsBlock.getBoolean(lhsBlock.getFirstValueIndex(p)) ? hasTrue : hasFalse; $elseif(BytesRef)$ foundMatch = In.process(nulls, mvs, lhsBlock.getBytesRef(lhsBlock.getFirstValueIndex(p), lhsScratch), rhsValues); +$elseif(NanosMillis)$ + // Even though these are longs, they're in different units and need special handling + foundMatch = In.processNanosMilils(nulls, mvs, lhsBlock.get$Type$(lhsBlock.getFirstValueIndex(p)), rhsValues); $else$ foundMatch = In.process(nulls, mvs, lhsBlock.get$Type$(lhsBlock.getFirstValueIndex(p)), rhsValues); $endif$ @@ -230,6 +233,10 @@ $if(BytesRef)$ result.appendBoolean(In.process(null, null, lhsVector.getBytesRef(p, lhsScratch), rhsValues)); $elseif(boolean)$ result.appendBoolean(lhsVector.getBoolean(p) ? hasTrue : hasFalse); +$elseif(nanosMillis)$ + result.appendBoolean(In.processNanosMillis(null, null, lhsVector.get$Type$(p), rhsValues)); +$elseif(millisNanos)$ + result.appendBoolean(In.processMillisNanos(null, null, lhsVector.get$Type$(p), rhsValues)); $else$ result.appendBoolean(In.process(null, null, lhsVector.get$Type$(p), rhsValues)); $endif$ @@ -240,7 +247,7 @@ $endif$ @Override public String toString() { - return "In$Type$Evaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; + return "In$Name$Evaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } @Override @@ -272,16 +279,16 @@ $endif$ } @Override - public In$Type$Evaluator get(DriverContext context) { + public In$Name$Evaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] rhs = Arrays.stream(this.rhs) .map(a -> a.get(context)) .toArray(EvalOperator.ExpressionEvaluator[]::new); - return new In$Type$Evaluator(source, lhs.get(context), rhs, context); + return new In$Name$Evaluator(source, lhs.get(context), rhs, context); } @Override public String toString() { - return "In$Type$Evaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; + return "In$Name$Evaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java index ef5cf50c7654..183008f900c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -15,8 +16,8 @@ import java.util.Objects; public final class LocalLogicalOptimizerContext extends LogicalOptimizerContext { private final SearchStats searchStats; - public LocalLogicalOptimizerContext(Configuration configuration, SearchStats searchStats) { - super(configuration); + public LocalLogicalOptimizerContext(Configuration configuration, FoldContext foldCtx, SearchStats searchStats) { + super(configuration, foldCtx); this.searchStats = searchStats; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java index c11e1a4ec49e..22e07b45310f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.SearchStats; -public record LocalPhysicalOptimizerContext(Configuration configuration, SearchStats searchStats) {} +public record LocalPhysicalOptimizerContext(Configuration configuration, FoldContext foldCtx, SearchStats searchStats) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java index 67148e67cbc1..da2d583674a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java @@ -7,37 +7,44 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.session.Configuration; import java.util.Objects; public class LogicalOptimizerContext { private final Configuration configuration; + private final FoldContext foldCtx; - public LogicalOptimizerContext(Configuration configuration) { + public LogicalOptimizerContext(Configuration configuration, FoldContext foldCtx) { this.configuration = configuration; + this.foldCtx = foldCtx; } public Configuration configuration() { return configuration; } + public FoldContext foldCtx() { + return foldCtx; + } + @Override public boolean equals(Object obj) { if (obj == this) return true; if (obj == null || obj.getClass() != this.getClass()) return false; var that = (LogicalOptimizerContext) obj; - return Objects.equals(this.configuration, that.configuration); + return this.configuration.equals(that.configuration) && this.foldCtx.equals(that.foldCtx); } @Override public int hashCode() { - return Objects.hash(configuration); + return Objects.hash(configuration, foldCtx); } @Override public String toString() { - return "LogicalOptimizerContext[" + "configuration=" + configuration + ']'; + return "LogicalOptimizerContext[configuration=" + configuration + ", foldCtx=" + foldCtx + ']'; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index dce828dbf192..7e6eddd3ef04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.optimizer; -import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.optimizer.rules.PlanConsistencyChecker; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -28,8 +28,8 @@ public final class LogicalVerifier { if (failures.hasFailures() == false) { p.forEachExpression(ex -> { - if (ex instanceof Validatable v) { - v.validate(failures); + if (ex instanceof PostOptimizationVerificationAware va) { + va.postLogicalOptimizationVerification(failures); } }); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java index 3152f9b57476..5f463f2aa4c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; @@ -28,7 +29,7 @@ public final class BooleanFunctionEqualsElimination extends OptimizerRules.Optim } @Override - public Expression rule(BinaryComparison bc) { + public Expression rule(BinaryComparison bc, LogicalOptimizerContext ctx) { if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java index 73d1ea1fb6e8..e1803872fd60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.List; @@ -35,7 +36,7 @@ public final class BooleanSimplification extends OptimizerRules.OptimizerExpress } @Override - public Expression rule(ScalarFunction e) { + public Expression rule(ScalarFunction e, LogicalOptimizerContext ctx) { if (e instanceof And || e instanceof Or) { return simplifyAndOr((BinaryPredicate) e); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java index 3f47c74aaf81..1c290a7c4c4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; @@ -20,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Gre import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.ArrayList; import java.util.List; @@ -31,17 +33,17 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr } @Override - public Expression rule(BinaryLogic e) { + public Expression rule(BinaryLogic e, LogicalOptimizerContext ctx) { if (e instanceof And and) { - return combine(and); + return combine(ctx.foldCtx(), and); } else if (e instanceof Or or) { - return combine(or); + return combine(ctx.foldCtx(), or); } return e; } // combine conjunction - private static Expression combine(And and) { + private static Expression combine(FoldContext ctx, And and) { List bcs = new ArrayList<>(); List exps = new ArrayList<>(); boolean changed = false; @@ -58,13 +60,13 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr }); for (Expression ex : andExps) { if (ex instanceof BinaryComparison bc && (ex instanceof Equals || ex instanceof NotEquals) == false) { - if (bc.right().foldable() && (findExistingComparison(bc, bcs, true))) { + if (bc.right().foldable() && (findExistingComparison(ctx, bc, bcs, true))) { changed = true; } else { bcs.add(bc); } } else if (ex instanceof NotEquals neq) { - if (neq.right().foldable() && notEqualsIsRemovableFromConjunction(neq, bcs)) { + if (neq.right().foldable() && notEqualsIsRemovableFromConjunction(ctx, neq, bcs)) { // the non-equality can simply be dropped: either superfluous or has been merged with an updated range/inequality changed = true; } else { // not foldable OR not overlapping @@ -78,13 +80,13 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr } // combine disjunction - private static Expression combine(Or or) { + private static Expression combine(FoldContext ctx, Or or) { List bcs = new ArrayList<>(); List exps = new ArrayList<>(); boolean changed = false; for (Expression ex : Predicates.splitOr(or)) { if (ex instanceof BinaryComparison bc) { - if (bc.right().foldable() && findExistingComparison(bc, bcs, false)) { + if (bc.right().foldable() && findExistingComparison(ctx, bc, bcs, false)) { changed = true; } else { bcs.add(bc); @@ -100,8 +102,8 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr * Find commonalities between the given comparison in the given list. * The method can be applied both for conjunctive (AND) or disjunctive purposes (OR). */ - private static boolean findExistingComparison(BinaryComparison main, List bcs, boolean conjunctive) { - Object value = main.right().fold(); + private static boolean findExistingComparison(FoldContext ctx, BinaryComparison main, List bcs, boolean conjunctive) { + Object value = main.right().fold(ctx); // NB: the loop modifies the list (hence why the int is used) for (int i = 0; i < bcs.size(); i++) { BinaryComparison other = bcs.get(i); @@ -113,7 +115,7 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual) && (main instanceof GreaterThan || main instanceof GreaterThanOrEqual)) { if (main.left().semanticEquals(other.left())) { - Integer compare = BinaryComparison.compare(value, other.right().fold()); + Integer compare = BinaryComparison.compare(value, other.right().fold(ctx)); if (compare != null) { // AND if ((conjunctive && @@ -140,7 +142,7 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr else if ((other instanceof LessThan || other instanceof LessThanOrEqual) && (main instanceof LessThan || main instanceof LessThanOrEqual)) { if (main.left().semanticEquals(other.left())) { - Integer compare = BinaryComparison.compare(value, other.right().fold()); + Integer compare = BinaryComparison.compare(value, other.right().fold(ctx)); if (compare != null) { // AND if ((conjunctive && @@ -167,8 +169,8 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr return false; } - private static boolean notEqualsIsRemovableFromConjunction(NotEquals notEquals, List bcs) { - Object neqVal = notEquals.right().fold(); + private static boolean notEqualsIsRemovableFromConjunction(FoldContext ctx, NotEquals notEquals, List bcs) { + Object neqVal = notEquals.right().fold(ctx); Integer comp; // check on "condition-overlapping" inequalities: @@ -183,7 +185,7 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr BinaryComparison bc = bcs.get(i); if (notEquals.left().semanticEquals(bc.left())) { if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { - comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold()) : null; + comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold(ctx)) : null; if (comp != null) { if (comp >= 0) { if (comp == 0 && bc instanceof LessThanOrEqual) { // a != 2 AND a <= 2 -> a < 2 @@ -193,7 +195,7 @@ public final class CombineBinaryComparisons extends OptimizerRules.OptimizerExpr } // else: comp < 0 : a != 2 AND a nop } // else: non-comparable, nop } else if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { - comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold()) : null; + comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold(ctx)) : null; if (comp != null) { if (comp <= 0) { if (comp == 0 && bc instanceof GreaterThanOrEqual) { // a != 2 AND a >= 2 -> a > 2 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java index 5cb377de47ef..e1cda9cb149d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.time.ZoneId; import java.util.ArrayList; @@ -61,7 +62,7 @@ public final class CombineDisjunctions extends OptimizerRules.OptimizerExpressio } @Override - public Expression rule(Or or) { + public Expression rule(Or or, LogicalOptimizerContext ctx) { Expression e = or; // look only at equals, In and CIDRMatch List exps = splitOr(e); @@ -78,7 +79,7 @@ public final class CombineDisjunctions extends OptimizerRules.OptimizerExpressio if (eq.right().foldable()) { ins.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); if (eq.left().dataType() == DataType.IP) { - Object value = eq.right().fold(); + Object value = eq.right().fold(ctx.foldCtx()); // ImplicitCasting and ConstantFolding(includes explicit casting) are applied before CombineDisjunctions. // They fold the input IP string to an internal IP format. These happen to Equals and IN, but not for CIDRMatch, // as CIDRMatch takes strings as input, ImplicitCasting does not apply to it, and the first input to CIDRMatch is a @@ -101,7 +102,7 @@ public final class CombineDisjunctions extends OptimizerRules.OptimizerExpressio if (in.value().dataType() == DataType.IP) { List values = new ArrayList<>(in.list().size()); for (Expression i : in.list()) { - Object value = i.fold(); + Object value = i.fold(ctx.foldCtx()); // Same as Equals. if (value instanceof BytesRef bytesRef) { value = ipToString(bytesRef); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFolding.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFolding.java index 82fe2c6bddf5..27eec8de5902 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFolding.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFolding.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; public final class ConstantFolding extends OptimizerRules.OptimizerExpressionRule { @@ -17,7 +18,7 @@ public final class ConstantFolding extends OptimizerRules.OptimizerExpressionRul } @Override - public Expression rule(Expression e) { - return e.foldable() ? Literal.of(e) : e; + public Expression rule(Expression e, LogicalOptimizerContext ctx) { + return e.foldable() ? Literal.of(ctx.foldCtx(), e) : e; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConvertStringToByteRef.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConvertStringToByteRef.java index 0604750883f1..b716d8f012d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConvertStringToByteRef.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConvertStringToByteRef.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.ArrayList; import java.util.List; @@ -21,7 +22,8 @@ public final class ConvertStringToByteRef extends OptimizerRules.OptimizerExpres } @Override - protected Expression rule(Literal lit) { + protected Expression rule(Literal lit, LogicalOptimizerContext ctx) { + // TODO we shouldn't be emitting String into Literals at all Object value = lit.value(); if (value == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java index 747864625e65..cf4c7f19baaf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; public class FoldNull extends OptimizerRules.OptimizerExpressionRule { @@ -23,7 +24,7 @@ public class FoldNull extends OptimizerRules.OptimizerExpressionRule } @Override - public Expression rule(Expression e) { + public Expression rule(Expression e, LogicalOptimizerContext ctx) { Expression result = tryReplaceIsNullIsNotNull(e); // convert an aggregate null filter into a false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRight.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRight.java index d96c73d5ee4f..6504e6042c33 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRight.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRight.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; public final class LiteralsOnTheRight extends OptimizerRules.OptimizerExpressionRule> { @@ -17,7 +18,7 @@ public final class LiteralsOnTheRight extends OptimizerRules.OptimizerExpression } @Override - public BinaryOperator rule(BinaryOperator be) { + public BinaryOperator rule(BinaryOperator be, LogicalOptimizerContext ctx) { return be.left() instanceof Literal && (be.right() instanceof Literal) == false ? be.swapLeftAndRight() : be; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java index 2a0b2a6af36a..169ac2ac8c0f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.rule.Rule; @@ -36,7 +37,10 @@ public final class OptimizerRules { protected abstract LogicalPlan rule(SubPlan plan); } - public abstract static class OptimizerExpressionRule extends Rule { + public abstract static class OptimizerExpressionRule extends ParameterizedRule< + LogicalPlan, + LogicalPlan, + LogicalOptimizerContext> { private final TransformDirection direction; // overriding type token which returns the correct class but does an uncheck cast to LogicalPlan due to its generic bound @@ -49,17 +53,13 @@ public final class OptimizerRules { } @Override - public final LogicalPlan apply(LogicalPlan plan) { + public final LogicalPlan apply(LogicalPlan plan, LogicalOptimizerContext ctx) { return direction == TransformDirection.DOWN - ? plan.transformExpressionsDown(expressionTypeToken, this::rule) - : plan.transformExpressionsUp(expressionTypeToken, this::rule); + ? plan.transformExpressionsDown(expressionTypeToken, e -> rule(e, ctx)) + : plan.transformExpressionsUp(expressionTypeToken, e -> rule(e, ctx)); } - protected LogicalPlan rule(LogicalPlan plan) { - return plan; - } - - protected abstract Expression rule(E e); + protected abstract Expression rule(E e, LogicalOptimizerContext ctx); public Class expressionToken() { return expressionTypeToken; @@ -82,6 +82,7 @@ public final class OptimizerRules { this.direction = direction; } + @Override public final LogicalPlan apply(LogicalPlan plan, P context) { return direction == TransformDirection.DOWN ? plan.transformDown(typeToken(), t -> rule(t, context)) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PartiallyFoldCase.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PartiallyFoldCase.java index 118e4fc17052..0111c7cdd806 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PartiallyFoldCase.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PartiallyFoldCase.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import static org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection.DOWN; @@ -28,7 +29,7 @@ public final class PartiallyFoldCase extends OptimizerRules.OptimizerExpressionR } @Override - protected Expression rule(Case c) { - return c.partiallyFold(); + protected Expression rule(Case c, LogicalOptimizerContext ctx) { + return c.partiallyFold(ctx.foldCtx()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEmptyRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEmptyRelation.java index 8437b7945488..b6f185c85669 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEmptyRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEmptyRelation.java @@ -12,9 +12,11 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; @@ -26,15 +28,18 @@ import java.util.ArrayList; import java.util.List; @SuppressWarnings("removal") -public class PropagateEmptyRelation extends OptimizerRules.OptimizerRule { +public class PropagateEmptyRelation extends OptimizerRules.ParameterizedOptimizerRule { + public PropagateEmptyRelation() { + super(OptimizerRules.TransformDirection.DOWN); + } @Override - protected LogicalPlan rule(UnaryPlan plan) { + protected LogicalPlan rule(UnaryPlan plan, LogicalOptimizerContext ctx) { LogicalPlan p = plan; if (plan.child() instanceof LocalRelation local && local.supplier() == LocalSupplier.EMPTY) { // only care about non-grouped aggs might return something (count) if (plan instanceof Aggregate agg && agg.groupings().isEmpty()) { - List emptyBlocks = aggsFromEmpty(agg.aggregates()); + List emptyBlocks = aggsFromEmpty(ctx.foldCtx(), agg.aggregates()); p = replacePlanByRelation(plan, LocalSupplier.of(emptyBlocks.toArray(Block[]::new))); } else { p = PruneEmptyPlans.skipPlan(plan); @@ -43,14 +48,14 @@ public class PropagateEmptyRelation extends OptimizerRules.OptimizerRule aggsFromEmpty(List aggs) { + private List aggsFromEmpty(FoldContext foldCtx, List aggs) { List blocks = new ArrayList<>(); var blockFactory = PlannerUtils.NON_BREAKING_BLOCK_FACTORY; int i = 0; for (var agg : aggs) { // there needs to be an alias if (Alias.unwrap(agg) instanceof AggregateFunction aggFunc) { - aggOutput(agg, aggFunc, blockFactory, blocks); + aggOutput(foldCtx, agg, aggFunc, blockFactory, blocks); } else { throw new EsqlIllegalArgumentException("Did not expect a non-aliased aggregation {}", agg); } @@ -61,9 +66,15 @@ public class PropagateEmptyRelation extends OptimizerRules.OptimizerRule blocks) { + protected void aggOutput( + FoldContext foldCtx, + NamedExpression agg, + AggregateFunction aggFunc, + BlockFactory blockFactory, + List blocks + ) { // look for count(literal) with literal != null - Object value = aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L : null; + Object value = aggFunc instanceof Count count && (count.foldable() == false || count.fold(foldCtx) != null) ? 0L : null; var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(aggFunc.dataType()), 1); wrapper.accept(value); blocks.add(wrapper.builder().build()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java index 0bd98db1e1d7..5a1677f2759e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Gre import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.ArrayList; import java.util.Iterator; @@ -41,17 +42,18 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul super(OptimizerRules.TransformDirection.DOWN); } - public Expression rule(BinaryLogic e) { + @Override + public Expression rule(BinaryLogic e, LogicalOptimizerContext ctx) { if (e instanceof And) { - return propagate((And) e); + return propagate((And) e, ctx); } else if (e instanceof Or) { - return propagate((Or) e); + return propagate((Or) e, ctx); } return e; } // combine conjunction - private static Expression propagate(And and) { + private static Expression propagate(And and, LogicalOptimizerContext ctx) { List ranges = new ArrayList<>(); // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; // the others go into the general 'exps'. @@ -72,7 +74,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul if (otherEq.right().foldable() && DataType.isDateTime(otherEq.left().dataType()) == false) { for (BinaryComparison eq : equals) { if (otherEq.left().semanticEquals(eq.left())) { - Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); + Integer comp = BinaryComparison.compare(eq.right().fold(ctx.foldCtx()), otherEq.right().fold(ctx.foldCtx())); if (comp != null) { // var cannot be equal to two different values at the same time if (comp != 0) { @@ -108,7 +110,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul // check for (BinaryComparison eq : equals) { - Object eqValue = eq.right().fold(); + Object eqValue = eq.right().fold(ctx.foldCtx()); for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { Range range = iterator.next(); @@ -116,7 +118,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul if (range.value().semanticEquals(eq.left())) { // if equals is outside the interval, evaluate the whole expression to FALSE if (range.lower().foldable()) { - Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); + Integer compare = BinaryComparison.compare(range.lower().fold(ctx.foldCtx()), eqValue); if (compare != null && ( // eq outside the lower boundary compare > 0 || @@ -126,7 +128,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul } } if (range.upper().foldable()) { - Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); + Integer compare = BinaryComparison.compare(range.upper().fold(ctx.foldCtx()), eqValue); if (compare != null && ( // eq outside the upper boundary compare < 0 || @@ -146,7 +148,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul for (Iterator iter = notEquals.iterator(); iter.hasNext();) { NotEquals neq = iter.next(); if (eq.left().semanticEquals(neq.left())) { - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold(ctx.foldCtx())); if (comp != null) { if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); @@ -162,7 +164,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul for (Iterator iter = inequalities.iterator(); iter.hasNext();) { BinaryComparison bc = iter.next(); if (eq.left().semanticEquals(bc.left())) { - Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); + Integer compare = BinaryComparison.compare(eqValue, bc.right().fold(ctx.foldCtx())); if (compare != null) { if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a a < 3; a = 2 OR a < 1 -> nop // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 - private static Expression propagate(Or or) { + private static Expression propagate(Or or, LogicalOptimizerContext ctx) { List exps = new ArrayList<>(); List equals = new ArrayList<>(); // foldable right term Equals List notEquals = new ArrayList<>(); // foldable right term NotEquals @@ -230,13 +232,13 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul // evaluate the impact of each Equal over the different types of Expressions for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { Equals eq = iterEq.next(); - Object eqValue = eq.right().fold(); + Object eqValue = eq.right().fold(ctx.foldCtx()); boolean removeEquals = false; // Equals OR NotEquals for (NotEquals neq : notEquals) { if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold(ctx.foldCtx())); if (comp != null) { if (comp == 0) { // a = 2 OR a != 2 -> TRUE return TRUE; @@ -257,8 +259,12 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop Range range = ranges.get(i); if (eq.left().semanticEquals(range.value())) { - Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; - Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; + Integer lowerComp = range.lower().foldable() + ? BinaryComparison.compare(eqValue, range.lower().fold(ctx.foldCtx())) + : null; + Integer upperComp = range.upper().foldable() + ? BinaryComparison.compare(eqValue, range.upper().fold(ctx.foldCtx())) + : null; if (lowerComp != null && lowerComp == 0) { if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? @@ -312,7 +318,7 @@ public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRul for (int i = 0; i < inequalities.size(); i++) { BinaryComparison bc = inequalities.get(i); if (eq.left().semanticEquals(bc.left())) { - Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); + Integer comp = BinaryComparison.compare(eqValue, bc.right().fold(ctx.foldCtx())); if (comp != null) { if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { if (comp < 0) { // a = 1 OR a > 2 -> nop diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java index 73eaa9220fd8..66cdc992a91c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java @@ -12,19 +12,20 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.rule.Rule; +import org.elasticsearch.xpack.esql.rule.ParameterizedRule; /** * Replace any reference attribute with its source, if it does not affect the result. * This avoids ulterior look-ups between attributes and its source across nodes. */ -public final class PropagateEvalFoldables extends Rule { +public final class PropagateEvalFoldables extends ParameterizedRule { @Override - public LogicalPlan apply(LogicalPlan plan) { + public LogicalPlan apply(LogicalPlan plan, LogicalOptimizerContext ctx) { var collectRefs = new AttributeMap(); java.util.function.Function replaceReference = r -> collectRefs.resolve(r, r); @@ -39,7 +40,7 @@ public final class PropagateEvalFoldables extends Rule shouldCollect = c.foldable(); } if (shouldCollect) { - collectRefs.put(a.toAttribute(), Literal.of(c)); + collectRefs.put(a.toAttribute(), Literal.of(ctx.foldCtx(), c)); } }); if (collectRefs.isEmpty()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java index 738ca83b47e4..e3165180e331 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.ArrayList; import java.util.LinkedHashSet; @@ -33,7 +34,7 @@ public class PropagateNullable extends OptimizerRules.OptimizerExpressionRule splits = Predicates.splitAnd(and); Set nullExpressions = new LinkedHashSet<>(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index 1cacebdf27cd..969a6bb713ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -20,14 +21,18 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; -public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule { +public final class PushDownAndCombineLimits extends OptimizerRules.ParameterizedOptimizerRule { + + public PushDownAndCombineLimits() { + super(OptimizerRules.TransformDirection.DOWN); + } @Override - public LogicalPlan rule(Limit limit) { + public LogicalPlan rule(Limit limit, LogicalOptimizerContext ctx) { if (limit.child() instanceof Limit childLimit) { var limitSource = limit.limit(); - var l1 = (int) limitSource.fold(); - var l2 = (int) childLimit.limit().fold(); + var l1 = (int) limitSource.fold(ctx.foldCtx()); + var l2 = (int) childLimit.limit().fold(ctx.foldCtx()); return new Limit(limit.source(), Literal.of(limitSource, Math.min(l1, l2)), childLimit.child()); } else if (limit.child() instanceof UnaryPlan unary) { if (unary instanceof Eval || unary instanceof Project || unary instanceof RegexExtract || unary instanceof Enrich) { @@ -41,7 +46,7 @@ public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule // we add an inner limit to MvExpand and just push down the existing limit, ie. // | MV_EXPAND | LIMIT N -> | LIMIT N | MV_EXPAND (with limit N) var limitSource = limit.limit(); - var limitVal = (int) limitSource.fold(); + var limitVal = (int) limitSource.fold(ctx.foldCtx()); Integer mvxLimit = mvx.limit(); if (mvxLimit == null || mvxLimit > limitVal) { mvx = new MvExpand(mvx.source(), mvx.child(), mvx.target(), mvx.expanded(), limitVal); @@ -54,8 +59,8 @@ public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule else { Limit descendantLimit = descendantLimit(unary); if (descendantLimit != null) { - var l1 = (int) limit.limit().fold(); - var l2 = (int) descendantLimit.limit().fold(); + var l1 = (int) limit.limit().fold(ctx.foldCtx()); + var l2 = (int) descendantLimit.limit().fold(ctx.foldCtx()); if (l2 <= l1) { return new Limit(limit.source(), Literal.of(limit.limit(), l2), limit.child()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java index 1a8f8a164cc1..7953b2b28eaa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.StringPattern; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.parser.ParsingException; public final class ReplaceRegexMatch extends OptimizerRules.OptimizerExpressionRule> { @@ -23,7 +24,7 @@ public final class ReplaceRegexMatch extends OptimizerRules.OptimizerExpressionR } @Override - public Expression rule(RegexMatch regexMatch) { + public Expression rule(RegexMatch regexMatch, LogicalOptimizerContext ctx) { Expression e = regexMatch; StringPattern pattern = regexMatch.pattern(); boolean matchesAll; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java index eebeb1dc14f4..9e7b6ce80422 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; @@ -17,13 +18,16 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.util.ArrayList; import java.util.List; -public final class ReplaceRowAsLocalRelation extends OptimizerRules.OptimizerRule { +public final class ReplaceRowAsLocalRelation extends OptimizerRules.ParameterizedOptimizerRule { + public ReplaceRowAsLocalRelation() { + super(OptimizerRules.TransformDirection.DOWN); + } @Override - protected LogicalPlan rule(Row row) { + protected LogicalPlan rule(Row row, LogicalOptimizerContext context) { var fields = row.fields(); List values = new ArrayList<>(fields.size()); - fields.forEach(f -> values.add(f.child().fold())); + fields.forEach(f -> values.add(f.child().fold(context.foldCtx()))); var blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); return new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsFilteredAggWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsFilteredAggWithEval.java index 2cafcc2e0705..a7e56a5f25fc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsFilteredAggWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStatsFilteredAggWithEval.java @@ -49,7 +49,7 @@ public class ReplaceStatsFilteredAggWithEval extends OptimizerRules.OptimizerRul && alias.child() instanceof AggregateFunction aggFunction && aggFunction.hasFilter() && aggFunction.filter() instanceof Literal literal - && Boolean.FALSE.equals(literal.fold())) { + && Boolean.FALSE.equals(literal.value())) { Object value = aggFunction instanceof Count || aggFunction instanceof CountDistinct ? 0L : null; Alias newAlias = alias.replaceChild(Literal.of(aggFunction, value)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java index 0fea7cf8ddc1..053441bce5e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.ChangeCase import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; public class ReplaceStringCasingWithInsensitiveEquals extends OptimizerRules.OptimizerExpressionRule { @@ -26,30 +27,35 @@ public class ReplaceStringCasingWithInsensitiveEquals extends OptimizerRules.Opt } @Override - protected Expression rule(ScalarFunction sf) { + protected Expression rule(ScalarFunction sf, LogicalOptimizerContext ctx) { Expression e = sf; if (sf instanceof BinaryComparison bc) { - e = rewriteBinaryComparison(sf, bc, false); + e = rewriteBinaryComparison(ctx, sf, bc, false); } else if (sf instanceof Not not && not.field() instanceof BinaryComparison bc) { - e = rewriteBinaryComparison(sf, bc, true); + e = rewriteBinaryComparison(ctx, sf, bc, true); } return e; } - private static Expression rewriteBinaryComparison(ScalarFunction sf, BinaryComparison bc, boolean negated) { + private static Expression rewriteBinaryComparison( + LogicalOptimizerContext ctx, + ScalarFunction sf, + BinaryComparison bc, + boolean negated + ) { Expression e = sf; if (bc.left() instanceof ChangeCase changeCase && bc.right().foldable()) { if (bc instanceof Equals) { - e = replaceChangeCase(bc, changeCase, negated); + e = replaceChangeCase(ctx, bc, changeCase, negated); } else if (bc instanceof NotEquals) { // not actually used currently, `!=` is built as `NOT(==)` already - e = replaceChangeCase(bc, changeCase, negated == false); + e = replaceChangeCase(ctx, bc, changeCase, negated == false); } } return e; } - private static Expression replaceChangeCase(BinaryComparison bc, ChangeCase changeCase, boolean negated) { - var foldedRight = BytesRefs.toString(bc.right().fold()); + private static Expression replaceChangeCase(LogicalOptimizerContext ctx, BinaryComparison bc, ChangeCase changeCase, boolean negated) { + var foldedRight = BytesRefs.toString(bc.right().fold(ctx.foldCtx())); var field = unwrapCase(changeCase.field()); var e = changeCase.caseType().matchesCase(foldedRight) ? new InsensitiveEquals(bc.source(), field, bc.right()) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SimplifyComparisonsArithmetics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SimplifyComparisonsArithmetics.java index d3a9970896c1..60ff161651f2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SimplifyComparisonsArithmetics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SimplifyComparisonsArithmetics.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -15,6 +16,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Ari import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.BinaryComparisonInversible; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.time.DateTimeException; import java.util.List; @@ -41,20 +43,20 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz } @Override - protected Expression rule(BinaryComparison bc) { + protected Expression rule(BinaryComparison bc, LogicalOptimizerContext ctx) { // optimize only once the expression has a literal on the right side of the binary comparison if (bc.right() instanceof Literal) { if (bc.left() instanceof ArithmeticOperation) { - return simplifyBinaryComparison(bc); + return simplifyBinaryComparison(ctx.foldCtx(), bc); } if (bc.left() instanceof Neg) { - return foldNegation(bc); + return foldNegation(ctx.foldCtx(), bc); } } return bc; } - private Expression simplifyBinaryComparison(BinaryComparison comparison) { + private Expression simplifyBinaryComparison(FoldContext foldContext, BinaryComparison comparison) { ArithmeticOperation operation = (ArithmeticOperation) comparison.left(); // Use symbol comp: SQL operations aren't available in this package (as dependencies) String opSymbol = operation.symbol(); @@ -64,9 +66,9 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz } OperationSimplifier simplification = null; if (isMulOrDiv(opSymbol)) { - simplification = new MulDivSimplifier(comparison); + simplification = new MulDivSimplifier(foldContext, comparison); } else if (opSymbol.equals(ADD.symbol()) || opSymbol.equals(SUB.symbol())) { - simplification = new AddSubSimplifier(comparison); + simplification = new AddSubSimplifier(foldContext, comparison); } return (simplification == null || simplification.isUnsafe(typesCompatible)) ? comparison : simplification.apply(); @@ -76,16 +78,16 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz return opSymbol.equals(MUL.symbol()) || opSymbol.equals(DIV.symbol()); } - private static Expression foldNegation(BinaryComparison bc) { + private static Expression foldNegation(FoldContext ctx, BinaryComparison bc) { Literal bcLiteral = (Literal) bc.right(); - Expression literalNeg = tryFolding(new Neg(bcLiteral.source(), bcLiteral)); + Expression literalNeg = tryFolding(ctx, new Neg(bcLiteral.source(), bcLiteral)); return literalNeg == null ? bc : bc.reverse().replaceChildren(asList(((Neg) bc.left()).field(), literalNeg)); } - private static Expression tryFolding(Expression expression) { + private static Expression tryFolding(FoldContext ctx, Expression expression) { if (expression.foldable()) { try { - expression = new Literal(expression.source(), expression.fold(), expression.dataType()); + expression = new Literal(expression.source(), expression.fold(ctx), expression.dataType()); } catch (ArithmeticException | DateTimeException e) { // null signals that folding would result in an over-/underflow (such as Long.MAX_VALUE+1); the optimisation is skipped. expression = null; @@ -95,6 +97,7 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz } private abstract static class OperationSimplifier { + final FoldContext foldContext; final BinaryComparison comparison; final Literal bcLiteral; final ArithmeticOperation operation; @@ -102,7 +105,8 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz final Expression opRight; final Literal opLiteral; - OperationSimplifier(BinaryComparison comparison) { + OperationSimplifier(FoldContext foldContext, BinaryComparison comparison) { + this.foldContext = foldContext; this.comparison = comparison; operation = (ArithmeticOperation) comparison.left(); bcLiteral = (Literal) comparison.right(); @@ -151,7 +155,7 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz Expression bcRightExpression = ((BinaryComparisonInversible) operation).binaryComparisonInverse() .create(bcl.source(), bcl, opRight); - bcRightExpression = tryFolding(bcRightExpression); + bcRightExpression = tryFolding(foldContext, bcRightExpression); return bcRightExpression != null ? postProcess((BinaryComparison) comparison.replaceChildren(List.of(opLeft, bcRightExpression))) : comparison; @@ -169,8 +173,8 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz private static class AddSubSimplifier extends OperationSimplifier { - AddSubSimplifier(BinaryComparison comparison) { - super(comparison); + AddSubSimplifier(FoldContext foldContext, BinaryComparison comparison) { + super(foldContext, comparison); } @Override @@ -182,7 +186,7 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz if (operation.symbol().equals(SUB.symbol()) && opRight instanceof Literal == false) { // such as: 1 - x > -MAX // if next simplification step would fail on overflow anyways, skip the optimisation already - return tryFolding(new Sub(EMPTY, opLeft, bcLiteral)) == null; + return tryFolding(foldContext, new Sub(EMPTY, opLeft, bcLiteral)) == null; } return false; @@ -194,8 +198,8 @@ public final class SimplifyComparisonsArithmetics extends OptimizerRules.Optimiz private final boolean isDiv; // and not MUL. private final int opRightSign; // sign of the right operand in: (left) (op) (right) (comp) (literal) - MulDivSimplifier(BinaryComparison comparison) { - super(comparison); + MulDivSimplifier(FoldContext foldContext, BinaryComparison comparison) { + super(foldContext, comparison); isDiv = operation.symbol().equals(DIV.symbol()); opRightSign = sign(opRight); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnLimitZero.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnLimitZero.java index 5d98d941bb20..c6d62dee0ba4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnLimitZero.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnLimitZero.java @@ -7,14 +7,19 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -public final class SkipQueryOnLimitZero extends OptimizerRules.OptimizerRule { +public final class SkipQueryOnLimitZero extends OptimizerRules.ParameterizedOptimizerRule { + public SkipQueryOnLimitZero() { + super(OptimizerRules.TransformDirection.DOWN); + } + @Override - protected LogicalPlan rule(Limit limit) { + protected LogicalPlan rule(Limit limit, LogicalOptimizerContext ctx) { if (limit.limit().foldable()) { - if (Integer.valueOf(0).equals((limit.limit().fold()))) { + if (Integer.valueOf(0).equals((limit.limit().fold(ctx.foldCtx())))) { return PruneEmptyPlans.skipPlan(limit); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java index 9e9ae6a9a559..870464feb486 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.ArrayList; import java.util.List; @@ -25,7 +26,7 @@ public final class SplitInWithFoldableValue extends OptimizerRules.OptimizerExpr } @Override - public Expression rule(In in) { + public Expression rule(In in, LogicalOptimizerContext ctx) { if (in.value().foldable()) { List foldables = new ArrayList<>(in.list().size()); List nonFoldables = new ArrayList<>(in.list().size()); @@ -36,7 +37,7 @@ public final class SplitInWithFoldableValue extends OptimizerRules.OptimizerExpr nonFoldables.add(e); } }); - if (foldables.size() > 0 && nonFoldables.size() > 0) { + if (foldables.isEmpty() == false && nonFoldables.isEmpty() == false) { In withFoldables = new In(in.source(), in.value(), foldables); In withoutFoldables = new In(in.source(), in.value(), nonFoldables); return new Or(in.source(), withFoldables, withoutFoldables); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java index c8369d2b08a3..62a00b79d733 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SubstituteFilteredExpression.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.OptimizerExpressionRule; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection; @@ -21,7 +22,7 @@ public class SubstituteFilteredExpression extends OptimizerExpressionRule blocks) { + protected void aggOutput( + FoldContext foldCtx, + NamedExpression agg, + AggregateFunction aggFunc, + BlockFactory blockFactory, + List blocks + ) { List output = AbstractPhysicalOperationProviders.intermediateAttributes(List.of(agg), List.of()); for (Attribute o : output) { DataType dataType = o.dataType(); // boolean right now is used for the internal #seen so always return true var value = dataType == DataType.BOOLEAN ? true // look for count(literal) with literal != null - : aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L + : aggFunc instanceof Count count && (count.foldable() == false || count.fold(foldCtx) != null) ? 0L // otherwise nullify : null; var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java index dfb1dbc8bc8f..afeab28745c6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -76,22 +77,33 @@ public class EnableSpatialDistancePushdown extends PhysicalOptimizerRules.Parame protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext ctx) { PhysicalPlan plan = filterExec; if (filterExec.child() instanceof EsQueryExec esQueryExec) { - plan = rewrite(filterExec, esQueryExec, LucenePushdownPredicates.from(ctx.searchStats())); + plan = rewrite(ctx.foldCtx(), filterExec, esQueryExec, LucenePushdownPredicates.from(ctx.searchStats())); } else if (filterExec.child() instanceof EvalExec evalExec && evalExec.child() instanceof EsQueryExec esQueryExec) { - plan = rewriteBySplittingFilter(filterExec, evalExec, esQueryExec, LucenePushdownPredicates.from(ctx.searchStats())); + plan = rewriteBySplittingFilter( + ctx.foldCtx(), + filterExec, + evalExec, + esQueryExec, + LucenePushdownPredicates.from(ctx.searchStats()) + ); } return plan; } - private FilterExec rewrite(FilterExec filterExec, EsQueryExec esQueryExec, LucenePushdownPredicates lucenePushdownPredicates) { + private FilterExec rewrite( + FoldContext ctx, + FilterExec filterExec, + EsQueryExec esQueryExec, + LucenePushdownPredicates lucenePushdownPredicates + ) { // Find and rewrite any binary comparisons that involve a distance function and a literal var rewritten = filterExec.condition().transformDown(EsqlBinaryComparison.class, comparison -> { ComparisonType comparisonType = ComparisonType.from(comparison.getFunctionType()); if (comparison.left() instanceof StDistance dist && comparison.right().foldable()) { - return rewriteComparison(comparison, dist, comparison.right(), comparisonType); + return rewriteComparison(ctx, comparison, dist, comparison.right(), comparisonType); } else if (comparison.right() instanceof StDistance dist && comparison.left().foldable()) { - return rewriteComparison(comparison, dist, comparison.left(), ComparisonType.invert(comparisonType)); + return rewriteComparison(ctx, comparison, dist, comparison.left(), ComparisonType.invert(comparisonType)); } return comparison; }); @@ -120,6 +132,7 @@ public class EnableSpatialDistancePushdown extends PhysicalOptimizerRules.Parame * */ private PhysicalPlan rewriteBySplittingFilter( + FoldContext ctx, FilterExec filterExec, EvalExec evalExec, EsQueryExec esQueryExec, @@ -142,7 +155,7 @@ public class EnableSpatialDistancePushdown extends PhysicalOptimizerRules.Parame for (Expression exp : splitAnd(filterExec.condition())) { Expression resExp = exp.transformUp(ReferenceAttribute.class, r -> aliasReplacedBy.resolve(r, r)); // Find and rewrite any binary comparisons that involve a distance function and a literal - var rewritten = rewriteDistanceFilters(resExp, distances); + var rewritten = rewriteDistanceFilters(ctx, resExp, distances); // If all pushable StDistance functions were found and re-written, we need to re-write the FILTER/EVAL combination if (rewritten.equals(resExp) == false && canPushToSource(rewritten, lucenePushdownPredicates)) { pushable.add(rewritten); @@ -181,40 +194,42 @@ public class EnableSpatialDistancePushdown extends PhysicalOptimizerRules.Parame return distances; } - private Expression rewriteDistanceFilters(Expression expr, Map distances) { + private Expression rewriteDistanceFilters(FoldContext ctx, Expression expr, Map distances) { return expr.transformDown(EsqlBinaryComparison.class, comparison -> { ComparisonType comparisonType = ComparisonType.from(comparison.getFunctionType()); if (comparison.left() instanceof ReferenceAttribute r && distances.containsKey(r.id()) && comparison.right().foldable()) { StDistance dist = distances.get(r.id()); - return rewriteComparison(comparison, dist, comparison.right(), comparisonType); + return rewriteComparison(ctx, comparison, dist, comparison.right(), comparisonType); } else if (comparison.right() instanceof ReferenceAttribute r && distances.containsKey(r.id()) && comparison.left().foldable()) { StDistance dist = distances.get(r.id()); - return rewriteComparison(comparison, dist, comparison.left(), ComparisonType.invert(comparisonType)); + return rewriteComparison(ctx, comparison, dist, comparison.left(), ComparisonType.invert(comparisonType)); } return comparison; }); } private Expression rewriteComparison( + FoldContext ctx, EsqlBinaryComparison comparison, StDistance dist, Expression literal, ComparisonType comparisonType ) { - Object value = literal.fold(); + Object value = literal.fold(ctx); if (value instanceof Number number) { if (dist.right().foldable()) { - return rewriteDistanceFilter(comparison, dist.left(), dist.right(), number, comparisonType); + return rewriteDistanceFilter(ctx, comparison, dist.left(), dist.right(), number, comparisonType); } else if (dist.left().foldable()) { - return rewriteDistanceFilter(comparison, dist.right(), dist.left(), number, comparisonType); + return rewriteDistanceFilter(ctx, comparison, dist.right(), dist.left(), number, comparisonType); } } return comparison; } private Expression rewriteDistanceFilter( + FoldContext ctx, EsqlBinaryComparison comparison, Expression spatialExp, Expression literalExp, @@ -222,7 +237,7 @@ public class EnableSpatialDistancePushdown extends PhysicalOptimizerRules.Parame ComparisonType comparisonType ) { DataType shapeDataType = getShapeDataType(spatialExp); - Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(literalExp); + Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(ctx, literalExp); if (geometry instanceof Point point) { double distance = number.doubleValue(); Source source = comparison.source(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java index 2b531257e594..24df3c1db234 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -61,7 +62,7 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi @Override protected PhysicalPlan rule(TopNExec topNExec, LocalPhysicalOptimizerContext ctx) { - Pushable pushable = evaluatePushable(topNExec, LucenePushdownPredicates.from(ctx.searchStats())); + Pushable pushable = evaluatePushable(ctx.foldCtx(), topNExec, LucenePushdownPredicates.from(ctx.searchStats())); return pushable.rewrite(topNExec); } @@ -95,18 +96,18 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi return new EsQueryExec.GeoDistanceSort(fieldAttribute.exactAttribute(), order.direction(), point.getLat(), point.getLon()); } - private static PushableGeoDistance from(StDistance distance, Order order) { + private static PushableGeoDistance from(FoldContext ctx, StDistance distance, Order order) { if (distance.left() instanceof Attribute attr && distance.right().foldable()) { - return from(attr, distance.right(), order); + return from(ctx, attr, distance.right(), order); } else if (distance.right() instanceof Attribute attr && distance.left().foldable()) { - return from(attr, distance.left(), order); + return from(ctx, attr, distance.left(), order); } return null; } - private static PushableGeoDistance from(Attribute attr, Expression foldable, Order order) { + private static PushableGeoDistance from(FoldContext ctx, Attribute attr, Expression foldable, Order order) { if (attr instanceof FieldAttribute fieldAttribute) { - Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(foldable); + Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(ctx, foldable); if (geometry instanceof Point point) { return new PushableGeoDistance(fieldAttribute, order, point); } @@ -122,7 +123,7 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi } } - private static Pushable evaluatePushable(TopNExec topNExec, LucenePushdownPredicates lucenePushdownPredicates) { + private static Pushable evaluatePushable(FoldContext ctx, TopNExec topNExec, LucenePushdownPredicates lucenePushdownPredicates) { PhysicalPlan child = topNExec.child(); if (child instanceof EsQueryExec queryExec && queryExec.canPushSorts() @@ -164,7 +165,7 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi if (distances.containsKey(resolvedAttribute.id())) { StDistance distance = distances.get(resolvedAttribute.id()); StDistance d = (StDistance) distance.transformDown(ReferenceAttribute.class, r -> aliasReplacedBy.resolve(r, r)); - PushableGeoDistance pushableGeoDistance = PushableGeoDistance.from(d, order); + PushableGeoDistance pushableGeoDistance = PushableGeoDistance.from(ctx, d, order); if (pushableGeoDistance != null) { pushableSorts.add(pushableGeoDistance.sort()); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index c5b37fa411f6..492df7fbc160 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -283,8 +283,6 @@ indexPattern clusterString indexString metadata -metadataOption -deprecated_metadata metricsCommand evalCommand statsCommand @@ -330,4 +328,4 @@ joinPredicate atn: -[4, 1, 128, 639, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 3, 11, 297, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 311, 8, 15, 10, 15, 12, 15, 314, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 319, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 327, 8, 17, 10, 17, 12, 17, 330, 9, 17, 1, 17, 3, 17, 333, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 338, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 348, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 354, 8, 22, 10, 22, 12, 22, 357, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 367, 8, 24, 10, 24, 12, 24, 370, 9, 24, 1, 24, 3, 24, 373, 8, 24, 1, 24, 1, 24, 3, 24, 377, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 384, 8, 26, 1, 26, 1, 26, 3, 26, 388, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 393, 8, 27, 10, 27, 12, 27, 396, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 401, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 406, 8, 29, 10, 29, 12, 29, 409, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 414, 8, 30, 10, 30, 12, 30, 417, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 422, 8, 31, 10, 31, 12, 31, 425, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 432, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 447, 8, 34, 10, 34, 12, 34, 450, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 469, 8, 34, 10, 34, 12, 34, 472, 9, 34, 1, 34, 1, 34, 3, 34, 476, 8, 34, 1, 35, 1, 35, 3, 35, 480, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 485, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 494, 8, 38, 10, 38, 12, 38, 497, 9, 38, 1, 39, 1, 39, 3, 39, 501, 8, 39, 1, 39, 1, 39, 3, 39, 505, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 517, 8, 42, 10, 42, 12, 42, 520, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 530, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 542, 8, 47, 10, 47, 12, 47, 545, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 555, 8, 50, 1, 51, 3, 51, 558, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 563, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 585, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 591, 8, 58, 10, 58, 12, 58, 594, 9, 58, 3, 58, 596, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 601, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 614, 8, 61, 1, 62, 3, 62, 617, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 626, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 632, 8, 64, 10, 64, 12, 64, 635, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 665, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 300, 1, 0, 0, 0, 26, 302, 1, 0, 0, 0, 28, 304, 1, 0, 0, 0, 30, 307, 1, 0, 0, 0, 32, 318, 1, 0, 0, 0, 34, 322, 1, 0, 0, 0, 36, 337, 1, 0, 0, 0, 38, 341, 1, 0, 0, 0, 40, 343, 1, 0, 0, 0, 42, 347, 1, 0, 0, 0, 44, 349, 1, 0, 0, 0, 46, 358, 1, 0, 0, 0, 48, 362, 1, 0, 0, 0, 50, 378, 1, 0, 0, 0, 52, 381, 1, 0, 0, 0, 54, 389, 1, 0, 0, 0, 56, 397, 1, 0, 0, 0, 58, 402, 1, 0, 0, 0, 60, 410, 1, 0, 0, 0, 62, 418, 1, 0, 0, 0, 64, 426, 1, 0, 0, 0, 66, 431, 1, 0, 0, 0, 68, 475, 1, 0, 0, 0, 70, 479, 1, 0, 0, 0, 72, 484, 1, 0, 0, 0, 74, 486, 1, 0, 0, 0, 76, 489, 1, 0, 0, 0, 78, 498, 1, 0, 0, 0, 80, 506, 1, 0, 0, 0, 82, 509, 1, 0, 0, 0, 84, 512, 1, 0, 0, 0, 86, 521, 1, 0, 0, 0, 88, 525, 1, 0, 0, 0, 90, 531, 1, 0, 0, 0, 92, 535, 1, 0, 0, 0, 94, 538, 1, 0, 0, 0, 96, 546, 1, 0, 0, 0, 98, 550, 1, 0, 0, 0, 100, 554, 1, 0, 0, 0, 102, 557, 1, 0, 0, 0, 104, 562, 1, 0, 0, 0, 106, 566, 1, 0, 0, 0, 108, 568, 1, 0, 0, 0, 110, 570, 1, 0, 0, 0, 112, 573, 1, 0, 0, 0, 114, 577, 1, 0, 0, 0, 116, 580, 1, 0, 0, 0, 118, 600, 1, 0, 0, 0, 120, 604, 1, 0, 0, 0, 122, 609, 1, 0, 0, 0, 124, 616, 1, 0, 0, 0, 126, 622, 1, 0, 0, 0, 128, 627, 1, 0, 0, 0, 130, 636, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 26, 13, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 26, 13, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 296, 5, 48, 0, 0, 287, 297, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 297, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 287, 1, 0, 0, 0, 296, 288, 1, 0, 0, 0, 296, 297, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 299, 5, 55, 0, 0, 299, 23, 1, 0, 0, 0, 300, 301, 3, 72, 36, 0, 301, 25, 1, 0, 0, 0, 302, 303, 3, 64, 32, 0, 303, 27, 1, 0, 0, 0, 304, 305, 5, 12, 0, 0, 305, 306, 3, 30, 15, 0, 306, 29, 1, 0, 0, 0, 307, 312, 3, 32, 16, 0, 308, 309, 5, 39, 0, 0, 309, 311, 3, 32, 16, 0, 310, 308, 1, 0, 0, 0, 311, 314, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 31, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 315, 316, 3, 58, 29, 0, 316, 317, 5, 36, 0, 0, 317, 319, 1, 0, 0, 0, 318, 315, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 3, 10, 5, 0, 321, 33, 1, 0, 0, 0, 322, 323, 5, 6, 0, 0, 323, 328, 3, 36, 18, 0, 324, 325, 5, 39, 0, 0, 325, 327, 3, 36, 18, 0, 326, 324, 1, 0, 0, 0, 327, 330, 1, 0, 0, 0, 328, 326, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 332, 1, 0, 0, 0, 330, 328, 1, 0, 0, 0, 331, 333, 3, 42, 21, 0, 332, 331, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 35, 1, 0, 0, 0, 334, 335, 3, 38, 19, 0, 335, 336, 5, 38, 0, 0, 336, 338, 1, 0, 0, 0, 337, 334, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 3, 40, 20, 0, 340, 37, 1, 0, 0, 0, 341, 342, 5, 81, 0, 0, 342, 39, 1, 0, 0, 0, 343, 344, 7, 2, 0, 0, 344, 41, 1, 0, 0, 0, 345, 348, 3, 44, 22, 0, 346, 348, 3, 46, 23, 0, 347, 345, 1, 0, 0, 0, 347, 346, 1, 0, 0, 0, 348, 43, 1, 0, 0, 0, 349, 350, 5, 80, 0, 0, 350, 355, 5, 81, 0, 0, 351, 352, 5, 39, 0, 0, 352, 354, 5, 81, 0, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 45, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 359, 5, 70, 0, 0, 359, 360, 3, 44, 22, 0, 360, 361, 5, 71, 0, 0, 361, 47, 1, 0, 0, 0, 362, 363, 5, 19, 0, 0, 363, 368, 3, 36, 18, 0, 364, 365, 5, 39, 0, 0, 365, 367, 3, 36, 18, 0, 366, 364, 1, 0, 0, 0, 367, 370, 1, 0, 0, 0, 368, 366, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 33, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 49, 1, 0, 0, 0, 378, 379, 5, 4, 0, 0, 379, 380, 3, 30, 15, 0, 380, 51, 1, 0, 0, 0, 381, 383, 5, 15, 0, 0, 382, 384, 3, 54, 27, 0, 383, 382, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 386, 5, 33, 0, 0, 386, 388, 3, 30, 15, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 53, 1, 0, 0, 0, 389, 394, 3, 56, 28, 0, 390, 391, 5, 39, 0, 0, 391, 393, 3, 56, 28, 0, 392, 390, 1, 0, 0, 0, 393, 396, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 55, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 397, 400, 3, 32, 16, 0, 398, 399, 5, 16, 0, 0, 399, 401, 3, 10, 5, 0, 400, 398, 1, 0, 0, 0, 400, 401, 1, 0, 0, 0, 401, 57, 1, 0, 0, 0, 402, 407, 3, 72, 36, 0, 403, 404, 5, 41, 0, 0, 404, 406, 3, 72, 36, 0, 405, 403, 1, 0, 0, 0, 406, 409, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 410, 415, 3, 66, 33, 0, 411, 412, 5, 41, 0, 0, 412, 414, 3, 66, 33, 0, 413, 411, 1, 0, 0, 0, 414, 417, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 61, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 418, 423, 3, 60, 30, 0, 419, 420, 5, 39, 0, 0, 420, 422, 3, 60, 30, 0, 421, 419, 1, 0, 0, 0, 422, 425, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 63, 1, 0, 0, 0, 425, 423, 1, 0, 0, 0, 426, 427, 7, 3, 0, 0, 427, 65, 1, 0, 0, 0, 428, 432, 5, 85, 0, 0, 429, 430, 4, 33, 10, 0, 430, 432, 3, 70, 35, 0, 431, 428, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 67, 1, 0, 0, 0, 433, 476, 5, 50, 0, 0, 434, 435, 3, 104, 52, 0, 435, 436, 5, 72, 0, 0, 436, 476, 1, 0, 0, 0, 437, 476, 3, 102, 51, 0, 438, 476, 3, 104, 52, 0, 439, 476, 3, 98, 49, 0, 440, 476, 3, 70, 35, 0, 441, 476, 3, 106, 53, 0, 442, 443, 5, 70, 0, 0, 443, 448, 3, 100, 50, 0, 444, 445, 5, 39, 0, 0, 445, 447, 3, 100, 50, 0, 446, 444, 1, 0, 0, 0, 447, 450, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 448, 449, 1, 0, 0, 0, 449, 451, 1, 0, 0, 0, 450, 448, 1, 0, 0, 0, 451, 452, 5, 71, 0, 0, 452, 476, 1, 0, 0, 0, 453, 454, 5, 70, 0, 0, 454, 459, 3, 98, 49, 0, 455, 456, 5, 39, 0, 0, 456, 458, 3, 98, 49, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 71, 0, 0, 463, 476, 1, 0, 0, 0, 464, 465, 5, 70, 0, 0, 465, 470, 3, 106, 53, 0, 466, 467, 5, 39, 0, 0, 467, 469, 3, 106, 53, 0, 468, 466, 1, 0, 0, 0, 469, 472, 1, 0, 0, 0, 470, 468, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 473, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 474, 5, 71, 0, 0, 474, 476, 1, 0, 0, 0, 475, 433, 1, 0, 0, 0, 475, 434, 1, 0, 0, 0, 475, 437, 1, 0, 0, 0, 475, 438, 1, 0, 0, 0, 475, 439, 1, 0, 0, 0, 475, 440, 1, 0, 0, 0, 475, 441, 1, 0, 0, 0, 475, 442, 1, 0, 0, 0, 475, 453, 1, 0, 0, 0, 475, 464, 1, 0, 0, 0, 476, 69, 1, 0, 0, 0, 477, 480, 5, 53, 0, 0, 478, 480, 5, 69, 0, 0, 479, 477, 1, 0, 0, 0, 479, 478, 1, 0, 0, 0, 480, 71, 1, 0, 0, 0, 481, 485, 3, 64, 32, 0, 482, 483, 4, 36, 11, 0, 483, 485, 3, 70, 35, 0, 484, 481, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 73, 1, 0, 0, 0, 486, 487, 5, 9, 0, 0, 487, 488, 5, 31, 0, 0, 488, 75, 1, 0, 0, 0, 489, 490, 5, 14, 0, 0, 490, 495, 3, 78, 39, 0, 491, 492, 5, 39, 0, 0, 492, 494, 3, 78, 39, 0, 493, 491, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 77, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 3, 10, 5, 0, 499, 501, 7, 4, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 504, 1, 0, 0, 0, 502, 503, 5, 51, 0, 0, 503, 505, 7, 5, 0, 0, 504, 502, 1, 0, 0, 0, 504, 505, 1, 0, 0, 0, 505, 79, 1, 0, 0, 0, 506, 507, 5, 8, 0, 0, 507, 508, 3, 62, 31, 0, 508, 81, 1, 0, 0, 0, 509, 510, 5, 2, 0, 0, 510, 511, 3, 62, 31, 0, 511, 83, 1, 0, 0, 0, 512, 513, 5, 11, 0, 0, 513, 518, 3, 86, 43, 0, 514, 515, 5, 39, 0, 0, 515, 517, 3, 86, 43, 0, 516, 514, 1, 0, 0, 0, 517, 520, 1, 0, 0, 0, 518, 516, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 85, 1, 0, 0, 0, 520, 518, 1, 0, 0, 0, 521, 522, 3, 60, 30, 0, 522, 523, 5, 89, 0, 0, 523, 524, 3, 60, 30, 0, 524, 87, 1, 0, 0, 0, 525, 526, 5, 1, 0, 0, 526, 527, 3, 20, 10, 0, 527, 529, 3, 106, 53, 0, 528, 530, 3, 94, 47, 0, 529, 528, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 89, 1, 0, 0, 0, 531, 532, 5, 7, 0, 0, 532, 533, 3, 20, 10, 0, 533, 534, 3, 106, 53, 0, 534, 91, 1, 0, 0, 0, 535, 536, 5, 10, 0, 0, 536, 537, 3, 58, 29, 0, 537, 93, 1, 0, 0, 0, 538, 543, 3, 96, 48, 0, 539, 540, 5, 39, 0, 0, 540, 542, 3, 96, 48, 0, 541, 539, 1, 0, 0, 0, 542, 545, 1, 0, 0, 0, 543, 541, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 95, 1, 0, 0, 0, 545, 543, 1, 0, 0, 0, 546, 547, 3, 64, 32, 0, 547, 548, 5, 36, 0, 0, 548, 549, 3, 68, 34, 0, 549, 97, 1, 0, 0, 0, 550, 551, 7, 6, 0, 0, 551, 99, 1, 0, 0, 0, 552, 555, 3, 102, 51, 0, 553, 555, 3, 104, 52, 0, 554, 552, 1, 0, 0, 0, 554, 553, 1, 0, 0, 0, 555, 101, 1, 0, 0, 0, 556, 558, 7, 0, 0, 0, 557, 556, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 5, 32, 0, 0, 560, 103, 1, 0, 0, 0, 561, 563, 7, 0, 0, 0, 562, 561, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 565, 5, 31, 0, 0, 565, 105, 1, 0, 0, 0, 566, 567, 5, 30, 0, 0, 567, 107, 1, 0, 0, 0, 568, 569, 7, 7, 0, 0, 569, 109, 1, 0, 0, 0, 570, 571, 5, 5, 0, 0, 571, 572, 3, 112, 56, 0, 572, 111, 1, 0, 0, 0, 573, 574, 5, 70, 0, 0, 574, 575, 3, 2, 1, 0, 575, 576, 5, 71, 0, 0, 576, 113, 1, 0, 0, 0, 577, 578, 5, 13, 0, 0, 578, 579, 5, 105, 0, 0, 579, 115, 1, 0, 0, 0, 580, 581, 5, 3, 0, 0, 581, 584, 5, 95, 0, 0, 582, 583, 5, 93, 0, 0, 583, 585, 3, 60, 30, 0, 584, 582, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 595, 1, 0, 0, 0, 586, 587, 5, 94, 0, 0, 587, 592, 3, 118, 59, 0, 588, 589, 5, 39, 0, 0, 589, 591, 3, 118, 59, 0, 590, 588, 1, 0, 0, 0, 591, 594, 1, 0, 0, 0, 592, 590, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 596, 1, 0, 0, 0, 594, 592, 1, 0, 0, 0, 595, 586, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 117, 1, 0, 0, 0, 597, 598, 3, 60, 30, 0, 598, 599, 5, 36, 0, 0, 599, 601, 1, 0, 0, 0, 600, 597, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 603, 3, 60, 30, 0, 603, 119, 1, 0, 0, 0, 604, 605, 5, 18, 0, 0, 605, 606, 3, 36, 18, 0, 606, 607, 5, 93, 0, 0, 607, 608, 3, 62, 31, 0, 608, 121, 1, 0, 0, 0, 609, 610, 5, 17, 0, 0, 610, 613, 3, 54, 27, 0, 611, 612, 5, 33, 0, 0, 612, 614, 3, 30, 15, 0, 613, 611, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 123, 1, 0, 0, 0, 615, 617, 7, 8, 0, 0, 616, 615, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 5, 20, 0, 0, 619, 620, 3, 126, 63, 0, 620, 621, 3, 128, 64, 0, 621, 125, 1, 0, 0, 0, 622, 625, 3, 64, 32, 0, 623, 624, 5, 89, 0, 0, 624, 626, 3, 64, 32, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 127, 1, 0, 0, 0, 627, 628, 5, 93, 0, 0, 628, 633, 3, 130, 65, 0, 629, 630, 5, 39, 0, 0, 630, 632, 3, 130, 65, 0, 631, 629, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 129, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 637, 3, 16, 8, 0, 637, 131, 1, 0, 0, 0, 62, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 296, 312, 318, 328, 332, 337, 347, 355, 368, 372, 376, 383, 387, 394, 400, 407, 415, 423, 431, 448, 459, 470, 475, 479, 484, 495, 500, 504, 518, 529, 543, 554, 557, 562, 584, 592, 595, 600, 613, 616, 625, 633] \ No newline at end of file +[4, 1, 128, 627, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 138, 8, 1, 10, 1, 12, 1, 141, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 149, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 169, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 181, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 188, 8, 5, 10, 5, 12, 5, 191, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 203, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 211, 8, 5, 10, 5, 12, 5, 214, 9, 5, 1, 6, 1, 6, 3, 6, 218, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 230, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 235, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 245, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 251, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 259, 8, 9, 10, 9, 12, 9, 262, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 272, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 277, 8, 10, 10, 10, 12, 10, 280, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 288, 8, 11, 10, 11, 12, 11, 291, 9, 11, 3, 11, 293, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 307, 8, 15, 10, 15, 12, 15, 310, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 315, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 323, 8, 17, 10, 17, 12, 17, 326, 9, 17, 1, 17, 3, 17, 329, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 334, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 346, 8, 21, 10, 21, 12, 21, 349, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 355, 8, 22, 10, 22, 12, 22, 358, 9, 22, 1, 22, 3, 22, 361, 8, 22, 1, 22, 1, 22, 3, 22, 365, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 372, 8, 24, 1, 24, 1, 24, 3, 24, 376, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 381, 8, 25, 10, 25, 12, 25, 384, 9, 25, 1, 26, 1, 26, 1, 26, 3, 26, 389, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 394, 8, 27, 10, 27, 12, 27, 397, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 402, 8, 28, 10, 28, 12, 28, 405, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 410, 8, 29, 10, 29, 12, 29, 413, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 420, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 435, 8, 32, 10, 32, 12, 32, 438, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 446, 8, 32, 10, 32, 12, 32, 449, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 457, 8, 32, 10, 32, 12, 32, 460, 9, 32, 1, 32, 1, 32, 3, 32, 464, 8, 32, 1, 33, 1, 33, 3, 33, 468, 8, 33, 1, 34, 1, 34, 1, 34, 3, 34, 473, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 482, 8, 36, 10, 36, 12, 36, 485, 9, 36, 1, 37, 1, 37, 3, 37, 489, 8, 37, 1, 37, 1, 37, 3, 37, 493, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 505, 8, 40, 10, 40, 12, 40, 508, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 518, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 5, 45, 530, 8, 45, 10, 45, 12, 45, 533, 9, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 3, 48, 543, 8, 48, 1, 49, 3, 49, 546, 8, 49, 1, 49, 1, 49, 1, 50, 3, 50, 551, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 573, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 579, 8, 56, 10, 56, 12, 56, 582, 9, 56, 3, 56, 584, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 589, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 602, 8, 59, 1, 60, 3, 60, 605, 8, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 3, 61, 614, 8, 61, 1, 62, 1, 62, 1, 62, 1, 62, 5, 62, 620, 8, 62, 10, 62, 12, 62, 623, 9, 62, 1, 63, 1, 63, 1, 63, 0, 4, 2, 10, 18, 20, 64, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 654, 0, 128, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 4, 148, 1, 0, 0, 0, 6, 168, 1, 0, 0, 0, 8, 170, 1, 0, 0, 0, 10, 202, 1, 0, 0, 0, 12, 229, 1, 0, 0, 0, 14, 231, 1, 0, 0, 0, 16, 244, 1, 0, 0, 0, 18, 250, 1, 0, 0, 0, 20, 271, 1, 0, 0, 0, 22, 281, 1, 0, 0, 0, 24, 296, 1, 0, 0, 0, 26, 298, 1, 0, 0, 0, 28, 300, 1, 0, 0, 0, 30, 303, 1, 0, 0, 0, 32, 314, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 333, 1, 0, 0, 0, 38, 337, 1, 0, 0, 0, 40, 339, 1, 0, 0, 0, 42, 341, 1, 0, 0, 0, 44, 350, 1, 0, 0, 0, 46, 366, 1, 0, 0, 0, 48, 369, 1, 0, 0, 0, 50, 377, 1, 0, 0, 0, 52, 385, 1, 0, 0, 0, 54, 390, 1, 0, 0, 0, 56, 398, 1, 0, 0, 0, 58, 406, 1, 0, 0, 0, 60, 414, 1, 0, 0, 0, 62, 419, 1, 0, 0, 0, 64, 463, 1, 0, 0, 0, 66, 467, 1, 0, 0, 0, 68, 472, 1, 0, 0, 0, 70, 474, 1, 0, 0, 0, 72, 477, 1, 0, 0, 0, 74, 486, 1, 0, 0, 0, 76, 494, 1, 0, 0, 0, 78, 497, 1, 0, 0, 0, 80, 500, 1, 0, 0, 0, 82, 509, 1, 0, 0, 0, 84, 513, 1, 0, 0, 0, 86, 519, 1, 0, 0, 0, 88, 523, 1, 0, 0, 0, 90, 526, 1, 0, 0, 0, 92, 534, 1, 0, 0, 0, 94, 538, 1, 0, 0, 0, 96, 542, 1, 0, 0, 0, 98, 545, 1, 0, 0, 0, 100, 550, 1, 0, 0, 0, 102, 554, 1, 0, 0, 0, 104, 556, 1, 0, 0, 0, 106, 558, 1, 0, 0, 0, 108, 561, 1, 0, 0, 0, 110, 565, 1, 0, 0, 0, 112, 568, 1, 0, 0, 0, 114, 588, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 597, 1, 0, 0, 0, 120, 604, 1, 0, 0, 0, 122, 610, 1, 0, 0, 0, 124, 615, 1, 0, 0, 0, 126, 624, 1, 0, 0, 0, 128, 129, 3, 2, 1, 0, 129, 130, 5, 0, 0, 1, 130, 1, 1, 0, 0, 0, 131, 132, 6, 1, -1, 0, 132, 133, 3, 4, 2, 0, 133, 139, 1, 0, 0, 0, 134, 135, 10, 1, 0, 0, 135, 136, 5, 29, 0, 0, 136, 138, 3, 6, 3, 0, 137, 134, 1, 0, 0, 0, 138, 141, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 3, 1, 0, 0, 0, 141, 139, 1, 0, 0, 0, 142, 149, 3, 106, 53, 0, 143, 149, 3, 34, 17, 0, 144, 149, 3, 28, 14, 0, 145, 149, 3, 110, 55, 0, 146, 147, 4, 2, 1, 0, 147, 149, 3, 44, 22, 0, 148, 142, 1, 0, 0, 0, 148, 143, 1, 0, 0, 0, 148, 144, 1, 0, 0, 0, 148, 145, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 149, 5, 1, 0, 0, 0, 150, 169, 3, 46, 23, 0, 151, 169, 3, 8, 4, 0, 152, 169, 3, 76, 38, 0, 153, 169, 3, 70, 35, 0, 154, 169, 3, 48, 24, 0, 155, 169, 3, 72, 36, 0, 156, 169, 3, 78, 39, 0, 157, 169, 3, 80, 40, 0, 158, 169, 3, 84, 42, 0, 159, 169, 3, 86, 43, 0, 160, 169, 3, 112, 56, 0, 161, 169, 3, 88, 44, 0, 162, 163, 4, 3, 2, 0, 163, 169, 3, 118, 59, 0, 164, 165, 4, 3, 3, 0, 165, 169, 3, 116, 58, 0, 166, 167, 4, 3, 4, 0, 167, 169, 3, 120, 60, 0, 168, 150, 1, 0, 0, 0, 168, 151, 1, 0, 0, 0, 168, 152, 1, 0, 0, 0, 168, 153, 1, 0, 0, 0, 168, 154, 1, 0, 0, 0, 168, 155, 1, 0, 0, 0, 168, 156, 1, 0, 0, 0, 168, 157, 1, 0, 0, 0, 168, 158, 1, 0, 0, 0, 168, 159, 1, 0, 0, 0, 168, 160, 1, 0, 0, 0, 168, 161, 1, 0, 0, 0, 168, 162, 1, 0, 0, 0, 168, 164, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 7, 1, 0, 0, 0, 170, 171, 5, 16, 0, 0, 171, 172, 3, 10, 5, 0, 172, 9, 1, 0, 0, 0, 173, 174, 6, 5, -1, 0, 174, 175, 5, 49, 0, 0, 175, 203, 3, 10, 5, 8, 176, 203, 3, 16, 8, 0, 177, 203, 3, 12, 6, 0, 178, 180, 3, 16, 8, 0, 179, 181, 5, 49, 0, 0, 180, 179, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 5, 44, 0, 0, 183, 184, 5, 48, 0, 0, 184, 189, 3, 16, 8, 0, 185, 186, 5, 39, 0, 0, 186, 188, 3, 16, 8, 0, 187, 185, 1, 0, 0, 0, 188, 191, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 192, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 192, 193, 5, 55, 0, 0, 193, 203, 1, 0, 0, 0, 194, 195, 3, 16, 8, 0, 195, 197, 5, 45, 0, 0, 196, 198, 5, 49, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 203, 1, 0, 0, 0, 201, 203, 3, 14, 7, 0, 202, 173, 1, 0, 0, 0, 202, 176, 1, 0, 0, 0, 202, 177, 1, 0, 0, 0, 202, 178, 1, 0, 0, 0, 202, 194, 1, 0, 0, 0, 202, 201, 1, 0, 0, 0, 203, 212, 1, 0, 0, 0, 204, 205, 10, 5, 0, 0, 205, 206, 5, 34, 0, 0, 206, 211, 3, 10, 5, 6, 207, 208, 10, 4, 0, 0, 208, 209, 5, 52, 0, 0, 209, 211, 3, 10, 5, 5, 210, 204, 1, 0, 0, 0, 210, 207, 1, 0, 0, 0, 211, 214, 1, 0, 0, 0, 212, 210, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 11, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 215, 217, 3, 16, 8, 0, 216, 218, 5, 49, 0, 0, 217, 216, 1, 0, 0, 0, 217, 218, 1, 0, 0, 0, 218, 219, 1, 0, 0, 0, 219, 220, 5, 47, 0, 0, 220, 221, 3, 102, 51, 0, 221, 230, 1, 0, 0, 0, 222, 224, 3, 16, 8, 0, 223, 225, 5, 49, 0, 0, 224, 223, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 227, 5, 54, 0, 0, 227, 228, 3, 102, 51, 0, 228, 230, 1, 0, 0, 0, 229, 215, 1, 0, 0, 0, 229, 222, 1, 0, 0, 0, 230, 13, 1, 0, 0, 0, 231, 234, 3, 54, 27, 0, 232, 233, 5, 37, 0, 0, 233, 235, 3, 26, 13, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 236, 1, 0, 0, 0, 236, 237, 5, 38, 0, 0, 237, 238, 3, 64, 32, 0, 238, 15, 1, 0, 0, 0, 239, 245, 3, 18, 9, 0, 240, 241, 3, 18, 9, 0, 241, 242, 3, 104, 52, 0, 242, 243, 3, 18, 9, 0, 243, 245, 1, 0, 0, 0, 244, 239, 1, 0, 0, 0, 244, 240, 1, 0, 0, 0, 245, 17, 1, 0, 0, 0, 246, 247, 6, 9, -1, 0, 247, 251, 3, 20, 10, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 3, 250, 246, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 251, 260, 1, 0, 0, 0, 252, 253, 10, 2, 0, 0, 253, 254, 7, 1, 0, 0, 254, 259, 3, 18, 9, 3, 255, 256, 10, 1, 0, 0, 256, 257, 7, 0, 0, 0, 257, 259, 3, 18, 9, 2, 258, 252, 1, 0, 0, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 19, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 6, 10, -1, 0, 264, 272, 3, 64, 32, 0, 265, 272, 3, 54, 27, 0, 266, 272, 3, 22, 11, 0, 267, 268, 5, 48, 0, 0, 268, 269, 3, 10, 5, 0, 269, 270, 5, 55, 0, 0, 270, 272, 1, 0, 0, 0, 271, 263, 1, 0, 0, 0, 271, 265, 1, 0, 0, 0, 271, 266, 1, 0, 0, 0, 271, 267, 1, 0, 0, 0, 272, 278, 1, 0, 0, 0, 273, 274, 10, 1, 0, 0, 274, 275, 5, 37, 0, 0, 275, 277, 3, 26, 13, 0, 276, 273, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 21, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 3, 24, 12, 0, 282, 292, 5, 48, 0, 0, 283, 293, 5, 66, 0, 0, 284, 289, 3, 10, 5, 0, 285, 286, 5, 39, 0, 0, 286, 288, 3, 10, 5, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 283, 1, 0, 0, 0, 292, 284, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 5, 55, 0, 0, 295, 23, 1, 0, 0, 0, 296, 297, 3, 68, 34, 0, 297, 25, 1, 0, 0, 0, 298, 299, 3, 60, 30, 0, 299, 27, 1, 0, 0, 0, 300, 301, 5, 12, 0, 0, 301, 302, 3, 30, 15, 0, 302, 29, 1, 0, 0, 0, 303, 308, 3, 32, 16, 0, 304, 305, 5, 39, 0, 0, 305, 307, 3, 32, 16, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 31, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 3, 54, 27, 0, 312, 313, 5, 36, 0, 0, 313, 315, 1, 0, 0, 0, 314, 311, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 3, 10, 5, 0, 317, 33, 1, 0, 0, 0, 318, 319, 5, 6, 0, 0, 319, 324, 3, 36, 18, 0, 320, 321, 5, 39, 0, 0, 321, 323, 3, 36, 18, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 42, 21, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 35, 1, 0, 0, 0, 330, 331, 3, 38, 19, 0, 331, 332, 5, 38, 0, 0, 332, 334, 1, 0, 0, 0, 333, 330, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 3, 40, 20, 0, 336, 37, 1, 0, 0, 0, 337, 338, 5, 81, 0, 0, 338, 39, 1, 0, 0, 0, 339, 340, 7, 2, 0, 0, 340, 41, 1, 0, 0, 0, 341, 342, 5, 80, 0, 0, 342, 347, 5, 81, 0, 0, 343, 344, 5, 39, 0, 0, 344, 346, 5, 81, 0, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 43, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 351, 5, 19, 0, 0, 351, 356, 3, 36, 18, 0, 352, 353, 5, 39, 0, 0, 353, 355, 3, 36, 18, 0, 354, 352, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 361, 3, 50, 25, 0, 360, 359, 1, 0, 0, 0, 360, 361, 1, 0, 0, 0, 361, 364, 1, 0, 0, 0, 362, 363, 5, 33, 0, 0, 363, 365, 3, 30, 15, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 45, 1, 0, 0, 0, 366, 367, 5, 4, 0, 0, 367, 368, 3, 30, 15, 0, 368, 47, 1, 0, 0, 0, 369, 371, 5, 15, 0, 0, 370, 372, 3, 50, 25, 0, 371, 370, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 375, 1, 0, 0, 0, 373, 374, 5, 33, 0, 0, 374, 376, 3, 30, 15, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 49, 1, 0, 0, 0, 377, 382, 3, 52, 26, 0, 378, 379, 5, 39, 0, 0, 379, 381, 3, 52, 26, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 51, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 388, 3, 32, 16, 0, 386, 387, 5, 16, 0, 0, 387, 389, 3, 10, 5, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 53, 1, 0, 0, 0, 390, 395, 3, 68, 34, 0, 391, 392, 5, 41, 0, 0, 392, 394, 3, 68, 34, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 55, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 403, 3, 62, 31, 0, 399, 400, 5, 41, 0, 0, 400, 402, 3, 62, 31, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 57, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 411, 3, 56, 28, 0, 407, 408, 5, 39, 0, 0, 408, 410, 3, 56, 28, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 59, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 7, 3, 0, 0, 415, 61, 1, 0, 0, 0, 416, 420, 5, 85, 0, 0, 417, 418, 4, 31, 10, 0, 418, 420, 3, 66, 33, 0, 419, 416, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 420, 63, 1, 0, 0, 0, 421, 464, 5, 50, 0, 0, 422, 423, 3, 100, 50, 0, 423, 424, 5, 72, 0, 0, 424, 464, 1, 0, 0, 0, 425, 464, 3, 98, 49, 0, 426, 464, 3, 100, 50, 0, 427, 464, 3, 94, 47, 0, 428, 464, 3, 66, 33, 0, 429, 464, 3, 102, 51, 0, 430, 431, 5, 70, 0, 0, 431, 436, 3, 96, 48, 0, 432, 433, 5, 39, 0, 0, 433, 435, 3, 96, 48, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 439, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 5, 71, 0, 0, 440, 464, 1, 0, 0, 0, 441, 442, 5, 70, 0, 0, 442, 447, 3, 94, 47, 0, 443, 444, 5, 39, 0, 0, 444, 446, 3, 94, 47, 0, 445, 443, 1, 0, 0, 0, 446, 449, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 450, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 450, 451, 5, 71, 0, 0, 451, 464, 1, 0, 0, 0, 452, 453, 5, 70, 0, 0, 453, 458, 3, 102, 51, 0, 454, 455, 5, 39, 0, 0, 455, 457, 3, 102, 51, 0, 456, 454, 1, 0, 0, 0, 457, 460, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 461, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 461, 462, 5, 71, 0, 0, 462, 464, 1, 0, 0, 0, 463, 421, 1, 0, 0, 0, 463, 422, 1, 0, 0, 0, 463, 425, 1, 0, 0, 0, 463, 426, 1, 0, 0, 0, 463, 427, 1, 0, 0, 0, 463, 428, 1, 0, 0, 0, 463, 429, 1, 0, 0, 0, 463, 430, 1, 0, 0, 0, 463, 441, 1, 0, 0, 0, 463, 452, 1, 0, 0, 0, 464, 65, 1, 0, 0, 0, 465, 468, 5, 53, 0, 0, 466, 468, 5, 69, 0, 0, 467, 465, 1, 0, 0, 0, 467, 466, 1, 0, 0, 0, 468, 67, 1, 0, 0, 0, 469, 473, 3, 60, 30, 0, 470, 471, 4, 34, 11, 0, 471, 473, 3, 66, 33, 0, 472, 469, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 69, 1, 0, 0, 0, 474, 475, 5, 9, 0, 0, 475, 476, 5, 31, 0, 0, 476, 71, 1, 0, 0, 0, 477, 478, 5, 14, 0, 0, 478, 483, 3, 74, 37, 0, 479, 480, 5, 39, 0, 0, 480, 482, 3, 74, 37, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 73, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 488, 3, 10, 5, 0, 487, 489, 7, 4, 0, 0, 488, 487, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 491, 5, 51, 0, 0, 491, 493, 7, 5, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 75, 1, 0, 0, 0, 494, 495, 5, 8, 0, 0, 495, 496, 3, 58, 29, 0, 496, 77, 1, 0, 0, 0, 497, 498, 5, 2, 0, 0, 498, 499, 3, 58, 29, 0, 499, 79, 1, 0, 0, 0, 500, 501, 5, 11, 0, 0, 501, 506, 3, 82, 41, 0, 502, 503, 5, 39, 0, 0, 503, 505, 3, 82, 41, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 81, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 510, 3, 56, 28, 0, 510, 511, 5, 89, 0, 0, 511, 512, 3, 56, 28, 0, 512, 83, 1, 0, 0, 0, 513, 514, 5, 1, 0, 0, 514, 515, 3, 20, 10, 0, 515, 517, 3, 102, 51, 0, 516, 518, 3, 90, 45, 0, 517, 516, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 85, 1, 0, 0, 0, 519, 520, 5, 7, 0, 0, 520, 521, 3, 20, 10, 0, 521, 522, 3, 102, 51, 0, 522, 87, 1, 0, 0, 0, 523, 524, 5, 10, 0, 0, 524, 525, 3, 54, 27, 0, 525, 89, 1, 0, 0, 0, 526, 531, 3, 92, 46, 0, 527, 528, 5, 39, 0, 0, 528, 530, 3, 92, 46, 0, 529, 527, 1, 0, 0, 0, 530, 533, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 91, 1, 0, 0, 0, 533, 531, 1, 0, 0, 0, 534, 535, 3, 60, 30, 0, 535, 536, 5, 36, 0, 0, 536, 537, 3, 64, 32, 0, 537, 93, 1, 0, 0, 0, 538, 539, 7, 6, 0, 0, 539, 95, 1, 0, 0, 0, 540, 543, 3, 98, 49, 0, 541, 543, 3, 100, 50, 0, 542, 540, 1, 0, 0, 0, 542, 541, 1, 0, 0, 0, 543, 97, 1, 0, 0, 0, 544, 546, 7, 0, 0, 0, 545, 544, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 5, 32, 0, 0, 548, 99, 1, 0, 0, 0, 549, 551, 7, 0, 0, 0, 550, 549, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 5, 31, 0, 0, 553, 101, 1, 0, 0, 0, 554, 555, 5, 30, 0, 0, 555, 103, 1, 0, 0, 0, 556, 557, 7, 7, 0, 0, 557, 105, 1, 0, 0, 0, 558, 559, 5, 5, 0, 0, 559, 560, 3, 108, 54, 0, 560, 107, 1, 0, 0, 0, 561, 562, 5, 70, 0, 0, 562, 563, 3, 2, 1, 0, 563, 564, 5, 71, 0, 0, 564, 109, 1, 0, 0, 0, 565, 566, 5, 13, 0, 0, 566, 567, 5, 105, 0, 0, 567, 111, 1, 0, 0, 0, 568, 569, 5, 3, 0, 0, 569, 572, 5, 95, 0, 0, 570, 571, 5, 93, 0, 0, 571, 573, 3, 56, 28, 0, 572, 570, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 583, 1, 0, 0, 0, 574, 575, 5, 94, 0, 0, 575, 580, 3, 114, 57, 0, 576, 577, 5, 39, 0, 0, 577, 579, 3, 114, 57, 0, 578, 576, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 584, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 574, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 113, 1, 0, 0, 0, 585, 586, 3, 56, 28, 0, 586, 587, 5, 36, 0, 0, 587, 589, 1, 0, 0, 0, 588, 585, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 3, 56, 28, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 18, 0, 0, 593, 594, 3, 36, 18, 0, 594, 595, 5, 93, 0, 0, 595, 596, 3, 58, 29, 0, 596, 117, 1, 0, 0, 0, 597, 598, 5, 17, 0, 0, 598, 601, 3, 50, 25, 0, 599, 600, 5, 33, 0, 0, 600, 602, 3, 30, 15, 0, 601, 599, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 119, 1, 0, 0, 0, 603, 605, 7, 8, 0, 0, 604, 603, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 5, 20, 0, 0, 607, 608, 3, 122, 61, 0, 608, 609, 3, 124, 62, 0, 609, 121, 1, 0, 0, 0, 610, 613, 3, 60, 30, 0, 611, 612, 5, 89, 0, 0, 612, 614, 3, 60, 30, 0, 613, 611, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 123, 1, 0, 0, 0, 615, 616, 5, 93, 0, 0, 616, 621, 3, 126, 63, 0, 617, 618, 5, 39, 0, 0, 618, 620, 3, 126, 63, 0, 619, 617, 1, 0, 0, 0, 620, 623, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 125, 1, 0, 0, 0, 623, 621, 1, 0, 0, 0, 624, 625, 3, 16, 8, 0, 625, 127, 1, 0, 0, 0, 61, 139, 148, 168, 180, 189, 197, 202, 210, 212, 217, 224, 229, 234, 244, 250, 258, 260, 271, 278, 289, 292, 308, 314, 324, 328, 333, 347, 356, 360, 364, 371, 375, 382, 388, 395, 403, 411, 419, 436, 447, 458, 463, 467, 472, 483, 488, 492, 506, 517, 531, 542, 545, 550, 572, 580, 583, 588, 601, 604, 613, 621] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index a56035364641..e272dc7f477a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -59,35 +59,34 @@ public class EsqlBaseParser extends ParserConfig { RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, - RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, - RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, - RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, - RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, - RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, - RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, - RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, - RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, - RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, - RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, - RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, - RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, - RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, - RULE_joinCondition = 64, RULE_joinPredicate = 65; + RULE_indexString = 20, RULE_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, + RULE_statsCommand = 24, RULE_aggFields = 25, RULE_aggField = 26, RULE_qualifiedName = 27, + RULE_qualifiedNamePattern = 28, RULE_qualifiedNamePatterns = 29, RULE_identifier = 30, + RULE_identifierPattern = 31, RULE_constant = 32, RULE_parameter = 33, + RULE_identifierOrParameter = 34, RULE_limitCommand = 35, RULE_sortCommand = 36, + RULE_orderExpression = 37, RULE_keepCommand = 38, RULE_dropCommand = 39, + RULE_renameCommand = 40, RULE_renameClause = 41, RULE_dissectCommand = 42, + RULE_grokCommand = 43, RULE_mvExpandCommand = 44, RULE_commandOptions = 45, + RULE_commandOption = 46, RULE_booleanValue = 47, RULE_numericValue = 48, + RULE_decimalValue = 49, RULE_integerValue = 50, RULE_string = 51, RULE_comparisonOperator = 52, + RULE_explainCommand = 53, RULE_subqueryExpression = 54, RULE_showCommand = 55, + RULE_enrichCommand = 56, RULE_enrichWithClause = 57, RULE_lookupCommand = 58, + RULE_inlinestatsCommand = 59, RULE_joinCommand = 60, RULE_joinTarget = 61, + RULE_joinCondition = 62, RULE_joinPredicate = 63; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", - "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "indexPattern", "clusterString", "indexString", "metadata", "metricsCommand", + "evalCommand", "statsCommand", "aggFields", "aggField", "qualifiedName", + "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", "identifierPattern", + "constant", "parameter", "identifierOrParameter", "limitCommand", "sortCommand", + "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", + "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", + "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", + "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" }; @@ -226,9 +225,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(132); + setState(128); query(0); - setState(133); + setState(129); match(EOF); } } @@ -324,11 +323,11 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(136); + setState(132); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(143); + setState(139); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -339,16 +338,16 @@ public class EsqlBaseParser extends ParserConfig { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(138); + setState(134); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(139); + setState(135); match(PIPE); - setState(140); + setState(136); processingCommand(); } } } - setState(145); + setState(141); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -406,43 +405,43 @@ public class EsqlBaseParser extends ParserConfig { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(152); + setState(148); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(146); + setState(142); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(147); + setState(143); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(148); + setState(144); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(149); + setState(145); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(150); + setState(146); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(151); + setState(147); metricsCommand(); } break; @@ -530,117 +529,117 @@ public class EsqlBaseParser extends ParserConfig { ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(172); + setState(168); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(154); + setState(150); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(155); + setState(151); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(156); + setState(152); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(157); + setState(153); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(158); + setState(154); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(159); + setState(155); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(160); + setState(156); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(161); + setState(157); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(162); + setState(158); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(163); + setState(159); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(164); + setState(160); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(165); + setState(161); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(166); + setState(162); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(167); + setState(163); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(168); + setState(164); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(169); + setState(165); lookupCommand(); } break; case 15: enterOuterAlt(_localctx, 15); { - setState(170); + setState(166); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(171); + setState(167); joinCommand(); } break; @@ -689,9 +688,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(174); + setState(170); match(WHERE); - setState(175); + setState(171); booleanExpression(0); } } @@ -907,7 +906,7 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(206); + setState(202); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -916,9 +915,9 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(178); + setState(174); match(NOT); - setState(179); + setState(175); booleanExpression(8); } break; @@ -927,7 +926,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(180); + setState(176); valueExpression(); } break; @@ -936,7 +935,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(181); + setState(177); regexBooleanExpression(); } break; @@ -945,41 +944,41 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(178); valueExpression(); - setState(184); + setState(180); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(183); + setState(179); match(NOT); } } - setState(186); + setState(182); match(IN); - setState(187); + setState(183); match(LP); - setState(188); + setState(184); valueExpression(); - setState(193); + setState(189); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(189); + setState(185); match(COMMA); - setState(190); + setState(186); valueExpression(); } } - setState(195); + setState(191); _errHandler.sync(this); _la = _input.LA(1); } - setState(196); + setState(192); match(RP); } break; @@ -988,21 +987,21 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(194); valueExpression(); - setState(199); + setState(195); match(IS); - setState(201); + setState(197); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(200); + setState(196); match(NOT); } } - setState(203); + setState(199); match(NULL); } break; @@ -1011,13 +1010,13 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(205); + setState(201); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(216); + setState(212); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1025,7 +1024,7 @@ public class EsqlBaseParser extends ParserConfig { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(214); + setState(210); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1033,11 +1032,11 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(208); + setState(204); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(209); + setState(205); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(210); + setState(206); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1046,18 +1045,18 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(211); + setState(207); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(212); + setState(208); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(213); + setState(209); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(218); + setState(214); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1112,48 +1111,48 @@ public class EsqlBaseParser extends ParserConfig { enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(233); + setState(229); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(219); + setState(215); valueExpression(); - setState(221); + setState(217); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(220); + setState(216); match(NOT); } } - setState(223); + setState(219); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(224); + setState(220); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(226); + setState(222); valueExpression(); - setState(228); + setState(224); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(227); + setState(223); match(NOT); } } - setState(230); + setState(226); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(231); + setState(227); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1213,23 +1212,23 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(235); + setState(231); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(238); + setState(234); _errHandler.sync(this); _la = _input.LA(1); if (_la==CAST_OP) { { - setState(236); + setState(232); match(CAST_OP); - setState(237); + setState(233); ((MatchBooleanExpressionContext)_localctx).fieldType = dataType(); } } - setState(240); + setState(236); match(COLON); - setState(241); + setState(237); ((MatchBooleanExpressionContext)_localctx).matchQuery = constant(); } } @@ -1313,14 +1312,14 @@ public class EsqlBaseParser extends ParserConfig { ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(248); + setState(244); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(243); + setState(239); operatorExpression(0); } break; @@ -1328,11 +1327,11 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(244); + setState(240); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(245); + setState(241); comparisonOperator(); - setState(246); + setState(242); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1457,7 +1456,7 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(254); + setState(250); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1466,7 +1465,7 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(247); primaryExpression(0); } break; @@ -1475,7 +1474,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(248); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1486,13 +1485,13 @@ public class EsqlBaseParser extends ParserConfig { _errHandler.reportMatch(this); consume(); } - setState(253); + setState(249); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(264); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1500,7 +1499,7 @@ public class EsqlBaseParser extends ParserConfig { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(262); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: @@ -1508,9 +1507,9 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(256); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(257); + setState(253); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { @@ -1521,7 +1520,7 @@ public class EsqlBaseParser extends ParserConfig { _errHandler.reportMatch(this); consume(); } - setState(258); + setState(254); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1530,9 +1529,9 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(259); + setState(255); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(260); + setState(256); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1543,14 +1542,14 @@ public class EsqlBaseParser extends ParserConfig { _errHandler.reportMatch(this); consume(); } - setState(261); + setState(257); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(266); + setState(262); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -1708,7 +1707,7 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(275); + setState(271); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { case 1: @@ -1717,7 +1716,7 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(268); + setState(264); constant(); } break; @@ -1726,7 +1725,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(269); + setState(265); qualifiedName(); } break; @@ -1735,7 +1734,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(270); + setState(266); functionExpression(); } break; @@ -1744,17 +1743,17 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(271); + setState(267); match(LP); - setState(272); + setState(268); booleanExpression(0); - setState(273); + setState(269); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(282); + setState(278); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1765,16 +1764,16 @@ public class EsqlBaseParser extends ParserConfig { { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(277); + setState(273); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(278); + setState(274); match(CAST_OP); - setState(279); + setState(275); dataType(); } } } - setState(284); + setState(280); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); } @@ -1836,37 +1835,37 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(285); + setState(281); functionName(); - setState(286); + setState(282); match(LP); - setState(296); + setState(292); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: { - setState(287); + setState(283); match(ASTERISK); } break; case 2: { { - setState(288); + setState(284); booleanExpression(0); - setState(293); + setState(289); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(289); + setState(285); match(COMMA); - setState(290); + setState(286); booleanExpression(0); } } - setState(295); + setState(291); _errHandler.sync(this); _la = _input.LA(1); } @@ -1874,7 +1873,7 @@ public class EsqlBaseParser extends ParserConfig { } break; } - setState(298); + setState(294); match(RP); } } @@ -1920,7 +1919,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(300); + setState(296); identifierOrParameter(); } } @@ -1978,7 +1977,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(302); + setState(298); identifier(); } } @@ -2025,9 +2024,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(300); match(ROW); - setState(305); + setState(301); fields(); } } @@ -2081,23 +2080,23 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(307); + setState(303); field(); - setState(312); + setState(308); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(308); + setState(304); match(COMMA); - setState(309); + setState(305); field(); } } } - setState(314); + setState(310); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } @@ -2149,19 +2148,19 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(318); + setState(314); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(315); + setState(311); qualifiedName(); - setState(316); + setState(312); match(ASSIGN); } break; } - setState(320); + setState(316); booleanExpression(0); } } @@ -2219,34 +2218,34 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(322); + setState(318); match(FROM); - setState(323); + setState(319); indexPattern(); - setState(328); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(324); + setState(320); match(COMMA); - setState(325); + setState(321); indexPattern(); } } } - setState(330); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } - setState(332); + setState(328); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(331); + setState(327); metadata(); } break; @@ -2299,19 +2298,19 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(333); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { - setState(334); + setState(330); clusterString(); - setState(335); + setState(331); match(COLON); } break; } - setState(339); + setState(335); indexString(); } } @@ -2355,7 +2354,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(341); + setState(337); match(UNQUOTED_SOURCE); } } @@ -2401,7 +2400,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(343); + setState(339); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2426,11 +2425,14 @@ public class EsqlBaseParser extends ParserConfig { @SuppressWarnings("CheckReturnValue") public static class MetadataContext extends ParserRuleContext { - public MetadataOptionContext metadataOption() { - return getRuleContext(MetadataOptionContext.class,0); + public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } + public List UNQUOTED_SOURCE() { return getTokens(EsqlBaseParser.UNQUOTED_SOURCE); } + public TerminalNode UNQUOTED_SOURCE(int i) { + return getToken(EsqlBaseParser.UNQUOTED_SOURCE, i); } - public Deprecated_metadataContext deprecated_metadata() { - return getRuleContext(Deprecated_metadataContext.class,0); + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); } @SuppressWarnings("this-escape") public MetadataContext(ParserRuleContext parent, int invokingState) { @@ -2455,98 +2457,31 @@ public class EsqlBaseParser extends ParserConfig { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); - try { - setState(347); - _errHandler.sync(this); - switch (_input.LA(1)) { - case METADATA: - enterOuterAlt(_localctx, 1); - { - setState(345); - metadataOption(); - } - break; - case OPENING_BRACKET: - enterOuterAlt(_localctx, 2); - { - setState(346); - deprecated_metadata(); - } - break; - default: - throw new NoViableAltException(this); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class MetadataOptionContext extends ParserRuleContext { - public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } - public List UNQUOTED_SOURCE() { return getTokens(EsqlBaseParser.UNQUOTED_SOURCE); } - public TerminalNode UNQUOTED_SOURCE(int i) { - return getToken(EsqlBaseParser.UNQUOTED_SOURCE, i); - } - public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(EsqlBaseParser.COMMA, i); - } - @SuppressWarnings("this-escape") - public MetadataOptionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_metadataOption; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetadataOption(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetadataOption(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetadataOption(this); - else return visitor.visitChildren(this); - } - } - - public final MetadataOptionContext metadataOption() throws RecognitionException { - MetadataOptionContext _localctx = new MetadataOptionContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_metadataOption); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(349); + setState(341); match(METADATA); - setState(350); + setState(342); match(UNQUOTED_SOURCE); - setState(355); + setState(347); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(351); + setState(343); match(COMMA); - setState(352); + setState(344); match(UNQUOTED_SOURCE); } } } - setState(357); + setState(349); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } } } @@ -2561,57 +2496,6 @@ public class EsqlBaseParser extends ParserConfig { return _localctx; } - @SuppressWarnings("CheckReturnValue") - public static class Deprecated_metadataContext extends ParserRuleContext { - public TerminalNode OPENING_BRACKET() { return getToken(EsqlBaseParser.OPENING_BRACKET, 0); } - public MetadataOptionContext metadataOption() { - return getRuleContext(MetadataOptionContext.class,0); - } - public TerminalNode CLOSING_BRACKET() { return getToken(EsqlBaseParser.CLOSING_BRACKET, 0); } - public Deprecated_metadataContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_deprecated_metadata; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDeprecated_metadata(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitDeprecated_metadata(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitDeprecated_metadata(this); - else return visitor.visitChildren(this); - } - } - - public final Deprecated_metadataContext deprecated_metadata() throws RecognitionException { - Deprecated_metadataContext _localctx = new Deprecated_metadataContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_deprecated_metadata); - try { - enterOuterAlt(_localctx, 1); - { - setState(358); - match(OPENING_BRACKET); - setState(359); - metadataOption(); - setState(360); - match(CLOSING_BRACKET); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - @SuppressWarnings("CheckReturnValue") public static class MetricsCommandContext extends ParserRuleContext { public AggFieldsContext aggregates; @@ -2656,51 +2540,51 @@ public class EsqlBaseParser extends ParserConfig { public final MetricsCommandContext metricsCommand() throws RecognitionException { MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_metricsCommand); + enterRule(_localctx, 44, RULE_metricsCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(362); + setState(350); match(DEV_METRICS); - setState(363); + setState(351); indexPattern(); - setState(368); + setState(356); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(364); + setState(352); match(COMMA); - setState(365); + setState(353); indexPattern(); } } } - setState(370); + setState(358); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(372); + setState(360); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(371); + setState(359); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(376); + setState(364); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(374); + setState(362); match(BY); - setState(375); + setState(363); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2746,13 +2630,13 @@ public class EsqlBaseParser extends ParserConfig { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_evalCommand); + enterRule(_localctx, 46, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(378); + setState(366); match(EVAL); - setState(379); + setState(367); fields(); } } @@ -2801,30 +2685,30 @@ public class EsqlBaseParser extends ParserConfig { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_statsCommand); + enterRule(_localctx, 48, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(381); + setState(369); match(STATS); - setState(383); + setState(371); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(382); + setState(370); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(387); + setState(375); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(385); + setState(373); match(BY); - setState(386); + setState(374); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2876,30 +2760,30 @@ public class EsqlBaseParser extends ParserConfig { public final AggFieldsContext aggFields() throws RecognitionException { AggFieldsContext _localctx = new AggFieldsContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_aggFields); + enterRule(_localctx, 50, RULE_aggFields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(389); + setState(377); aggField(); - setState(394); + setState(382); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(390); + setState(378); match(COMMA); - setState(391); + setState(379); aggField(); } } } - setState(396); + setState(384); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } } } @@ -2945,20 +2829,20 @@ public class EsqlBaseParser extends ParserConfig { public final AggFieldContext aggField() throws RecognitionException { AggFieldContext _localctx = new AggFieldContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_aggField); + enterRule(_localctx, 52, RULE_aggField); try { enterOuterAlt(_localctx, 1); { - setState(397); + setState(385); field(); - setState(400); + setState(388); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(398); + setState(386); match(WHERE); - setState(399); + setState(387); booleanExpression(0); } break; @@ -3010,30 +2894,30 @@ public class EsqlBaseParser extends ParserConfig { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_qualifiedName); + enterRule(_localctx, 54, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(402); + setState(390); identifierOrParameter(); - setState(407); + setState(395); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(403); + setState(391); match(DOT); - setState(404); + setState(392); identifierOrParameter(); } } } - setState(409); + setState(397); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -3082,30 +2966,30 @@ public class EsqlBaseParser extends ParserConfig { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_qualifiedNamePattern); + enterRule(_localctx, 56, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(410); + setState(398); identifierPattern(); - setState(415); + setState(403); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(411); + setState(399); match(DOT); - setState(412); + setState(400); identifierPattern(); } } } - setState(417); + setState(405); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -3154,30 +3038,30 @@ public class EsqlBaseParser extends ParserConfig { public final QualifiedNamePatternsContext qualifiedNamePatterns() throws RecognitionException { QualifiedNamePatternsContext _localctx = new QualifiedNamePatternsContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_qualifiedNamePatterns); + enterRule(_localctx, 58, RULE_qualifiedNamePatterns); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(418); + setState(406); qualifiedNamePattern(); - setState(423); + setState(411); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(419); + setState(407); match(COMMA); - setState(420); + setState(408); qualifiedNamePattern(); } } } - setState(425); + setState(413); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } } } @@ -3218,12 +3102,12 @@ public class EsqlBaseParser extends ParserConfig { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_identifier); + enterRule(_localctx, 60, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(426); + setState(414); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3274,24 +3158,24 @@ public class EsqlBaseParser extends ParserConfig { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_identifierPattern); + enterRule(_localctx, 62, RULE_identifierPattern); try { - setState(431); + setState(419); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(428); + setState(416); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(429); + setState(417); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(430); + setState(418); parameter(); } break; @@ -3561,17 +3445,17 @@ public class EsqlBaseParser extends ParserConfig { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_constant); + enterRule(_localctx, 64, RULE_constant); int _la; try { - setState(475); + setState(463); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(433); + setState(421); match(NULL); } break; @@ -3579,9 +3463,9 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(434); + setState(422); integerValue(); - setState(435); + setState(423); match(UNQUOTED_IDENTIFIER); } break; @@ -3589,7 +3473,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(437); + setState(425); decimalValue(); } break; @@ -3597,7 +3481,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(438); + setState(426); integerValue(); } break; @@ -3605,7 +3489,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(439); + setState(427); booleanValue(); } break; @@ -3613,7 +3497,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(440); + setState(428); parameter(); } break; @@ -3621,7 +3505,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(441); + setState(429); string(); } break; @@ -3629,27 +3513,27 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(442); + setState(430); match(OPENING_BRACKET); - setState(443); + setState(431); numericValue(); - setState(448); + setState(436); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(444); + setState(432); match(COMMA); - setState(445); + setState(433); numericValue(); } } - setState(450); + setState(438); _errHandler.sync(this); _la = _input.LA(1); } - setState(451); + setState(439); match(CLOSING_BRACKET); } break; @@ -3657,27 +3541,27 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(453); + setState(441); match(OPENING_BRACKET); - setState(454); + setState(442); booleanValue(); - setState(459); + setState(447); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(455); + setState(443); match(COMMA); - setState(456); + setState(444); booleanValue(); } } - setState(461); + setState(449); _errHandler.sync(this); _la = _input.LA(1); } - setState(462); + setState(450); match(CLOSING_BRACKET); } break; @@ -3685,27 +3569,27 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(464); + setState(452); match(OPENING_BRACKET); - setState(465); + setState(453); string(); - setState(470); + setState(458); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(466); + setState(454); match(COMMA); - setState(467); + setState(455); string(); } } - setState(472); + setState(460); _errHandler.sync(this); _la = _input.LA(1); } - setState(473); + setState(461); match(CLOSING_BRACKET); } break; @@ -3777,16 +3661,16 @@ public class EsqlBaseParser extends ParserConfig { public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_parameter); + enterRule(_localctx, 66, RULE_parameter); try { - setState(479); + setState(467); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(477); + setState(465); match(PARAM); } break; @@ -3794,7 +3678,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(478); + setState(466); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3843,24 +3727,24 @@ public class EsqlBaseParser extends ParserConfig { public final IdentifierOrParameterContext identifierOrParameter() throws RecognitionException { IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_identifierOrParameter); + enterRule(_localctx, 68, RULE_identifierOrParameter); try { - setState(484); + setState(472); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(481); + setState(469); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(482); + setState(470); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(483); + setState(471); parameter(); } break; @@ -3903,13 +3787,13 @@ public class EsqlBaseParser extends ParserConfig { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_limitCommand); + enterRule(_localctx, 70, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(486); + setState(474); match(LIMIT); - setState(487); + setState(475); match(INTEGER_LITERAL); } } @@ -3959,32 +3843,32 @@ public class EsqlBaseParser extends ParserConfig { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_sortCommand); + enterRule(_localctx, 72, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(489); + setState(477); match(SORT); - setState(490); + setState(478); orderExpression(); - setState(495); + setState(483); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(491); + setState(479); match(COMMA); - setState(492); + setState(480); orderExpression(); } } } - setState(497); + setState(485); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -4033,19 +3917,19 @@ public class EsqlBaseParser extends ParserConfig { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_orderExpression); + enterRule(_localctx, 74, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(498); + setState(486); booleanExpression(0); - setState(500); + setState(488); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(499); + setState(487); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4059,14 +3943,14 @@ public class EsqlBaseParser extends ParserConfig { } break; } - setState(504); + setState(492); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(502); + setState(490); match(NULLS); - setState(503); + setState(491); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4121,13 +4005,13 @@ public class EsqlBaseParser extends ParserConfig { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_keepCommand); + enterRule(_localctx, 76, RULE_keepCommand); try { enterOuterAlt(_localctx, 1); { - setState(506); + setState(494); match(KEEP); - setState(507); + setState(495); qualifiedNamePatterns(); } } @@ -4170,13 +4054,13 @@ public class EsqlBaseParser extends ParserConfig { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_dropCommand); + enterRule(_localctx, 78, RULE_dropCommand); try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(497); match(DROP); - setState(510); + setState(498); qualifiedNamePatterns(); } } @@ -4226,32 +4110,32 @@ public class EsqlBaseParser extends ParserConfig { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_renameCommand); + enterRule(_localctx, 80, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(512); + setState(500); match(RENAME); - setState(513); + setState(501); renameClause(); - setState(518); + setState(506); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(514); + setState(502); match(COMMA); - setState(515); + setState(503); renameClause(); } } } - setState(520); + setState(508); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } } } @@ -4299,15 +4183,15 @@ public class EsqlBaseParser extends ParserConfig { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_renameClause); + enterRule(_localctx, 82, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(521); + setState(509); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(522); + setState(510); match(AS); - setState(523); + setState(511); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4356,22 +4240,22 @@ public class EsqlBaseParser extends ParserConfig { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_dissectCommand); + enterRule(_localctx, 84, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(525); + setState(513); match(DISSECT); - setState(526); + setState(514); primaryExpression(0); - setState(527); + setState(515); string(); - setState(529); + setState(517); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(528); + setState(516); commandOptions(); } break; @@ -4420,15 +4304,15 @@ public class EsqlBaseParser extends ParserConfig { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_grokCommand); + enterRule(_localctx, 86, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(531); + setState(519); match(GROK); - setState(532); + setState(520); primaryExpression(0); - setState(533); + setState(521); string(); } } @@ -4471,13 +4355,13 @@ public class EsqlBaseParser extends ParserConfig { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_mvExpandCommand); + enterRule(_localctx, 88, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(535); + setState(523); match(MV_EXPAND); - setState(536); + setState(524); qualifiedName(); } } @@ -4526,30 +4410,30 @@ public class EsqlBaseParser extends ParserConfig { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_commandOptions); + enterRule(_localctx, 90, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(538); + setState(526); commandOption(); - setState(543); + setState(531); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,50,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(539); + setState(527); match(COMMA); - setState(540); + setState(528); commandOption(); } } } - setState(545); + setState(533); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,50,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } } } @@ -4595,15 +4479,15 @@ public class EsqlBaseParser extends ParserConfig { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_commandOption); + enterRule(_localctx, 92, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(546); + setState(534); identifier(); - setState(547); + setState(535); match(ASSIGN); - setState(548); + setState(536); constant(); } } @@ -4644,12 +4528,12 @@ public class EsqlBaseParser extends ParserConfig { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_booleanValue); + enterRule(_localctx, 94, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(550); + setState(538); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4702,22 +4586,22 @@ public class EsqlBaseParser extends ParserConfig { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_numericValue); + enterRule(_localctx, 96, RULE_numericValue); try { - setState(554); + setState(542); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(552); + setState(540); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(553); + setState(541); integerValue(); } break; @@ -4761,17 +4645,17 @@ public class EsqlBaseParser extends ParserConfig { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_decimalValue); + enterRule(_localctx, 98, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(545); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(556); + setState(544); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4784,7 +4668,7 @@ public class EsqlBaseParser extends ParserConfig { } } - setState(559); + setState(547); match(DECIMAL_LITERAL); } } @@ -4826,17 +4710,17 @@ public class EsqlBaseParser extends ParserConfig { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_integerValue); + enterRule(_localctx, 100, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(562); + setState(550); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(561); + setState(549); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4849,7 +4733,7 @@ public class EsqlBaseParser extends ParserConfig { } } - setState(564); + setState(552); match(INTEGER_LITERAL); } } @@ -4889,11 +4773,11 @@ public class EsqlBaseParser extends ParserConfig { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_string); + enterRule(_localctx, 102, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(566); + setState(554); match(QUOTED_STRING); } } @@ -4938,12 +4822,12 @@ public class EsqlBaseParser extends ParserConfig { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_comparisonOperator); + enterRule(_localctx, 104, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(568); + setState(556); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -4994,13 +4878,13 @@ public class EsqlBaseParser extends ParserConfig { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_explainCommand); + enterRule(_localctx, 106, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(570); + setState(558); match(EXPLAIN); - setState(571); + setState(559); subqueryExpression(); } } @@ -5044,15 +4928,15 @@ public class EsqlBaseParser extends ParserConfig { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_subqueryExpression); + enterRule(_localctx, 108, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(573); + setState(561); match(OPENING_BRACKET); - setState(574); + setState(562); query(0); - setState(575); + setState(563); match(CLOSING_BRACKET); } } @@ -5104,14 +4988,14 @@ public class EsqlBaseParser extends ParserConfig { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 114, RULE_showCommand); + enterRule(_localctx, 110, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(577); + setState(565); match(SHOW); - setState(578); + setState(566); match(INFO); } } @@ -5169,53 +5053,53 @@ public class EsqlBaseParser extends ParserConfig { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 116, RULE_enrichCommand); + enterRule(_localctx, 112, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(580); + setState(568); match(ENRICH); - setState(581); + setState(569); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(584); + setState(572); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(582); + setState(570); match(ON); - setState(583); + setState(571); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(595); + setState(583); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(586); + setState(574); match(WITH); - setState(587); + setState(575); enrichWithClause(); - setState(592); + setState(580); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,55,_ctx); + _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(588); + setState(576); match(COMMA); - setState(589); + setState(577); enrichWithClause(); } } } - setState(594); + setState(582); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,55,_ctx); + _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } } break; @@ -5266,23 +5150,23 @@ public class EsqlBaseParser extends ParserConfig { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 118, RULE_enrichWithClause); + enterRule(_localctx, 114, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(600); + setState(588); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(597); + setState(585); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(598); + setState(586); match(ASSIGN); } break; } - setState(602); + setState(590); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5331,17 +5215,17 @@ public class EsqlBaseParser extends ParserConfig { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 120, RULE_lookupCommand); + enterRule(_localctx, 116, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(604); + setState(592); match(DEV_LOOKUP); - setState(605); + setState(593); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(606); + setState(594); match(ON); - setState(607); + setState(595); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5390,22 +5274,22 @@ public class EsqlBaseParser extends ParserConfig { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 122, RULE_inlinestatsCommand); + enterRule(_localctx, 118, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(609); + setState(597); match(DEV_INLINESTATS); - setState(610); + setState(598); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(613); + setState(601); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(611); + setState(599); match(BY); - setState(612); + setState(600); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5458,17 +5342,17 @@ public class EsqlBaseParser extends ParserConfig { public final JoinCommandContext joinCommand() throws RecognitionException { JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState()); - enterRule(_localctx, 124, RULE_joinCommand); + enterRule(_localctx, 120, RULE_joinCommand); int _la; try { enterOuterAlt(_localctx, 1); { - setState(616); + setState(604); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { { - setState(615); + setState(603); ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { @@ -5482,11 +5366,11 @@ public class EsqlBaseParser extends ParserConfig { } } - setState(618); + setState(606); match(DEV_JOIN); - setState(619); + setState(607); joinTarget(); - setState(620); + setState(608); joinCondition(); } } @@ -5534,21 +5418,21 @@ public class EsqlBaseParser extends ParserConfig { public final JoinTargetContext joinTarget() throws RecognitionException { JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); - enterRule(_localctx, 126, RULE_joinTarget); + enterRule(_localctx, 122, RULE_joinTarget); int _la; try { enterOuterAlt(_localctx, 1); { - setState(622); + setState(610); ((JoinTargetContext)_localctx).index = identifier(); - setState(625); + setState(613); _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { - setState(623); + setState(611); match(AS); - setState(624); + setState(612); ((JoinTargetContext)_localctx).alias = identifier(); } } @@ -5601,32 +5485,32 @@ public class EsqlBaseParser extends ParserConfig { public final JoinConditionContext joinCondition() throws RecognitionException { JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState()); - enterRule(_localctx, 128, RULE_joinCondition); + enterRule(_localctx, 124, RULE_joinCondition); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(627); + setState(615); match(ON); - setState(628); + setState(616); joinPredicate(); - setState(633); + setState(621); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,61,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(629); + setState(617); match(COMMA); - setState(630); + setState(618); joinPredicate(); } } } - setState(635); + setState(623); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,61,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } } } @@ -5668,11 +5552,11 @@ public class EsqlBaseParser extends ParserConfig { public final JoinPredicateContext joinPredicate() throws RecognitionException { JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState()); - enterRule(_localctx, 130, RULE_joinPredicate); + enterRule(_localctx, 126, RULE_joinPredicate); try { enterOuterAlt(_localctx, 1); { - setState(636); + setState(624); valueExpression(); } } @@ -5701,9 +5585,9 @@ public class EsqlBaseParser extends ParserConfig { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 33: + case 31: return identifierPattern_sempred((IdentifierPatternContext)_localctx, predIndex); - case 36: + case 34: return identifierOrParameter_sempred((IdentifierOrParameterContext)_localctx, predIndex); } return true; @@ -5774,7 +5658,7 @@ public class EsqlBaseParser extends ParserConfig { } public static final String _serializedATN = - "\u0004\u0001\u0080\u027f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0080\u0273\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5790,392 +5674,385 @@ public class EsqlBaseParser extends ParserConfig { ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ - ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ - "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ - "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ - "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ - "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ - "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00ef\b"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0003\b\u00f9\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ff"+ - "\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0107\b\t"+ - "\n\t\f\t\u010a\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0003\n\u0114\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0119\b\n"+ - "\n\n\f\n\u011c\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0005\u000b\u0124\b\u000b\n\u000b\f\u000b\u0127\t\u000b"+ - "\u0003\u000b\u0129\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ - "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0005\u000f\u0137\b\u000f\n\u000f\f\u000f\u013a\t\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u013f\b\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0147"+ - "\b\u0011\n\u0011\f\u0011\u014a\t\u0011\u0001\u0011\u0003\u0011\u014d\b"+ - "\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0152\b\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0003\u0015\u015c\b\u0015\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0005\u0016\u0162\b\u0016\n\u0016\f\u0016\u0165\t\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0005\u0018\u016f\b\u0018\n\u0018\f\u0018\u0172"+ - "\t\u0018\u0001\u0018\u0003\u0018\u0175\b\u0018\u0001\u0018\u0001\u0018"+ - "\u0003\u0018\u0179\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a"+ - "\u0001\u001a\u0003\u001a\u0180\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ - "\u0184\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b\u0189\b"+ - "\u001b\n\u001b\f\u001b\u018c\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0003\u001c\u0191\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d"+ - "\u0196\b\u001d\n\u001d\f\u001d\u0199\t\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0005\u001e\u019e\b\u001e\n\u001e\f\u001e\u01a1\t\u001e\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0005\u001f\u01a6\b\u001f\n\u001f\f\u001f\u01a9"+ - "\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01b0\b!\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0005\"\u01bf\b\"\n\"\f\"\u01c2\t\"\u0001\"\u0001\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01d5\b\"\n\"\f\"\u01d8"+ - "\t\"\u0001\"\u0001\"\u0003\"\u01dc\b\"\u0001#\u0001#\u0003#\u01e0\b#\u0001"+ - "$\u0001$\u0001$\u0003$\u01e5\b$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001"+ - "&\u0001&\u0005&\u01ee\b&\n&\f&\u01f1\t&\u0001\'\u0001\'\u0003\'\u01f5"+ - "\b\'\u0001\'\u0001\'\u0003\'\u01f9\b\'\u0001(\u0001(\u0001(\u0001)\u0001"+ - ")\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u0205\b*\n*\f*\u0208\t*\u0001"+ - "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003,\u0212\b,\u0001"+ - "-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0005"+ - "/\u021e\b/\n/\f/\u0221\t/\u00010\u00010\u00010\u00010\u00011\u00011\u0001"+ - "2\u00012\u00032\u022b\b2\u00013\u00033\u022e\b3\u00013\u00013\u00014\u0003"+ - "4\u0233\b4\u00014\u00014\u00015\u00015\u00016\u00016\u00017\u00017\u0001"+ - "7\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001"+ - ":\u0001:\u0003:\u0249\b:\u0001:\u0001:\u0001:\u0001:\u0005:\u024f\b:\n"+ - ":\f:\u0252\t:\u0003:\u0254\b:\u0001;\u0001;\u0001;\u0003;\u0259\b;\u0001"+ - ";\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001"+ - "=\u0003=\u0266\b=\u0001>\u0003>\u0269\b>\u0001>\u0001>\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0003?\u0272\b?\u0001@\u0001@\u0001@\u0001@\u0005@\u0278"+ - "\b@\n@\f@\u027b\t@\u0001A\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014"+ - "B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082"+ - "\u0000\t\u0001\u0000@A\u0001\u0000BD\u0002\u0000\u001e\u001eQQ\u0001\u0000"+ - "HI\u0002\u0000##((\u0002\u0000++..\u0002\u0000**88\u0002\u000099;?\u0001"+ - "\u0000\u0016\u0018\u0299\u0000\u0084\u0001\u0000\u0000\u0000\u0002\u0087"+ - "\u0001\u0000\u0000\u0000\u0004\u0098\u0001\u0000\u0000\u0000\u0006\u00ac"+ - "\u0001\u0000\u0000\u0000\b\u00ae\u0001\u0000\u0000\u0000\n\u00ce\u0001"+ - "\u0000\u0000\u0000\f\u00e9\u0001\u0000\u0000\u0000\u000e\u00eb\u0001\u0000"+ - "\u0000\u0000\u0010\u00f8\u0001\u0000\u0000\u0000\u0012\u00fe\u0001\u0000"+ - "\u0000\u0000\u0014\u0113\u0001\u0000\u0000\u0000\u0016\u011d\u0001\u0000"+ - "\u0000\u0000\u0018\u012c\u0001\u0000\u0000\u0000\u001a\u012e\u0001\u0000"+ - "\u0000\u0000\u001c\u0130\u0001\u0000\u0000\u0000\u001e\u0133\u0001\u0000"+ - "\u0000\u0000 \u013e\u0001\u0000\u0000\u0000\"\u0142\u0001\u0000\u0000"+ - "\u0000$\u0151\u0001\u0000\u0000\u0000&\u0155\u0001\u0000\u0000\u0000("+ - "\u0157\u0001\u0000\u0000\u0000*\u015b\u0001\u0000\u0000\u0000,\u015d\u0001"+ - "\u0000\u0000\u0000.\u0166\u0001\u0000\u0000\u00000\u016a\u0001\u0000\u0000"+ - "\u00002\u017a\u0001\u0000\u0000\u00004\u017d\u0001\u0000\u0000\u00006"+ - "\u0185\u0001\u0000\u0000\u00008\u018d\u0001\u0000\u0000\u0000:\u0192\u0001"+ - "\u0000\u0000\u0000<\u019a\u0001\u0000\u0000\u0000>\u01a2\u0001\u0000\u0000"+ - "\u0000@\u01aa\u0001\u0000\u0000\u0000B\u01af\u0001\u0000\u0000\u0000D"+ - "\u01db\u0001\u0000\u0000\u0000F\u01df\u0001\u0000\u0000\u0000H\u01e4\u0001"+ - "\u0000\u0000\u0000J\u01e6\u0001\u0000\u0000\u0000L\u01e9\u0001\u0000\u0000"+ - "\u0000N\u01f2\u0001\u0000\u0000\u0000P\u01fa\u0001\u0000\u0000\u0000R"+ - "\u01fd\u0001\u0000\u0000\u0000T\u0200\u0001\u0000\u0000\u0000V\u0209\u0001"+ - "\u0000\u0000\u0000X\u020d\u0001\u0000\u0000\u0000Z\u0213\u0001\u0000\u0000"+ - "\u0000\\\u0217\u0001\u0000\u0000\u0000^\u021a\u0001\u0000\u0000\u0000"+ - "`\u0222\u0001\u0000\u0000\u0000b\u0226\u0001\u0000\u0000\u0000d\u022a"+ - "\u0001\u0000\u0000\u0000f\u022d\u0001\u0000\u0000\u0000h\u0232\u0001\u0000"+ - "\u0000\u0000j\u0236\u0001\u0000\u0000\u0000l\u0238\u0001\u0000\u0000\u0000"+ - "n\u023a\u0001\u0000\u0000\u0000p\u023d\u0001\u0000\u0000\u0000r\u0241"+ - "\u0001\u0000\u0000\u0000t\u0244\u0001\u0000\u0000\u0000v\u0258\u0001\u0000"+ - "\u0000\u0000x\u025c\u0001\u0000\u0000\u0000z\u0261\u0001\u0000\u0000\u0000"+ - "|\u0268\u0001\u0000\u0000\u0000~\u026e\u0001\u0000\u0000\u0000\u0080\u0273"+ - "\u0001\u0000\u0000\u0000\u0082\u027c\u0001\u0000\u0000\u0000\u0084\u0085"+ - "\u0003\u0002\u0001\u0000\u0085\u0086\u0005\u0000\u0000\u0001\u0086\u0001"+ - "\u0001\u0000\u0000\u0000\u0087\u0088\u0006\u0001\uffff\uffff\u0000\u0088"+ - "\u0089\u0003\u0004\u0002\u0000\u0089\u008f\u0001\u0000\u0000\u0000\u008a"+ - "\u008b\n\u0001\u0000\u0000\u008b\u008c\u0005\u001d\u0000\u0000\u008c\u008e"+ - "\u0003\u0006\u0003\u0000\u008d\u008a\u0001\u0000\u0000\u0000\u008e\u0091"+ - "\u0001\u0000\u0000\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f\u0090"+ - "\u0001\u0000\u0000\u0000\u0090\u0003\u0001\u0000\u0000\u0000\u0091\u008f"+ - "\u0001\u0000\u0000\u0000\u0092\u0099\u0003n7\u0000\u0093\u0099\u0003\""+ - "\u0011\u0000\u0094\u0099\u0003\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000"+ - "\u0096\u0097\u0004\u0002\u0001\u0000\u0097\u0099\u00030\u0018\u0000\u0098"+ - "\u0092\u0001\u0000\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098"+ - "\u0094\u0001\u0000\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098"+ - "\u0096\u0001\u0000\u0000\u0000\u0099\u0005\u0001\u0000\u0000\u0000\u009a"+ - "\u00ad\u00032\u0019\u0000\u009b\u00ad\u0003\b\u0004\u0000\u009c\u00ad"+ - "\u0003P(\u0000\u009d\u00ad\u0003J%\u0000\u009e\u00ad\u00034\u001a\u0000"+ - "\u009f\u00ad\u0003L&\u0000\u00a0\u00ad\u0003R)\u0000\u00a1\u00ad\u0003"+ - "T*\u0000\u00a2\u00ad\u0003X,\u0000\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad"+ - "\u0003t:\u0000\u00a5\u00ad\u0003\\.\u0000\u00a6\u00a7\u0004\u0003\u0002"+ - "\u0000\u00a7\u00ad\u0003z=\u0000\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9"+ - "\u00ad\u0003x<\u0000\u00aa\u00ab\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003"+ - "|>\u0000\u00ac\u009a\u0001\u0000\u0000\u0000\u00ac\u009b\u0001\u0000\u0000"+ - "\u0000\u00ac\u009c\u0001\u0000\u0000\u0000\u00ac\u009d\u0001\u0000\u0000"+ - "\u0000\u00ac\u009e\u0001\u0000\u0000\u0000\u00ac\u009f\u0001\u0000\u0000"+ - "\u0000\u00ac\u00a0\u0001\u0000\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000"+ - "\u0000\u00ac\u00a2\u0001\u0000\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000"+ - "\u0000\u00ac\u00a4\u0001\u0000\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000"+ - "\u0000\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000"+ - "\u0000\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u0007\u0001\u0000\u0000"+ - "\u0000\u00ae\u00af\u0005\u0010\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000"+ - "\u00b0\t\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000"+ - "\u00b2\u00b3\u00051\u0000\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf"+ - "\u0003\u0010\b\u0000\u00b5\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003"+ - "\u0010\b\u0000\u00b7\u00b9\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000"+ - "\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000"+ - "\u0000\u0000\u00ba\u00bb\u0005,\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000"+ - "\u00bc\u00c1\u0003\u0010\b\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be"+ - "\u00c0\u0003\u0010\b\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3"+ - "\u0001\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2"+ - "\u0001\u0000\u0000\u0000\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1"+ - "\u0001\u0000\u0000\u0000\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001"+ - "\u0000\u0000\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-"+ - "\u0000\u0000\u00c8\u00ca\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000"+ - "\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000"+ - "\u0000\u00cb\u00cc\u00052\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000"+ - "\u00cd\u00cf\u0003\u000e\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000"+ - "\u00ce\u00b4\u0001\u0000\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000"+ - "\u00ce\u00b6\u0001\u0000\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000"+ - "\u00ce\u00cd\u0001\u0000\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000"+ - "\u00d0\u00d1\n\u0005\u0000\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2"+ - "\u00d7\u0003\n\u0005\u0006\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5"+ - "\u00054\u0000\u0000\u00d5\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001"+ - "\u0000\u0000\u0000\u00d6\u00d3\u0001\u0000\u0000\u0000\u00d7\u00da\u0001"+ - "\u0000\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001"+ - "\u0000\u0000\u0000\u00d9\u000b\u0001\u0000\u0000\u0000\u00da\u00d8\u0001"+ - "\u0000\u0000\u0000\u00db\u00dd\u0003\u0010\b\u0000\u00dc\u00de\u00051"+ - "\u0000\u0000\u00dd\u00dc\u0001\u0000\u0000\u0000\u00dd\u00de\u0001\u0000"+ - "\u0000\u0000\u00de\u00df\u0001\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000"+ - "\u0000\u00e0\u00e1\u0003j5\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2"+ - "\u00e4\u0003\u0010\b\u0000\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3"+ - "\u0001\u0000\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6"+ - "\u0001\u0000\u0000\u0000\u00e6\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003"+ - "j5\u0000\u00e8\u00ea\u0001\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000"+ - "\u0000\u00e9\u00e2\u0001\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000"+ - "\u00eb\u00ee\u0003:\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef"+ - "\u0003\u001a\r\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001"+ - "\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005"+ - "&\u0000\u0000\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000\u0000"+ - "\u0000\u00f3\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t\u0000"+ - "\u00f5\u00f6\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7\u00f9"+ - "\u0001\u0000\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8\u00f4"+ - "\u0001\u0000\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa\u00fb"+ - "\u0006\t\uffff\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc\u00fd"+ - "\u0007\u0000\u0000\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa\u0001"+ - "\u0000\u0000\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108\u0001"+ - "\u0000\u0000\u0000\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007\u0001"+ - "\u0000\u0000\u0102\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001\u0000"+ - "\u0000\u0104\u0105\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012\t\u0002"+ - "\u0106\u0100\u0001\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000\u0000"+ - "\u0107\u010a\u0001\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000\u0000"+ - "\u0108\u0109\u0001\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000\u0000"+ - "\u010a\u0108\u0001\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff\u0000"+ - "\u010c\u0114\u0003D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e\u0114"+ - "\u0003\u0016\u000b\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111\u0003"+ - "\n\u0005\u0000\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001\u0000"+ - "\u0000\u0000\u0113\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001\u0000"+ - "\u0000\u0000\u0113\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001\u0000"+ - "\u0000\u0000\u0114\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001\u0000"+ - "\u0000\u0116\u0117\u0005%\u0000\u0000\u0117\u0119\u0003\u001a\r\u0000"+ - "\u0118\u0115\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000"+ - "\u011a\u0118\u0001\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000\u0000"+ - "\u011b\u0015\u0001\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000\u0000"+ - "\u011d\u011e\u0003\u0018\f\u0000\u011e\u0128\u00050\u0000\u0000\u011f"+ - "\u0129\u0005B\u0000\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121\u0122"+ - "\u0005\'\u0000\u0000\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121\u0001"+ - "\u0000\u0000\u0000\u0124\u0127\u0001\u0000\u0000\u0000\u0125\u0123\u0001"+ - "\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000\u0126\u0129\u0001"+ - "\u0000\u0000\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u011f\u0001"+ - "\u0000\u0000\u0000\u0128\u0120\u0001\u0000\u0000\u0000\u0128\u0129\u0001"+ - "\u0000\u0000\u0000\u0129\u012a\u0001\u0000\u0000\u0000\u012a\u012b\u0005"+ - "7\u0000\u0000\u012b\u0017\u0001\u0000\u0000\u0000\u012c\u012d\u0003H$"+ - "\u0000\u012d\u0019\u0001\u0000\u0000\u0000\u012e\u012f\u0003@ \u0000\u012f"+ - "\u001b\u0001\u0000\u0000\u0000\u0130\u0131\u0005\f\u0000\u0000\u0131\u0132"+ - "\u0003\u001e\u000f\u0000\u0132\u001d\u0001\u0000\u0000\u0000\u0133\u0138"+ - "\u0003 \u0010\u0000\u0134\u0135\u0005\'\u0000\u0000\u0135\u0137\u0003"+ - " \u0010\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u013a\u0001\u0000"+ - "\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138\u0139\u0001\u0000"+ - "\u0000\u0000\u0139\u001f\u0001\u0000\u0000\u0000\u013a\u0138\u0001\u0000"+ - "\u0000\u0000\u013b\u013c\u0003:\u001d\u0000\u013c\u013d\u0005$\u0000\u0000"+ - "\u013d\u013f\u0001\u0000\u0000\u0000\u013e\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u013f\u0001\u0000\u0000\u0000\u013f\u0140\u0001\u0000\u0000\u0000"+ - "\u0140\u0141\u0003\n\u0005\u0000\u0141!\u0001\u0000\u0000\u0000\u0142"+ - "\u0143\u0005\u0006\u0000\u0000\u0143\u0148\u0003$\u0012\u0000\u0144\u0145"+ - "\u0005\'\u0000\u0000\u0145\u0147\u0003$\u0012\u0000\u0146\u0144\u0001"+ - "\u0000\u0000\u0000\u0147\u014a\u0001\u0000\u0000\u0000\u0148\u0146\u0001"+ - "\u0000\u0000\u0000\u0148\u0149\u0001\u0000\u0000\u0000\u0149\u014c\u0001"+ - "\u0000\u0000\u0000\u014a\u0148\u0001\u0000\u0000\u0000\u014b\u014d\u0003"+ - "*\u0015\u0000\u014c\u014b\u0001\u0000\u0000\u0000\u014c\u014d\u0001\u0000"+ - "\u0000\u0000\u014d#\u0001\u0000\u0000\u0000\u014e\u014f\u0003&\u0013\u0000"+ - "\u014f\u0150\u0005&\u0000\u0000\u0150\u0152\u0001\u0000\u0000\u0000\u0151"+ - "\u014e\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152"+ - "\u0153\u0001\u0000\u0000\u0000\u0153\u0154\u0003(\u0014\u0000\u0154%\u0001"+ - "\u0000\u0000\u0000\u0155\u0156\u0005Q\u0000\u0000\u0156\'\u0001\u0000"+ - "\u0000\u0000\u0157\u0158\u0007\u0002\u0000\u0000\u0158)\u0001\u0000\u0000"+ - "\u0000\u0159\u015c\u0003,\u0016\u0000\u015a\u015c\u0003.\u0017\u0000\u015b"+ - "\u0159\u0001\u0000\u0000\u0000\u015b\u015a\u0001\u0000\u0000\u0000\u015c"+ - "+\u0001\u0000\u0000\u0000\u015d\u015e\u0005P\u0000\u0000\u015e\u0163\u0005"+ - "Q\u0000\u0000\u015f\u0160\u0005\'\u0000\u0000\u0160\u0162\u0005Q\u0000"+ - "\u0000\u0161\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001\u0000\u0000"+ - "\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000"+ - "\u0000\u0164-\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000"+ - "\u0166\u0167\u0005F\u0000\u0000\u0167\u0168\u0003,\u0016\u0000\u0168\u0169"+ - "\u0005G\u0000\u0000\u0169/\u0001\u0000\u0000\u0000\u016a\u016b\u0005\u0013"+ - "\u0000\u0000\u016b\u0170\u0003$\u0012\u0000\u016c\u016d\u0005\'\u0000"+ - "\u0000\u016d\u016f\u0003$\u0012\u0000\u016e\u016c\u0001\u0000\u0000\u0000"+ - "\u016f\u0172\u0001\u0000\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000"+ - "\u0170\u0171\u0001\u0000\u0000\u0000\u0171\u0174\u0001\u0000\u0000\u0000"+ - "\u0172\u0170\u0001\u0000\u0000\u0000\u0173\u0175\u00036\u001b\u0000\u0174"+ - "\u0173\u0001\u0000\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175"+ - "\u0178\u0001\u0000\u0000\u0000\u0176\u0177\u0005!\u0000\u0000\u0177\u0179"+ - "\u0003\u001e\u000f\u0000\u0178\u0176\u0001\u0000\u0000\u0000\u0178\u0179"+ - "\u0001\u0000\u0000\u0000\u01791\u0001\u0000\u0000\u0000\u017a\u017b\u0005"+ - "\u0004\u0000\u0000\u017b\u017c\u0003\u001e\u000f\u0000\u017c3\u0001\u0000"+ - "\u0000\u0000\u017d\u017f\u0005\u000f\u0000\u0000\u017e\u0180\u00036\u001b"+ - "\u0000\u017f\u017e\u0001\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000"+ - "\u0000\u0180\u0183\u0001\u0000\u0000\u0000\u0181\u0182\u0005!\u0000\u0000"+ - "\u0182\u0184\u0003\u001e\u000f\u0000\u0183\u0181\u0001\u0000\u0000\u0000"+ - "\u0183\u0184\u0001\u0000\u0000\u0000\u01845\u0001\u0000\u0000\u0000\u0185"+ - "\u018a\u00038\u001c\u0000\u0186\u0187\u0005\'\u0000\u0000\u0187\u0189"+ - "\u00038\u001c\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0189\u018c\u0001"+ - "\u0000\u0000\u0000\u018a\u0188\u0001\u0000\u0000\u0000\u018a\u018b\u0001"+ - "\u0000\u0000\u0000\u018b7\u0001\u0000\u0000\u0000\u018c\u018a\u0001\u0000"+ - "\u0000\u0000\u018d\u0190\u0003 \u0010\u0000\u018e\u018f\u0005\u0010\u0000"+ - "\u0000\u018f\u0191\u0003\n\u0005\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+ - "\u0190\u0191\u0001\u0000\u0000\u0000\u01919\u0001\u0000\u0000\u0000\u0192"+ - "\u0197\u0003H$\u0000\u0193\u0194\u0005)\u0000\u0000\u0194\u0196\u0003"+ - "H$\u0000\u0195\u0193\u0001\u0000\u0000\u0000\u0196\u0199\u0001\u0000\u0000"+ - "\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0197\u0198\u0001\u0000\u0000"+ - "\u0000\u0198;\u0001\u0000\u0000\u0000\u0199\u0197\u0001\u0000\u0000\u0000"+ - "\u019a\u019f\u0003B!\u0000\u019b\u019c\u0005)\u0000\u0000\u019c\u019e"+ - "\u0003B!\u0000\u019d\u019b\u0001\u0000\u0000\u0000\u019e\u01a1\u0001\u0000"+ - "\u0000\u0000\u019f\u019d\u0001\u0000\u0000\u0000\u019f\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a0=\u0001\u0000\u0000\u0000\u01a1\u019f\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a7\u0003<\u001e\u0000\u01a3\u01a4\u0005\'\u0000\u0000"+ - "\u01a4\u01a6\u0003<\u001e\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6"+ - "\u01a9\u0001\u0000\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a7"+ - "\u01a8\u0001\u0000\u0000\u0000\u01a8?\u0001\u0000\u0000\u0000\u01a9\u01a7"+ - "\u0001\u0000\u0000\u0000\u01aa\u01ab\u0007\u0003\u0000\u0000\u01abA\u0001"+ - "\u0000\u0000\u0000\u01ac\u01b0\u0005U\u0000\u0000\u01ad\u01ae\u0004!\n"+ - "\u0000\u01ae\u01b0\u0003F#\u0000\u01af\u01ac\u0001\u0000\u0000\u0000\u01af"+ - "\u01ad\u0001\u0000\u0000\u0000\u01b0C\u0001\u0000\u0000\u0000\u01b1\u01dc"+ - "\u00052\u0000\u0000\u01b2\u01b3\u0003h4\u0000\u01b3\u01b4\u0005H\u0000"+ - "\u0000\u01b4\u01dc\u0001\u0000\u0000\u0000\u01b5\u01dc\u0003f3\u0000\u01b6"+ - "\u01dc\u0003h4\u0000\u01b7\u01dc\u0003b1\u0000\u01b8\u01dc\u0003F#\u0000"+ - "\u01b9\u01dc\u0003j5\u0000\u01ba\u01bb\u0005F\u0000\u0000\u01bb\u01c0"+ - "\u0003d2\u0000\u01bc\u01bd\u0005\'\u0000\u0000\u01bd\u01bf\u0003d2\u0000"+ - "\u01be\u01bc\u0001\u0000\u0000\u0000\u01bf\u01c2\u0001\u0000\u0000\u0000"+ - "\u01c0\u01be\u0001\u0000\u0000\u0000\u01c0\u01c1\u0001\u0000\u0000\u0000"+ - "\u01c1\u01c3\u0001\u0000\u0000\u0000\u01c2\u01c0\u0001\u0000\u0000\u0000"+ - "\u01c3\u01c4\u0005G\u0000\u0000\u01c4\u01dc\u0001\u0000\u0000\u0000\u01c5"+ - "\u01c6\u0005F\u0000\u0000\u01c6\u01cb\u0003b1\u0000\u01c7\u01c8\u0005"+ - "\'\u0000\u0000\u01c8\u01ca\u0003b1\u0000\u01c9\u01c7\u0001\u0000\u0000"+ - "\u0000\u01ca\u01cd\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000"+ - "\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000"+ - "\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005G\u0000\u0000"+ - "\u01cf\u01dc\u0001\u0000\u0000\u0000\u01d0\u01d1\u0005F\u0000\u0000\u01d1"+ - "\u01d6\u0003j5\u0000\u01d2\u01d3\u0005\'\u0000\u0000\u01d3\u01d5\u0003"+ - "j5\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d8\u0001\u0000\u0000"+ - "\u0000\u01d6\u01d4\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000"+ - "\u0000\u01d7\u01d9\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000"+ - "\u0000\u01d9\u01da\u0005G\u0000\u0000\u01da\u01dc\u0001\u0000\u0000\u0000"+ - "\u01db\u01b1\u0001\u0000\u0000\u0000\u01db\u01b2\u0001\u0000\u0000\u0000"+ - "\u01db\u01b5\u0001\u0000\u0000\u0000\u01db\u01b6\u0001\u0000\u0000\u0000"+ - "\u01db\u01b7\u0001\u0000\u0000\u0000\u01db\u01b8\u0001\u0000\u0000\u0000"+ - "\u01db\u01b9\u0001\u0000\u0000\u0000\u01db\u01ba\u0001\u0000\u0000\u0000"+ - "\u01db\u01c5\u0001\u0000\u0000\u0000\u01db\u01d0\u0001\u0000\u0000\u0000"+ - "\u01dcE\u0001\u0000\u0000\u0000\u01dd\u01e0\u00055\u0000\u0000\u01de\u01e0"+ - "\u0005E\u0000\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01df\u01de\u0001"+ - "\u0000\u0000\u0000\u01e0G\u0001\u0000\u0000\u0000\u01e1\u01e5\u0003@ "+ - "\u0000\u01e2\u01e3\u0004$\u000b\u0000\u01e3\u01e5\u0003F#\u0000\u01e4"+ - "\u01e1\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e5"+ - "I\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005\t\u0000\u0000\u01e7\u01e8"+ - "\u0005\u001f\u0000\u0000\u01e8K\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005"+ - "\u000e\u0000\u0000\u01ea\u01ef\u0003N\'\u0000\u01eb\u01ec\u0005\'\u0000"+ - "\u0000\u01ec\u01ee\u0003N\'\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000"+ - "\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000"+ - "\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0M\u0001\u0000\u0000\u0000\u01f1"+ - "\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0003\n\u0005\u0000\u01f3\u01f5"+ - "\u0007\u0004\u0000\u0000\u01f4\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5"+ - "\u0001\u0000\u0000\u0000\u01f5\u01f8\u0001\u0000\u0000\u0000\u01f6\u01f7"+ - "\u00053\u0000\u0000\u01f7\u01f9\u0007\u0005\u0000\u0000\u01f8\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f8\u01f9\u0001\u0000\u0000\u0000\u01f9O\u0001\u0000"+ - "\u0000\u0000\u01fa\u01fb\u0005\b\u0000\u0000\u01fb\u01fc\u0003>\u001f"+ - "\u0000\u01fcQ\u0001\u0000\u0000\u0000\u01fd\u01fe\u0005\u0002\u0000\u0000"+ - "\u01fe\u01ff\u0003>\u001f\u0000\u01ffS\u0001\u0000\u0000\u0000\u0200\u0201"+ - "\u0005\u000b\u0000\u0000\u0201\u0206\u0003V+\u0000\u0202\u0203\u0005\'"+ - "\u0000\u0000\u0203\u0205\u0003V+\u0000\u0204\u0202\u0001\u0000\u0000\u0000"+ - "\u0205\u0208\u0001\u0000\u0000\u0000\u0206\u0204\u0001\u0000\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207U\u0001\u0000\u0000\u0000\u0208"+ - "\u0206\u0001\u0000\u0000\u0000\u0209\u020a\u0003<\u001e\u0000\u020a\u020b"+ - "\u0005Y\u0000\u0000\u020b\u020c\u0003<\u001e\u0000\u020cW\u0001\u0000"+ - "\u0000\u0000\u020d\u020e\u0005\u0001\u0000\u0000\u020e\u020f\u0003\u0014"+ - "\n\u0000\u020f\u0211\u0003j5\u0000\u0210\u0212\u0003^/\u0000\u0211\u0210"+ - "\u0001\u0000\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212Y\u0001"+ - "\u0000\u0000\u0000\u0213\u0214\u0005\u0007\u0000\u0000\u0214\u0215\u0003"+ - "\u0014\n\u0000\u0215\u0216\u0003j5\u0000\u0216[\u0001\u0000\u0000\u0000"+ - "\u0217\u0218\u0005\n\u0000\u0000\u0218\u0219\u0003:\u001d\u0000\u0219"+ - "]\u0001\u0000\u0000\u0000\u021a\u021f\u0003`0\u0000\u021b\u021c\u0005"+ - "\'\u0000\u0000\u021c\u021e\u0003`0\u0000\u021d\u021b\u0001\u0000\u0000"+ - "\u0000\u021e\u0221\u0001\u0000\u0000\u0000\u021f\u021d\u0001\u0000\u0000"+ - "\u0000\u021f\u0220\u0001\u0000\u0000\u0000\u0220_\u0001\u0000\u0000\u0000"+ - "\u0221\u021f\u0001\u0000\u0000\u0000\u0222\u0223\u0003@ \u0000\u0223\u0224"+ - "\u0005$\u0000\u0000\u0224\u0225\u0003D\"\u0000\u0225a\u0001\u0000\u0000"+ - "\u0000\u0226\u0227\u0007\u0006\u0000\u0000\u0227c\u0001\u0000\u0000\u0000"+ - "\u0228\u022b\u0003f3\u0000\u0229\u022b\u0003h4\u0000\u022a\u0228\u0001"+ - "\u0000\u0000\u0000\u022a\u0229\u0001\u0000\u0000\u0000\u022be\u0001\u0000"+ - "\u0000\u0000\u022c\u022e\u0007\u0000\u0000\u0000\u022d\u022c\u0001\u0000"+ - "\u0000\u0000\u022d\u022e\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000"+ - "\u0000\u0000\u022f\u0230\u0005 \u0000\u0000\u0230g\u0001\u0000\u0000\u0000"+ - "\u0231\u0233\u0007\u0000\u0000\u0000\u0232\u0231\u0001\u0000\u0000\u0000"+ - "\u0232\u0233\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ - "\u0234\u0235\u0005\u001f\u0000\u0000\u0235i\u0001\u0000\u0000\u0000\u0236"+ - "\u0237\u0005\u001e\u0000\u0000\u0237k\u0001\u0000\u0000\u0000\u0238\u0239"+ - "\u0007\u0007\u0000\u0000\u0239m\u0001\u0000\u0000\u0000\u023a\u023b\u0005"+ - "\u0005\u0000\u0000\u023b\u023c\u0003p8\u0000\u023co\u0001\u0000\u0000"+ - "\u0000\u023d\u023e\u0005F\u0000\u0000\u023e\u023f\u0003\u0002\u0001\u0000"+ - "\u023f\u0240\u0005G\u0000\u0000\u0240q\u0001\u0000\u0000\u0000\u0241\u0242"+ - "\u0005\r\u0000\u0000\u0242\u0243\u0005i\u0000\u0000\u0243s\u0001\u0000"+ - "\u0000\u0000\u0244\u0245\u0005\u0003\u0000\u0000\u0245\u0248\u0005_\u0000"+ - "\u0000\u0246\u0247\u0005]\u0000\u0000\u0247\u0249\u0003<\u001e\u0000\u0248"+ - "\u0246\u0001\u0000\u0000\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249"+ - "\u0253\u0001\u0000\u0000\u0000\u024a\u024b\u0005^\u0000\u0000\u024b\u0250"+ - "\u0003v;\u0000\u024c\u024d\u0005\'\u0000\u0000\u024d\u024f\u0003v;\u0000"+ - "\u024e\u024c\u0001\u0000\u0000\u0000\u024f\u0252\u0001\u0000\u0000\u0000"+ - "\u0250\u024e\u0001\u0000\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000"+ - "\u0251\u0254\u0001\u0000\u0000\u0000\u0252\u0250\u0001\u0000\u0000\u0000"+ - "\u0253\u024a\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000\u0000\u0000"+ - "\u0254u\u0001\u0000\u0000\u0000\u0255\u0256\u0003<\u001e\u0000\u0256\u0257"+ - "\u0005$\u0000\u0000\u0257\u0259\u0001\u0000\u0000\u0000\u0258\u0255\u0001"+ - "\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000\u0259\u025a\u0001"+ - "\u0000\u0000\u0000\u025a\u025b\u0003<\u001e\u0000\u025bw\u0001\u0000\u0000"+ - "\u0000\u025c\u025d\u0005\u0012\u0000\u0000\u025d\u025e\u0003$\u0012\u0000"+ - "\u025e\u025f\u0005]\u0000\u0000\u025f\u0260\u0003>\u001f\u0000\u0260y"+ - "\u0001\u0000\u0000\u0000\u0261\u0262\u0005\u0011\u0000\u0000\u0262\u0265"+ - "\u00036\u001b\u0000\u0263\u0264\u0005!\u0000\u0000\u0264\u0266\u0003\u001e"+ - "\u000f\u0000\u0265\u0263\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000"+ - "\u0000\u0000\u0266{\u0001\u0000\u0000\u0000\u0267\u0269\u0007\b\u0000"+ - "\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000"+ - "\u0000\u0269\u026a\u0001\u0000\u0000\u0000\u026a\u026b\u0005\u0014\u0000"+ - "\u0000\u026b\u026c\u0003~?\u0000\u026c\u026d\u0003\u0080@\u0000\u026d"+ - "}\u0001\u0000\u0000\u0000\u026e\u0271\u0003@ \u0000\u026f\u0270\u0005"+ - "Y\u0000\u0000\u0270\u0272\u0003@ \u0000\u0271\u026f\u0001\u0000\u0000"+ - "\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u007f\u0001\u0000\u0000"+ - "\u0000\u0273\u0274\u0005]\u0000\u0000\u0274\u0279\u0003\u0082A\u0000\u0275"+ - "\u0276\u0005\'\u0000\u0000\u0276\u0278\u0003\u0082A\u0000\u0277\u0275"+ - "\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000\u0000\u0279\u0277"+ - "\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000\u0000\u027a\u0081"+ - "\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000\u0000\u027c\u027d"+ - "\u0003\u0010\b\u0000\u027d\u0083\u0001\u0000\u0000\u0000>\u008f\u0098"+ - "\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9\u00ee\u00f8"+ - "\u00fe\u0106\u0108\u0113\u011a\u0125\u0128\u0138\u013e\u0148\u014c\u0151"+ - "\u015b\u0163\u0170\u0174\u0178\u017f\u0183\u018a\u0190\u0197\u019f\u01a7"+ - "\u01af\u01c0\u01cb\u01d6\u01db\u01df\u01e4\u01ef\u01f4\u01f8\u0206\u0211"+ - "\u021f\u022a\u022d\u0232\u0248\u0250\u0253\u0258\u0265\u0268\u0271\u0279"; + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0001\u0000"+ + "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0005\u0001\u008a\b\u0001\n\u0001\f\u0001\u008d"+ + "\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002\u0095\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00a9\b\u0003\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b5\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00bc\b\u0005\n"+ + "\u0005\f\u0005\u00bf\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0003\u0005\u00c6\b\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0003\u0005\u00cb\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d3\b\u0005\n\u0005\f\u0005"+ + "\u00d6\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00da\b\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e1"+ + "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e6\b\u0006"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00eb\b\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ + "\b\u00f5\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00fb\b\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0103\b\t\n\t\f\t\u0106"+ + "\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003"+ + "\n\u0110\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0115\b\n\n\n\f\n\u0118\t"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0005\u000b\u0120\b\u000b\n\u000b\f\u000b\u0123\t\u000b\u0003\u000b\u0125"+ + "\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ + "\u000f\u0133\b\u000f\n\u000f\f\u000f\u0136\t\u000f\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0003\u0010\u013b\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0143\b\u0011\n\u0011"+ + "\f\u0011\u0146\t\u0011\u0001\u0011\u0003\u0011\u0149\b\u0011\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0003\u0012\u014e\b\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0005\u0015\u015a\b\u0015\n\u0015\f\u0015\u015d"+ + "\t\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0163"+ + "\b\u0016\n\u0016\f\u0016\u0166\t\u0016\u0001\u0016\u0003\u0016\u0169\b"+ + "\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u016d\b\u0016\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0003\u0018\u0174\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0178\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0005\u0019\u017d\b\u0019\n\u0019\f\u0019\u0180\t\u0019\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0003\u001a\u0185\b\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0005\u001b\u018a\b\u001b\n\u001b\f\u001b\u018d\t\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0192\b\u001c\n\u001c\f\u001c"+ + "\u0195\t\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u019a\b"+ + "\u001d\n\u001d\f\u001d\u019d\t\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0003\u001f\u01a4\b\u001f\u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ + " \u0005 \u01b3\b \n \f \u01b6\t \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ + " \u0005 \u01be\b \n \f \u01c1\t \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ + " \u0005 \u01c9\b \n \f \u01cc\t \u0001 \u0001 \u0003 \u01d0\b \u0001!"+ + "\u0001!\u0003!\u01d4\b!\u0001\"\u0001\"\u0001\"\u0003\"\u01d9\b\"\u0001"+ + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01e2\b$\n$\f$\u01e5"+ + "\t$\u0001%\u0001%\u0003%\u01e9\b%\u0001%\u0001%\u0003%\u01ed\b%\u0001"+ + "&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001(\u0005"+ + "(\u01f9\b(\n(\f(\u01fc\t(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ + "*\u0001*\u0003*\u0206\b*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001-\u0001-\u0001-\u0005-\u0212\b-\n-\f-\u0215\t-\u0001.\u0001.\u0001"+ + ".\u0001.\u0001/\u0001/\u00010\u00010\u00030\u021f\b0\u00011\u00031\u0222"+ + "\b1\u00011\u00011\u00012\u00032\u0227\b2\u00012\u00012\u00013\u00013\u0001"+ + "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00017\u0001"+ + "7\u00017\u00018\u00018\u00018\u00018\u00038\u023d\b8\u00018\u00018\u0001"+ + "8\u00018\u00058\u0243\b8\n8\f8\u0246\t8\u00038\u0248\b8\u00019\u00019"+ + "\u00019\u00039\u024d\b9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ + ":\u0001;\u0001;\u0001;\u0001;\u0003;\u025a\b;\u0001<\u0003<\u025d\b<\u0001"+ + "<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0003=\u0266\b=\u0001>\u0001"+ + ">\u0001>\u0001>\u0005>\u026c\b>\n>\f>\u026f\t>\u0001?\u0001?\u0001?\u0000"+ + "\u0004\u0002\n\u0012\u0014@\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz|~\u0000\t\u0001\u0000@A\u0001\u0000BD\u0002\u0000"+ + "\u001e\u001eQQ\u0001\u0000HI\u0002\u0000##((\u0002\u0000++..\u0002\u0000"+ + "**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u028e\u0000\u0080\u0001\u0000"+ + "\u0000\u0000\u0002\u0083\u0001\u0000\u0000\u0000\u0004\u0094\u0001\u0000"+ + "\u0000\u0000\u0006\u00a8\u0001\u0000\u0000\u0000\b\u00aa\u0001\u0000\u0000"+ + "\u0000\n\u00ca\u0001\u0000\u0000\u0000\f\u00e5\u0001\u0000\u0000\u0000"+ + "\u000e\u00e7\u0001\u0000\u0000\u0000\u0010\u00f4\u0001\u0000\u0000\u0000"+ + "\u0012\u00fa\u0001\u0000\u0000\u0000\u0014\u010f\u0001\u0000\u0000\u0000"+ + "\u0016\u0119\u0001\u0000\u0000\u0000\u0018\u0128\u0001\u0000\u0000\u0000"+ + "\u001a\u012a\u0001\u0000\u0000\u0000\u001c\u012c\u0001\u0000\u0000\u0000"+ + "\u001e\u012f\u0001\u0000\u0000\u0000 \u013a\u0001\u0000\u0000\u0000\""+ + "\u013e\u0001\u0000\u0000\u0000$\u014d\u0001\u0000\u0000\u0000&\u0151\u0001"+ + "\u0000\u0000\u0000(\u0153\u0001\u0000\u0000\u0000*\u0155\u0001\u0000\u0000"+ + "\u0000,\u015e\u0001\u0000\u0000\u0000.\u016e\u0001\u0000\u0000\u00000"+ + "\u0171\u0001\u0000\u0000\u00002\u0179\u0001\u0000\u0000\u00004\u0181\u0001"+ + "\u0000\u0000\u00006\u0186\u0001\u0000\u0000\u00008\u018e\u0001\u0000\u0000"+ + "\u0000:\u0196\u0001\u0000\u0000\u0000<\u019e\u0001\u0000\u0000\u0000>"+ + "\u01a3\u0001\u0000\u0000\u0000@\u01cf\u0001\u0000\u0000\u0000B\u01d3\u0001"+ + "\u0000\u0000\u0000D\u01d8\u0001\u0000\u0000\u0000F\u01da\u0001\u0000\u0000"+ + "\u0000H\u01dd\u0001\u0000\u0000\u0000J\u01e6\u0001\u0000\u0000\u0000L"+ + "\u01ee\u0001\u0000\u0000\u0000N\u01f1\u0001\u0000\u0000\u0000P\u01f4\u0001"+ + "\u0000\u0000\u0000R\u01fd\u0001\u0000\u0000\u0000T\u0201\u0001\u0000\u0000"+ + "\u0000V\u0207\u0001\u0000\u0000\u0000X\u020b\u0001\u0000\u0000\u0000Z"+ + "\u020e\u0001\u0000\u0000\u0000\\\u0216\u0001\u0000\u0000\u0000^\u021a"+ + "\u0001\u0000\u0000\u0000`\u021e\u0001\u0000\u0000\u0000b\u0221\u0001\u0000"+ + "\u0000\u0000d\u0226\u0001\u0000\u0000\u0000f\u022a\u0001\u0000\u0000\u0000"+ + "h\u022c\u0001\u0000\u0000\u0000j\u022e\u0001\u0000\u0000\u0000l\u0231"+ + "\u0001\u0000\u0000\u0000n\u0235\u0001\u0000\u0000\u0000p\u0238\u0001\u0000"+ + "\u0000\u0000r\u024c\u0001\u0000\u0000\u0000t\u0250\u0001\u0000\u0000\u0000"+ + "v\u0255\u0001\u0000\u0000\u0000x\u025c\u0001\u0000\u0000\u0000z\u0262"+ + "\u0001\u0000\u0000\u0000|\u0267\u0001\u0000\u0000\u0000~\u0270\u0001\u0000"+ + "\u0000\u0000\u0080\u0081\u0003\u0002\u0001\u0000\u0081\u0082\u0005\u0000"+ + "\u0000\u0001\u0082\u0001\u0001\u0000\u0000\u0000\u0083\u0084\u0006\u0001"+ + "\uffff\uffff\u0000\u0084\u0085\u0003\u0004\u0002\u0000\u0085\u008b\u0001"+ + "\u0000\u0000\u0000\u0086\u0087\n\u0001\u0000\u0000\u0087\u0088\u0005\u001d"+ + "\u0000\u0000\u0088\u008a\u0003\u0006\u0003\u0000\u0089\u0086\u0001\u0000"+ + "\u0000\u0000\u008a\u008d\u0001\u0000\u0000\u0000\u008b\u0089\u0001\u0000"+ + "\u0000\u0000\u008b\u008c\u0001\u0000\u0000\u0000\u008c\u0003\u0001\u0000"+ + "\u0000\u0000\u008d\u008b\u0001\u0000\u0000\u0000\u008e\u0095\u0003j5\u0000"+ + "\u008f\u0095\u0003\"\u0011\u0000\u0090\u0095\u0003\u001c\u000e\u0000\u0091"+ + "\u0095\u0003n7\u0000\u0092\u0093\u0004\u0002\u0001\u0000\u0093\u0095\u0003"+ + ",\u0016\u0000\u0094\u008e\u0001\u0000\u0000\u0000\u0094\u008f\u0001\u0000"+ + "\u0000\u0000\u0094\u0090\u0001\u0000\u0000\u0000\u0094\u0091\u0001\u0000"+ + "\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0095\u0005\u0001\u0000"+ + "\u0000\u0000\u0096\u00a9\u0003.\u0017\u0000\u0097\u00a9\u0003\b\u0004"+ + "\u0000\u0098\u00a9\u0003L&\u0000\u0099\u00a9\u0003F#\u0000\u009a\u00a9"+ + "\u00030\u0018\u0000\u009b\u00a9\u0003H$\u0000\u009c\u00a9\u0003N\'\u0000"+ + "\u009d\u00a9\u0003P(\u0000\u009e\u00a9\u0003T*\u0000\u009f\u00a9\u0003"+ + "V+\u0000\u00a0\u00a9\u0003p8\u0000\u00a1\u00a9\u0003X,\u0000\u00a2\u00a3"+ + "\u0004\u0003\u0002\u0000\u00a3\u00a9\u0003v;\u0000\u00a4\u00a5\u0004\u0003"+ + "\u0003\u0000\u00a5\u00a9\u0003t:\u0000\u00a6\u00a7\u0004\u0003\u0004\u0000"+ + "\u00a7\u00a9\u0003x<\u0000\u00a8\u0096\u0001\u0000\u0000\u0000\u00a8\u0097"+ + "\u0001\u0000\u0000\u0000\u00a8\u0098\u0001\u0000\u0000\u0000\u00a8\u0099"+ + "\u0001\u0000\u0000\u0000\u00a8\u009a\u0001\u0000\u0000\u0000\u00a8\u009b"+ + "\u0001\u0000\u0000\u0000\u00a8\u009c\u0001\u0000\u0000\u0000\u00a8\u009d"+ + "\u0001\u0000\u0000\u0000\u00a8\u009e\u0001\u0000\u0000\u0000\u00a8\u009f"+ + "\u0001\u0000\u0000\u0000\u00a8\u00a0\u0001\u0000\u0000\u0000\u00a8\u00a1"+ + "\u0001\u0000\u0000\u0000\u00a8\u00a2\u0001\u0000\u0000\u0000\u00a8\u00a4"+ + "\u0001\u0000\u0000\u0000\u00a8\u00a6\u0001\u0000\u0000\u0000\u00a9\u0007"+ + "\u0001\u0000\u0000\u0000\u00aa\u00ab\u0005\u0010\u0000\u0000\u00ab\u00ac"+ + "\u0003\n\u0005\u0000\u00ac\t\u0001\u0000\u0000\u0000\u00ad\u00ae\u0006"+ + "\u0005\uffff\uffff\u0000\u00ae\u00af\u00051\u0000\u0000\u00af\u00cb\u0003"+ + "\n\u0005\b\u00b0\u00cb\u0003\u0010\b\u0000\u00b1\u00cb\u0003\f\u0006\u0000"+ + "\u00b2\u00b4\u0003\u0010\b\u0000\u00b3\u00b5\u00051\u0000\u0000\u00b4"+ + "\u00b3\u0001\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000\u0000\u00b5"+ + "\u00b6\u0001\u0000\u0000\u0000\u00b6\u00b7\u0005,\u0000\u0000\u00b7\u00b8"+ + "\u00050\u0000\u0000\u00b8\u00bd\u0003\u0010\b\u0000\u00b9\u00ba\u0005"+ + "\'\u0000\u0000\u00ba\u00bc\u0003\u0010\b\u0000\u00bb\u00b9\u0001\u0000"+ + "\u0000\u0000\u00bc\u00bf\u0001\u0000\u0000\u0000\u00bd\u00bb\u0001\u0000"+ + "\u0000\u0000\u00bd\u00be\u0001\u0000\u0000\u0000\u00be\u00c0\u0001\u0000"+ + "\u0000\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c1\u00057\u0000"+ + "\u0000\u00c1\u00cb\u0001\u0000\u0000\u0000\u00c2\u00c3\u0003\u0010\b\u0000"+ + "\u00c3\u00c5\u0005-\u0000\u0000\u00c4\u00c6\u00051\u0000\u0000\u00c5\u00c4"+ + "\u0001\u0000\u0000\u0000\u00c5\u00c6\u0001\u0000\u0000\u0000\u00c6\u00c7"+ + "\u0001\u0000\u0000\u0000\u00c7\u00c8\u00052\u0000\u0000\u00c8\u00cb\u0001"+ + "\u0000\u0000\u0000\u00c9\u00cb\u0003\u000e\u0007\u0000\u00ca\u00ad\u0001"+ + "\u0000\u0000\u0000\u00ca\u00b0\u0001\u0000\u0000\u0000\u00ca\u00b1\u0001"+ + "\u0000\u0000\u0000\u00ca\u00b2\u0001\u0000\u0000\u0000\u00ca\u00c2\u0001"+ + "\u0000\u0000\u0000\u00ca\u00c9\u0001\u0000\u0000\u0000\u00cb\u00d4\u0001"+ + "\u0000\u0000\u0000\u00cc\u00cd\n\u0005\u0000\u0000\u00cd\u00ce\u0005\""+ + "\u0000\u0000\u00ce\u00d3\u0003\n\u0005\u0006\u00cf\u00d0\n\u0004\u0000"+ + "\u0000\u00d0\u00d1\u00054\u0000\u0000\u00d1\u00d3\u0003\n\u0005\u0005"+ + "\u00d2\u00cc\u0001\u0000\u0000\u0000\u00d2\u00cf\u0001\u0000\u0000\u0000"+ + "\u00d3\u00d6\u0001\u0000\u0000\u0000\u00d4\u00d2\u0001\u0000\u0000\u0000"+ + "\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u000b\u0001\u0000\u0000\u0000"+ + "\u00d6\u00d4\u0001\u0000\u0000\u0000\u00d7\u00d9\u0003\u0010\b\u0000\u00d8"+ + "\u00da\u00051\u0000\u0000\u00d9\u00d8\u0001\u0000\u0000\u0000\u00d9\u00da"+ + "\u0001\u0000\u0000\u0000\u00da\u00db\u0001\u0000\u0000\u0000\u00db\u00dc"+ + "\u0005/\u0000\u0000\u00dc\u00dd\u0003f3\u0000\u00dd\u00e6\u0001\u0000"+ + "\u0000\u0000\u00de\u00e0\u0003\u0010\b\u0000\u00df\u00e1\u00051\u0000"+ + "\u0000\u00e0\u00df\u0001\u0000\u0000\u0000\u00e0\u00e1\u0001\u0000\u0000"+ + "\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u00e3\u00056\u0000\u0000"+ + "\u00e3\u00e4\u0003f3\u0000\u00e4\u00e6\u0001\u0000\u0000\u0000\u00e5\u00d7"+ + "\u0001\u0000\u0000\u0000\u00e5\u00de\u0001\u0000\u0000\u0000\u00e6\r\u0001"+ + "\u0000\u0000\u0000\u00e7\u00ea\u00036\u001b\u0000\u00e8\u00e9\u0005%\u0000"+ + "\u0000\u00e9\u00eb\u0003\u001a\r\u0000\u00ea\u00e8\u0001\u0000\u0000\u0000"+ + "\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb\u00ec\u0001\u0000\u0000\u0000"+ + "\u00ec\u00ed\u0005&\u0000\u0000\u00ed\u00ee\u0003@ \u0000\u00ee\u000f"+ + "\u0001\u0000\u0000\u0000\u00ef\u00f5\u0003\u0012\t\u0000\u00f0\u00f1\u0003"+ + "\u0012\t\u0000\u00f1\u00f2\u0003h4\u0000\u00f2\u00f3\u0003\u0012\t\u0000"+ + "\u00f3\u00f5\u0001\u0000\u0000\u0000\u00f4\u00ef\u0001\u0000\u0000\u0000"+ + "\u00f4\u00f0\u0001\u0000\u0000\u0000\u00f5\u0011\u0001\u0000\u0000\u0000"+ + "\u00f6\u00f7\u0006\t\uffff\uffff\u0000\u00f7\u00fb\u0003\u0014\n\u0000"+ + "\u00f8\u00f9\u0007\u0000\u0000\u0000\u00f9\u00fb\u0003\u0012\t\u0003\u00fa"+ + "\u00f6\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000\u0000\u00fb"+ + "\u0104\u0001\u0000\u0000\u0000\u00fc\u00fd\n\u0002\u0000\u0000\u00fd\u00fe"+ + "\u0007\u0001\u0000\u0000\u00fe\u0103\u0003\u0012\t\u0003\u00ff\u0100\n"+ + "\u0001\u0000\u0000\u0100\u0101\u0007\u0000\u0000\u0000\u0101\u0103\u0003"+ + "\u0012\t\u0002\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00ff\u0001\u0000"+ + "\u0000\u0000\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001\u0000"+ + "\u0000\u0000\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0013\u0001\u0000"+ + "\u0000\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u0108\u0006\n\uffff"+ + "\uffff\u0000\u0108\u0110\u0003@ \u0000\u0109\u0110\u00036\u001b\u0000"+ + "\u010a\u0110\u0003\u0016\u000b\u0000\u010b\u010c\u00050\u0000\u0000\u010c"+ + "\u010d\u0003\n\u0005\u0000\u010d\u010e\u00057\u0000\u0000\u010e\u0110"+ + "\u0001\u0000\u0000\u0000\u010f\u0107\u0001\u0000\u0000\u0000\u010f\u0109"+ + "\u0001\u0000\u0000\u0000\u010f\u010a\u0001\u0000\u0000\u0000\u010f\u010b"+ + "\u0001\u0000\u0000\u0000\u0110\u0116\u0001\u0000\u0000\u0000\u0111\u0112"+ + "\n\u0001\u0000\u0000\u0112\u0113\u0005%\u0000\u0000\u0113\u0115\u0003"+ + "\u001a\r\u0000\u0114\u0111\u0001\u0000\u0000\u0000\u0115\u0118\u0001\u0000"+ + "\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000"+ + "\u0000\u0000\u0117\u0015\u0001\u0000\u0000\u0000\u0118\u0116\u0001\u0000"+ + "\u0000\u0000\u0119\u011a\u0003\u0018\f\u0000\u011a\u0124\u00050\u0000"+ + "\u0000\u011b\u0125\u0005B\u0000\u0000\u011c\u0121\u0003\n\u0005\u0000"+ + "\u011d\u011e\u0005\'\u0000\u0000\u011e\u0120\u0003\n\u0005\u0000\u011f"+ + "\u011d\u0001\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121"+ + "\u011f\u0001\u0000\u0000\u0000\u0121\u0122\u0001\u0000\u0000\u0000\u0122"+ + "\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124"+ + "\u011b\u0001\u0000\u0000\u0000\u0124\u011c\u0001\u0000\u0000\u0000\u0124"+ + "\u0125\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000\u0126"+ + "\u0127\u00057\u0000\u0000\u0127\u0017\u0001\u0000\u0000\u0000\u0128\u0129"+ + "\u0003D\"\u0000\u0129\u0019\u0001\u0000\u0000\u0000\u012a\u012b\u0003"+ + "<\u001e\u0000\u012b\u001b\u0001\u0000\u0000\u0000\u012c\u012d\u0005\f"+ + "\u0000\u0000\u012d\u012e\u0003\u001e\u000f\u0000\u012e\u001d\u0001\u0000"+ + "\u0000\u0000\u012f\u0134\u0003 \u0010\u0000\u0130\u0131\u0005\'\u0000"+ + "\u0000\u0131\u0133\u0003 \u0010\u0000\u0132\u0130\u0001\u0000\u0000\u0000"+ + "\u0133\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000"+ + "\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u001f\u0001\u0000\u0000\u0000"+ + "\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u0138\u00036\u001b\u0000\u0138"+ + "\u0139\u0005$\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a\u0137"+ + "\u0001\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013c"+ + "\u0001\u0000\u0000\u0000\u013c\u013d\u0003\n\u0005\u0000\u013d!\u0001"+ + "\u0000\u0000\u0000\u013e\u013f\u0005\u0006\u0000\u0000\u013f\u0144\u0003"+ + "$\u0012\u0000\u0140\u0141\u0005\'\u0000\u0000\u0141\u0143\u0003$\u0012"+ + "\u0000\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000"+ + "\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000"+ + "\u0000\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000"+ + "\u0000\u0147\u0149\u0003*\u0015\u0000\u0148\u0147\u0001\u0000\u0000\u0000"+ + "\u0148\u0149\u0001\u0000\u0000\u0000\u0149#\u0001\u0000\u0000\u0000\u014a"+ + "\u014b\u0003&\u0013\u0000\u014b\u014c\u0005&\u0000\u0000\u014c\u014e\u0001"+ + "\u0000\u0000\u0000\u014d\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0001"+ + "\u0000\u0000\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f\u0150\u0003"+ + "(\u0014\u0000\u0150%\u0001\u0000\u0000\u0000\u0151\u0152\u0005Q\u0000"+ + "\u0000\u0152\'\u0001\u0000\u0000\u0000\u0153\u0154\u0007\u0002\u0000\u0000"+ + "\u0154)\u0001\u0000\u0000\u0000\u0155\u0156\u0005P\u0000\u0000\u0156\u015b"+ + "\u0005Q\u0000\u0000\u0157\u0158\u0005\'\u0000\u0000\u0158\u015a\u0005"+ + "Q\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000"+ + "\u0000\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000"+ + "\u0000\u0000\u015c+\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ + "\u0000\u015e\u015f\u0005\u0013\u0000\u0000\u015f\u0164\u0003$\u0012\u0000"+ + "\u0160\u0161\u0005\'\u0000\u0000\u0161\u0163\u0003$\u0012\u0000\u0162"+ + "\u0160\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164"+ + "\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165"+ + "\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167"+ + "\u0169\u00032\u0019\u0000\u0168\u0167\u0001\u0000\u0000\u0000\u0168\u0169"+ + "\u0001\u0000\u0000\u0000\u0169\u016c\u0001\u0000\u0000\u0000\u016a\u016b"+ + "\u0005!\u0000\u0000\u016b\u016d\u0003\u001e\u000f\u0000\u016c\u016a\u0001"+ + "\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d-\u0001\u0000"+ + "\u0000\u0000\u016e\u016f\u0005\u0004\u0000\u0000\u016f\u0170\u0003\u001e"+ + "\u000f\u0000\u0170/\u0001\u0000\u0000\u0000\u0171\u0173\u0005\u000f\u0000"+ + "\u0000\u0172\u0174\u00032\u0019\u0000\u0173\u0172\u0001\u0000\u0000\u0000"+ + "\u0173\u0174\u0001\u0000\u0000\u0000\u0174\u0177\u0001\u0000\u0000\u0000"+ + "\u0175\u0176\u0005!\u0000\u0000\u0176\u0178\u0003\u001e\u000f\u0000\u0177"+ + "\u0175\u0001\u0000\u0000\u0000\u0177\u0178\u0001\u0000\u0000\u0000\u0178"+ + "1\u0001\u0000\u0000\u0000\u0179\u017e\u00034\u001a\u0000\u017a\u017b\u0005"+ + "\'\u0000\u0000\u017b\u017d\u00034\u001a\u0000\u017c\u017a\u0001\u0000"+ + "\u0000\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000"+ + "\u0000\u0000\u017e\u017f\u0001\u0000\u0000\u0000\u017f3\u0001\u0000\u0000"+ + "\u0000\u0180\u017e\u0001\u0000\u0000\u0000\u0181\u0184\u0003 \u0010\u0000"+ + "\u0182\u0183\u0005\u0010\u0000\u0000\u0183\u0185\u0003\n\u0005\u0000\u0184"+ + "\u0182\u0001\u0000\u0000\u0000\u0184\u0185\u0001\u0000\u0000\u0000\u0185"+ + "5\u0001\u0000\u0000\u0000\u0186\u018b\u0003D\"\u0000\u0187\u0188\u0005"+ + ")\u0000\u0000\u0188\u018a\u0003D\"\u0000\u0189\u0187\u0001\u0000\u0000"+ + "\u0000\u018a\u018d\u0001\u0000\u0000\u0000\u018b\u0189\u0001\u0000\u0000"+ + "\u0000\u018b\u018c\u0001\u0000\u0000\u0000\u018c7\u0001\u0000\u0000\u0000"+ + "\u018d\u018b\u0001\u0000\u0000\u0000\u018e\u0193\u0003>\u001f\u0000\u018f"+ + "\u0190\u0005)\u0000\u0000\u0190\u0192\u0003>\u001f\u0000\u0191\u018f\u0001"+ + "\u0000\u0000\u0000\u0192\u0195\u0001\u0000\u0000\u0000\u0193\u0191\u0001"+ + "\u0000\u0000\u0000\u0193\u0194\u0001\u0000\u0000\u0000\u01949\u0001\u0000"+ + "\u0000\u0000\u0195\u0193\u0001\u0000\u0000\u0000\u0196\u019b\u00038\u001c"+ + "\u0000\u0197\u0198\u0005\'\u0000\u0000\u0198\u019a\u00038\u001c\u0000"+ + "\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000"+ + "\u019b\u0199\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000"+ + "\u019c;\u0001\u0000\u0000\u0000\u019d\u019b\u0001\u0000\u0000\u0000\u019e"+ + "\u019f\u0007\u0003\u0000\u0000\u019f=\u0001\u0000\u0000\u0000\u01a0\u01a4"+ + "\u0005U\u0000\u0000\u01a1\u01a2\u0004\u001f\n\u0000\u01a2\u01a4\u0003"+ + "B!\u0000\u01a3\u01a0\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000"+ + "\u0000\u01a4?\u0001\u0000\u0000\u0000\u01a5\u01d0\u00052\u0000\u0000\u01a6"+ + "\u01a7\u0003d2\u0000\u01a7\u01a8\u0005H\u0000\u0000\u01a8\u01d0\u0001"+ + "\u0000\u0000\u0000\u01a9\u01d0\u0003b1\u0000\u01aa\u01d0\u0003d2\u0000"+ + "\u01ab\u01d0\u0003^/\u0000\u01ac\u01d0\u0003B!\u0000\u01ad\u01d0\u0003"+ + "f3\u0000\u01ae\u01af\u0005F\u0000\u0000\u01af\u01b4\u0003`0\u0000\u01b0"+ + "\u01b1\u0005\'\u0000\u0000\u01b1\u01b3\u0003`0\u0000\u01b2\u01b0\u0001"+ + "\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000\u0000\u0000\u01b4\u01b2\u0001"+ + "\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000\u0000\u01b5\u01b7\u0001"+ + "\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000\u0000\u01b7\u01b8\u0005"+ + "G\u0000\u0000\u01b8\u01d0\u0001\u0000\u0000\u0000\u01b9\u01ba\u0005F\u0000"+ + "\u0000\u01ba\u01bf\u0003^/\u0000\u01bb\u01bc\u0005\'\u0000\u0000\u01bc"+ + "\u01be\u0003^/\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01be\u01c1\u0001"+ + "\u0000\u0000\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000\u01bf\u01c0\u0001"+ + "\u0000\u0000\u0000\u01c0\u01c2\u0001\u0000\u0000\u0000\u01c1\u01bf\u0001"+ + "\u0000\u0000\u0000\u01c2\u01c3\u0005G\u0000\u0000\u01c3\u01d0\u0001\u0000"+ + "\u0000\u0000\u01c4\u01c5\u0005F\u0000\u0000\u01c5\u01ca\u0003f3\u0000"+ + "\u01c6\u01c7\u0005\'\u0000\u0000\u01c7\u01c9\u0003f3\u0000\u01c8\u01c6"+ + "\u0001\u0000\u0000\u0000\u01c9\u01cc\u0001\u0000\u0000\u0000\u01ca\u01c8"+ + "\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001\u0000\u0000\u0000\u01cb\u01cd"+ + "\u0001\u0000\u0000\u0000\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cd\u01ce"+ + "\u0005G\u0000\u0000\u01ce\u01d0\u0001\u0000\u0000\u0000\u01cf\u01a5\u0001"+ + "\u0000\u0000\u0000\u01cf\u01a6\u0001\u0000\u0000\u0000\u01cf\u01a9\u0001"+ + "\u0000\u0000\u0000\u01cf\u01aa\u0001\u0000\u0000\u0000\u01cf\u01ab\u0001"+ + "\u0000\u0000\u0000\u01cf\u01ac\u0001\u0000\u0000\u0000\u01cf\u01ad\u0001"+ + "\u0000\u0000\u0000\u01cf\u01ae\u0001\u0000\u0000\u0000\u01cf\u01b9\u0001"+ + "\u0000\u0000\u0000\u01cf\u01c4\u0001\u0000\u0000\u0000\u01d0A\u0001\u0000"+ + "\u0000\u0000\u01d1\u01d4\u00055\u0000\u0000\u01d2\u01d4\u0005E\u0000\u0000"+ + "\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3\u01d2\u0001\u0000\u0000\u0000"+ + "\u01d4C\u0001\u0000\u0000\u0000\u01d5\u01d9\u0003<\u001e\u0000\u01d6\u01d7"+ + "\u0004\"\u000b\u0000\u01d7\u01d9\u0003B!\u0000\u01d8\u01d5\u0001\u0000"+ + "\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d9E\u0001\u0000\u0000"+ + "\u0000\u01da\u01db\u0005\t\u0000\u0000\u01db\u01dc\u0005\u001f\u0000\u0000"+ + "\u01dcG\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u000e\u0000\u0000\u01de"+ + "\u01e3\u0003J%\u0000\u01df\u01e0\u0005\'\u0000\u0000\u01e0\u01e2\u0003"+ + "J%\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e2\u01e5\u0001\u0000\u0000"+ + "\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000"+ + "\u0000\u01e4I\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000"+ + "\u01e6\u01e8\u0003\n\u0005\u0000\u01e7\u01e9\u0007\u0004\u0000\u0000\u01e8"+ + "\u01e7\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9"+ + "\u01ec\u0001\u0000\u0000\u0000\u01ea\u01eb\u00053\u0000\u0000\u01eb\u01ed"+ + "\u0007\u0005\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed"+ + "\u0001\u0000\u0000\u0000\u01edK\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005"+ + "\b\u0000\u0000\u01ef\u01f0\u0003:\u001d\u0000\u01f0M\u0001\u0000\u0000"+ + "\u0000\u01f1\u01f2\u0005\u0002\u0000\u0000\u01f2\u01f3\u0003:\u001d\u0000"+ + "\u01f3O\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005\u000b\u0000\u0000\u01f5"+ + "\u01fa\u0003R)\u0000\u01f6\u01f7\u0005\'\u0000\u0000\u01f7\u01f9\u0003"+ + "R)\u0000\u01f8\u01f6\u0001\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000"+ + "\u0000\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000"+ + "\u0000\u01fbQ\u0001\u0000\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000"+ + "\u01fd\u01fe\u00038\u001c\u0000\u01fe\u01ff\u0005Y\u0000\u0000\u01ff\u0200"+ + "\u00038\u001c\u0000\u0200S\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u0001"+ + "\u0000\u0000\u0202\u0203\u0003\u0014\n\u0000\u0203\u0205\u0003f3\u0000"+ + "\u0204\u0206\u0003Z-\u0000\u0205\u0204\u0001\u0000\u0000\u0000\u0205\u0206"+ + "\u0001\u0000\u0000\u0000\u0206U\u0001\u0000\u0000\u0000\u0207\u0208\u0005"+ + "\u0007\u0000\u0000\u0208\u0209\u0003\u0014\n\u0000\u0209\u020a\u0003f"+ + "3\u0000\u020aW\u0001\u0000\u0000\u0000\u020b\u020c\u0005\n\u0000\u0000"+ + "\u020c\u020d\u00036\u001b\u0000\u020dY\u0001\u0000\u0000\u0000\u020e\u0213"+ + "\u0003\\.\u0000\u020f\u0210\u0005\'\u0000\u0000\u0210\u0212\u0003\\.\u0000"+ + "\u0211\u020f\u0001\u0000\u0000\u0000\u0212\u0215\u0001\u0000\u0000\u0000"+ + "\u0213\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000"+ + "\u0214[\u0001\u0000\u0000\u0000\u0215\u0213\u0001\u0000\u0000\u0000\u0216"+ + "\u0217\u0003<\u001e\u0000\u0217\u0218\u0005$\u0000\u0000\u0218\u0219\u0003"+ + "@ \u0000\u0219]\u0001\u0000\u0000\u0000\u021a\u021b\u0007\u0006\u0000"+ + "\u0000\u021b_\u0001\u0000\u0000\u0000\u021c\u021f\u0003b1\u0000\u021d"+ + "\u021f\u0003d2\u0000\u021e\u021c\u0001\u0000\u0000\u0000\u021e\u021d\u0001"+ + "\u0000\u0000\u0000\u021fa\u0001\u0000\u0000\u0000\u0220\u0222\u0007\u0000"+ + "\u0000\u0000\u0221\u0220\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000\u0223\u0224\u0005 \u0000"+ + "\u0000\u0224c\u0001\u0000\u0000\u0000\u0225\u0227\u0007\u0000\u0000\u0000"+ + "\u0226\u0225\u0001\u0000\u0000\u0000\u0226\u0227\u0001\u0000\u0000\u0000"+ + "\u0227\u0228\u0001\u0000\u0000\u0000\u0228\u0229\u0005\u001f\u0000\u0000"+ + "\u0229e\u0001\u0000\u0000\u0000\u022a\u022b\u0005\u001e\u0000\u0000\u022b"+ + "g\u0001\u0000\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022di\u0001"+ + "\u0000\u0000\u0000\u022e\u022f\u0005\u0005\u0000\u0000\u022f\u0230\u0003"+ + "l6\u0000\u0230k\u0001\u0000\u0000\u0000\u0231\u0232\u0005F\u0000\u0000"+ + "\u0232\u0233\u0003\u0002\u0001\u0000\u0233\u0234\u0005G\u0000\u0000\u0234"+ + "m\u0001\u0000\u0000\u0000\u0235\u0236\u0005\r\u0000\u0000\u0236\u0237"+ + "\u0005i\u0000\u0000\u0237o\u0001\u0000\u0000\u0000\u0238\u0239\u0005\u0003"+ + "\u0000\u0000\u0239\u023c\u0005_\u0000\u0000\u023a\u023b\u0005]\u0000\u0000"+ + "\u023b\u023d\u00038\u001c\u0000\u023c\u023a\u0001\u0000\u0000\u0000\u023c"+ + "\u023d\u0001\u0000\u0000\u0000\u023d\u0247\u0001\u0000\u0000\u0000\u023e"+ + "\u023f\u0005^\u0000\u0000\u023f\u0244\u0003r9\u0000\u0240\u0241\u0005"+ + "\'\u0000\u0000\u0241\u0243\u0003r9\u0000\u0242\u0240\u0001\u0000\u0000"+ + "\u0000\u0243\u0246\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000\u0000"+ + "\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0248\u0001\u0000\u0000"+ + "\u0000\u0246\u0244\u0001\u0000\u0000\u0000\u0247\u023e\u0001\u0000\u0000"+ + "\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248q\u0001\u0000\u0000\u0000"+ + "\u0249\u024a\u00038\u001c\u0000\u024a\u024b\u0005$\u0000\u0000\u024b\u024d"+ + "\u0001\u0000\u0000\u0000\u024c\u0249\u0001\u0000\u0000\u0000\u024c\u024d"+ + "\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u024f"+ + "\u00038\u001c\u0000\u024fs\u0001\u0000\u0000\u0000\u0250\u0251\u0005\u0012"+ + "\u0000\u0000\u0251\u0252\u0003$\u0012\u0000\u0252\u0253\u0005]\u0000\u0000"+ + "\u0253\u0254\u0003:\u001d\u0000\u0254u\u0001\u0000\u0000\u0000\u0255\u0256"+ + "\u0005\u0011\u0000\u0000\u0256\u0259\u00032\u0019\u0000\u0257\u0258\u0005"+ + "!\u0000\u0000\u0258\u025a\u0003\u001e\u000f\u0000\u0259\u0257\u0001\u0000"+ + "\u0000\u0000\u0259\u025a\u0001\u0000\u0000\u0000\u025aw\u0001\u0000\u0000"+ + "\u0000\u025b\u025d\u0007\b\u0000\u0000\u025c\u025b\u0001\u0000\u0000\u0000"+ + "\u025c\u025d\u0001\u0000\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000"+ + "\u025e\u025f\u0005\u0014\u0000\u0000\u025f\u0260\u0003z=\u0000\u0260\u0261"+ + "\u0003|>\u0000\u0261y\u0001\u0000\u0000\u0000\u0262\u0265\u0003<\u001e"+ + "\u0000\u0263\u0264\u0005Y\u0000\u0000\u0264\u0266\u0003<\u001e\u0000\u0265"+ + "\u0263\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266"+ + "{\u0001\u0000\u0000\u0000\u0267\u0268\u0005]\u0000\u0000\u0268\u026d\u0003"+ + "~?\u0000\u0269\u026a\u0005\'\u0000\u0000\u026a\u026c\u0003~?\u0000\u026b"+ + "\u0269\u0001\u0000\u0000\u0000\u026c\u026f\u0001\u0000\u0000\u0000\u026d"+ + "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ + "}\u0001\u0000\u0000\u0000\u026f\u026d\u0001\u0000\u0000\u0000\u0270\u0271"+ + "\u0003\u0010\b\u0000\u0271\u007f\u0001\u0000\u0000\u0000=\u008b\u0094"+ + "\u00a8\u00b4\u00bd\u00c5\u00ca\u00d2\u00d4\u00d9\u00e0\u00e5\u00ea\u00f4"+ + "\u00fa\u0102\u0104\u010f\u0116\u0121\u0124\u0134\u013a\u0144\u0148\u014d"+ + "\u015b\u0164\u0168\u016c\u0173\u0177\u017e\u0184\u018b\u0193\u019b\u01a3"+ + "\u01b4\u01bf\u01ca\u01cf\u01d3\u01d8\u01e3\u01e8\u01ec\u01fa\u0205\u0213"+ + "\u021e\u0221\u0226\u023c\u0244\u0247\u024c\u0259\u025c\u0265\u026d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 6071219839ba..2ee0efe52dfb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -452,30 +452,6 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitMetadata(EsqlBaseParser.MetadataContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterMetadataOption(EsqlBaseParser.MetadataOptionContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitMetadataOption(EsqlBaseParser.MetadataOptionContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index afe714692379..c5c1b0dfa7d7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -272,20 +272,6 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitMetadata(EsqlBaseParser.MetadataContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMetadataOption(EsqlBaseParser.MetadataOptionContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 0faca2541c9a..f45184e92065 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -415,26 +415,6 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitMetadata(EsqlBaseParser.MetadataContext ctx); - /** - * Enter a parse tree produced by {@link EsqlBaseParser#metadataOption}. - * @param ctx the parse tree - */ - void enterMetadataOption(EsqlBaseParser.MetadataOptionContext ctx); - /** - * Exit a parse tree produced by {@link EsqlBaseParser#metadataOption}. - * @param ctx the parse tree - */ - void exitMetadataOption(EsqlBaseParser.MetadataOptionContext ctx); - /** - * Enter a parse tree produced by {@link EsqlBaseParser#deprecated_metadata}. - * @param ctx the parse tree - */ - void enterDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); - /** - * Exit a parse tree produced by {@link EsqlBaseParser#deprecated_metadata}. - * @param ctx the parse tree - */ - void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#metricsCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index e91cd6670e97..30c5e0ce7809 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -254,18 +254,6 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitMetadata(EsqlBaseParser.MetadataContext ctx); - /** - * Visit a parse tree produced by {@link EsqlBaseParser#metadataOption}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMetadataOption(EsqlBaseParser.MetadataOptionContext ctx); - /** - * Visit a parse tree produced by {@link EsqlBaseParser#deprecated_metadata}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 81d43bc68b79..eb81446f9dde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; @@ -686,12 +687,20 @@ public abstract class ExpressionBuilder extends IdentifierBuilder { RegexMatch result = switch (type) { case EsqlBaseParser.LIKE -> { try { - yield new WildcardLike(source, left, new WildcardPattern(pattern.fold().toString())); + yield new WildcardLike( + source, + left, + new WildcardPattern(pattern.fold(FoldContext.small() /* TODO remove me */).toString()) + ); } catch (InvalidArgumentException e) { throw new ParsingException(source, "Invalid pattern for LIKE [{}]: [{}]", pattern, e.getMessage()); } } - case EsqlBaseParser.RLIKE -> new RLike(source, left, new RLikePattern(pattern.fold().toString())); + case EsqlBaseParser.RLIKE -> new RLike( + source, + left, + new RLikePattern(pattern.fold(FoldContext.small() /* TODO remove me */).toString()) + ); default -> throw new ParsingException("Invalid predicate type for [{}]", source.text()); }; return ctx.NOT() == null ? result : new Not(source, result); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 49d77bc36fb2..4b7c0118acda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -34,7 +35,6 @@ import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.MetadataOptionContext; import org.elasticsearch.xpack.esql.plan.TableIdentifier; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; @@ -71,7 +71,6 @@ import java.util.Set; import java.util.function.Function; import static java.util.Collections.emptyList; -import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; import static org.elasticsearch.xpack.esql.parser.ParserUtils.source; @@ -157,7 +156,7 @@ public class LogicalPlanBuilder extends ExpressionBuilder { public PlanFactory visitGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { return p -> { Source source = source(ctx); - String pattern = visitString(ctx.string()).fold().toString(); + String pattern = visitString(ctx.string()).fold(FoldContext.small() /* TODO remove me */).toString(); Grok.Parser grokParser; try { grokParser = Grok.pattern(source, pattern); @@ -188,7 +187,7 @@ public class LogicalPlanBuilder extends ExpressionBuilder { @Override public PlanFactory visitDissectCommand(EsqlBaseParser.DissectCommandContext ctx) { return p -> { - String pattern = visitString(ctx.string()).fold().toString(); + String pattern = visitString(ctx.string()).fold(FoldContext.small() /* TODO remove me */).toString(); Map options = visitCommandOptions(ctx.commandOptions()); String appendSeparator = ""; for (Map.Entry item : options.entrySet()) { @@ -243,7 +242,7 @@ public class LogicalPlanBuilder extends ExpressionBuilder { } Map result = new HashMap<>(); for (EsqlBaseParser.CommandOptionContext option : ctx.commandOption()) { - result.put(visitIdentifier(option.identifier()), expression(option.constant()).fold()); + result.put(visitIdentifier(option.identifier()), expression(option.constant()).fold(FoldContext.small() /* TODO remove me */)); } return result; } @@ -260,21 +259,7 @@ public class LogicalPlanBuilder extends ExpressionBuilder { TableIdentifier table = new TableIdentifier(source, null, visitIndexPattern(ctx.indexPattern())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { - var deprecatedContext = ctx.metadata().deprecated_metadata(); - MetadataOptionContext metadataOptionContext = null; - if (deprecatedContext != null) { - var s = source(deprecatedContext).source(); - addWarning( - "Line {}:{}: Square brackets '[]' need to be removed in FROM METADATA declaration", - s.getLineNumber(), - s.getColumnNumber() - ); - metadataOptionContext = deprecatedContext.metadataOption(); - } else { - metadataOptionContext = ctx.metadata().metadataOption(); - - } - for (var c : metadataOptionContext.UNQUOTED_SOURCE()) { + for (var c : ctx.metadata().UNQUOTED_SOURCE()) { String id = c.getText(); Source src = source(c); if (MetadataAttribute.isSupported(id) == false // TODO: drop check below once METADATA_SCORE is no longer snapshot-only diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 6755a7fa30af..9b8106034981 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -149,7 +150,7 @@ public class Enrich extends UnaryPlan implements GeneratingPlan, PostAna out.writeNamedWriteable(policyName()); out.writeNamedWriteable(matchField()); if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { - out.writeString(BytesRefs.toString(policyName().fold())); // old policy name + out.writeString(BytesRefs.toString(policyName().fold(FoldContext.small() /* TODO remove me */))); // old policy name } policy().writeTo(out); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 57ba1c8016fe..072bae21da2a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -47,9 +48,11 @@ import static java.util.Collections.emptyList; public abstract class AbstractPhysicalOperationProviders implements PhysicalOperationProviders { private final AggregateMapper aggregateMapper = new AggregateMapper(); + private final FoldContext foldContext; private final AnalysisRegistry analysisRegistry; - AbstractPhysicalOperationProviders(AnalysisRegistry analysisRegistry) { + AbstractPhysicalOperationProviders(FoldContext foldContext, AnalysisRegistry analysisRegistry) { + this.foldContext = foldContext; this.analysisRegistry = analysisRegistry; } @@ -251,6 +254,7 @@ public abstract class AbstractPhysicalOperationProviders implements PhysicalOper private record AggFunctionSupplierContext(AggregatorFunctionSupplier supplier, AggregatorMode mode) {} private void aggregatesToFactory( + List aggregates, AggregatorMode mode, Layout layout, @@ -311,7 +315,11 @@ public abstract class AbstractPhysicalOperationProviders implements PhysicalOper // apply the filter only in the initial phase - as the rest of the data is already filtered if (aggregateFunction.hasFilter() && mode.isInputPartial() == false) { - EvalOperator.ExpressionEvaluator.Factory evalFactory = EvalMapper.toEvaluator(aggregateFunction.filter(), layout); + EvalOperator.ExpressionEvaluator.Factory evalFactory = EvalMapper.toEvaluator( + foldContext, + aggregateFunction.filter(), + layout + ); aggSupplier = new FilteredAggregatorFunctionSupplier(aggSupplier, evalFactory); } consumer.accept(new AggFunctionSupplierContext(aggSupplier, mode)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index b1fe0e7a7cf5..112f101ad842 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -47,6 +46,7 @@ import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; @@ -94,8 +94,8 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi private final List shardContexts; - public EsPhysicalOperationProviders(List shardContexts, AnalysisRegistry analysisRegistry) { - super(analysisRegistry); + public EsPhysicalOperationProviders(FoldContext foldContext, List shardContexts, AnalysisRegistry analysisRegistry) { + super(foldContext, analysisRegistry); this.shardContexts = shardContexts; } @@ -161,7 +161,7 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi List sorts = esQueryExec.sorts(); assert esQueryExec.estimatedRowSize() != null : "estimated row size not initialized"; int rowEstimatedSize = esQueryExec.estimatedRowSize(); - int limit = esQueryExec.limit() != null ? (Integer) esQueryExec.limit().fold() : NO_LIMIT; + int limit = esQueryExec.limit() != null ? (Integer) esQueryExec.limit().fold(context.foldCtx()) : NO_LIMIT; boolean scoring = esQueryExec.attrs() .stream() .anyMatch(a -> a instanceof MetadataAttribute && a.name().equals(MetadataAttribute.SCORE)); @@ -217,7 +217,7 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi querySupplier(queryBuilder), context.queryPragmas().dataPartitioning(), context.queryPragmas().taskConcurrency(), - limit == null ? NO_LIMIT : (Integer) limit.fold() + limit == null ? NO_LIMIT : (Integer) limit.fold(context.foldCtx()) ); } @@ -338,16 +338,7 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi @Override public SearchLookup lookup() { - boolean syntheticSource = SourceFieldMapper.isSynthetic(indexSettings()); - var searchLookup = ctx.lookup(); - if (syntheticSource) { - // in the context of scripts and when synthetic source is used the search lookup can't always be reused between - // users of SearchLookup. This is only an issue when scripts fallback to _source, but since we can't always - // accurately determine whether a script uses _source, we should do this for all script usages. - // This lookup() method is only invoked for scripts / runtime fields, so it is ok to do here. - searchLookup = searchLookup.swapSourceProvider(ctx.createSourceProvider()); - } - return searchLookup; + return ctx.lookup(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index a1765977ee9c..c185bd572987 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; @@ -144,7 +145,7 @@ public final class EsqlExpressionTranslators { static Query translate(InsensitiveEquals bc) { TypedAttribute attribute = checkIsPushableAttribute(bc.left()); Source source = bc.source(); - BytesRef value = BytesRefs.toBytesRef(valueOf(bc.right())); + BytesRef value = BytesRefs.toBytesRef(valueOf(FoldContext.small() /* TODO remove me */, bc.right())); String name = pushableAttributeName(attribute); return new TermQuery(source, name, value.utf8ToString(), true); } @@ -188,7 +189,7 @@ public final class EsqlExpressionTranslators { TypedAttribute attribute = checkIsPushableAttribute(bc.left()); Source source = bc.source(); String name = handler.nameOf(attribute); - Object result = bc.right().fold(); + Object result = bc.right().fold(FoldContext.small() /* TODO remove me */); Object value = result; String format = null; boolean isDateLiteralComparison = false; @@ -269,7 +270,7 @@ public final class EsqlExpressionTranslators { return null; } Source source = bc.source(); - Object value = valueOf(bc.right()); + Object value = valueOf(FoldContext.small() /* TODO remove me */, bc.right()); // Comparisons with multi-values always return null in ESQL. if (value instanceof List) { @@ -369,7 +370,7 @@ public final class EsqlExpressionTranslators { if (f instanceof CIDRMatch cm) { if (cm.ipField() instanceof FieldAttribute fa && Expressions.foldable(cm.matches())) { String targetFieldName = handler.nameOf(fa.exactAttribute()); - Set set = new LinkedHashSet<>(Expressions.fold(cm.matches())); + Set set = new LinkedHashSet<>(Expressions.fold(FoldContext.small() /* TODO remove me */, cm.matches())); Query query = new TermsQuery(f.source(), targetFieldName, set); // CIDR_MATCH applies only to single values. @@ -420,7 +421,7 @@ public final class EsqlExpressionTranslators { String name = handler.nameOf(attribute); try { - Geometry shape = SpatialRelatesUtils.makeGeometryFromLiteral(constantExpression); + Geometry shape = SpatialRelatesUtils.makeGeometryFromLiteral(FoldContext.small() /* TODO remove me */, constantExpression); return new SpatialRelatesQuery(bc.source(), name, bc.queryRelation(), shape, attribute.dataType()); } catch (IllegalArgumentException e) { throw new QlIllegalArgumentException(e.getMessage(), e); @@ -461,7 +462,7 @@ public final class EsqlExpressionTranslators { queries.add(query); } } else { - terms.add(valueOf(rhs)); + terms.add(valueOf(FoldContext.small() /* TODO remove me */, rhs)); } } } @@ -487,8 +488,8 @@ public final class EsqlExpressionTranslators { } private static RangeQuery translate(Range r, TranslatorHandler handler) { - Object lower = valueOf(r.lower()); - Object upper = valueOf(r.upper()); + Object lower = valueOf(FoldContext.small() /* TODO remove me */, r.lower()); + Object upper = valueOf(FoldContext.small() /* TODO remove me */, r.upper()); String format = null; DataType dataType = r.value().dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index af38551c1ad0..ecd0284c7cb5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -22,7 +22,6 @@ import org.elasticsearch.compute.operator.ColumnLoadOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.EvalOperatorFactory; -import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.compute.operator.FilterOperator.FilterOperatorFactory; import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.LocalSourceOperator.LocalSourceFactory; @@ -55,6 +54,7 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -161,13 +161,14 @@ public class LocalExecutionPlanner { /** * turn the given plan into a list of drivers to execute */ - public LocalExecutionPlan plan(PhysicalPlan localPhysicalPlan) { + public LocalExecutionPlan plan(FoldContext foldCtx, PhysicalPlan localPhysicalPlan) { var context = new LocalExecutionPlannerContext( new ArrayList<>(), new Holder<>(DriverParallelism.SINGLE), configuration.pragmas(), bigArrays, blockFactory, + foldCtx, settings ); @@ -397,7 +398,7 @@ public class LocalExecutionPlanner { PhysicalOperation source = plan(eval.child(), context); for (Alias field : eval.fields()) { - var evaluatorSupplier = EvalMapper.toEvaluator(field.child(), source.layout); + var evaluatorSupplier = EvalMapper.toEvaluator(context.foldCtx(), field.child(), source.layout); Layout.Builder layout = source.layout.builder(); layout.append(field.toAttribute()); source = source.with(new EvalOperatorFactory(evaluatorSupplier), layout.build()); @@ -418,7 +419,7 @@ public class LocalExecutionPlanner { source = source.with( new StringExtractOperator.StringExtractOperatorFactory( patternNames, - EvalMapper.toEvaluator(expr, layout), + EvalMapper.toEvaluator(context.foldCtx(), expr, layout), () -> (input) -> dissect.parser().parser().parse(input) ), layout @@ -450,7 +451,7 @@ public class LocalExecutionPlanner { source = source.with( new ColumnExtractOperator.Factory( types, - EvalMapper.toEvaluator(grok.inputExpression(), layout), + EvalMapper.toEvaluator(context.foldCtx(), grok.inputExpression(), layout), () -> new GrokEvaluatorExtracter(grok.pattern().grok(), grok.pattern().pattern(), fieldToPos, fieldToType) ), layout @@ -599,10 +600,6 @@ public class LocalExecutionPlanner { ); } - private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { - return EvalMapper.toEvaluator(exp, layout); - } - private PhysicalOperation planLocal(LocalSourceExec localSourceExec, LocalExecutionPlannerContext context) { Layout.Builder layout = new Layout.Builder(); layout.append(localSourceExec.output()); @@ -657,12 +654,15 @@ public class LocalExecutionPlanner { private PhysicalOperation planFilter(FilterExec filter, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(filter.child(), context); // TODO: should this be extracted into a separate eval block? - return source.with(new FilterOperatorFactory(toEvaluator(filter.condition(), source.layout)), source.layout); + return source.with( + new FilterOperatorFactory(EvalMapper.toEvaluator(context.foldCtx(), filter.condition(), source.layout)), + source.layout + ); } private PhysicalOperation planLimit(LimitExec limit, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(limit.child(), context); - return source.with(new Factory((Integer) limit.limit().fold()), source.layout); + return source.with(new Factory((Integer) limit.limit().fold(context.foldCtx)), source.layout); } private PhysicalOperation planMvExpand(MvExpandExec mvExpandExec, LocalExecutionPlannerContext context) { @@ -783,6 +783,7 @@ public class LocalExecutionPlanner { QueryPragmas queryPragmas, BigArrays bigArrays, BlockFactory blockFactory, + FoldContext foldCtx, Settings settings ) { void addDriverFactory(DriverFactory driverFactory) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 5325145a77ad..2f4368155069 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -159,13 +160,18 @@ public class PlannerUtils { } } - public static PhysicalPlan localPlan(List searchContexts, Configuration configuration, PhysicalPlan plan) { - return localPlan(configuration, plan, SearchContextStats.from(searchContexts)); + public static PhysicalPlan localPlan( + List searchContexts, + Configuration configuration, + FoldContext foldCtx, + PhysicalPlan plan + ) { + return localPlan(configuration, foldCtx, plan, SearchContextStats.from(searchContexts)); } - public static PhysicalPlan localPlan(Configuration configuration, PhysicalPlan plan, SearchStats searchStats) { - final var logicalOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, searchStats)); - var physicalOptimizer = new LocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(configuration, searchStats)); + public static PhysicalPlan localPlan(Configuration configuration, FoldContext foldCtx, PhysicalPlan plan, SearchStats searchStats) { + final var logicalOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, foldCtx, searchStats)); + var physicalOptimizer = new LocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(configuration, foldCtx, searchStats)); return localPlan(plan, logicalOptimizer, physicalOptimizer); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java index 334875927eb9..4dea8a50b5c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java @@ -14,6 +14,9 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; class TypeConverter { @@ -33,19 +36,26 @@ class TypeConverter { BigArrays.NON_RECYCLING_INSTANCE ) ); - return new TypeConverter( - convertFunction.functionName(), - convertFunction.toEvaluator(e -> driverContext -> new ExpressionEvaluator() { - @Override - public org.elasticsearch.compute.data.Block eval(Page page) { - // This is a pass-through evaluator, since it sits directly on the source loading (no prior expressions) - return page.getBlock(0); - } + return new TypeConverter(convertFunction.functionName(), convertFunction.toEvaluator(new EvaluatorMapper.ToEvaluator() { + @Override + public ExpressionEvaluator.Factory apply(Expression expression) { + return driverContext -> new ExpressionEvaluator() { + @Override + public org.elasticsearch.compute.data.Block eval(Page page) { + // This is a pass-through evaluator, since it sits directly on the source loading (no prior expressions) + return page.getBlock(0); + } - @Override - public void close() {} - }).get(driverContext1) - ); + @Override + public void close() {} + }; + } + + @Override + public FoldContext foldCtx() { + throw new IllegalStateException("not folding"); + } + }).get(driverContext1)); } public Block convert(Block block) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index e881eabb38c4..b8f539ea307c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -90,7 +91,7 @@ class MapperUtils { enrich.mode(), enrich.policy().getType(), enrich.matchField(), - BytesRefs.toString(enrich.policyName().fold()), + BytesRefs.toString(enrich.policyName().fold(FoldContext.small() /* TODO remove me */)), enrich.policy().getMatchField(), enrich.concreteIndices(), enrich.enrichFields() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7223e6988bb1..a38236fe6095 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -61,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; @@ -140,6 +141,7 @@ public class ComputeService { CancellableTask rootTask, PhysicalPlan physicalPlan, Configuration configuration, + FoldContext foldContext, EsqlExecutionInfo execInfo, ActionListener listener ) { @@ -174,6 +176,7 @@ public class ComputeService { RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, + foldContext, null, null ); @@ -226,6 +229,7 @@ public class ComputeService { RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, + foldContext, exchangeSource, null ), @@ -460,16 +464,16 @@ public class ComputeService { context.exchangeSink(), enrichLookupService, lookupFromIndexService, - new EsPhysicalOperationProviders(contexts, searchService.getIndicesService().getAnalysis()) + new EsPhysicalOperationProviders(context.foldCtx(), contexts, searchService.getIndicesService().getAnalysis()) ); LOGGER.debug("Received physical plan:\n{}", plan); - plan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration, plan); + plan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration, context.foldCtx(), plan); // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation - LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(plan); + LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.foldCtx(), plan); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Local execution plan:\n{}", localExecutionPlan.describe()); } @@ -715,7 +719,15 @@ public class ComputeService { }; acquireSearchContexts(clusterAlias, shardIds, configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH, ESQL_WORKER_THREAD_POOL_NAME); - var computeContext = new ComputeContext(sessionId, clusterAlias, searchContexts, configuration, null, exchangeSink); + var computeContext = new ComputeContext( + sessionId, + clusterAlias, + searchContexts, + configuration, + configuration.newFoldContext(), + null, + exchangeSink + ); runCompute(parentTask, computeContext, request.plan(), batchListener); }, batchListener::onFailure)); } @@ -766,6 +778,7 @@ public class ComputeService { request.clusterAlias(), List.of(), request.configuration(), + new FoldContext(request.pragmas().foldLimit().getBytes()), exchangeSource, externalSink ), @@ -901,7 +914,15 @@ public class ComputeService { exchangeSink.addCompletionListener(computeListener.acquireAvoid()); runCompute( parentTask, - new ComputeContext(localSessionId, clusterAlias, List.of(), configuration, exchangeSource, exchangeSink), + new ComputeContext( + localSessionId, + clusterAlias, + List.of(), + configuration, + configuration.newFoldContext(), + exchangeSource, + exchangeSink + ), coordinatorPlan, computeListener.acquireCompute(clusterAlias) ); @@ -925,6 +946,7 @@ public class ComputeService { String clusterAlias, List searchContexts, Configuration configuration, + FoldContext foldCtx, ExchangeSourceHandler exchangeSource, ExchangeSinkHandler exchangeSink ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index e4310a0bbf3a..89b4231a999d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -24,132 +24,16 @@ import java.util.Set; * examples below are *just* used for that. Don't make more of those - add them * to {@link EsqlCapabilities} instead. *

- * NOTE: You can't remove a feature now and probably never will be able to. + * NOTE: You can only remove features on major version boundaries. * Only add more of these if you need a fast CPU level check. *

*/ public class EsqlFeatures implements FeatureSpecification { - /** - * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. - * Added in #106095. - */ - private static final NodeFeature MV_SORT = new NodeFeature("esql.mv_sort", true); - - /** - * When we disabled some broken optimizations around {@code nullable}. - * Fixed in #105691. - */ - private static final NodeFeature DISABLE_NULLABLE_OPTS = new NodeFeature("esql.disable_nullable_opts", true); - - /** - * Introduction of {@code ST_X} and {@code ST_Y}. Added in #105768. - */ - private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y", true); - - /** - * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. - */ - private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source", true); - - /** - * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. - */ - private static final NodeFeature SPATIAL_SHAPES = new NodeFeature("esql.spatial_shapes", true); - - /** - * Support for spatial aggregation {@code ST_CENTROID}. Done in #104269. - */ - private static final NodeFeature ST_CENTROID_AGG = new NodeFeature("esql.st_centroid_agg", true); - - /** - * Support for spatial aggregation {@code ST_INTERSECTS}. Done in #104907. - */ - private static final NodeFeature ST_INTERSECTS = new NodeFeature("esql.st_intersects", true); - - /** - * Support for spatial aggregation {@code ST_CONTAINS} and {@code ST_WITHIN}. Done in #106503. - */ - private static final NodeFeature ST_CONTAINS_WITHIN = new NodeFeature("esql.st_contains_within", true); - - /** - * Support for spatial aggregation {@code ST_DISJOINT}. Done in #107007. - */ - private static final NodeFeature ST_DISJOINT = new NodeFeature("esql.st_disjoint", true); - - /** - * The introduction of the {@code VALUES} agg. - */ - private static final NodeFeature AGG_VALUES = new NodeFeature("esql.agg_values", true); - - /** - * Does ESQL support async queries. - */ - public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query", true); - - /** - * Does ESQL support FROM OPTIONS? - */ - @Deprecated - public static final NodeFeature FROM_OPTIONS = new NodeFeature("esql.from_options", true); - - /** - * Cast string literals to a desired data type. - */ - public static final NodeFeature STRING_LITERAL_AUTO_CASTING = new NodeFeature("esql.string_literal_auto_casting", true); - - /** - * Base64 encoding and decoding functions. - */ - public static final NodeFeature BASE64_DECODE_ENCODE = new NodeFeature("esql.base64_decode_encode", true); - - /** - * Support for the :: casting operator - */ - public static final NodeFeature CASTING_OPERATOR = new NodeFeature("esql.casting_operator", true); - - /** - * Blocks can be labelled with {@link org.elasticsearch.compute.data.Block.MvOrdering#SORTED_ASCENDING} for optimizations. - */ - public static final NodeFeature MV_ORDERING_SORTED_ASCENDING = new NodeFeature("esql.mv_ordering_sorted_ascending", true); - - /** - * Support for metrics counter fields - */ - public static final NodeFeature METRICS_COUNTER_FIELDS = new NodeFeature("esql.metrics_counter_fields", true); - - /** - * Cast string literals to a desired data type for IN predicate and more types for BinaryComparison. - */ - public static final NodeFeature STRING_LITERAL_AUTO_CASTING_EXTENDED = new NodeFeature( - "esql.string_literal_auto_casting_extended", - true - ); - - /** - * Support for metadata fields. - */ - public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields", true); - - /** - * Support for timespan units abbreviations - */ - public static final NodeFeature TIMESPAN_ABBREVIATIONS = new NodeFeature("esql.timespan_abbreviations", true); - - /** - * Support metrics counter types - */ - public static final NodeFeature COUNTER_TYPES = new NodeFeature("esql.counter_types", true); - /** * Support metrics syntax */ public static final NodeFeature METRICS_SYNTAX = new NodeFeature("esql.metrics_syntax"); - /** - * Internal resolve_fields API for ES|QL - */ - public static final NodeFeature RESOLVE_FIELDS_API = new NodeFeature("esql.resolve_fields_api", true); - private Set snapshotBuildFeatures() { assert Build.current().isSnapshot() : Build.current(); return Set.of(METRICS_SYNTAX); @@ -157,30 +41,7 @@ public class EsqlFeatures implements FeatureSpecification { @Override public Set getFeatures() { - Set features = Set.of( - ASYNC_QUERY, - AGG_VALUES, - BASE64_DECODE_ENCODE, - MV_SORT, - DISABLE_NULLABLE_OPTS, - ST_X_Y, - FROM_OPTIONS, - SPATIAL_POINTS_FROM_SOURCE, - SPATIAL_SHAPES, - ST_CENTROID_AGG, - ST_INTERSECTS, - ST_CONTAINS_WITHIN, - ST_DISJOINT, - STRING_LITERAL_AUTO_CASTING, - CASTING_OPERATOR, - MV_ORDERING_SORTED_ASCENDING, - METRICS_COUNTER_FIELDS, - STRING_LITERAL_AUTO_CASTING_EXTENDED, - METADATA_FIELDS, - TIMESPAN_ABBREVIATIONS, - COUNTER_TYPES, - RESOLVE_FIELDS_API - ); + Set features = Set.of(); if (Build.current().isSnapshot()) { return Collections.unmodifiableSet(Sets.union(features, snapshotBuildFeatures())); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index 58e80e569ee5..2443c3f2cda6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -12,12 +12,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.esql.core.expression.Expression; import java.io.IOException; import java.util.Objects; @@ -57,6 +59,8 @@ public final class QueryPragmas implements Writeable { public static final Setting NODE_LEVEL_REDUCTION = Setting.boolSetting("node_level_reduction", true); + public static final Setting FOLD_LIMIT = Setting.memorySizeSetting("fold_limit", "5%"); + public static final QueryPragmas EMPTY = new QueryPragmas(Settings.EMPTY); private final Settings settings; @@ -134,6 +138,17 @@ public final class QueryPragmas implements Writeable { return NODE_LEVEL_REDUCTION.get(settings); } + /** + * The maximum amount of memory we can use for {@link Expression#fold} during planing. This + * defaults to 5% of memory available on the current node. If this method is called on the + * coordinating node, this is 5% of the coordinating node's memory. If it's called on a data + * node, it's 5% of the data node. That's an exciting inconsistency. But it's + * important. Bigger nodes have more space to do folding. + */ + public ByteSizeValue foldLimit() { + return FOLD_LIMIT.get(settings); + } + public boolean isEmpty() { return settings.isEmpty(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java index 8dee3478b3b6..61ac67674b25 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java @@ -28,10 +28,6 @@ final class ReinitializingSourceProvider implements SourceProvider { private PerThreadSourceProvider perThreadProvider; private final Supplier sourceProviderFactory; - // Keeping track of last seen doc and if current doc is before last seen doc then source provider is initialized: - // (when source mode is synthetic then _source is read from doc values and doc values don't support going backwards) - private int lastSeenDocId; - ReinitializingSourceProvider(Supplier sourceProviderFactory) { this.sourceProviderFactory = sourceProviderFactory; } @@ -40,15 +36,25 @@ final class ReinitializingSourceProvider implements SourceProvider { public Source getSource(LeafReaderContext ctx, int doc) throws IOException { var currentThread = Thread.currentThread(); PerThreadSourceProvider provider = perThreadProvider; - if (provider == null || provider.creatingThread != currentThread || doc < lastSeenDocId) { + if (provider == null || provider.creatingThread != currentThread || doc < provider.lastSeenDocId) { provider = new PerThreadSourceProvider(sourceProviderFactory.get(), currentThread); this.perThreadProvider = provider; } - lastSeenDocId = doc; + provider.lastSeenDocId = doc; return provider.source.getSource(ctx, doc); } - private record PerThreadSourceProvider(SourceProvider source, Thread creatingThread) { + private static final class PerThreadSourceProvider { + final SourceProvider source; + final Thread creatingThread; + // Keeping track of last seen doc and if current doc is before last seen doc then source provider is initialized: + // (when source mode is synthetic then _source is read from doc values and doc values don't support going backwards) + int lastSeenDocId; + + private PerThreadSourceProvider(SourceProvider source, Thread creatingThread) { + this.source = source; + this.creatingThread = creatingThread; + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index b44e249e3800..84173eeecc06 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; import org.elasticsearch.xpack.esql.core.async.AsyncTaskManagementService; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; @@ -189,11 +190,13 @@ public class TransportEsqlQueryAction extends HandledTransportAction computeService.execute( sessionId, (CancellableTask) task, plan, configuration, + foldCtx, executionInfo, resultListener ); @@ -201,6 +204,7 @@ public class TransportEsqlQueryAction extends HandledTransportAction new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) + .map( + e -> new EnrichPolicyResolver.UnresolvedPolicy( + (String) e.policyName().fold(FoldContext.small() /* TODO remove me*/), + e.mode() + ) + ) .collect(Collectors.toSet()); final List indices = preAnalysis.indices; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 0847f71b1fb0..eef0df6b89dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.Converter; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -239,9 +240,9 @@ public class EsqlDataTypeConverter { return null; } - public static TemporalAmount foldToTemporalAmount(Expression field, String sourceText, DataType expectedType) { + public static TemporalAmount foldToTemporalAmount(FoldContext ctx, Expression field, String sourceText, DataType expectedType) { if (field.foldable()) { - Object v = field.fold(); + Object v = field.fold(ctx); if (v instanceof BytesRef b) { try { return EsqlDataTypeConverter.parseTemporalAmount(b.utf8ToString(), expectedType); @@ -538,6 +539,10 @@ public class EsqlDataTypeConverter { return formatter == null ? dateTimeToString(dateTime) : formatter.formatMillis(dateTime); } + public static String nanoTimeToString(long dateTime, DateFormatter formatter) { + return formatter == null ? nanoTimeToString(dateTime) : formatter.formatNanos(dateTime); + } + public static BytesRef numericBooleanToString(Object field) { return new BytesRef(String.valueOf(field)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 7d4374934ab8..ed1ee71ff196 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.PreAnalyzer; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; @@ -240,7 +241,10 @@ public class CsvTests extends ESTestCase { * The csv tests support all but a few features. The unsupported features * are tested in integration tests. */ - assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); + assumeFalse( + "metadata fields aren't supported", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.METADATA_FIELDS.capabilityName()) + ); assumeFalse( "enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.ENRICH_LOAD.capabilityName()) @@ -264,7 +268,7 @@ public class CsvTests extends ESTestCase { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V10.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V11.capabilityName()) ); assumeFalse( "can't use TERM function in csv tests", @@ -482,15 +486,16 @@ public class CsvTests extends ESTestCase { return new CsvTestsDataLoader.MultiIndexTestDataset(indexName, datasets); } - private static TestPhysicalOperationProviders testOperationProviders(CsvTestsDataLoader.MultiIndexTestDataset datasets) - throws Exception { - var indexResolution = loadIndexResolution(datasets); + private static TestPhysicalOperationProviders testOperationProviders( + FoldContext foldCtx, + CsvTestsDataLoader.MultiIndexTestDataset datasets + ) throws Exception { var indexPages = new ArrayList(); for (CsvTestsDataLoader.TestDataset dataset : datasets.datasets()) { var testData = loadPageFromCsv(CsvTests.class.getResource("/data/" + dataset.dataFileName()), dataset.typeMapping()); indexPages.add(new TestPhysicalOperationProviders.IndexPage(dataset.indexName(), testData.v1(), testData.v2())); } - return TestPhysicalOperationProviders.create(indexPages); + return TestPhysicalOperationProviders.create(foldCtx, indexPages); } private ActualResults executePlan(BigArrays bigArrays) throws Exception { @@ -498,6 +503,7 @@ public class CsvTests extends ESTestCase { var testDatasets = testDatasets(parsed); LogicalPlan analyzed = analyzedPlan(parsed, testDatasets); + FoldContext foldCtx = FoldContext.small(); EsqlSession session = new EsqlSession( getTestName(), configuration, @@ -505,21 +511,21 @@ public class CsvTests extends ESTestCase { null, null, functionRegistry, - new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration)), + new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration, foldCtx)), mapper, TEST_VERIFIER, new PlanningMetrics(), null, EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER ); - TestPhysicalOperationProviders physicalOperationProviders = testOperationProviders(testDatasets); + TestPhysicalOperationProviders physicalOperationProviders = testOperationProviders(foldCtx, testDatasets); PlainActionFuture listener = new PlainActionFuture<>(); session.executeOptimizedPlan( new EsqlQueryRequest(), new EsqlExecutionInfo(randomBoolean()), - planRunner(bigArrays, physicalOperationProviders), + planRunner(bigArrays, foldCtx, physicalOperationProviders), session.optimizedPlan(analyzed), listener.delegateFailureAndWrap( // Wrap so we can capture the warnings in the calling thread @@ -579,12 +585,13 @@ public class CsvTests extends ESTestCase { testCase.assertWarnings(false).assertWarnings(normalized); } - PlanRunner planRunner(BigArrays bigArrays, TestPhysicalOperationProviders physicalOperationProviders) { - return (physicalPlan, listener) -> executeSubPlan(bigArrays, physicalOperationProviders, physicalPlan, listener); + PlanRunner planRunner(BigArrays bigArrays, FoldContext foldCtx, TestPhysicalOperationProviders physicalOperationProviders) { + return (physicalPlan, listener) -> executeSubPlan(bigArrays, foldCtx, physicalOperationProviders, physicalPlan, listener); } void executeSubPlan( BigArrays bigArrays, + FoldContext foldCtx, TestPhysicalOperationProviders physicalOperationProviders, PhysicalPlan physicalPlan, ActionListener listener @@ -630,12 +637,17 @@ public class CsvTests extends ESTestCase { // replace fragment inside the coordinator plan List drivers = new ArrayList<>(); - LocalExecutionPlan coordinatorNodeExecutionPlan = executionPlanner.plan(new OutputExec(coordinatorPlan, collectedPages::add)); + LocalExecutionPlan coordinatorNodeExecutionPlan = executionPlanner.plan( + foldCtx, + new OutputExec(coordinatorPlan, collectedPages::add) + ); drivers.addAll(coordinatorNodeExecutionPlan.createDrivers(getTestName())); if (dataNodePlan != null) { var searchStats = new DisabledSearchStats(); - var logicalTestOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, searchStats)); - var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(configuration, searchStats)); + var logicalTestOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, foldCtx, searchStats)); + var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer( + new LocalPhysicalOptimizerContext(configuration, foldCtx, searchStats) + ); var csvDataNodePhysicalPlan = PlannerUtils.localPlan(dataNodePlan, logicalTestOptimizer, physicalTestOptimizer); exchangeSource.addRemoteSink( @@ -646,7 +658,7 @@ public class CsvTests extends ESTestCase { throw new AssertionError("expected no failure", e); }) ); - LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan(csvDataNodePhysicalPlan); + LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan(foldCtx, csvDataNodePhysicalPlan); drivers.addAll(dataNodeExecutionPlan.createDrivers(getTestName())); Randomness.shuffle(drivers); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index 134981d3c3b0..ebfe1c814707 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -63,7 +63,12 @@ public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCa String name = randomAlphaOfLength(4); Operator.Status status = randomBoolean() ? null - : new AbstractPageMappingOperator.Status(randomNonNegativeLong(), between(0, Integer.MAX_VALUE)); + : new AbstractPageMappingOperator.Status( + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); return new DriverStatus.OperatorStatus(name, status); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4b9ae0d1692e..1d49409dc964 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -683,7 +683,7 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase 0"); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(MAX_LIMIT)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(MAX_LIMIT)); var filter = as(limit.child(), Filter.class); var eval = as(filter.child(), Eval.class); limit = as(eval.child(), Limit.class); @@ -1114,7 +1114,7 @@ public class AnalyzerTests extends ESTestCase { for (var breaker : List.of("stats c = count(salary) by last_name", "sort salary")) { var plan = analyze("from test | limit 100000 | " + breaker); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(MAX_LIMIT)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(MAX_LIMIT)); } } @@ -1122,7 +1122,7 @@ public class AnalyzerTests extends ESTestCase { for (var breaker : List.of("stats c = count(salary) by last_name", "eval c = salary | sort c")) { var plan = analyze("from test | " + breaker + " | eval cc = c * 10 | where cc > 0"); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(DEFAULT_LIMIT)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(DEFAULT_LIMIT)); } } @@ -1198,21 +1198,21 @@ public class AnalyzerTests extends ESTestCase { verifyUnsupported(""" from test | eval date_format(int) - """, "first argument of [date_format(int)] must be [datetime], found value [int] type [integer]"); + """, "first argument of [date_format(int)] must be [datetime or date_nanos], found value [int] type [integer]"); } public void testDateFormatOnFloat() { verifyUnsupported(""" from test | eval date_format(float) - """, "first argument of [date_format(float)] must be [datetime], found value [float] type [double]"); + """, "first argument of [date_format(float)] must be [datetime or date_nanos], found value [float] type [double]"); } public void testDateFormatOnText() { verifyUnsupported(""" from test | eval date_format(keyword) - """, "first argument of [date_format(keyword)] must be [datetime], found value [keyword] type [keyword]"); + """, "first argument of [date_format(keyword)] must be [datetime or date_nanos], found value [keyword] type [keyword]"); } public void testDateFormatWithNumericFormat() { @@ -1428,7 +1428,7 @@ public class AnalyzerTests extends ESTestCase { var plan = analyzeWithEmptyFieldCapsResponse(query); var limit = as(plan, Limit.class); limit = as(limit.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(0)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(0)); var orderBy = as(limit.child(), OrderBy.class); var agg = as(orderBy.child(), Aggregate.class); assertEmptyEsRelation(agg.child()); @@ -1443,7 +1443,7 @@ public class AnalyzerTests extends ESTestCase { var plan = analyzeWithEmptyFieldCapsResponse(query); var limit = as(plan, Limit.class); limit = as(limit.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(2)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(2)); var project = as(limit.child(), EsqlProject.class); var eval = as(project.child(), Eval.class); assertEmptyEsRelation(eval.child()); @@ -1460,7 +1460,7 @@ public class AnalyzerTests extends ESTestCase { var agg = as(limit.child(), Aggregate.class); var eval = as(agg.child(), Eval.class); limit = as(eval.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(10)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(10)); assertEmptyEsRelation(limit.child()); } @@ -2054,10 +2054,10 @@ public class AnalyzerTests extends ESTestCase { } LogicalPlan plan = analyze(query); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(1000)); var lookup = as(limit.child(), Lookup.class); - assertThat(lookup.tableName().fold(), equalTo("int_number_names")); + assertThat(as(lookup.tableName(), Literal.class).value(), equalTo("int_number_names")); assertMap(lookup.matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat( lookup.localRelation().output().stream().map(Object::toString).toList(), @@ -2140,7 +2140,7 @@ public class AnalyzerTests extends ESTestCase { } public void testLookupJoinUnknownIndex() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String errorMessage = "Unknown index [foobar]"; IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); @@ -2169,7 +2169,7 @@ public class AnalyzerTests extends ESTestCase { } public void testLookupJoinUnknownField() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; String errorMessage = "1:45: Unknown column [last_name] in right side of join"; @@ -2192,7 +2192,7 @@ public class AnalyzerTests extends ESTestCase { } public void testMultipleLookupJoinsGiveDifferentAttributes() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); // The field attributes that get contributed by different LOOKUP JOIN commands must have different name ids, // even if they have the same names. Otherwise, things like dependency analysis - like in PruneColumns - cannot work based on @@ -2222,7 +2222,7 @@ public class AnalyzerTests extends ESTestCase { } public void testLookupJoinIndexMode() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); var indexResolution = AnalyzerTestUtils.expandedDefaultIndexResolution(); var lookupResolution = AnalyzerTestUtils.defaultLookupResolution(); @@ -2336,7 +2336,7 @@ public class AnalyzerTests extends ESTestCase { projection = as(projections.get(3), ReferenceAttribute.class); assertEquals(projection.name(), "w"); assertEquals(projection.dataType(), DataType.DOUBLE); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(as(limit.limit(), Literal.class).value(), equalTo(1000)); } public void testNamedParamsForIdentifiers() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 180e32fb7c15..2ee6cf613611 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -113,7 +113,7 @@ public class ParsingTests extends ESTestCase { } public void testJoinOnConstant() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertEquals( "1:55: JOIN ON clause only supports fields at the moment, found [123]", error("row languages = 1, gender = \"f\" | lookup join test on 123") @@ -129,7 +129,7 @@ public class ParsingTests extends ESTestCase { } public void testJoinOnMultipleFields() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertEquals( "1:35: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on gender, languages") @@ -137,7 +137,7 @@ public class ParsingTests extends ESTestCase { } public void testJoinTwiceOnTheSameField() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertEquals( "1:35: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on languages, languages") @@ -145,7 +145,7 @@ public class ParsingTests extends ESTestCase { } public void testJoinTwiceOnTheSameField_TwoLookups() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertEquals( "1:80: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on languages | eval x = 1 | lookup join test on gender, gender") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index d78c4bfa21ce..f932992e8155 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1984,7 +1984,7 @@ public class VerifierTests extends ESTestCase { } public void testLookupJoinDataTypeMismatch() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java new file mode 100644 index 000000000000..a204e93b0d16 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class LookupFromIndexOperatorStatusTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return LookupFromIndexOperator.Status::new; + } + + @Override + protected LookupFromIndexOperator.Status createTestInstance() { + return new LookupFromIndexOperator.Status( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomLongBetween(0, TimeValue.timeValueHours(1).millis()), + randomNonNegativeLong(), + randomNonNegativeLong() + ); + } + + @Override + protected LookupFromIndexOperator.Status mutateInstance(LookupFromIndexOperator.Status in) throws IOException { + long receivedPages = in.receivedPages(); + long completedPages = in.completedPages(); + long totalTimeInMillis = in.totalTimeInMillis(); + long totalTerms = in.totalTerms(); + long emittedPages = in.emittedPages(); + switch (randomIntBetween(0, 4)) { + case 0 -> receivedPages = randomValueOtherThan(receivedPages, ESTestCase::randomNonNegativeLong); + case 1 -> completedPages = randomValueOtherThan(completedPages, ESTestCase::randomNonNegativeLong); + case 2 -> totalTimeInMillis = randomValueOtherThan(totalTimeInMillis, ESTestCase::randomNonNegativeLong); + case 3 -> totalTerms = randomValueOtherThan(totalTerms, ESTestCase::randomNonNegativeLong); + case 4 -> emittedPages = randomValueOtherThan(emittedPages, ESTestCase::randomNonNegativeLong); + default -> throw new UnsupportedOperationException(); + } + return new LookupFromIndexOperator.Status(receivedPages, completedPages, totalTimeInMillis, totalTerms, emittedPages); + } + + public void testToXContent() { + var status = new LookupFromIndexOperator.Status(100, 50, TimeValue.timeValueSeconds(10).millis(), 120, 88); + String json = Strings.toString(status, true, true); + assertThat(json, equalTo(""" + { + "received_pages" : 100, + "completed_pages" : 50, + "total_time_in_millis" : 10000, + "total_time" : "10s", + "emitted_pages" : 88, + "total_terms" : 120 + }""")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java new file mode 100644 index 000000000000..098ea9eaa0c2 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.junit.After; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class LookupFromIndexServiceResponseTests extends AbstractWireSerializingTestCase { + private final List breakers = new ArrayList<>(); + + LookupFromIndexService.LookupResponse createTestInstance(BlockFactory blockFactory) { + return new LookupFromIndexService.LookupResponse(randomList(0, 10, () -> testPage(blockFactory)), blockFactory); + } + + /** + * Build a {@link Page} to test serialization. If we had nice random + * {@linkplain Page} generation we'd use that happily, but it's off + * in the tests for compute, and we're in ESQL. And we don't + * really need a fully random one to verify serialization + * here. + */ + Page testPage(BlockFactory blockFactory) { + try (IntVector.Builder builder = blockFactory.newIntVectorFixedBuilder(3)) { + builder.appendInt(1); + builder.appendInt(2); + builder.appendInt(3); + return new Page(builder.build().asBlock()); + } + } + + @Override + protected LookupFromIndexService.LookupResponse createTestInstance() { + // Can't use a real block factory for the basic serialization tests because they don't release. + return createTestInstance(TestBlockFactory.getNonBreakingInstance()); + } + + @Override + protected Writeable.Reader instanceReader() { + return in -> new LookupFromIndexService.LookupResponse(in, TestBlockFactory.getNonBreakingInstance()); + } + + @Override + protected LookupFromIndexService.LookupResponse mutateInstance(LookupFromIndexService.LookupResponse instance) throws IOException { + assertThat(instance.blockFactory, sameInstance(TestBlockFactory.getNonBreakingInstance())); + List pages = new ArrayList<>(instance.pages().size()); + pages.addAll(instance.pages()); + pages.add(testPage(TestBlockFactory.getNonBreakingInstance())); + return new LookupFromIndexService.LookupResponse(pages, instance.blockFactory); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(IntBlock.ENTRY)); + } + + public void testWithBreaker() throws IOException { + BlockFactory origFactory = blockFactory(); + BlockFactory copyFactory = blockFactory(); + LookupFromIndexService.LookupResponse orig = createTestInstance(origFactory); + try { + LookupFromIndexService.LookupResponse copy = copyInstance( + orig, + getNamedWriteableRegistry(), + (out, v) -> v.writeTo(out), + in -> new LookupFromIndexService.LookupResponse(in, copyFactory), + TransportVersion.current() + ); + try { + assertThat(copy, equalTo(orig)); + } finally { + copy.decRef(); + } + assertThat(copyFactory.breaker().getUsed(), equalTo(0L)); + } finally { + orig.decRef(); + } + assertThat(origFactory.breaker().getUsed(), equalTo(0L)); + } + + /** + * Tests that we don't reserve any memory other than that in the {@link Page}s we + * hold, and calling {@link LookupFromIndexService.LookupResponse#takePages} + * gives us those pages. If we then close those pages, we should have 0 + * reserved memory. + */ + public void testTakePages() { + BlockFactory factory = blockFactory(); + LookupFromIndexService.LookupResponse orig = createTestInstance(factory); + try { + if (orig.pages().isEmpty()) { + assertThat(factory.breaker().getUsed(), equalTo(0L)); + return; + } + List pages = orig.takePages(); + Releasables.closeExpectNoException(Releasables.wrap(Iterators.map(pages.iterator(), page -> page::releaseBlocks))); + assertThat(factory.breaker().getUsed(), equalTo(0L)); + assertThat(orig.takePages(), nullValue()); + } finally { + orig.decRef(); + } + assertThat(factory.breaker().getUsed(), equalTo(0L)); + } + + private BlockFactory blockFactory() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(4 /* more than we need*/)) + .withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new BlockFactory(breaker, bigArrays); + } + + @After + public void allBreakersEmpty() throws Exception { + // first check that all big arrays are released, which can affect breakers + MockBigArrays.ensureAllArraysAreReleased(); + + for (CircuitBreaker breaker : breakers) { + assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapperTests.java new file mode 100644 index 000000000000..828f9e061686 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapperTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.evaluator.mapper; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.hamcrest.Matchers; + +public class EvaluatorMapperTests extends ESTestCase { + public void testFoldCompletesWithPlentyOfMemory() { + Add add = new Add( + Source.synthetic("shouldn't break"), + new Literal(Source.EMPTY, 1, DataType.INTEGER), + new Literal(Source.EMPTY, 3, DataType.INTEGER) + ); + assertEquals(add.fold(new FoldContext(100)), 4); + } + + public void testFoldBreaksWithLittleMemory() { + Add add = new Add( + Source.synthetic("should break"), + new Literal(Source.EMPTY, 1, DataType.INTEGER), + new Literal(Source.EMPTY, 3, DataType.INTEGER) + ); + Exception e = expectThrows(FoldContext.FoldTooMuchMemoryException.class, () -> add.fold(new FoldContext(10))); + assertThat( + e.getMessage(), + Matchers.equalTo( + "line -1:-1: Folding query used more than 10b. " + + "The expression that pushed past the limit is [should break] which needed 32b." + ) + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index c086245d6fd6..87ea6315d4f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -40,6 +41,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -263,12 +265,12 @@ public abstract class AbstractAggregationTestCase extends AbstractFunctionTestCa assertTrue(evaluableExpression.foldable()); if (testCase.foldingExceptionClass() != null) { - Throwable t = expectThrows(testCase.foldingExceptionClass(), evaluableExpression::fold); + Throwable t = expectThrows(testCase.foldingExceptionClass(), () -> evaluableExpression.fold(FoldContext.small())); assertThat(t.getMessage(), equalTo(testCase.foldingExceptionMessage())); return; } - Object result = evaluableExpression.fold(); + Object result = evaluableExpression.fold(FoldContext.small()); // Decode unsigned longs into BigIntegers if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); @@ -289,7 +291,7 @@ public abstract class AbstractAggregationTestCase extends AbstractFunctionTestCa expression = resolveSurrogates(expression); // As expressions may be composed of multiple functions, we need to fold nulls bottom-up - expression = expression.transformUp(e -> new FoldNull().rule(e)); + expression = expression.transformUp(e -> new FoldNull().rule(e, unboundLogicalOptimizerContext())); assertThat(expression.dataType(), equalTo(testCase.expectedType())); Expression.TypeResolution resolution = expression.typeResolved(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 03b9dba29895..1cf087cf55cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; @@ -44,6 +45,7 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; @@ -97,6 +99,7 @@ import java.util.stream.Stream; import static java.util.Map.entry; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.endsWith; @@ -530,14 +533,28 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { return build(testCase.getSource(), testCase.getDataAsLiterals()); } + public static EvaluatorMapper.ToEvaluator toEvaluator() { + return new EvaluatorMapper.ToEvaluator() { + @Override + public ExpressionEvaluator.Factory apply(Expression expression) { + return evaluator(expression); + } + + @Override + public FoldContext foldCtx() { + return FoldContext.small(); + } + }; + } + /** * Convert an {@link Expression} tree into a {@link ExpressionEvaluator.Factory} * for {@link ExpressionEvaluator}s in the same way as our planner. */ public static ExpressionEvaluator.Factory evaluator(Expression e) { - e = new FoldNull().rule(e); + e = new FoldNull().rule(e, unboundLogicalOptimizerContext()); if (e.foldable()) { - e = new Literal(e.source(), e.fold(), e.dataType()); + e = new Literal(e.source(), e.fold(FoldContext.small()), e.dataType()); } Layout.Builder builder = new Layout.Builder(); buildLayout(builder, e); @@ -545,7 +562,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { if (resolution.unresolved()) { throw new AssertionError("expected resolved " + resolution.message()); } - return EvalMapper.toEvaluator(e, builder.build()); + return EvalMapper.toEvaluator(FoldContext.small(), e, builder.build()); } protected final Page row(List values) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 65b9c447170f..944515e54af7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -20,6 +20,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.optimizer.rules.logical.FoldNull; @@ -38,6 +39,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -132,7 +134,7 @@ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTes if (resolution.unresolved()) { throw new AssertionError("expected resolved " + resolution.message()); } - expression = new FoldNull().rule(expression); + expression = new FoldNull().rule(expression, unboundLogicalOptimizerContext()); assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); @@ -363,11 +365,11 @@ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTes return; } assertFalse("expected resolved", expression.typeResolved().unresolved()); - Expression nullOptimized = new FoldNull().rule(expression); + Expression nullOptimized = new FoldNull().rule(expression, unboundLogicalOptimizerContext()); assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType())); assertTrue(nullOptimized.foldable()); if (testCase.foldingExceptionClass() == null) { - Object result = nullOptimized.fold(); + Object result = nullOptimized.fold(FoldContext.small()); // Decode unsigned longs into BigIntegers if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); @@ -380,7 +382,7 @@ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTes assertWarnings(testCase.getExpectedWarnings()); } } else { - Throwable t = expectThrows(testCase.foldingExceptionClass(), nullOptimized::fold); + Throwable t = expectThrows(testCase.foldingExceptionClass(), () -> nullOptimized.fold(FoldContext.small())); assertThat(t.getMessage(), equalTo(testCase.foldingExceptionMessage())); } } @@ -419,7 +421,7 @@ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTes String typeNameOverflow = dataType.typeName().toLowerCase(Locale.ROOT) + " overflow"; return new TestCaseSupplier( "<" + typeNameOverflow + ">", - List.of(dataType), + List.of(dataType, dataType), () -> new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(lhsSupplier.get(), dataType, "lhs"), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java index 19af9892015b..e507640c7b23 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -80,7 +81,9 @@ public class CheckLicenseTests extends ESTestCase { var plan = parser.createStatement(esql); plan = plan.transformDown( Limit.class, - l -> Objects.equals(l.limit().fold(), 10) ? new LicensedLimit(l.source(), l.limit(), l.child(), functionLicenseFeature) : l + l -> Objects.equals(l.limit().fold(FoldContext.small()), 10) + ? new LicensedLimit(l.source(), l.limit(), l.child(), functionLicenseFeature) + : l ); return analyzer(registry, operationMode).analyze(plan); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 72d816a65e63..9bf063518d4b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1007,6 +1007,17 @@ public record TestCaseSupplier(String name, List types, Supplier + * For multi-row parameters, see {@link MultiRowTestCaseSupplier#dateCases}. + *

+ * Helper function for if you want to specify your min and max range as dates instead of longs. + */ + public static List dateCases(Instant min, Instant max) { + return dateCases(min.toEpochMilli(), max.toEpochMilli()); + } + /** * Generate cases for {@link DataType#DATETIME}. *

@@ -1045,6 +1056,19 @@ public record TestCaseSupplier(String name, List types, Supplier dateFormatCases() { + return List.of( + new TypedDataSupplier("", () -> new BytesRef(ESTestCase.randomDateFormatterPattern()), DataType.KEYWORD), + new TypedDataSupplier("", () -> new BytesRef(ESTestCase.randomDateFormatterPattern()), DataType.TEXT), + new TypedDataSupplier("", () -> new BytesRef("yyyy"), DataType.KEYWORD), + new TypedDataSupplier("", () -> new BytesRef("yyyy"), DataType.TEXT) + ); + } + /** * Generate cases for {@link DataType#DATE_NANOS}. * @@ -1388,7 +1412,7 @@ public record TestCaseSupplier(String name, List types, Supplier types, Supplier types, Supplier { assertTrue(caseExpr.foldable()); - return caseExpr.fold(); + return caseExpr.fold(FoldContext.small()); }); } @@ -265,22 +267,31 @@ public class CaseExtraTests extends ESTestCase { public void testCaseIsLazy() { Case caseExpr = caseExpr(true, 1, true, 2); DriverContext driveContext = driverContext(); - EvalOperator.ExpressionEvaluator evaluator = caseExpr.toEvaluator(child -> { - Object value = child.fold(); - if (value != null && value.equals(2)) { - return dvrCtx -> new EvalOperator.ExpressionEvaluator() { - @Override - public Block eval(Page page) { - fail("Unexpected evaluation of 4th argument"); - return null; - } + EvaluatorMapper.ToEvaluator toEvaluator = new EvaluatorMapper.ToEvaluator() { + @Override + public EvalOperator.ExpressionEvaluator.Factory apply(Expression expression) { + Object value = expression.fold(FoldContext.small()); + if (value != null && value.equals(2)) { + return dvrCtx -> new EvalOperator.ExpressionEvaluator() { + @Override + public Block eval(Page page) { + fail("Unexpected evaluation of 4th argument"); + return null; + } - @Override - public void close() {} - }; + @Override + public void close() {} + }; + } + return AbstractFunctionTestCase.evaluator(expression); } - return AbstractFunctionTestCase.evaluator(child); - }).get(driveContext); + + @Override + public FoldContext foldCtx() { + return FoldContext.small(); + } + }; + EvalOperator.ExpressionEvaluator evaluator = caseExpr.toEvaluator(toEvaluator).get(driveContext); Page page = new Page(driveContext.blockFactory().newConstantIntBlockWith(0, 1)); try (Block block = evaluator.eval(page)) { assertEquals(1, toJavaObject(block, 0)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 05923246520f..23a0f2307171 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.Build; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -779,7 +780,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { return; } assertThat(e.foldable(), equalTo(true)); - Object result = e.fold(); + Object result = e.fold(FoldContext.small()); if (testCase.getExpectedBuildEvaluatorWarnings() != null) { assertWarnings(testCase.getExpectedBuildEvaluatorWarnings()); } @@ -799,18 +800,18 @@ public class CaseTests extends AbstractScalarFunctionTestCase { } Case c = (Case) buildFieldExpression(testCase); if (extra().expectedPartialFold == null) { - assertThat(c.partiallyFold(), sameInstance(c)); + assertThat(c.partiallyFold(FoldContext.small()), sameInstance(c)); return; } if (extra().expectedPartialFold.size() == 1) { - assertThat(c.partiallyFold(), equalTo(extra().expectedPartialFold.get(0).asField())); + assertThat(c.partiallyFold(FoldContext.small()), equalTo(extra().expectedPartialFold.get(0).asField())); return; } Case expected = build( Source.synthetic("expected"), extra().expectedPartialFold.stream().map(TestCaseSupplier.TypedData::asField).toList() ); - assertThat(c.partiallyFold(), equalTo(expected)); + assertThat(c.partiallyFold(FoldContext.small()), equalTo(expected)); } private static Function addWarnings(List warnings) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java index 9abbfbd61c1a..8060326365d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java @@ -71,7 +71,7 @@ public class ToDatePeriodTests extends AbstractScalarFunctionTestCase { })); } } - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java index 6486f6efcdd3..980eed1af910 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java @@ -70,7 +70,7 @@ public class ToTimeDurationTests extends AbstractScalarFunctionTestCase { })); } } - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index da069e3c37cc..b4a37b029757 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -99,7 +99,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { equalTo(0) ); })); - return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } public void testDateDiffFunction() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index be978eda0675..cd27ce511b31 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -99,7 +100,7 @@ public class DateExtractTests extends AbstractConfigurationFunctionTestCase { EsqlTestUtils.TEST_CFG ); - assertThat(instance.fold(), is(date.getLong(value))); + assertThat(instance.fold(FoldContext.small()), is(date.getLong(value))); assertThat( DateExtract.process(epochMilli, new BytesRef(value.name()), EsqlTestUtils.TEST_CFG.zoneId()), is(date.getLong(value)) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatErrorTests.java index 985f1144fbcf..c7eebdd82be5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatErrorTests.java @@ -28,14 +28,25 @@ public class DateFormatErrorTests extends ErrorsForCasesWithoutExamplesTestCase @Override protected Expression build(Source source, List args) { - return new DateFormat(source, args.get(0), args.get(1), EsqlTestUtils.TEST_CFG); + return new DateFormat(source, args.get(0), args.size() == 2 ? args.get(1) : null, EsqlTestUtils.TEST_CFG); } @Override protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + // Single argument version + String source = sourceForSignature(signature); + String name = signature.get(0).typeName(); + if (signature.size() == 1) { + return equalTo("first argument of [" + source + "] must be [datetime or date_nanos], found value [] type [" + name + "]"); + } + // Two argument version + // Handle the weird case where we're calling the two argument version with the date first instead of the format. + if (signature.get(0).isDate()) { + return equalTo("first argument of [" + source + "] must be [string], found value [] type [" + name + "]"); + } return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) { case 0 -> "string"; - case 1 -> "datetime"; + case 1 -> "datetime or date_nanos"; default -> ""; })); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java index 8dfdd1ba486c..1167b91a81e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java @@ -11,18 +11,22 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; import org.elasticsearch.xpack.esql.session.Configuration; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import java.time.Instant; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.matchesPattern; public class DateFormatTests extends AbstractConfigurationFunctionTestCase { public DateFormatTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -31,39 +35,60 @@ public class DateFormatTests extends AbstractConfigurationFunctionTestCase { @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( - true, - List.of( - new TestCaseSupplier( - List.of(DataType.KEYWORD, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataType.KEYWORD, "formatter"), - new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "val") - ), - "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", - DataType.KEYWORD, - equalTo(BytesRefs.toBytesRef("2023")) - ) + List suppliers = new ArrayList<>(); + // Formatter supplied cases + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + (format, value) -> new BytesRef( + DateFormatter.forPattern(((BytesRef) format).utf8ToString()).formatMillis(((Instant) value).toEpochMilli()) ), - new TestCaseSupplier( - List.of(DataType.TEXT, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataType.TEXT, "formatter"), - new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "val") - ), - "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", - DataType.KEYWORD, - equalTo(BytesRefs.toBytesRef("2023")) - ) - ) + DataType.KEYWORD, + TestCaseSupplier.dateFormatCases(), + TestCaseSupplier.dateCases(Instant.parse("1900-01-01T00:00:00.00Z"), Instant.parse("9999-12-31T00:00:00.00Z")), + matchesPattern( + "DateFormatMillisEvaluator\\[val=Attribute\\[channel=1], formatter=Attribute\\[(channel=0|\\w+)], locale=en_US]" + ), + (lhs, rhs) -> List.of(), + false ) ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + (format, value) -> new BytesRef( + DateFormatter.forPattern(((BytesRef) format).utf8ToString()).formatNanos(DateUtils.toLong((Instant) value)) + ), + DataType.KEYWORD, + TestCaseSupplier.dateFormatCases(), + TestCaseSupplier.dateNanosCases(), + matchesPattern( + "DateFormatNanosEvaluator\\[val=Attribute\\[channel=1], formatter=Attribute\\[(channel=0|\\w+)], locale=en_US]" + ), + (lhs, rhs) -> List.of(), + false + ) + ); + // Default formatter cases + TestCaseSupplier.unary( + suppliers, + "DateFormatMillisConstantEvaluator[val=Attribute[channel=0], formatter=format[strict_date_optional_time] locale[]]", + TestCaseSupplier.dateCases(Instant.parse("1900-01-01T00:00:00.00Z"), Instant.parse("9999-12-31T00:00:00.00Z")), + DataType.KEYWORD, + (value) -> new BytesRef(EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER.formatMillis(((Instant) value).toEpochMilli())), + List.of() + ); + TestCaseSupplier.unary( + suppliers, + "DateFormatNanosConstantEvaluator[val=Attribute[channel=0], formatter=format[strict_date_optional_time] locale[]]", + TestCaseSupplier.dateNanosCases(), + DataType.KEYWORD, + (value) -> new BytesRef(EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER.formatNanos(DateUtils.toLong((Instant) value))), + List.of() + ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override protected Expression buildWithConfiguration(Source source, List args, Configuration configuration) { - return new DateFormat(source, args.get(0), args.get(1), configuration); + return new DateFormat(source, args.get(0), args.size() == 2 ? args.get(1) : null, configuration); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java index ce6ee1702ee6..c667747a8ba7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java @@ -32,7 +32,8 @@ public class NowTests extends AbstractConfigurationFunctionTestCase { @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData( + return parameterSuppliersFromTypedDataWithDefaultChecks( + true, List.of( new TestCaseSupplier( "Now Test", @@ -44,7 +45,8 @@ public class NowTests extends AbstractConfigurationFunctionTestCase { equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) ) ) - ) + ), + (valid, position) -> "" ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java index 278c9123e30b..6531e7bee90a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java @@ -56,7 +56,7 @@ public class AcosTests extends AbstractScalarFunctionTestCase { ) ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java index 04fec5a20b43..410dc61ec5fa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java @@ -56,7 +56,7 @@ public class AsinTests extends AbstractScalarFunctionTestCase { ) ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java index bfe35a08b8ba..d702e28baf9d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java @@ -72,7 +72,7 @@ public class CbrtTests extends AbstractScalarFunctionTestCase { ); suppliers = anyNullIsNull(true, suppliers); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java index 50ed71262e5d..f3922a355180 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java @@ -29,14 +29,22 @@ public class ETests extends AbstractScalarFunctionTestCase { @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("E Test", List.of(DataType.INTEGER), () -> { - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(1, DataType.INTEGER, "foo")), - "LiteralsEvaluator[lit=2.718281828459045]", - DataType.DOUBLE, - equalTo(Math.E) - ); - }))); + return parameterSuppliersFromTypedDataWithDefaultChecks( + true, + List.of( + new TestCaseSupplier( + "E Test", + List.of(), + () -> new TestCaseSupplier.TestCase( + List.of(), + "LiteralsEvaluator[lit=2.718281828459045]", + DataType.DOUBLE, + equalTo(Math.E) + ) + ) + ), + (v, p) -> "" + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java index d42f4ffde060..bc5faf1b2560 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java @@ -75,7 +75,7 @@ public class ExpTests extends AbstractScalarFunctionTestCase { suppliers = anyNullIsNull(true, suppliers); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 44ad4547481d..7942320656f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -124,7 +124,7 @@ public class Log10Tests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java index 671cffe9e7f9..0ee277dbcadb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java @@ -187,11 +187,7 @@ public class LogTests extends AbstractScalarFunctionTestCase { ) ); - // Add null cases before the rest of the error cases, so messages are correct. - suppliers = anyNullIsNull(true, suppliers); - - // Negative cases - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java index e93bc7b43d98..79742952dbf5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java @@ -29,14 +29,22 @@ public class PiTests extends AbstractScalarFunctionTestCase { @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Pi Test", List.of(DataType.INTEGER), () -> { - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(1, DataType.INTEGER, "foo")), - "LiteralsEvaluator[lit=3.141592653589793]", - DataType.DOUBLE, - equalTo(Math.PI) - ); - }))); + return parameterSuppliersFromTypedDataWithDefaultChecks( + true, + List.of( + new TestCaseSupplier( + "Pi Test", + List.of(), + () -> new TestCaseSupplier.TestCase( + List.of(), + "LiteralsEvaluator[lit=3.141592653589793]", + DataType.DOUBLE, + equalTo(Math.PI) + ) + ) + ), + (v, p) -> "numeric" + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index 9d8b87bab887..2fc139a5458c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -77,7 +77,7 @@ public class PowTests extends AbstractScalarFunctionTestCase { ) ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java index 8c612e5e664e..4bf1351969d7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java @@ -72,7 +72,7 @@ public class SignumTests extends AbstractScalarFunctionTestCase { suppliers = anyNullIsNull(true, suppliers); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index 23f2adc6c02e..7cba5d6d57d4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -109,7 +109,7 @@ public class SqrtTests extends AbstractScalarFunctionTestCase { "Line -1:-1: java.lang.ArithmeticException: Square root of negative" ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "numeric")); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java index 1a622b25b335..40e66333f953 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java @@ -29,14 +29,22 @@ public class TauTests extends AbstractScalarFunctionTestCase { @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Tau Test", List.of(DataType.INTEGER), () -> { - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(1, DataType.INTEGER, "foo")), - "LiteralsEvaluator[lit=6.283185307179586]", - DataType.DOUBLE, - equalTo(Tau.TAU) - ); - }))); + return parameterSuppliersFromTypedDataWithDefaultChecks( + true, + List.of( + new TestCaseSupplier( + "Tau Test", + List.of(), + () -> new TestCaseSupplier.TestCase( + List.of(), + "LiteralsEvaluator[lit=6.283185307179586]", + DataType.DOUBLE, + equalTo(Tau.TAU) + ) + ) + ), + (v, p) -> "numeric" + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java index 156fc4bfe7c3..0c905b28ac93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java @@ -32,22 +32,20 @@ public class MvPSeriesWeightedSumTests extends AbstractScalarFunctionTestCase { @ParametersFactory public static Iterable parameters() { List cases = new ArrayList<>(); + doubles(cases); - cases = randomizeBytesRefsOffset(cases); - cases = anyNullIsNull( - cases, + return parameterSuppliersFromTypedDataWithDefaultChecks( (nullPosition, nullValueDataType, original) -> nullValueDataType == DataType.NULL ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> { if (nullData.isForceLiteral()) { return equalTo("LiteralsEvaluator[lit=null]"); } return nullData.type() == DataType.NULL ? equalTo("LiteralsEvaluator[lit=null]") : original; - } + }, + cases, + (valid, position) -> "double" ); - cases = errorsForCasesWithoutExamples(cases, (valid, position) -> "double"); - - return parameterSuppliersFromTypedData(cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index e5f240c811bd..5d21c135e467 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -18,9 +18,11 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matchers; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.function.Supplier; @@ -42,6 +44,12 @@ public class MvSortTests extends AbstractScalarFunctionTestCase { doubles(suppliers); bytesRefs(suppliers); nulls(suppliers); + + suppliers = anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 0 ? nullValueDataType : original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? Matchers.equalTo("LiteralsEvaluator[lit=null]") : original + ); return parameterSuppliersFromTypedData(suppliers); } @@ -143,75 +151,22 @@ public class MvSortTests extends AbstractScalarFunctionTestCase { } private static void bytesRefs(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD, DataType.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); - BytesRef order = new BytesRef("DESC"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field, DataType.KEYWORD, "field"), - new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() - ), - "MvSortBytesRef[field=Attribute[channel=0], order=false]", - DataType.KEYWORD, - equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) - ); - })); - - suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT, DataType.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); - BytesRef order = new BytesRef("ASC"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field, DataType.TEXT, "field"), - new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() - ), - "MvSortBytesRef[field=Attribute[channel=0], order=true]", - DataType.TEXT, - equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) - ); - })); - - suppliers.add(new TestCaseSupplier(List.of(DataType.SEMANTIC_TEXT, DataType.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataType.SEMANTIC_TEXT).value()); - BytesRef order = new BytesRef("ASC"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field, DataType.SEMANTIC_TEXT, "field"), - new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() - ), - "MvSortBytesRef[field=Attribute[channel=0], order=true]", - DataType.SEMANTIC_TEXT, - equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) - ); - })); - - suppliers.add(new TestCaseSupplier(List.of(DataType.IP, DataType.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); - BytesRef order = new BytesRef("DESC"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field, DataType.IP, "field"), - new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() - ), - "MvSortBytesRef[field=Attribute[channel=0], order=false]", - DataType.IP, - equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) - ); - })); - - suppliers.add(new TestCaseSupplier(List.of(DataType.VERSION, DataType.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataType.VERSION).value()); - BytesRef order = new BytesRef("ASC"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field, DataType.VERSION, "field"), - new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() - ), - "MvSortBytesRef[field=Attribute[channel=0], order=true]", - DataType.VERSION, - equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) - ); - })); + for (DataType type : List.of(DataType.KEYWORD, DataType.TEXT, DataType.SEMANTIC_TEXT, DataType.IP, DataType.VERSION)) { + suppliers.add(new TestCaseSupplier(List.of(type, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> (BytesRef) randomLiteral(type).value()); + boolean order = randomBoolean(); + var sortedData = field.stream().sorted(order ? Comparator.naturalOrder() : Comparator.reverseOrder()).toList(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field, type, "field"), + new TestCaseSupplier.TypedData(new BytesRef(order ? "ASC" : "DESC"), DataType.KEYWORD, "order").forceLiteral() + ), + "MvSortBytesRef[field=Attribute[channel=0], order=" + order + "]", + type, + equalTo(sortedData.size() == 1 ? sortedData.get(0) : sortedData) + ); + })); + } } private static void nulls(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index 26d113359a6f..89b148144fc8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -65,7 +65,7 @@ public class MvSumTests extends AbstractMultivalueFunctionTestCase { data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); return data; })); - return parameterSuppliersFromTypedData(anyNullIsNull(false, cases)); + return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "numeric"); } private static TestCaseSupplier arithmeticExceptionCase(DataType dataType, Supplier dataSupplier) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index 797c99992815..688341ebaa2b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -187,19 +188,27 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { Layout.Builder builder = new Layout.Builder(); buildLayout(builder, exp); Layout layout = builder.build(); - EvaluatorMapper.ToEvaluator toEvaluator = child -> { - if (child == evil) { - return dvrCtx -> new EvalOperator.ExpressionEvaluator() { - @Override - public Block eval(Page page) { - throw new AssertionError("shouldn't be called"); - } + EvaluatorMapper.ToEvaluator toEvaluator = new EvaluatorMapper.ToEvaluator() { + @Override + public EvalOperator.ExpressionEvaluator.Factory apply(Expression expression) { + if (expression == evil) { + return dvrCtx -> new EvalOperator.ExpressionEvaluator() { + @Override + public Block eval(Page page) { + throw new AssertionError("shouldn't be called"); + } - @Override - public void close() {} - }; + @Override + public void close() {} + }; + } + return EvalMapper.toEvaluator(FoldContext.small(), expression, layout); + } + + @Override + public FoldContext foldCtx() { + return FoldContext.small(); } - return EvalMapper.toEvaluator(child, layout); }; try ( EvalOperator.ExpressionEvaluator eval = exp.toEvaluator(toEvaluator).get(driverContext()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index ac060b945456..e58819beb803 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -76,8 +76,6 @@ public class LocateTests extends AbstractScalarFunctionTestCase { } } - suppliers = anyNullIsNull(true, suppliers); - // Here follows some non-randomized examples that we want to cover on every run suppliers.add(supplier("a tiger", "a t", null, 1)); suppliers.add(supplier("a tiger", "a", null, 1)); @@ -128,7 +126,7 @@ public class LocateTests extends AbstractScalarFunctionTestCase { suppliers.add(supplier("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0, 2)); suppliers.add(supplier("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0, 3)); - return parameterSuppliersFromTypedData(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 4f8adf3abaae..6c41552a9fc5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -12,6 +12,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -159,8 +160,8 @@ public class RLikeTests extends AbstractScalarFunctionTestCase { Expression expression = args.get(0); Literal pattern = (Literal) args.get(1); Literal caseInsensitive = args.size() > 2 ? (Literal) args.get(2) : null; - String patternString = ((BytesRef) pattern.fold()).utf8ToString(); - boolean caseInsensitiveBool = caseInsensitive != null ? (boolean) caseInsensitive.fold() : false; + String patternString = ((BytesRef) pattern.fold(FoldContext.small())).utf8ToString(); + boolean caseInsensitiveBool = caseInsensitive != null ? (boolean) caseInsensitive.fold(FoldContext.small()) : false; logger.info("pattern={} caseInsensitive={}", patternString, caseInsensitiveBool); return caseInsensitiveBool diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java index 8b4ea066fdcc..5eb654b0d823 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java @@ -122,13 +122,11 @@ public class RepeatTests extends AbstractScalarFunctionTestCase { .withFoldingException(IllegalArgumentException.class, "Number parameter cannot be negative, found [" + number + "]"); })); - cases = anyNullIsNull(true, cases); - cases = errorsForCasesWithoutExamples(cases, (v, p) -> switch (p) { + return parameterSuppliersFromTypedDataWithDefaultChecks(true, cases, (v, p) -> switch (p) { case 0 -> "string"; case 1 -> "integer"; default -> ""; }); - return parameterSuppliersFromTypedData(cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java index 58d52cc02b54..397fb8064626 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java @@ -39,7 +39,7 @@ public class ReverseTests extends AbstractScalarFunctionTestCase { } } - return parameterSuppliersFromTypedData(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "string"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java index b355feb6130a..f779dd038454 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -54,7 +55,7 @@ public class ToLowerTests extends AbstractConfigurationFunctionTestCase { String testString = randomAlphaOfLength(10); Configuration cfg = randomLocaleConfig(); ToLower func = new ToLower(Source.EMPTY, new Literal(Source.EMPTY, testString, DataType.KEYWORD), cfg); - assertThat(BytesRefs.toBytesRef(testString.toLowerCase(cfg.locale())), equalTo(func.fold())); + assertThat(BytesRefs.toBytesRef(testString.toLowerCase(cfg.locale())), equalTo(func.fold(FoldContext.small()))); } private Configuration randomLocaleConfig() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java index fdae4f953a0f..3957c2e1fb2c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -54,7 +55,7 @@ public class ToUpperTests extends AbstractConfigurationFunctionTestCase { String testString = randomAlphaOfLength(10); Configuration cfg = randomLocaleConfig(); ToUpper func = new ToUpper(Source.EMPTY, new Literal(Source.EMPTY, testString, DataType.KEYWORD), cfg); - assertThat(BytesRefs.toBytesRef(testString.toUpperCase(cfg.locale())), equalTo(func.fold())); + assertThat(BytesRefs.toBytesRef(testString.toUpperCase(cfg.locale())), equalTo(func.fold(FoldContext.small()))); } private Configuration randomLocaleConfig() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index eed2c7379e9e..6626ac50d60b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -12,6 +12,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -78,8 +79,8 @@ public class WildcardLikeTests extends AbstractScalarFunctionTestCase { Literal pattern = (Literal) args.get(1); if (args.size() > 2) { Literal caseInsensitive = (Literal) args.get(2); - assertThat(caseInsensitive.fold(), equalTo(false)); + assertThat(caseInsensitive.fold(FoldContext.small()), equalTo(false)); } - return new WildcardLike(source, expression, new WildcardPattern(((BytesRef) pattern.fold()).utf8ToString())); + return new WildcardLike(source, expression, new WildcardPattern(((BytesRef) pattern.fold(FoldContext.small())).utf8ToString())); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index aa4c037e5e96..62cb0bd868ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -35,7 +35,6 @@ import static org.elasticsearch.xpack.esql.core.util.DateUtils.asDateTime; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asMillis; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @@ -247,33 +246,7 @@ public class AddTests extends AbstractScalarFunctionTestCase { ); }).toList()); - // Datetime tests are split in two, depending on their permissiveness of null-injection, which cannot happen "automatically" for - // Datetime + Period/Duration, since the expression will take the non-null arg's type. - suppliers = anyNullIsNull( - suppliers, - (nullPosition, nullType, original) -> original.expectedType(), - (nullPosition, nullData, original) -> nullData.isForceLiteral() ? equalTo("LiteralsEvaluator[lit=null]") : original - ); - suppliers = errorsForCasesWithoutExamples(suppliers, AddTests::addErrorMessageString); - - // Cases that should generate warnings - suppliers.add(new TestCaseSupplier("MV", List.of(DataType.INTEGER, DataType.INTEGER), () -> { - // Ensure we don't have an overflow - int rhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - int lhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - int lhs2 = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(List.of(lhs, lhs2), DataType.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataType.INTEGER, "rhs") - ), - "AddIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataType.INTEGER, - is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); - })); - // exact math arithmetic exceptions + // Exact math arithmetic exceptions suppliers.add( arithmeticExceptionOverflowCase( DataType.INTEGER, @@ -315,6 +288,16 @@ public class AddTests extends AbstractScalarFunctionTestCase { ) ); + // Datetime tests are split in two, depending on their permissiveness of null-injection, which cannot happen "automatically" for + // Datetime + Period/Duration, since the expression will take the non-null arg's type. + suppliers = anyNullIsNull( + suppliers, + (nullPosition, nullType, original) -> original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? equalTo("LiteralsEvaluator[lit=null]") : original + ); + suppliers = errorsForCasesWithoutExamples(suppliers, AddTests::addErrorMessageString); + + // Cannot use parameterSuppliersFromTypedDataWithDefaultChecks as error messages are non-trivial return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index c6b607ded799..70536cd0036e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -89,8 +89,6 @@ public class DivTests extends AbstractScalarFunctionTestCase { ) ); - suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), DivTests::divErrorMessageString); - // Divide by zero cases - all of these should warn and return null TestCaseSupplier.NumericTypeTestConfigs typeStuff = new TestCaseSupplier.NumericTypeTestConfigs<>( new TestCaseSupplier.NumericTypeTestConfig<>( @@ -163,6 +161,9 @@ public class DivTests extends AbstractScalarFunctionTestCase { ) ); + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), DivTests::divErrorMessageString); + + // Cannot use parameterSuppliersFromTypedDataWithDefaultChecks as error messages are non-trivial return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index 65335cdeeb4e..2ef1cbe89e6a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -76,8 +76,6 @@ public class ModTests extends AbstractScalarFunctionTestCase { ) ); - suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), ModTests::modErrorMessageString); - // Divide by zero cases - all of these should warn and return null TestCaseSupplier.NumericTypeTestConfigs typeStuff = new TestCaseSupplier.NumericTypeTestConfigs<>( new TestCaseSupplier.NumericTypeTestConfig<>( @@ -150,6 +148,9 @@ public class ModTests extends AbstractScalarFunctionTestCase { ) ); + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), ModTests::modErrorMessageString); + + // Cannot use parameterSuppliersFromTypedDataWithDefaultChecks as error messages are non-trivial return parameterSuppliersFromTypedData(suppliers); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java index a2ca331cb261..12b685eaeeb8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; @@ -124,7 +125,20 @@ public class MulTests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), MulTests::mulErrorMessageString); + + // Cannot use parameterSuppliersFromTypedDataWithDefaultChecks as error messages are non-trivial + return parameterSuppliersFromTypedData(suppliers); + } + + private static String mulErrorMessageString(boolean includeOrdinal, List> validPerPosition, List types) { + try { + return typeErrorMessage(includeOrdinal, validPerPosition, types, (a, b) -> "numeric"); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is from the combination + return "[*] has arguments with incompatible types [" + types.get(0).typeName() + "] and [" + types.get(1).typeName() + "]"; + + } } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index a8c7b5b5a83f..15860d35539e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -12,6 +12,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -154,7 +155,7 @@ public class NegTests extends AbstractScalarFunctionTestCase { private Object foldTemporalAmount(Object val) { Neg neg = new Neg(Source.EMPTY, new Literal(Source.EMPTY, val, typeOf(val))); - return neg.fold(); + return neg.fold(FoldContext.small()); } private static DataType typeOf(Object val) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index 3a7d4e6482d7..600db229e90a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -24,6 +24,7 @@ import java.time.Period; import java.time.ZonedDateTime; import java.time.temporal.TemporalAmount; import java.util.List; +import java.util.Set; import java.util.function.BinaryOperator; import java.util.function.Supplier; import java.util.function.ToLongBiFunction; @@ -33,8 +34,6 @@ import static org.elasticsearch.xpack.esql.core.util.DateUtils.asDateTime; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asMillis; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class SubTests extends AbstractScalarFunctionTestCase { @@ -245,7 +244,8 @@ public class SubTests extends AbstractScalarFunctionTestCase { "SubUnsignedLongsEvaluator" ) ); - suppliers = anyNullIsNull(suppliers, (nullPosition, nullValueDataType, original) -> { + + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(suppliers, (nullPosition, nullValueDataType, original) -> { if (nullValueDataType == DataType.NULL) { return original.getData().get(nullPosition == 0 ? 1 : 0).type(); } @@ -255,28 +255,30 @@ public class SubTests extends AbstractScalarFunctionTestCase { return equalTo("LiteralsEvaluator[lit=null]"); } return original; - }); - - suppliers.add(new TestCaseSupplier("MV", List.of(DataType.INTEGER, DataType.INTEGER), () -> { - // Ensure we don't have an overflow - int rhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - int lhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - int lhs2 = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(List.of(lhs, lhs2), DataType.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataType.INTEGER, "rhs") - ), - "SubIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataType.INTEGER, - is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); - })); + }), SubTests::subErrorMessageString); + // Cannot use parameterSuppliersFromTypedDataWithDefaultChecks as error messages are non-trivial return parameterSuppliersFromTypedData(suppliers); } + private static String subErrorMessageString(boolean includeOrdinal, List> validPerPosition, List types) { + if (types.get(1) == DataType.DATETIME) { + if (types.get(0).isNumeric() || DataType.isMillisOrNanos(types.get(0))) { + return "[-] has arguments with incompatible types [" + types.get(0).typeName() + "] and [datetime]"; + } + if (DataType.isNull(types.get(0))) { + return "[-] arguments are in unsupported order: cannot subtract a [DATETIME] value [datetime] from a [NULL] amount [null]"; + } + } + + try { + return typeErrorMessage(includeOrdinal, validPerPosition, types, (a, b) -> "date_nanos, datetime or numeric"); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is from the combination + return "[-] has arguments with incompatible types [" + types.get(0).typeName() + "] and [" + types.get(1).typeName() + "]"; + } + } + @Override protected Expression build(Source source, List args) { return new Sub(source, args.get(0), args.get(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java index 8e46f7d28540..4b9f21187f42 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java @@ -236,7 +236,7 @@ public class EqualsTests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index f00e8a19f9da..45cfef518d39 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -158,7 +158,7 @@ public class GreaterThanOrEqualTests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index 169f1886af47..6ab55ddbcf40 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -172,7 +172,7 @@ public class GreaterThanTests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java index b004adca351a..80f67ec8e5e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -48,27 +49,27 @@ public class InTests extends AbstractFunctionTestCase { public void testInWithContainedValue() { In in = new In(EMPTY, TWO, Arrays.asList(ONE, TWO, THREE)); - assertTrue((Boolean) in.fold()); + assertTrue((Boolean) in.fold(FoldContext.small())); } public void testInWithNotContainedValue() { In in = new In(EMPTY, THREE, Arrays.asList(ONE, TWO)); - assertFalse((Boolean) in.fold()); + assertFalse((Boolean) in.fold(FoldContext.small())); } public void testHandleNullOnLeftValue() { In in = new In(EMPTY, NULL, Arrays.asList(ONE, TWO, THREE)); - assertNull(in.fold()); + assertNull(in.fold(FoldContext.small())); in = new In(EMPTY, NULL, Arrays.asList(ONE, NULL, THREE)); - assertNull(in.fold()); + assertNull(in.fold(FoldContext.small())); } public void testHandleNullsOnRightValue() { In in = new In(EMPTY, THREE, Arrays.asList(ONE, NULL, THREE)); - assertTrue((Boolean) in.fold()); + assertTrue((Boolean) in.fold(FoldContext.small())); in = new In(EMPTY, ONE, Arrays.asList(TWO, NULL, THREE)); - assertNull(in.fold()); + assertNull(in.fold(FoldContext.small())); } private static Literal L(Object value) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsTests.java index faf0a0d8f418..6fa1112f23f4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import static org.elasticsearch.xpack.esql.EsqlTestUtils.of; @@ -18,37 +19,37 @@ import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; public class InsensitiveEqualsTests extends ESTestCase { public void testFold() { - assertTrue(insensitiveEquals(l("foo"), l("foo")).fold()); - assertTrue(insensitiveEquals(l("Foo"), l("foo")).fold()); - assertTrue(insensitiveEquals(l("Foo"), l("fOO")).fold()); - assertTrue(insensitiveEquals(l("foo*"), l("foo*")).fold()); - assertTrue(insensitiveEquals(l("foo*"), l("FOO*")).fold()); - assertTrue(insensitiveEquals(l("foo?bar"), l("foo?bar")).fold()); - assertTrue(insensitiveEquals(l("foo?bar"), l("FOO?BAR")).fold()); - assertFalse(insensitiveEquals(l("Foo"), l("fo*")).fold()); - assertFalse(insensitiveEquals(l("Fox"), l("fo?")).fold()); - assertFalse(insensitiveEquals(l("Foo"), l("*OO")).fold()); - assertFalse(insensitiveEquals(l("BarFooBaz"), l("*O*")).fold()); - assertFalse(insensitiveEquals(l("BarFooBaz"), l("bar*baz")).fold()); - assertFalse(insensitiveEquals(l("foo"), l("*")).fold()); + assertTrue(insensitiveEquals(l("foo"), l("foo")).fold(FoldContext.small())); + assertTrue(insensitiveEquals(l("Foo"), l("foo")).fold(FoldContext.small())); + assertTrue(insensitiveEquals(l("Foo"), l("fOO")).fold(FoldContext.small())); + assertTrue(insensitiveEquals(l("foo*"), l("foo*")).fold(FoldContext.small())); + assertTrue(insensitiveEquals(l("foo*"), l("FOO*")).fold(FoldContext.small())); + assertTrue(insensitiveEquals(l("foo?bar"), l("foo?bar")).fold(FoldContext.small())); + assertTrue(insensitiveEquals(l("foo?bar"), l("FOO?BAR")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("Foo"), l("fo*")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("Fox"), l("fo?")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("Foo"), l("*OO")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("BarFooBaz"), l("*O*")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("BarFooBaz"), l("bar*baz")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("foo"), l("*")).fold(FoldContext.small())); - assertFalse(insensitiveEquals(l("foo*bar"), l("foo\\*bar")).fold()); - assertFalse(insensitiveEquals(l("foo?"), l("foo\\?")).fold()); - assertFalse(insensitiveEquals(l("foo?bar"), l("foo\\?bar")).fold()); - assertFalse(insensitiveEquals(l(randomAlphaOfLength(10)), l("*")).fold()); - assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("???")).fold()); + assertFalse(insensitiveEquals(l("foo*bar"), l("foo\\*bar")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("foo?"), l("foo\\?")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("foo?bar"), l("foo\\?bar")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(10)), l("*")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("???")).fold(FoldContext.small())); - assertFalse(insensitiveEquals(l("foo"), l("bar")).fold()); - assertFalse(insensitiveEquals(l("foo"), l("ba*")).fold()); - assertFalse(insensitiveEquals(l("foo"), l("*a*")).fold()); - assertFalse(insensitiveEquals(l(""), l("bar")).fold()); - assertFalse(insensitiveEquals(l("foo"), l("")).fold()); - assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("??")).fold()); - assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("????")).fold()); + assertFalse(insensitiveEquals(l("foo"), l("bar")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("foo"), l("ba*")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("foo"), l("*a*")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l(""), l("bar")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l("foo"), l("")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("??")).fold(FoldContext.small())); + assertFalse(insensitiveEquals(l(randomAlphaOfLength(3)), l("????")).fold(FoldContext.small())); - assertNull(insensitiveEquals(l("foo"), Literal.NULL).fold()); - assertNull(insensitiveEquals(Literal.NULL, l("foo")).fold()); - assertNull(insensitiveEquals(Literal.NULL, Literal.NULL).fold()); + assertNull(insensitiveEquals(l("foo"), Literal.NULL).fold(FoldContext.small())); + assertNull(insensitiveEquals(Literal.NULL, l("foo")).fold(FoldContext.small())); + assertNull(insensitiveEquals(Literal.NULL, Literal.NULL).fold(FoldContext.small())); } public void testProcess() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 7eccc2c8cc86..8d48d5812d9d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -158,7 +158,7 @@ public class LessThanOrEqualTests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index a17be8aa9797..f5f4bae5b2af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -172,7 +172,7 @@ public class LessThanTests extends AbstractScalarFunctionTestCase { ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java index 296b4b37cfa7..95001366f1ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java @@ -232,7 +232,8 @@ public class NotEqualsTests extends AbstractScalarFunctionTestCase { false ) ); - return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); + + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 0c03556241d2..11cd123c731e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; @@ -70,6 +71,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.greaterThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForExistingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; @@ -93,7 +95,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()); analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, EsqlTestUtils.emptyPolicyResolution()), @@ -161,7 +163,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { assertThat(Expressions.names(eval.fields()), contains("last_name")); var alias = as(eval.fields().get(0), Alias.class); var literal = as(alias.child(), Literal.class); - assertThat(literal.fold(), is(nullValue())); + assertThat(literal.value(), is(nullValue())); assertThat(literal.dataType(), is(DataType.KEYWORD)); var limit = as(eval.child(), Limit.class); @@ -304,7 +306,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { var alias = as(eval.fields().get(0), Alias.class); var literal = as(alias.child(), Literal.class); - assertThat(literal.fold(), is(nullValue())); + assertThat(literal.value(), is(nullValue())); assertThat(literal.dataType(), is(DataType.INTEGER)); var limit = as(eval.child(), Limit.class); @@ -402,7 +404,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { EsIndex index = new EsIndex("large", large, Map.of("large", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(index); - var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + var logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()); var analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, EsqlTestUtils.emptyPolicyResolution()), @@ -411,7 +413,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { var analyzed = analyzer.analyze(parser.createStatement(query)); var optimized = logicalOptimizer.optimize(analyzed); - var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, searchStats); + var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, FoldContext.small(), searchStats); var plan = new LocalLogicalPlanOptimizer(localContext).localOptimize(optimized); var project = as(plan, Project.class); @@ -423,7 +425,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { var eval = as(project.child(), Eval.class); var field = eval.fields().get(0); assertThat(Expressions.name(field), is("field005")); - assertThat(Alias.unwrap(field).fold(), Matchers.nullValue()); + assertThat(Alias.unwrap(field).fold(FoldContext.small()), Matchers.nullValue()); } // InferIsNotNull @@ -561,7 +563,7 @@ public class LocalLogicalPlanOptimizerTests extends ESTestCase { } private LogicalPlan localPlan(LogicalPlan plan, SearchStats searchStats) { - var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, searchStats); + var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, FoldContext.small(), searchStats); // System.out.println(plan); var localPlan = new LocalLogicalPlanOptimizer(localContext).localOptimize(plan); // System.out.println(localPlan); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 6dee34323443..1536ed7f99fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -90,6 +91,7 @@ import static org.elasticsearch.compute.aggregation.AggregatorMode.FINAL; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; import static org.hamcrest.Matchers.contains; @@ -407,7 +409,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { @SuppressWarnings("unchecked") public void testSingleCountWithStatsFilter() { // an optimizer that filters out the ExtractAggregateCommonFilter rule - var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(config)) { + var logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()) { @Override protected List> batches() { var oldBatches = super.batches(); @@ -486,7 +488,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var expected = QueryBuilders.queryStringQuery("last_name: Smith"); assertThat(query.query().toString(), is(expected.toString())); } @@ -515,7 +517,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(2, 37, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); @@ -550,7 +552,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(2, 37, "cidr_match(ip, \"127.0.0.1/32\")"); var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); @@ -585,7 +587,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(3, 8, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); @@ -618,7 +620,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var queryStringLeft = QueryBuilders.queryStringQuery("last_name: Smith"); var queryStringRight = QueryBuilders.queryStringQuery("emp_no: [10010 TO *]"); @@ -647,7 +649,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var expected = QueryBuilders.matchQuery("last_name", "Smith").lenient(true); assertThat(query.query().toString(), is(expected.toString())); } @@ -676,7 +678,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(2, 38, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); @@ -711,7 +713,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(2, 32, "cidr_match(ip, \"127.0.0.1/32\")"); var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); @@ -745,7 +747,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(3, 8, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); @@ -777,7 +779,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var queryStringLeft = QueryBuilders.matchQuery("last_name", "Smith").lenient(true); var queryStringRight = QueryBuilders.matchQuery("first_name", "John").lenient(true); @@ -806,7 +808,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var expected = kqlQueryBuilder("last_name: Smith"); assertThat(query.query().toString(), is(expected.toString())); } @@ -835,7 +837,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(2, 36, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); @@ -870,7 +872,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(2, 36, "cidr_match(ip, \"127.0.0.1/32\")"); var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); @@ -905,7 +907,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); Source filterSource = new Source(3, 8, "emp_no > 10000"); var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); @@ -938,7 +940,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var kqlQueryLeft = kqlQueryBuilder("last_name: Smith"); var kqlQueryRight = kqlQueryBuilder("emp_no > 10010"); @@ -1004,7 +1006,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var expected = QueryBuilders.existsQuery("emp_no"); assertThat(query.query().toString(), is(expected.toString())); } @@ -1028,7 +1030,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var expected = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("emp_no")); assertThat(query.query().toString(), is(expected.toString())); } @@ -1599,7 +1601,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { var project = as(exchange.child(), ProjectExec.class); var field = as(project.child(), FieldExtractExec.class); var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); + assertThat(as(query.limit(), Literal.class).value(), is(1000)); var expected = QueryBuilders.termQuery("last_name", "Smith"); assertThat(query.query().toString(), is(expected.toString())); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 4d175dea0507..a8f8054fbc6b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -151,6 +152,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.referenceAttribute; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; @@ -188,6 +190,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static EsqlParser parser; private static Analyzer analyzer; + private static LogicalOptimizerContext logicalOptimizerCtx; private static LogicalPlanOptimizer logicalOptimizer; private static Map mapping; private static Map mappingAirports; @@ -203,7 +206,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Analyzer metricsAnalyzer; private static class SubstitutionOnlyOptimizer extends LogicalPlanOptimizer { - static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(unboundLogicalOptimizerContext()); SubstitutionOnlyOptimizer(LogicalOptimizerContext optimizerContext) { super(optimizerContext); @@ -218,7 +221,8 @@ public class LogicalPlanOptimizerTests extends ESTestCase { @BeforeClass public static void init() { parser = new EsqlParser(); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + logicalOptimizerCtx = unboundLogicalOptimizerContext(); + logicalOptimizer = new LogicalPlanOptimizer(logicalOptimizerCtx); enrichResolution = new EnrichResolution(); AnalyzerTestUtils.loadEnrichPolicyResolution(enrichResolution, "languages_idx", "id", "languages_idx", "mapping-languages.json"); @@ -325,7 +329,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(exprs.size(), equalTo(1)); var alias = as(exprs.get(0), Alias.class); assertThat(alias.name(), equalTo("x")); - assertThat(alias.child().fold(), equalTo(1)); + assertThat(alias.child().fold(FoldContext.small()), equalTo(1)); } /** @@ -361,11 +365,11 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(exprs.size(), equalTo(1)); var alias = as(exprs.get(0), Alias.class); assertThat(alias.name(), equalTo("x")); - assertThat(alias.child().fold(), equalTo(1)); + assertThat(alias.child().fold(FoldContext.small()), equalTo(1)); var filterCondition = as(filter.condition(), GreaterThan.class); assertThat(Expressions.name(filterCondition.left()), equalTo("languages")); - assertThat(filterCondition.right().fold(), equalTo(1)); + assertThat(filterCondition.right().fold(FoldContext.small()), equalTo(1)); } public void testCombineProjections() { @@ -625,7 +629,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(alias.name(), is("sum(salary) + 1 where false")); var add = as(alias.child(), Add.class); var literal = as(add.right(), Literal.class); - assertThat(literal.fold(), is(1)); + assertThat(literal.value(), is(1)); var limit = as(eval.child(), Limit.class); var source = as(limit.child(), LocalRelation.class); @@ -658,7 +662,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var alias = as(eval.fields().getFirst(), Alias.class); assertTrue(alias.child().foldable()); - assertThat(alias.child().fold(), nullValue()); + assertThat(alias.child().fold(FoldContext.small()), nullValue()); assertThat(alias.child().dataType(), is(LONG)); alias = as(eval.fields().getLast(), Alias.class); @@ -695,7 +699,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var alias = as(eval.fields().getFirst(), Alias.class); assertTrue(alias.child().foldable()); - assertThat(alias.child().fold(), nullValue()); + assertThat(alias.child().fold(FoldContext.small()), nullValue()); assertThat(alias.child().dataType(), is(LONG)); alias = as(eval.fields().get(1), Alias.class); @@ -703,7 +707,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { alias = as(eval.fields().getLast(), Alias.class); assertTrue(alias.child().foldable()); - assertThat(alias.child().fold(), nullValue()); + assertThat(alias.child().fold(FoldContext.small()), nullValue()); assertThat(alias.child().dataType(), is(LONG)); var limit = as(eval.child(), Limit.class); @@ -752,7 +756,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(alias.name(), is("count_distinct(salary + 2) + 3 where false")); var add = as(alias.child(), Add.class); var literal = as(add.right(), Literal.class); - assertThat(literal.fold(), is(3)); + assertThat(literal.value(), is(3)); var limit = as(eval.child(), Limit.class); var source = as(limit.child(), LocalRelation.class); @@ -788,13 +792,13 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var alias = as(eval.fields().getFirst(), Alias.class); assertThat(Expressions.name(alias), containsString("max_a")); assertTrue(alias.child().foldable()); - assertThat(alias.child().fold(), nullValue()); + assertThat(alias.child().fold(FoldContext.small()), nullValue()); assertThat(alias.child().dataType(), is(INTEGER)); alias = as(eval.fields().getLast(), Alias.class); assertThat(Expressions.name(alias), containsString("min_a")); assertTrue(alias.child().foldable()); - assertThat(alias.child().fold(), nullValue()); + assertThat(alias.child().fold(FoldContext.small()), nullValue()); assertThat(alias.child().dataType(), is(INTEGER)); var limit = as(eval.child(), Limit.class); @@ -933,7 +937,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var gt = as(filter.condition(), GreaterThan.class); assertThat(Expressions.name(gt.left()), is("emp_no")); assertTrue(gt.right().foldable()); - assertThat(gt.right().fold(), is(1)); + assertThat(gt.right().fold(FoldContext.small()), is(1)); var source = as(filter.child(), EsRelation.class); } @@ -1053,7 +1057,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var gt = as(filter.condition(), GreaterThan.class); // name is "emp_no > 1 + 1" assertThat(Expressions.name(gt.left()), is("emp_no")); assertTrue(gt.right().foldable()); - assertThat(gt.right().fold(), is(2)); + assertThat(gt.right().fold(FoldContext.small()), is(2)); var source = as(filter.child(), EsRelation.class); } @@ -1083,12 +1087,12 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var lt = as(and.left(), LessThan.class); assertThat(Expressions.name(lt.left()), is("emp_no")); assertTrue(lt.right().foldable()); - assertThat(lt.right().fold(), is(10)); + assertThat(lt.right().fold(FoldContext.small()), is(10)); var equals = as(and.right(), Equals.class); assertThat(Expressions.name(equals.left()), is("last_name")); assertTrue(equals.right().foldable()); - assertThat(equals.right().fold(), is(BytesRefs.toBytesRef("Doe"))); + assertThat(equals.right().fold(FoldContext.small()), is(BytesRefs.toBytesRef("Doe"))); var source = as(filter.child(), EsRelation.class); } @@ -1303,7 +1307,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var anotherLimit = new Limit(EMPTY, L(limitValues[secondLimit]), oneLimit); assertEquals( new Limit(EMPTY, L(Math.min(limitValues[0], limitValues[1])), emptySource()), - new PushDownAndCombineLimits().rule(anotherLimit) + new PushDownAndCombineLimits().rule(anotherLimit, logicalOptimizerCtx) ); } @@ -1322,7 +1326,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var limit = new Limit(EMPTY, L(10), join); - var optimizedPlan = new PushDownAndCombineLimits().rule(limit); + var optimizedPlan = new PushDownAndCombineLimits().rule(limit, logicalOptimizerCtx); assertEquals(join.replaceChildren(limit.replaceChild(join.left()), join.right()), optimizedPlan); } @@ -1340,10 +1344,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var value = i == limitWithMinimum ? minimum : randomIntBetween(100, 1000); plan = new Limit(EMPTY, L(value), plan); } - assertEquals( - new Limit(EMPTY, L(minimum), relation), - new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)).optimize(plan) - ); + assertEquals(new Limit(EMPTY, L(minimum), relation), logicalOptimizer.optimize(plan)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") @@ -1864,7 +1865,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(mvExpand.limit(), equalTo(1000)); var keep = as(mvExpand.child(), EsqlProject.class); var limitPastMvExpand = as(keep.child(), Limit.class); - assertThat(limitPastMvExpand.limit().fold(), equalTo(1000)); + assertThat(limitPastMvExpand.limit().fold(FoldContext.small()), equalTo(1000)); as(limitPastMvExpand.child(), EsRelation.class); } @@ -1887,7 +1888,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(mvExpand.limit(), equalTo(10)); var project = as(mvExpand.child(), EsqlProject.class); var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(1)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1)); as(limit.child(), EsRelation.class); } @@ -1921,7 +1922,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(5)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); var mvExp = as(topN.child(), MvExpand.class); assertThat(mvExp.limit(), equalTo(5)); @@ -1931,7 +1932,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { mvExp = as(filter.child(), MvExpand.class); assertThat(mvExp.limit(), equalTo(10)); topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(10)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); filter = as(topN.child(), Filter.class); as(filter.child(), EsRelation.class); } @@ -1955,11 +1956,11 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(5)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary", "first_name")); var mvExp = as(topN.child(), MvExpand.class); topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); assertThat(orderNames(topN), contains("emp_no")); as(topN.child(), EsRelation.class); } @@ -1985,14 +1986,14 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(5)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("first_name")); topN = as(topN.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(5)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); var mvExp = as(topN.child(), MvExpand.class); topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); assertThat(orderNames(topN), contains("emp_no")); as(topN.child(), EsRelation.class); } @@ -2021,11 +2022,11 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var limit = as(plan, Limit.class); var filter = as(limit.child(), Filter.class); - assertThat(limit.limit().fold(), equalTo(5)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(5)); var agg = as(filter.child(), Aggregate.class); var mvExp = as(agg.child(), MvExpand.class); var topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(50)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(50)); assertThat(orderNames(topN), contains("emp_no")); as(topN.child(), EsRelation.class); } @@ -2052,13 +2053,13 @@ public class LogicalPlanOptimizerTests extends ESTestCase { | limit 5"""); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(5)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(5)); var filter = as(limit.child(), Filter.class); var agg = as(filter.child(), Aggregate.class); var mvExp = as(agg.child(), MvExpand.class); assertThat(mvExp.limit(), equalTo(50)); limit = as(mvExp.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(50)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(50)); as(limit.child(), EsRelation.class); } @@ -2083,12 +2084,12 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(5)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); var eval = as(topN.child(), Eval.class); var mvExp = as(eval.child(), MvExpand.class); topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); assertThat(orderNames(topN), contains("first_name")); as(topN.child(), EsRelation.class); } @@ -2114,7 +2115,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(1000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(1000)); assertThat(orderNames(topN), contains("salary", "first_name")); var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); @@ -2143,7 +2144,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var mvExp = as(plan, MvExpand.class); assertThat(mvExp.limit(), equalTo(10)); var topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(10)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); assertThat(orderNames(topN), contains("emp_no")); var filter = as(topN.child(), Filter.class); as(filter.child(), EsRelation.class); @@ -2168,7 +2169,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { | sort first_name"""); var topN = as(plan, TopN.class); - assertThat(topN.limit().fold(), equalTo(1000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(1000)); assertThat(orderNames(topN), contains("first_name")); var mvExpand = as(topN.child(), MvExpand.class); var filter = as(mvExpand.child(), Filter.class); @@ -2200,11 +2201,11 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(mvExpand.limit(), equalTo(10000)); var project = as(mvExpand.child(), EsqlProject.class); var topN = as(project.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(7300)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(7300)); assertThat(orderNames(topN), contains("a")); mvExpand = as(topN.child(), MvExpand.class); var limit = as(mvExpand.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(7300)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(7300)); as(limit.child(), LocalRelation.class); } @@ -2224,7 +2225,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var topN = as(plan, TopN.class); assertThat(orderNames(topN), contains("first_name")); - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); var mvExpand = as(topN.child(), MvExpand.class); var topN2 = as(mvExpand.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule as(topN2.child(), EsRelation.class); @@ -2252,7 +2253,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(15)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(15)); assertThat(orderNames(topN), contains("salary", "first_name")); var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); @@ -2260,7 +2261,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { topN = as(mvExp.child(), TopN.class); // the filter acts on first_name (the one used in mv_expand), so the limit 15 is not pushed down past mv_expand // instead the default limit is added - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); assertThat(orderNames(topN), contains("emp_no")); as(topN.child(), EsRelation.class); } @@ -2287,7 +2288,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(15)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(15)); assertThat(orderNames(topN), contains("salary", "first_name")); var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); @@ -2295,7 +2296,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { topN = as(mvExp.child(), TopN.class); // the filters after mv_expand do not act on the expanded field values, as such the limit 15 is the one being pushed down // otherwise that limit wouldn't have pushed down and the default limit was instead being added by default before mv_expanded - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); assertThat(orderNames(topN), contains("emp_no")); as(topN.child(), EsRelation.class); } @@ -2323,14 +2324,14 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); - assertThat(topN.limit().fold(), equalTo(15)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(15)); assertThat(orderNames(topN), contains("salary", "first_name")); var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); topN = as(mvExp.child(), TopN.class); // the filter uses an alias ("x") to the expanded field ("first_name"), so the default limit is used and not the one provided - assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); assertThat(orderNames(topN), contains("gender")); as(topN.child(), EsRelation.class); } @@ -2369,7 +2370,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var expand = as(plan, MvExpand.class); assertThat(expand.limit(), equalTo(20)); var topN = as(expand.child(), TopN.class); - assertThat(topN.limit().fold(), is(20)); + assertThat(topN.limit().fold(FoldContext.small()), is(20)); var row = as(topN.child(), EsRelation.class); } @@ -2390,7 +2391,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var expand = as(plan, MvExpand.class); assertThat(expand.limit(), equalTo(1000)); var limit2 = as(expand.child(), Limit.class); - assertThat(limit2.limit().fold(), is(1000)); + assertThat(limit2.limit().fold(FoldContext.small()), is(1000)); var row = as(limit2.child(), LocalRelation.class); } @@ -2583,7 +2584,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertTrue(filter.condition() instanceof Equals); Equals equals = as(filter.condition(), Equals.class); - assertEquals(BytesRefs.toBytesRef("foo"), equals.right().fold()); + assertEquals(BytesRefs.toBytesRef("foo"), equals.right().fold(FoldContext.small())); assertTrue(filter.child() instanceof EsRelation); } @@ -2609,7 +2610,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertTrue(filter.condition() instanceof Equals); Equals equals = as(filter.condition(), Equals.class); - assertEquals(BytesRefs.toBytesRef("foo"), equals.right().fold()); + assertEquals(BytesRefs.toBytesRef("foo"), equals.right().fold(FoldContext.small())); assertTrue(filter.child() instanceof EsRelation); } @@ -2773,7 +2774,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { """); var enrich = as(plan, Enrich.class); assertTrue(enrich.policyName().resolved()); - assertThat(enrich.policyName().fold(), is(BytesRefs.toBytesRef("languages_idx"))); + assertThat(enrich.policyName().fold(FoldContext.small()), is(BytesRefs.toBytesRef("languages_idx"))); var eval = as(enrich.child(), Eval.class); var limit = as(eval.child(), Limit.class); as(limit.child(), EsRelation.class); @@ -2819,7 +2820,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var filter = as(limit.child(), Filter.class); var enrich = as(filter.child(), Enrich.class); assertTrue(enrich.policyName().resolved()); - assertThat(enrich.policyName().fold(), is(BytesRefs.toBytesRef("languages_idx"))); + assertThat(enrich.policyName().fold(FoldContext.small()), is(BytesRefs.toBytesRef("languages_idx"))); var eval = as(enrich.child(), Eval.class); as(eval.child(), EsRelation.class); } @@ -2940,7 +2941,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var a = as(aggs.get(0), Alias.class); var per = as(a.child(), Percentile.class); var literal = as(per.percentile(), Literal.class); - assertThat((int) QuantileStates.MEDIAN, is(literal.fold())); + assertThat((int) QuantileStates.MEDIAN, is(literal.value())); assertThat(Expressions.names(agg.groupings()), contains("last_name")); } @@ -2949,7 +2950,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { FieldAttribute fa = getFieldAttribute("foo"); In in = new In(EMPTY, ONE, List.of(TWO, THREE, fa, L(null))); Or expected = new Or(EMPTY, new In(EMPTY, ONE, List.of(TWO, THREE)), new In(EMPTY, ONE, List.of(fa, L(null)))); - assertThat(new SplitInWithFoldableValue().rule(in), equalTo(expected)); + assertThat(new SplitInWithFoldableValue().rule(in, logicalOptimizerCtx), equalTo(expected)); } public void testReplaceFilterWithExact() { @@ -3706,7 +3707,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var alias = as(exp, Alias.class); var af = as(alias.child(), aggType); var field = af.field(); - var name = field.foldable() ? BytesRefs.toString(field.fold()) : Expressions.name(field); + var name = field.foldable() ? BytesRefs.toString(field.fold(FoldContext.small())) : Expressions.name(field); assertThat(name, is(fieldName)); } @@ -4118,7 +4119,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var value = Alias.unwrap(fields.get(0)); var math = as(value, Mod.class); assertThat(Expressions.name(math.left()), is("emp_no")); - assertThat(math.right().fold(), is(2)); + assertThat(math.right().fold(FoldContext.small()), is(2)); // languages + emp_no % 2 var add = as(Alias.unwrap(fields.get(1).canonical()), Add.class); if (add.left() instanceof Mod mod) { @@ -4127,7 +4128,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(Expressions.name(add.left()), is("languages")); var mod = as(add.right().canonical(), Mod.class); assertThat(Expressions.name(mod.left()), is("emp_no")); - assertThat(mod.right().fold(), is(2)); + assertThat(mod.right().fold(FoldContext.small()), is(2)); } /** @@ -4156,7 +4157,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var value = Alias.unwrap(fields.get(0).canonical()); var math = as(value, Mod.class); assertThat(Expressions.name(math.left()), is("emp_no")); - assertThat(math.right().fold(), is(2)); + assertThat(math.right().fold(FoldContext.small()), is(2)); // languages + salary var add = as(Alias.unwrap(fields.get(1).canonical()), Add.class); assertThat(Expressions.name(add.left()), anyOf(is("languages"), is("salary"))); @@ -4173,7 +4174,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(Expressions.name(add3.right()), anyOf(is("salary"), is("languages"))); // emp_no % 2 assertThat(Expressions.name(mod.left()), is("emp_no")); - assertThat(mod.right().fold(), is(2)); + assertThat(mod.right().fold(FoldContext.small()), is(2)); } /** @@ -4611,8 +4612,8 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var mvCoalesce = as(mul.left(), Coalesce.class); assertThat(mvCoalesce.children().size(), equalTo(2)); var mvCount = as(mvCoalesce.children().get(0), MvCount.class); - assertThat(mvCount.fold(), equalTo(2)); - assertThat(mvCoalesce.children().get(1).fold(), equalTo(0)); + assertThat(mvCount.fold(FoldContext.small()), equalTo(2)); + assertThat(mvCoalesce.children().get(1).fold(FoldContext.small()), equalTo(0)); var count = as(mul.right(), ReferenceAttribute.class); assertThat(count.name(), equalTo("$$COUNT$s$0")); @@ -4623,8 +4624,8 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var mvCoalesce_expr = as(mul_expr.left(), Coalesce.class); assertThat(mvCoalesce_expr.children().size(), equalTo(2)); var mvCount_expr = as(mvCoalesce_expr.children().get(0), MvCount.class); - assertThat(mvCount_expr.fold(), equalTo(1)); - assertThat(mvCoalesce_expr.children().get(1).fold(), equalTo(0)); + assertThat(mvCount_expr.fold(FoldContext.small()), equalTo(1)); + assertThat(mvCoalesce_expr.children().get(1).fold(FoldContext.small()), equalTo(0)); var count_expr = as(mul_expr.right(), ReferenceAttribute.class); assertThat(count_expr.name(), equalTo("$$COUNT$s$0")); @@ -4636,7 +4637,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(mvCoalesce_null.children().size(), equalTo(2)); var mvCount_null = as(mvCoalesce_null.children().get(0), MvCount.class); assertThat(mvCount_null.field(), equalTo(NULL)); - assertThat(mvCoalesce_null.children().get(1).fold(), equalTo(0)); + assertThat(mvCoalesce_null.children().get(1).fold(FoldContext.small()), equalTo(0)); var count_null = as(mul_null.right(), ReferenceAttribute.class); assertThat(count_null.name(), equalTo("$$COUNT$s$0")); } @@ -4675,7 +4676,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(s.name(), equalTo("s")); var mul = as(s.child(), Mul.class); var mvSum = as(mul.left(), MvSum.class); - assertThat(mvSum.fold(), equalTo(3)); + assertThat(mvSum.fold(FoldContext.small()), equalTo(3)); var count = as(mul.right(), ReferenceAttribute.class); assertThat(count.name(), equalTo("$$COUNT$s$0")); @@ -4684,7 +4685,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(s_expr.name(), equalTo("s_expr")); var mul_expr = as(s_expr.child(), Mul.class); var mvSum_expr = as(mul_expr.left(), MvSum.class); - assertThat(mvSum_expr.fold(), equalTo(3.14)); + assertThat(mvSum_expr.fold(FoldContext.small()), equalTo(3.14)); var count_expr = as(mul_expr.right(), ReferenceAttribute.class); assertThat(count_expr.name(), equalTo("$$COUNT$s$0")); @@ -4833,7 +4834,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var s = as(exprs.get(0), Alias.class); assertThat(s.source().toString(), containsString(LoggerMessageFormat.format(null, testCase.aggFunctionTemplate, "[1,2]"))); assertEquals(s.child(), testCase.replacementForConstant.apply(new Literal(EMPTY, List.of(1, 2), INTEGER))); - assertEquals(s.child().fold(), testCase.aggMultiValue.apply(new int[] { 1, 2 })); + assertEquals(s.child().fold(FoldContext.small()), testCase.aggMultiValue.apply(new int[] { 1, 2 })); var s_expr = as(exprs.get(1), Alias.class); assertThat(s_expr.source().toString(), containsString(LoggerMessageFormat.format(null, testCase.aggFunctionTemplate, "314.0/100"))); @@ -4841,7 +4842,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { s_expr.child(), testCase.replacementForConstant.apply(new Div(EMPTY, new Literal(EMPTY, 314.0, DOUBLE), new Literal(EMPTY, 100, INTEGER))) ); - assertEquals(s_expr.child().fold(), testCase.aggSingleValue.apply(3.14)); + assertEquals(s_expr.child().fold(FoldContext.small()), testCase.aggSingleValue.apply(3.14)); var s_null = as(exprs.get(2), Alias.class); assertThat(s_null.source().toString(), containsString(LoggerMessageFormat.format(null, testCase.aggFunctionTemplate, "null"))); @@ -4927,7 +4928,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { } public void testPlanSanityCheckWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); var plan = optimizedPlan(""" FROM test @@ -5598,8 +5599,8 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static void assertSemanticMatching(Expression fieldAttributeExp, Expression unresolvedAttributeExp) { Expression unresolvedUpdated = unresolvedAttributeExp.transformUp( LITERALS_ON_THE_RIGHT.expressionToken(), - LITERALS_ON_THE_RIGHT::rule - ).transformUp(x -> x.foldable() ? new Literal(x.source(), x.fold(), x.dataType()) : x); + be -> LITERALS_ON_THE_RIGHT.rule(be, logicalOptimizerCtx) + ).transformUp(x -> x.foldable() ? new Literal(x.source(), x.fold(FoldContext.small()), x.dataType()) : x); List resolvedFields = fieldAttributeExp.collectFirstChildren(x -> x instanceof FieldAttribute); for (Expression field : resolvedFields) { @@ -5679,8 +5680,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { } private void assertNullLiteral(Expression expression) { - assertEquals(Literal.class, expression.getClass()); - assertNull(expression.fold()); + assertNull(as(expression, Literal.class).value()); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108519") @@ -5776,7 +5776,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var filter = as(limit.child(), Filter.class); var insensitive = as(filter.condition(), InsensitiveEquals.class); as(insensitive.left(), FieldAttribute.class); - var bRef = as(insensitive.right().fold(), BytesRef.class); + var bRef = as(insensitive.right().fold(FoldContext.small()), BytesRef.class); assertThat(bRef.utf8ToString(), is(value)); as(filter.child(), EsRelation.class); } @@ -5792,7 +5792,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var not = as(filter.condition(), Not.class); var insensitive = as(not.field(), InsensitiveEquals.class); as(insensitive.left(), FieldAttribute.class); - var bRef = as(insensitive.right().fold(), BytesRef.class); + var bRef = as(insensitive.right().fold(FoldContext.small()), BytesRef.class); assertThat(bRef.utf8ToString(), is(value)); as(filter.child(), EsRelation.class); } @@ -5805,7 +5805,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var insensitive = as(filter.condition(), InsensitiveEquals.class); var field = as(insensitive.left(), FieldAttribute.class); assertThat(field.fieldName(), is("first_name")); - var bRef = as(insensitive.right().fold(), BytesRef.class); + var bRef = as(insensitive.right().fold(FoldContext.small()), BytesRef.class); assertThat(bRef.utf8ToString(), is("VALÜ")); as(filter.child(), EsRelation.class); } @@ -5856,7 +5856,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var left = as(join.left(), EsqlProject.class); assertThat(left.output().toString(), containsString("int{r}")); var limit = as(left.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); @@ -5925,7 +5925,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { } var plan = optimizedPlan(query); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); var agg = as(limit.child(), Aggregate.class); assertMap( @@ -6003,7 +6003,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String query = """ FROM test @@ -6017,7 +6017,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); var project = as(join.left(), Project.class); var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); var filter = as(limit.child(), Filter.class); // assert that the rename has been undone var op = as(filter.condition(), GreaterThan.class); @@ -6045,7 +6045,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnLeftSideField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String query = """ FROM test @@ -6061,7 +6061,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var project = as(join.left(), Project.class); var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); var filter = as(limit.child(), Filter.class); var op = as(filter.condition(), GreaterThan.class); var field = as(op.left(), FieldAttribute.class); @@ -6088,7 +6088,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownDisabledForLookupField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String query = """ FROM test @@ -6100,7 +6100,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var plan = optimizedPlan(query); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); var filter = as(limit.child(), Filter.class); var op = as(filter.condition(), Equals.class); @@ -6132,7 +6132,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String query = """ FROM test @@ -6144,7 +6144,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var plan = optimizedPlan(query); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); // filter kept in place, working on the right side var filter = as(limit.child(), Filter.class); EsqlBinaryComparison op = as(filter.condition(), Equals.class); @@ -6183,7 +6183,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); String query = """ FROM test @@ -6195,7 +6195,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { var plan = optimizedPlan(query); var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(), equalTo(1000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); var filter = as(limit.child(), Filter.class); var or = as(filter.condition(), Or.class); @@ -6289,7 +6289,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { as(addEval.child(), EsRelation.class); assertThat(Expressions.attribute(mul.left()).id(), equalTo(finalAggs.aggregates().get(1).id())); - assertThat(mul.right().fold(), equalTo(1.1)); + assertThat(mul.right().fold(FoldContext.small()), equalTo(1.1)); assertThat(finalAggs.aggregateType(), equalTo(Aggregate.AggregateType.STANDARD)); Max maxRate = as(Alias.unwrap(finalAggs.aggregates().get(0)), Max.class); @@ -6304,7 +6304,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { ToPartial toPartialMaxCost = as(Alias.unwrap(aggsByTsid.aggregates().get(1)), ToPartial.class); assertThat(Expressions.attribute(toPartialMaxCost.field()).id(), equalTo(addEval.fields().get(0).id())); assertThat(Expressions.attribute(add.left()).name(), equalTo("network.cost")); - assertThat(add.right().fold(), equalTo(0.2)); + assertThat(add.right().fold(FoldContext.small()), equalTo(0.2)); } public void testTranslateMetricsGroupedByOneDimension() { @@ -6533,7 +6533,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { assertThat(Expressions.attribute(finalAgg.groupings().get(1)).id(), equalTo(aggsByTsid.aggregates().get(1).id())); assertThat(Expressions.attribute(mul.left()).id(), equalTo(aggsByTsid.aggregates().get(0).id())); - assertThat(mul.right().fold(), equalTo(1.05)); + assertThat(mul.right().fold(FoldContext.small()), equalTo(1.05)); assertThat(aggsByTsid.aggregateType(), equalTo(Aggregate.AggregateType.METRICS)); Rate rate = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), Rate.class); assertThat(Expressions.attribute(rate.field()).name(), equalTo("network.total_bytes_in")); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index ff710a90e815..75825f4e8f48 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -164,6 +165,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; @@ -245,7 +247,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { @Before public void init() { parser = new EsqlParser(); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(); @@ -1117,7 +1119,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var fieldExtract = as(project.child(), FieldExtractExec.class); var source = source(fieldExtract.child()); assertThat(source.estimatedRowSize(), equalTo(allFieldRowSize + Integer.BYTES)); - assertThat(source.limit().fold(), is(10)); + assertThat(source.limit().fold(FoldContext.small()), is(10)); } /** @@ -1199,7 +1201,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var leaves = extract.collectLeaves(); assertEquals(1, leaves.size()); var source = as(leaves.get(0), EsQueryExec.class); - assertThat(source.limit().fold(), is(10)); + assertThat(source.limit().fold(FoldContext.small()), is(10)); // extra ints for doc id and emp_no_10 assertThat(source.estimatedRowSize(), equalTo(allFieldRowSize + Integer.BYTES * 2)); } @@ -1246,7 +1248,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var source = source(extract.child()); assertThat(source.estimatedRowSize(), equalTo(allFieldRowSize + Integer.BYTES * 2)); - assertThat(source.limit().fold(), is(10)); + assertThat(source.limit().fold(FoldContext.small()), is(10)); var rq = as(sv(source.query(), "emp_no"), RangeQueryBuilder.class); assertThat(rq.fieldName(), equalTo("emp_no")); assertThat(rq.from(), equalTo(0)); @@ -2616,7 +2618,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { } public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); // Do not assert serialization: // This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator. @@ -2902,14 +2904,14 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var eval = as(project.child(), EvalExec.class); var limit = as(eval.child(), LimitExec.class); assertThat(limit.limit(), instanceOf(Literal.class)); - assertThat(limit.limit().fold(), equalTo(10000)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(10000)); var aggFinal = as(limit.child(), AggregateExec.class); assertThat(aggFinal.getMode(), equalTo(FINAL)); var aggPartial = as(aggFinal.child(), AggregateExec.class); assertThat(aggPartial.getMode(), equalTo(INITIAL)); limit = as(aggPartial.child(), LimitExec.class); assertThat(limit.limit(), instanceOf(Literal.class)); - assertThat(limit.limit().fold(), equalTo(10)); + assertThat(limit.limit().fold(FoldContext.small()), equalTo(10)); var exchange = as(limit.child(), ExchangeExec.class); project = as(exchange.child(), ProjectExec.class); @@ -2918,7 +2920,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var fieldExtract = as(project.child(), FieldExtractExec.class); assertThat(Expressions.names(fieldExtract.attributesToExtract()), is(expectedFields)); var source = source(fieldExtract.child()); - assertThat(source.limit().fold(), equalTo(10)); + assertThat(source.limit().fold(FoldContext.small()), equalTo(10)); } /** @@ -4849,15 +4851,15 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var alias1 = as(eval.fields().get(0), Alias.class); assertThat(alias1.name(), is("poi")); var poi = as(alias1.child(), Literal.class); - assertThat(poi.fold(), instanceOf(BytesRef.class)); + assertThat(poi.value(), instanceOf(BytesRef.class)); var alias2 = as(eval.fields().get(1), Alias.class); assertThat(alias2.name(), is("distance")); var stDistance = as(alias2.child(), StDistance.class); var location = as(stDistance.left(), FieldAttribute.class); assertThat(location.fieldName(), is("location")); var poiRef = as(stDistance.right(), Literal.class); - assertThat(poiRef.fold(), instanceOf(BytesRef.class)); - assertThat(poiRef.fold().toString(), is(poi.fold().toString())); + assertThat(poiRef.value(), instanceOf(BytesRef.class)); + assertThat(poiRef.value().toString(), is(poi.value().toString())); // Validate the filter condition var and = as(filter.condition(), And.class); @@ -6205,15 +6207,15 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var alias1 = as(evalExec.fields().get(0), Alias.class); assertThat(alias1.name(), is("poi")); var poi = as(alias1.child(), Literal.class); - assertThat(poi.fold(), instanceOf(BytesRef.class)); + assertThat(poi.value(), instanceOf(BytesRef.class)); var alias2 = as(evalExec.fields().get(1), Alias.class); assertThat(alias2.name(), is("distance")); var stDistance = as(alias2.child(), StDistance.class); var location = as(stDistance.left(), FieldAttribute.class); assertThat(location.fieldName(), is("location")); var poiRef = as(stDistance.right(), Literal.class); - assertThat(poiRef.fold(), instanceOf(BytesRef.class)); - assertThat(poiRef.fold().toString(), is(poi.fold().toString())); + assertThat(poiRef.value(), instanceOf(BytesRef.class)); + assertThat(poiRef.value().toString(), is(poi.value().toString())); extract = as(evalExec.child(), FieldExtractExec.class); assertThat(names(extract.attributesToExtract()), contains("location")); var source = source(extract.child()); @@ -6294,7 +6296,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var alias1 = as(evalExec.fields().get(0), Alias.class); assertThat(alias1.name(), is("poi")); var poi = as(alias1.child(), Literal.class); - assertThat(poi.fold(), instanceOf(BytesRef.class)); + assertThat(poi.value(), instanceOf(BytesRef.class)); var alias4 = as(evalExec.fields().get(3), Alias.class); assertThat(alias4.name(), is("loc2")); as(alias4.child(), FieldAttribute.class); @@ -6307,8 +6309,8 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var refLocation = as(stDistance.left(), ReferenceAttribute.class); assertThat(refLocation.name(), is("loc3")); var poiRef = as(stDistance.right(), Literal.class); - assertThat(poiRef.fold(), instanceOf(BytesRef.class)); - assertThat(poiRef.fold().toString(), is(poi.fold().toString())); + assertThat(poiRef.value(), instanceOf(BytesRef.class)); + assertThat(poiRef.value().toString(), is(poi.value().toString())); var alias7 = as(evalExec.fields().get(6), Alias.class); assertThat(alias7.name(), is("distance")); as(alias7.child(), ReferenceAttribute.class); @@ -6391,15 +6393,15 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var alias1 = as(evalExec.fields().get(0), Alias.class); assertThat(alias1.name(), is("poi")); var poi = as(alias1.child(), Literal.class); - assertThat(poi.fold(), instanceOf(BytesRef.class)); + assertThat(poi.value(), instanceOf(BytesRef.class)); var alias2 = as(evalExec.fields().get(1), Alias.class); assertThat(alias2.name(), is("distance")); var stDistance = as(alias2.child(), StDistance.class); var location = as(stDistance.left(), FieldAttribute.class); assertThat(location.fieldName(), is("location")); var poiRef = as(stDistance.right(), Literal.class); - assertThat(poiRef.fold(), instanceOf(BytesRef.class)); - assertThat(poiRef.fold().toString(), is(poi.fold().toString())); + assertThat(poiRef.value(), instanceOf(BytesRef.class)); + assertThat(poiRef.value().toString(), is(poi.value().toString())); extract = as(evalExec.child(), FieldExtractExec.class); assertThat(names(extract.attributesToExtract()), contains("location")); var source = source(extract.child()); @@ -6931,7 +6933,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var fragment = as(exchange.child(), FragmentExec.class); var partialTopN = as(fragment.fragment(), TopN.class); var enrich2 = as(partialTopN.child(), Enrich.class); - assertThat(BytesRefs.toString(enrich2.policyName().fold()), equalTo("departments")); + assertThat(BytesRefs.toString(enrich2.policyName().fold(FoldContext.small())), equalTo("departments")); assertThat(enrich2.mode(), equalTo(Enrich.Mode.ANY)); var eval = as(enrich2.child(), Eval.class); as(eval.child(), EsRelation.class); @@ -6957,7 +6959,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { var fragment = as(exchange.child(), FragmentExec.class); var partialTopN = as(fragment.fragment(), TopN.class); var enrich2 = as(partialTopN.child(), Enrich.class); - assertThat(BytesRefs.toString(enrich2.policyName().fold()), equalTo("departments")); + assertThat(BytesRefs.toString(enrich2.policyName().fold(FoldContext.small())), equalTo("departments")); assertThat(enrich2.mode(), equalTo(Enrich.Mode.ANY)); var eval = as(enrich2.child(), Eval.class); as(eval.child(), EsRelation.class); @@ -7299,7 +7301,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { } public void testLookupJoinFieldLoading() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz"))); @@ -7376,7 +7378,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { } public void testLookupJoinFieldLoadingTwoLookups() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); TestDataSource data = dataSetWithLookupIndices( Map.of( @@ -7430,7 +7432,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/119082") public void testLookupJoinFieldLoadingTwoLookupsProjectInBetween() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); TestDataSource data = dataSetWithLookupIndices( Map.of( @@ -7471,7 +7473,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/118778") public void testLookupJoinFieldLoadingDropAllFields() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz"))); @@ -7549,7 +7551,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { // The TopN needs an estimated row size for the planner to work var plans = PlannerUtils.breakPlanBetweenCoordinatorAndDataNode(EstimatesRowSize.estimateRowSize(0, plan), config); plan = useDataNodePlan ? plans.v2() : plans.v1(); - plan = PlannerUtils.localPlan(List.of(), config, plan); + plan = PlannerUtils.localPlan(List.of(), config, FoldContext.small(), plan); LocalExecutionPlanner planner = new LocalExecutionPlanner( "test", "", @@ -7562,10 +7564,10 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { new ExchangeSinkHandler(null, 10, () -> 10), null, null, - new EsPhysicalOperationProviders(List.of(), null) + new EsPhysicalOperationProviders(FoldContext.small(), List.of(), null) ); - return planner.plan(plan); + return planner.plan(FoldContext.small(), plan); } private List> findFieldNamesInLookupJoinDescription(LocalExecutionPlanner.LocalExecutionPlan physicalOperations) { @@ -7662,7 +7664,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { PhysicalPlan reduction = PlannerUtils.reductionPlan(plans.v2()); TopNExec reductionTopN = as(reduction, TopNExec.class); assertThat(reductionTopN.estimatedRowSize(), equalTo(allFieldRowSize)); - assertThat(reductionTopN.limit().fold(), equalTo(limit)); + assertThat(reductionTopN.limit().fold(FoldContext.small()), equalTo(limit)); } public void testReductionPlanForAggs() { @@ -7818,7 +7820,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { // individually hence why here the plan is kept as is var l = p.transformUp(FragmentExec.class, fragment -> { - var localPlan = PlannerUtils.localPlan(config, fragment, searchStats); + var localPlan = PlannerUtils.localPlan(config, FoldContext.small(), fragment, searchStats); return EstimatesRowSize.estimateRowSize(fragment.estimatedRowSize(), localPlan); }); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java index 9fe479dbb862..e6a7d110f8c0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -26,7 +27,7 @@ public class TestPlannerOptimizer { private final Configuration config; public TestPlannerOptimizer(Configuration config, Analyzer analyzer) { - this(config, analyzer, new LogicalPlanOptimizer(new LogicalOptimizerContext(config))); + this(config, analyzer, new LogicalPlanOptimizer(new LogicalOptimizerContext(config, FoldContext.small()))); } public TestPlannerOptimizer(Configuration config, Analyzer analyzer, LogicalPlanOptimizer logicalOptimizer) { @@ -61,8 +62,13 @@ public class TestPlannerOptimizer { // this is of no use in the unit tests, which checks the plan as a whole instead of each // individually hence why here the plan is kept as is - var logicalTestOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(config, searchStats)); - var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(config, searchStats), true); + var logicalTestOptimizer = new LocalLogicalPlanOptimizer( + new LocalLogicalOptimizerContext(config, FoldContext.small(), searchStats) + ); + var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer( + new LocalPhysicalOptimizerContext(config, FoldContext.small(), searchStats), + true + ); var l = PlannerUtils.localPlan(physicalPlan, logicalTestOptimizer, physicalTestOptimizer); // handle local reduction alignment diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/LogicalOptimizerContextTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/LogicalOptimizerContextTests.java new file mode 100644 index 000000000000..5d2fec0fc818 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/LogicalOptimizerContextTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xpack.esql.ConfigurationTestUtils; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; +import org.elasticsearch.xpack.esql.session.Configuration; + +import static org.hamcrest.Matchers.equalTo; + +public class LogicalOptimizerContextTests extends ESTestCase { + public void testToString() { + // Random looking numbers for FoldContext are indeed random. Just so we have consistent numbers to assert on in toString. + LogicalOptimizerContext ctx = new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG, new FoldContext(102)); + ctx.foldCtx().trackAllocation(Source.EMPTY, 99); + assertThat( + ctx.toString(), + equalTo("LogicalOptimizerContext[configuration=" + EsqlTestUtils.TEST_CFG + ", foldCtx=FoldContext[3/102]]") + ); + } + + public void testEqualsAndHashCode() { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(randomLogicalOptimizerContext(), this::copy, this::mutate); + } + + private LogicalOptimizerContext randomLogicalOptimizerContext() { + return new LogicalOptimizerContext(ConfigurationTestUtils.randomConfiguration(), randomFoldContext()); + } + + private LogicalOptimizerContext copy(LogicalOptimizerContext c) { + return new LogicalOptimizerContext(c.configuration(), c.foldCtx()); + } + + private LogicalOptimizerContext mutate(LogicalOptimizerContext c) { + Configuration configuration = c.configuration(); + FoldContext foldCtx = c.foldCtx(); + if (randomBoolean()) { + configuration = randomValueOtherThan(configuration, ConfigurationTestUtils::randomConfiguration); + } else { + foldCtx = randomValueOtherThan(foldCtx, this::randomFoldContext); + } + return new LogicalOptimizerContext(configuration, foldCtx); + } + + private FoldContext randomFoldContext() { + FoldContext ctx = new FoldContext(randomNonNegativeLong()); + if (randomBoolean()) { + ctx.trackAllocation(Source.EMPTY, randomLongBetween(0, ctx.initialAllowedBytes())); + } + return ctx; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java index 08c8612d8097..c0c145aee538 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java @@ -22,25 +22,26 @@ import java.util.List; import static java.util.Arrays.asList; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.notEqualsOf; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; public class BooleanFunctionEqualsEliminationTests extends ESTestCase { + private Expression booleanFunctionEqualElimination(BinaryComparison e) { + return new BooleanFunctionEqualsElimination().rule(e, unboundLogicalOptimizerContext()); + } public void testBoolEqualsSimplificationOnExpressions() { - BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), new Literal(EMPTY, 0, DataType.INTEGER), null); - assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE))); + assertEquals(exp, booleanFunctionEqualElimination(new Equals(EMPTY, exp, TRUE))); // TODO: Replace use of QL Not with ESQL Not - assertEquals(new Not(EMPTY, exp), s.rule(new Equals(EMPTY, exp, FALSE))); + assertEquals(new Not(EMPTY, exp), booleanFunctionEqualElimination(new Equals(EMPTY, exp, FALSE))); } public void testBoolEqualsSimplificationOnFields() { - BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); - FieldAttribute field = getFieldAttribute(); List comparisons = asList( @@ -55,7 +56,7 @@ public class BooleanFunctionEqualsEliminationTests extends ESTestCase { ); for (BinaryComparison comparison : comparisons) { - assertEquals(comparison, s.rule(comparison)); + assertEquals(comparison, booleanFunctionEqualElimination(comparison)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java index 3b1f8cfc83af..5b4bf806518d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java @@ -9,9 +9,11 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; @@ -20,33 +22,31 @@ public class BooleanSimplificationTests extends ESTestCase { private static final Expression DUMMY_EXPRESSION = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 0); + private Expression booleanSimplification(ScalarFunction e) { + return new BooleanSimplification().rule(e, unboundLogicalOptimizerContext()); + } + public void testBoolSimplifyOr() { - BooleanSimplification simplification = new BooleanSimplification(); + assertEquals(TRUE, booleanSimplification(new Or(EMPTY, TRUE, TRUE))); + assertEquals(TRUE, booleanSimplification(new Or(EMPTY, TRUE, DUMMY_EXPRESSION))); + assertEquals(TRUE, booleanSimplification(new Or(EMPTY, DUMMY_EXPRESSION, TRUE))); - assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, TRUE))); - assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, DUMMY_EXPRESSION))); - assertEquals(TRUE, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, TRUE))); - - assertEquals(FALSE, simplification.rule(new Or(EMPTY, FALSE, FALSE))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, FALSE, DUMMY_EXPRESSION))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, FALSE))); + assertEquals(FALSE, booleanSimplification(new Or(EMPTY, FALSE, FALSE))); + assertEquals(DUMMY_EXPRESSION, booleanSimplification(new Or(EMPTY, FALSE, DUMMY_EXPRESSION))); + assertEquals(DUMMY_EXPRESSION, booleanSimplification(new Or(EMPTY, DUMMY_EXPRESSION, FALSE))); } public void testBoolSimplifyAnd() { - BooleanSimplification simplification = new BooleanSimplification(); + assertEquals(TRUE, booleanSimplification(new And(EMPTY, TRUE, TRUE))); + assertEquals(DUMMY_EXPRESSION, booleanSimplification(new And(EMPTY, TRUE, DUMMY_EXPRESSION))); + assertEquals(DUMMY_EXPRESSION, booleanSimplification(new And(EMPTY, DUMMY_EXPRESSION, TRUE))); - assertEquals(TRUE, simplification.rule(new And(EMPTY, TRUE, TRUE))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, TRUE, DUMMY_EXPRESSION))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, TRUE))); - - assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, FALSE))); - assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, DUMMY_EXPRESSION))); - assertEquals(FALSE, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, FALSE))); + assertEquals(FALSE, booleanSimplification(new And(EMPTY, FALSE, FALSE))); + assertEquals(FALSE, booleanSimplification(new And(EMPTY, FALSE, DUMMY_EXPRESSION))); + assertEquals(FALSE, booleanSimplification(new And(EMPTY, DUMMY_EXPRESSION, FALSE))); } public void testBoolCommonFactorExtraction() { - BooleanSimplification simplification = new BooleanSimplification(); - Expression a1 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); Expression a2 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); Expression b = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 2); @@ -55,7 +55,7 @@ public class BooleanSimplificationTests extends ESTestCase { Or actual = new Or(EMPTY, new And(EMPTY, a1, b), new And(EMPTY, a2, c)); And expected = new And(EMPTY, a1, new Or(EMPTY, b, c)); - assertEquals(expected, simplification.rule(actual)); + assertEquals(expected, booleanSimplification(actual)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java index d388369e0b16..a0d23731ae82 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; @@ -37,6 +38,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.greaterThanOrEqualOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOrEqualOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.notEqualsOf; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; @@ -45,19 +47,17 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; public class CombineBinaryComparisonsTests extends ESTestCase { - - private static final Expression DUMMY_EXPRESSION = - new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 0); + private Expression combine(BinaryLogic e) { + return new CombineBinaryComparisons().rule(e, unboundLogicalOptimizerContext()); + } public void testCombineBinaryComparisonsNotComparable() { FieldAttribute fa = getFieldAttribute(); LessThanOrEqual lte = lessThanOrEqualOf(fa, SIX); LessThan lt = lessThanOf(fa, FALSE); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - And and = new And(EMPTY, lte, lt); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(exp, and); } @@ -67,9 +67,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { LessThanOrEqual lte = lessThanOrEqualOf(fa, SIX); LessThan lt = lessThanOf(fa, FIVE); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, lte, lt)); + Expression exp = combine(new And(EMPTY, lte, lt)); assertEquals(LessThan.class, exp.getClass()); LessThan r = (LessThan) exp; assertEquals(FIVE, r.right()); @@ -81,9 +79,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, SIX); GreaterThan gt = greaterThanOf(fa, FIVE); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, gte, gt)); + Expression exp = combine(new And(EMPTY, gte, gt)); assertEquals(GreaterThanOrEqual.class, exp.getClass()); GreaterThanOrEqual r = (GreaterThanOrEqual) exp; assertEquals(SIX, r.right()); @@ -95,9 +91,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, FIVE); GreaterThan gt = greaterThanOf(fa, FIVE); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, gte, gt)); + Expression exp = combine(new And(EMPTY, gte, gt)); assertEquals(GreaterThan.class, exp.getClass()); GreaterThan r = (GreaterThan) exp; assertEquals(FIVE, r.right()); @@ -111,9 +105,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { LessThanOrEqual lte = lessThanOrEqualOf(fa, L(7)); LessThan lt = lessThanOf(fa, SIX); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, gte, new And(EMPTY, gt, new And(EMPTY, lt, lte)))); + Expression exp = combine(new And(EMPTY, gte, new And(EMPTY, gt, new And(EMPTY, lt, lte)))); assertEquals(And.class, exp.getClass()); And and = (And) exp; assertEquals(gt, and.left()); @@ -128,10 +120,8 @@ public class CombineBinaryComparisonsTests extends ESTestCase { LessThanOrEqual lte = lessThanOrEqualOf(fa, L(7)); Expression ne = notEqualsOf(fa, FIVE); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - // TRUE AND a != 5 AND 4 < a <= 7 - Expression exp = rule.rule(new And(EMPTY, gte, new And(EMPTY, TRUE, new And(EMPTY, gt, new And(EMPTY, ne, lte))))); + Expression exp = combine(new And(EMPTY, gte, new And(EMPTY, TRUE, new And(EMPTY, gt, new And(EMPTY, ne, lte))))); assertEquals(And.class, exp.getClass()); And and = ((And) exp); assertEquals(And.class, and.right().getClass()); @@ -150,8 +140,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, ONE); LessThan lt = lessThanOf(fa, FIVE); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(new And(EMPTY, gte, lt)); + Expression exp = combine(new And(EMPTY, gte, lt)); assertEquals(And.class, exp.getClass()); And and = (And) exp; @@ -167,8 +156,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, THREE); And and = new And(EMPTY, neq, gt); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(gt, exp); } @@ -180,8 +168,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); And and = new And(EMPTY, neq, gte); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(GreaterThan.class, exp.getClass()); GreaterThan gt = (GreaterThan) exp; assertEquals(TWO, gt.right()); @@ -195,8 +182,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, ONE); And and = new And(EMPTY, neq, gte); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(And.class, exp.getClass()); // can't optimize } @@ -208,8 +194,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { LessThanOrEqual lte = lessThanOrEqualOf(fa, THREE); And and = new And(EMPTY, neq, lte); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(and, exp); // can't optimize } @@ -221,8 +206,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { LessThanOrEqual lte = lessThanOrEqualOf(fa, TWO); And and = new And(EMPTY, neq, lte); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(LessThan.class, exp.getClass()); LessThan lt = (LessThan) exp; assertEquals(TWO, lt.right()); @@ -236,8 +220,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { LessThanOrEqual lte = lessThanOrEqualOf(fa, ONE); And and = new And(EMPTY, neq, lte); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals(lte, exp); } @@ -251,8 +234,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { Or or = new Or(EMPTY, gt1, gt2); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); + Expression exp = combine(or); assertEquals(exp, or); } @@ -266,8 +248,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { Or or = new Or(EMPTY, gt1, new Or(EMPTY, gt2, gt3)); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); + Expression exp = combine(or); assertEquals(GreaterThan.class, exp.getClass()); GreaterThan gt = (GreaterThan) exp; @@ -284,8 +265,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { Or or = new Or(EMPTY, new Or(EMPTY, gt1, gt2), gte3); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); + Expression exp = combine(or); assertEquals(GreaterThan.class, exp.getClass()); GreaterThan gt = (GreaterThan) exp; @@ -302,8 +282,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { Or or = new Or(EMPTY, new Or(EMPTY, lt1, lt2), lt3); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); + Expression exp = combine(or); assertEquals(LessThan.class, exp.getClass()); LessThan lt = (LessThan) exp; @@ -320,8 +299,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { Or or = new Or(EMPTY, lt2, new Or(EMPTY, lte2, lt1)); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); + Expression exp = combine(or); assertEquals(LessThanOrEqual.class, exp.getClass()); LessThanOrEqual lte = (LessThanOrEqual) exp; @@ -340,8 +318,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { Or or = new Or(EMPTY, new Or(EMPTY, lt2, gt3), new Or(EMPTY, lt1, gt4)); - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); + Expression exp = combine(or); assertEquals(Or.class, exp.getClass()); Or ro = (Or) exp; @@ -367,7 +344,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { And right = new And(EMPTY, a2, common); Or or = new Or(EMPTY, left, right); - Expression exp = new BooleanSimplification().rule(or); + Expression exp = new BooleanSimplification().rule(or, unboundLogicalOptimizerContext()); assertEquals(new And(EMPTY, common, new Or(EMPTY, a1, a2)), exp); } @@ -391,8 +368,7 @@ public class CombineBinaryComparisonsTests extends ESTestCase { ); for (And and : testCases) { - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); + Expression exp = combine(and); assertEquals("Rule should not have transformed [" + and.nodeString() + "]", and, exp); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java index 043d18dac9fd..bb5f2fd3505e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java @@ -38,16 +38,24 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.elasticsearch.xpack.esql.EsqlTestUtils.relation; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; public class CombineDisjunctionsTests extends ESTestCase { + private Expression combineDisjunctions(Or e) { + return new CombineDisjunctions().rule(e, unboundLogicalOptimizerContext()); + } + + private LogicalPlan combineDisjunctions(LogicalPlan l) { + return new CombineDisjunctions().apply(l, unboundLogicalOptimizerContext()); + } public void testTwoEqualsWithOr() { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); - Expression e = new CombineDisjunctions().rule(or); + Expression e = combineDisjunctions(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -58,7 +66,7 @@ public class CombineDisjunctionsTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); - Expression e = new CombineDisjunctions().rule(or); + Expression e = combineDisjunctions(or); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); @@ -69,7 +77,7 @@ public class CombineDisjunctionsTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, List.of(TWO))); - Expression e = new CombineDisjunctions().rule(or); + Expression e = combineDisjunctions(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -80,7 +88,7 @@ public class CombineDisjunctionsTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); - Expression e = new CombineDisjunctions().rule(or); + Expression e = combineDisjunctions(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -92,7 +100,7 @@ public class CombineDisjunctionsTests extends ESTestCase { Equals equals = equalsOf(fa, ONE); Or or = new Or(EMPTY, equals, new In(EMPTY, fa, List.of(ONE))); - Expression e = new CombineDisjunctions().rule(or); + Expression e = combineDisjunctions(or); assertEquals(equals, e); } @@ -101,7 +109,7 @@ public class CombineDisjunctionsTests extends ESTestCase { And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); Filter dummy = new Filter(EMPTY, relation(), and); - LogicalPlan transformed = new CombineDisjunctions().apply(dummy); + LogicalPlan transformed = combineDisjunctions(dummy); assertSame(dummy, transformed); assertEquals(and, ((Filter) transformed).condition()); } @@ -111,7 +119,7 @@ public class CombineDisjunctionsTests extends ESTestCase { FieldAttribute fieldTwo = getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); - Expression e = new CombineDisjunctions().rule(or); + Expression e = combineDisjunctions(or); assertEquals(or, e); } @@ -120,7 +128,7 @@ public class CombineDisjunctionsTests extends ESTestCase { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TWO))); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); - Expression e = new CombineDisjunctions().rule(secondOr); + Expression e = combineDisjunctions(secondOr); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -132,7 +140,7 @@ public class CombineDisjunctionsTests extends ESTestCase { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), lessThanOf(fa, TWO)); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); - Expression e = new CombineDisjunctions().rule(secondOr); + Expression e = combineDisjunctions(secondOr); assertEquals(Or.class, e.getClass()); Or or = (Or) e; assertEquals(or.left(), firstOr.right()); @@ -160,7 +168,7 @@ public class CombineDisjunctionsTests extends ESTestCase { cidrs.add(new CIDRMatch(EMPTY, faa, ipa2)); cidrs.add(new CIDRMatch(EMPTY, fab, ipb2)); Or oldOr = (Or) Predicates.combineOr(cidrs); - Expression e = new CombineDisjunctions().rule(oldOr); + Expression e = combineDisjunctions(oldOr); assertEquals(Or.class, e.getClass()); Or newOr = (Or) e; assertEquals(CIDRMatch.class, newOr.left().getClass()); @@ -211,7 +219,7 @@ public class CombineDisjunctionsTests extends ESTestCase { Or oldOr = (Or) Predicates.combineOr(all); - Expression e = new CombineDisjunctions().rule(oldOr); + Expression e = combineDisjunctions(oldOr); assertEquals(Or.class, e.getClass()); Or newOr = (Or) e; assertEquals(Or.class, newOr.left().getClass()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java index 01af91271e1b..8a8585b8d0ab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; @@ -45,68 +46,77 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOrEqualOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.notEqualsOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.of; import static org.elasticsearch.xpack.esql.EsqlTestUtils.rangeOf; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; public class ConstantFoldingTests extends ESTestCase { + private Expression constantFolding(Expression e) { + return new ConstantFolding().rule(e, unboundLogicalOptimizerContext()); + } public void testConstantFolding() { Expression exp = new Add(EMPTY, TWO, THREE); assertTrue(exp.foldable()); - Expression result = new ConstantFolding().rule(exp); - assertTrue(result instanceof Literal); - assertEquals(5, ((Literal) result).value()); + Expression result = constantFolding(exp); + assertEquals(5, as(result, Literal.class).value()); // check now with an alias - result = new ConstantFolding().rule(new Alias(EMPTY, "a", exp)); + result = constantFolding(new Alias(EMPTY, "a", exp)); assertEquals("a", Expressions.name(result)); assertEquals(Alias.class, result.getClass()); } public void testConstantFoldingBinaryComparison() { - assertEquals(FALSE, new ConstantFolding().rule(greaterThanOf(TWO, THREE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(greaterThanOrEqualOf(TWO, THREE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(equalsOf(TWO, THREE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(notEqualsOf(TWO, THREE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(lessThanOrEqualOf(TWO, THREE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(lessThanOf(TWO, THREE)).canonical()); + assertEquals(FALSE, constantFolding(greaterThanOf(TWO, THREE)).canonical()); + assertEquals(FALSE, constantFolding(greaterThanOrEqualOf(TWO, THREE)).canonical()); + assertEquals(FALSE, constantFolding(equalsOf(TWO, THREE)).canonical()); + assertEquals(TRUE, constantFolding(notEqualsOf(TWO, THREE)).canonical()); + assertEquals(TRUE, constantFolding(lessThanOrEqualOf(TWO, THREE)).canonical()); + assertEquals(TRUE, constantFolding(lessThanOf(TWO, THREE)).canonical()); } public void testConstantFoldingBinaryLogic() { - assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, greaterThanOf(TWO, THREE), TRUE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, greaterThanOrEqualOf(TWO, THREE), TRUE)).canonical()); + assertEquals(FALSE, constantFolding(new And(EMPTY, greaterThanOf(TWO, THREE), TRUE)).canonical()); + assertEquals(TRUE, constantFolding(new Or(EMPTY, greaterThanOrEqualOf(TWO, THREE), TRUE)).canonical()); } public void testConstantFoldingBinaryLogic_WithNullHandling() { - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, TRUE)).canonical().nullable()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, TRUE, NULL)).canonical().nullable()); - assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, NULL, FALSE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, FALSE, NULL)).canonical()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, NULL)).canonical().nullable()); + assertEquals(Nullability.TRUE, constantFolding(new And(EMPTY, NULL, TRUE)).canonical().nullable()); + assertEquals(Nullability.TRUE, constantFolding(new And(EMPTY, TRUE, NULL)).canonical().nullable()); + assertEquals(FALSE, constantFolding(new And(EMPTY, NULL, FALSE)).canonical()); + assertEquals(FALSE, constantFolding(new And(EMPTY, FALSE, NULL)).canonical()); + assertEquals(Nullability.TRUE, constantFolding(new And(EMPTY, NULL, NULL)).canonical().nullable()); - assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, TRUE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, TRUE, NULL)).canonical()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, FALSE)).canonical().nullable()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, FALSE, NULL)).canonical().nullable()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, NULL)).canonical().nullable()); + assertEquals(TRUE, constantFolding(new Or(EMPTY, NULL, TRUE)).canonical()); + assertEquals(TRUE, constantFolding(new Or(EMPTY, TRUE, NULL)).canonical()); + assertEquals(Nullability.TRUE, constantFolding(new Or(EMPTY, NULL, FALSE)).canonical().nullable()); + assertEquals(Nullability.TRUE, constantFolding(new Or(EMPTY, FALSE, NULL)).canonical().nullable()); + assertEquals(Nullability.TRUE, constantFolding(new Or(EMPTY, NULL, NULL)).canonical().nullable()); } public void testConstantFoldingRange() { - assertEquals(true, new ConstantFolding().rule(rangeOf(FIVE, FIVE, true, new Literal(EMPTY, 10, DataType.INTEGER), false)).fold()); - assertEquals(false, new ConstantFolding().rule(rangeOf(FIVE, FIVE, false, new Literal(EMPTY, 10, DataType.INTEGER), false)).fold()); + assertEquals( + true, + constantFolding(rangeOf(FIVE, FIVE, true, new Literal(EMPTY, 10, DataType.INTEGER), false)).fold(FoldContext.small()) + ); + assertEquals( + false, + constantFolding(rangeOf(FIVE, FIVE, false, new Literal(EMPTY, 10, DataType.INTEGER), false)).fold(FoldContext.small()) + ); } public void testConstantNot() { - assertEquals(FALSE, new ConstantFolding().rule(new Not(EMPTY, TRUE))); - assertEquals(TRUE, new ConstantFolding().rule(new Not(EMPTY, FALSE))); + assertEquals(FALSE, constantFolding(new Not(EMPTY, TRUE))); + assertEquals(TRUE, constantFolding(new Not(EMPTY, FALSE))); } public void testConstantFoldingLikes() { - assertEquals(TRUE, new ConstantFolding().rule(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical()); + assertEquals(TRUE, constantFolding(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical()); + assertEquals(TRUE, constantFolding(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical()); } public void testArithmeticFolding() { @@ -125,7 +135,7 @@ public class ConstantFoldingTests extends ESTestCase { Expression value = new Literal(EMPTY, 12, DataType.INTEGER); Range range = new Range(EMPTY, value, lowerBound, randomBoolean(), upperBound, randomBoolean(), randomZone()); - Expression folded = new ConstantFolding().rule(range); + Expression folded = constantFolding(range); assertTrue((Boolean) as(folded, Literal.class).value()); } @@ -156,16 +166,15 @@ public class ConstantFoldingTests extends ESTestCase { // Just applying this to the range directly won't perform a transformDown. LogicalPlan filter = new Filter(EMPTY, emptySource(), range); - Filter foldedOnce = as(new ConstantFolding().apply(filter), Filter.class); + Filter foldedOnce = as(new ConstantFolding().apply(filter, unboundLogicalOptimizerContext()), Filter.class); // We need to run the rule twice, because during the first run only the boundaries can be folded - the range doesn't know it's // foldable, yet. - Filter foldedTwice = as(new ConstantFolding().apply(foldedOnce), Filter.class); + Filter foldedTwice = as(new ConstantFolding().apply(foldedOnce, unboundLogicalOptimizerContext()), Filter.class); assertFalse((Boolean) as(foldedTwice.condition(), Literal.class).value()); } - private static Object foldOperator(BinaryOperator b) { - return ((Literal) new ConstantFolding().rule(b)).value(); + private Object foldOperator(BinaryOperator b) { + return ((Literal) constantFolding(b)).value(); } - } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java index ae3157618493..252b25a214bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -67,9 +68,11 @@ import java.lang.reflect.Constructor; import java.util.List; import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.greaterThanOf; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; @@ -85,28 +88,27 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; public class FoldNullTests extends ESTestCase { + private Expression foldNull(Expression e) { + return new FoldNull().rule(e, unboundLogicalOptimizerContext()); + } public void testBasicNullFolding() { - FoldNull rule = new FoldNull(); - assertNullLiteral(rule.rule(new Add(EMPTY, L(randomInt()), Literal.NULL))); - assertNullLiteral(rule.rule(new Round(EMPTY, Literal.NULL, null))); - assertNullLiteral(rule.rule(new Pow(EMPTY, Literal.NULL, Literal.NULL))); - assertNullLiteral(rule.rule(new DateFormat(EMPTY, Literal.NULL, Literal.NULL, null))); - assertNullLiteral(rule.rule(new DateParse(EMPTY, Literal.NULL, Literal.NULL))); - assertNullLiteral(rule.rule(new DateTrunc(EMPTY, Literal.NULL, Literal.NULL))); - assertNullLiteral(rule.rule(new Substring(EMPTY, Literal.NULL, Literal.NULL, Literal.NULL))); + assertNullLiteral(foldNull(new Add(EMPTY, L(randomInt()), Literal.NULL))); + assertNullLiteral(foldNull(new Round(EMPTY, Literal.NULL, null))); + assertNullLiteral(foldNull(new Pow(EMPTY, Literal.NULL, Literal.NULL))); + assertNullLiteral(foldNull(new DateFormat(EMPTY, Literal.NULL, Literal.NULL, null))); + assertNullLiteral(foldNull(new DateParse(EMPTY, Literal.NULL, Literal.NULL))); + assertNullLiteral(foldNull(new DateTrunc(EMPTY, Literal.NULL, Literal.NULL))); + assertNullLiteral(foldNull(new Substring(EMPTY, Literal.NULL, Literal.NULL, Literal.NULL))); } public void testNullFoldingIsNotNull() { - FoldNull foldNull = new FoldNull(); - assertEquals(true, foldNull.rule(new IsNotNull(EMPTY, TRUE)).fold()); - assertEquals(false, foldNull.rule(new IsNotNull(EMPTY, NULL)).fold()); + assertEquals(true, foldNull(new IsNotNull(EMPTY, TRUE)).fold(FoldContext.small())); + assertEquals(false, foldNull(new IsNotNull(EMPTY, NULL)).fold(FoldContext.small())); } @SuppressWarnings("unchecked") public void testNullFoldingDoesNotApplyOnAbstractMultivalueFunction() throws Exception { - FoldNull rule = new FoldNull(); - List> items = List.of( MvDedupe.class, MvFirst.class, @@ -119,119 +121,112 @@ public class FoldNullTests extends ESTestCase { for (Class clazz : items) { Constructor ctor = clazz.getConstructor(Source.class, Expression.class); AbstractMultivalueFunction conditionalFunction = ctor.newInstance(EMPTY, getFieldAttribute("a")); - assertEquals(conditionalFunction, rule.rule(conditionalFunction)); + assertEquals(conditionalFunction, foldNull(conditionalFunction)); conditionalFunction = ctor.newInstance(EMPTY, NULL); - assertEquals(NULL, rule.rule(conditionalFunction)); + assertEquals(NULL, foldNull(conditionalFunction)); } // avg and count ar different just because they know the return type in advance (all the others infer the type from the input) MvAvg avg = new MvAvg(EMPTY, getFieldAttribute("a")); - assertEquals(avg, rule.rule(avg)); + assertEquals(avg, foldNull(avg)); avg = new MvAvg(EMPTY, NULL); - assertEquals(new Literal(EMPTY, null, DOUBLE), rule.rule(avg)); + assertEquals(new Literal(EMPTY, null, DOUBLE), foldNull(avg)); MvCount count = new MvCount(EMPTY, getFieldAttribute("a")); - assertEquals(count, rule.rule(count)); + assertEquals(count, foldNull(count)); count = new MvCount(EMPTY, NULL); - assertEquals(new Literal(EMPTY, null, INTEGER), rule.rule(count)); + assertEquals(new Literal(EMPTY, null, INTEGER), foldNull(count)); } public void testNullFoldingIsNull() { - FoldNull foldNull = new FoldNull(); - assertEquals(true, foldNull.rule(new IsNull(EMPTY, NULL)).fold()); - assertEquals(false, foldNull.rule(new IsNull(EMPTY, TRUE)).fold()); + assertEquals(true, foldNull(new IsNull(EMPTY, NULL)).fold(FoldContext.small())); + assertEquals(false, foldNull(new IsNull(EMPTY, TRUE)).fold(FoldContext.small())); } public void testGenericNullableExpression() { FoldNull rule = new FoldNull(); // arithmetic - assertNullLiteral(rule.rule(new Add(EMPTY, getFieldAttribute("a"), NULL))); + assertNullLiteral(foldNull(new Add(EMPTY, getFieldAttribute("a"), NULL))); // comparison - assertNullLiteral(rule.rule(greaterThanOf(getFieldAttribute("a"), NULL))); + assertNullLiteral(foldNull(greaterThanOf(getFieldAttribute("a"), NULL))); // regex - assertNullLiteral(rule.rule(new RLike(EMPTY, NULL, new RLikePattern("123")))); + assertNullLiteral(foldNull(new RLike(EMPTY, NULL, new RLikePattern("123")))); // date functions - assertNullLiteral(rule.rule(new DateExtract(EMPTY, NULL, NULL, configuration("")))); + assertNullLiteral(foldNull(new DateExtract(EMPTY, NULL, NULL, configuration("")))); // math functions - assertNullLiteral(rule.rule(new Cos(EMPTY, NULL))); + assertNullLiteral(foldNull(new Cos(EMPTY, NULL))); // string functions - assertNullLiteral(rule.rule(new LTrim(EMPTY, NULL))); + assertNullLiteral(foldNull(new LTrim(EMPTY, NULL))); // spatial - assertNullLiteral(rule.rule(new SpatialCentroid(EMPTY, NULL))); + assertNullLiteral(foldNull(new SpatialCentroid(EMPTY, NULL))); // ip - assertNullLiteral(rule.rule(new CIDRMatch(EMPTY, NULL, List.of(NULL)))); + assertNullLiteral(foldNull(new CIDRMatch(EMPTY, NULL, List.of(NULL)))); // conversion - assertNullLiteral(rule.rule(new ToString(EMPTY, NULL))); + assertNullLiteral(foldNull(new ToString(EMPTY, NULL))); } public void testNullFoldingDoesNotApplyOnLogicalExpressions() { - FoldNull rule = new FoldNull(); - Or or = new Or(EMPTY, NULL, TRUE); - assertEquals(or, rule.rule(or)); + assertEquals(or, foldNull(or)); or = new Or(EMPTY, NULL, NULL); - assertEquals(or, rule.rule(or)); + assertEquals(or, foldNull(or)); And and = new And(EMPTY, NULL, TRUE); - assertEquals(and, rule.rule(and)); + assertEquals(and, foldNull(and)); and = new And(EMPTY, NULL, NULL); - assertEquals(and, rule.rule(and)); + assertEquals(and, foldNull(and)); } @SuppressWarnings("unchecked") public void testNullFoldingDoesNotApplyOnAggregate() throws Exception { - FoldNull rule = new FoldNull(); - List> items = List.of(Max.class, Min.class); for (Class clazz : items) { Constructor ctor = clazz.getConstructor(Source.class, Expression.class); AggregateFunction conditionalFunction = ctor.newInstance(EMPTY, getFieldAttribute("a")); - assertEquals(conditionalFunction, rule.rule(conditionalFunction)); + assertEquals(conditionalFunction, foldNull(conditionalFunction)); conditionalFunction = ctor.newInstance(EMPTY, NULL); - assertEquals(NULL, rule.rule(conditionalFunction)); + assertEquals(NULL, foldNull(conditionalFunction)); } Avg avg = new Avg(EMPTY, getFieldAttribute("a")); - assertEquals(avg, rule.rule(avg)); + assertEquals(avg, foldNull(avg)); avg = new Avg(EMPTY, NULL); - assertEquals(new Literal(EMPTY, null, DOUBLE), rule.rule(avg)); + assertEquals(new Literal(EMPTY, null, DOUBLE), foldNull(avg)); Count count = new Count(EMPTY, getFieldAttribute("a")); - assertEquals(count, rule.rule(count)); + assertEquals(count, foldNull(count)); count = new Count(EMPTY, NULL); - assertEquals(count, rule.rule(count)); + assertEquals(count, foldNull(count)); CountDistinct countd = new CountDistinct(EMPTY, getFieldAttribute("a"), getFieldAttribute("a")); - assertEquals(countd, rule.rule(countd)); + assertEquals(countd, foldNull(countd)); countd = new CountDistinct(EMPTY, NULL, NULL); - assertEquals(new Literal(EMPTY, null, LONG), rule.rule(countd)); + assertEquals(new Literal(EMPTY, null, LONG), foldNull(countd)); Median median = new Median(EMPTY, getFieldAttribute("a")); - assertEquals(median, rule.rule(median)); + assertEquals(median, foldNull(median)); median = new Median(EMPTY, NULL); - assertEquals(new Literal(EMPTY, null, DOUBLE), rule.rule(median)); + assertEquals(new Literal(EMPTY, null, DOUBLE), foldNull(median)); MedianAbsoluteDeviation medianad = new MedianAbsoluteDeviation(EMPTY, getFieldAttribute("a")); - assertEquals(medianad, rule.rule(medianad)); + assertEquals(medianad, foldNull(medianad)); medianad = new MedianAbsoluteDeviation(EMPTY, NULL); - assertEquals(new Literal(EMPTY, null, DOUBLE), rule.rule(medianad)); + assertEquals(new Literal(EMPTY, null, DOUBLE), foldNull(medianad)); Percentile percentile = new Percentile(EMPTY, getFieldAttribute("a"), getFieldAttribute("a")); - assertEquals(percentile, rule.rule(percentile)); + assertEquals(percentile, foldNull(percentile)); percentile = new Percentile(EMPTY, NULL, NULL); - assertEquals(new Literal(EMPTY, null, DOUBLE), rule.rule(percentile)); + assertEquals(new Literal(EMPTY, null, DOUBLE), foldNull(percentile)); Sum sum = new Sum(EMPTY, getFieldAttribute("a")); - assertEquals(sum, rule.rule(sum)); + assertEquals(sum, foldNull(sum)); sum = new Sum(EMPTY, NULL); - assertEquals(new Literal(EMPTY, null, DOUBLE), rule.rule(sum)); + assertEquals(new Literal(EMPTY, null, DOUBLE), foldNull(sum)); } public void testNullFoldableDoesNotApplyToIsNullAndNotNull() { - FoldNull rule = new FoldNull(); - DataType numericType = randomFrom(INTEGER, LONG, DOUBLE); DataType genericType = randomFrom(INTEGER, LONG, DOUBLE, UNSIGNED_LONG, KEYWORD, TEXT, GEO_POINT, GEO_SHAPE, VERSION, IP); List items = List.of( @@ -260,29 +255,26 @@ public class FoldNullTests extends ESTestCase { ); for (Expression item : items) { Expression isNull = new IsNull(EMPTY, item); - Expression transformed = rule.rule(isNull); + Expression transformed = foldNull(isNull); assertEquals(isNull, transformed); IsNotNull isNotNull = new IsNotNull(EMPTY, item); - transformed = rule.rule(isNotNull); + transformed = foldNull(isNotNull); assertEquals(isNotNull, transformed); } } public void testNullBucketGetsFolded() { - FoldNull foldNull = new FoldNull(); - assertEquals(NULL, foldNull.rule(new Bucket(EMPTY, NULL, NULL, NULL, NULL))); + assertEquals(NULL, foldNull(new Bucket(EMPTY, NULL, NULL, NULL, NULL))); } public void testNullCategorizeGroupingNotFolded() { - FoldNull foldNull = new FoldNull(); Categorize categorize = new Categorize(EMPTY, NULL); - assertEquals(categorize, foldNull.rule(categorize)); + assertEquals(categorize, foldNull(categorize)); } private void assertNullLiteral(Expression expression) { - assertEquals(Literal.class, expression.getClass()); - assertNull(expression.fold()); + assertNull(as(expression, Literal.class).value()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRightTests.java index 17e69e81444c..1664e9f4653b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/LiteralsOnTheRightTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equ import static org.elasticsearch.xpack.esql.EsqlTestUtils.FIVE; import static org.elasticsearch.xpack.esql.EsqlTestUtils.equalsOf; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; @@ -22,7 +23,7 @@ public class LiteralsOnTheRightTests extends ESTestCase { public void testLiteralsOnTheRight() { Alias a = new Alias(EMPTY, "a", new Literal(EMPTY, 10, INTEGER)); - Expression result = new LiteralsOnTheRight().rule(equalsOf(FIVE, a)); + Expression result = new LiteralsOnTheRight().rule(equalsOf(FIVE, a), unboundLogicalOptimizerContext()); assertTrue(result instanceof Equals); Equals eq = (Equals) result; assertEquals(a, eq.left()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java index 55091653e75d..a6c0d838b2c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; @@ -37,11 +38,15 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOrEqualOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.notEqualsOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.rangeOf; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; public class PropagateEqualsTests extends ESTestCase { + private Expression propagateEquals(BinaryLogic e) { + return new PropagateEquals().rule(e, unboundLogicalOptimizerContext()); + } // a == 1 AND a == 2 -> FALSE public void testDualEqualsConjunction() { @@ -49,8 +54,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq1 = equalsOf(fa, ONE); Equals eq2 = equalsOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); + Expression exp = propagateEquals(new And(EMPTY, eq1, eq2)); assertEquals(FALSE, exp); } @@ -60,8 +64,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq1 = equalsOf(fa, new Literal(EMPTY, 10, DataType.INTEGER)); Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataType.INTEGER), false); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, r)); + Expression exp = propagateEquals(new And(EMPTY, eq1, r)); assertEquals(FALSE, exp); } @@ -71,8 +74,7 @@ public class PropagateEqualsTests extends ESTestCase { NotEquals neq = notEqualsOf(fa, THREE); Equals eq = equalsOf(fa, THREE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, neq, eq)); + Expression exp = propagateEquals(new And(EMPTY, neq, eq)); assertEquals(FALSE, exp); } @@ -82,8 +84,7 @@ public class PropagateEqualsTests extends ESTestCase { NotEquals neq = notEqualsOf(fa, FOUR); Equals eq = equalsOf(fa, THREE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, neq, eq)); + Expression exp = propagateEquals(new And(EMPTY, neq, eq)); assertEquals(Equals.class, exp.getClass()); assertEquals(eq, exp); } @@ -94,8 +95,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThan lt = lessThanOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, lt)); + Expression exp = propagateEquals(new And(EMPTY, eq, lt)); assertEquals(FALSE, exp); } @@ -105,8 +105,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, lt)); + Expression exp = propagateEquals(new And(EMPTY, eq, lt)); assertEquals(eq, exp); } @@ -116,8 +115,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, lt)); + Expression exp = propagateEquals(new And(EMPTY, eq, lt)); assertEquals(FALSE, exp); } @@ -127,8 +125,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, gt)); + Expression exp = propagateEquals(new And(EMPTY, eq, gt)); assertEquals(FALSE, exp); } @@ -138,8 +135,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, gte)); + Expression exp = propagateEquals(new And(EMPTY, eq, gte)); assertEquals(eq, exp); } @@ -149,8 +145,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, THREE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, gt)); + Expression exp = propagateEquals(new And(EMPTY, eq, gt)); assertEquals(FALSE, exp); } @@ -162,9 +157,8 @@ public class PropagateEqualsTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, ONE); NotEquals neq = notEqualsOf(fa, FOUR); - PropagateEquals rule = new PropagateEquals(); Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq)); - Expression exp = rule.rule((And) and); + Expression exp = propagateEquals((And) and); assertEquals(eq, exp); } @@ -176,9 +170,8 @@ public class PropagateEqualsTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, new Literal(EMPTY, 0, DataType.INTEGER)); NotEquals neq = notEqualsOf(fa, FOUR); - PropagateEquals rule = new PropagateEquals(); Expression and = Predicates.combineAnd(asList(eq, range, gt, neq)); - Expression exp = rule.rule((And) and); + Expression exp = propagateEquals((And) and); assertEquals(eq, exp); } @@ -188,8 +181,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, ONE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, gt)); + Expression exp = propagateEquals(new Or(EMPTY, eq, gt)); assertEquals(gt, exp); } @@ -199,8 +191,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, gt)); + Expression exp = propagateEquals(new Or(EMPTY, eq, gt)); assertEquals(GreaterThanOrEqual.class, exp.getClass()); GreaterThanOrEqual gte = (GreaterThanOrEqual) exp; assertEquals(TWO, gte.right()); @@ -212,8 +203,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); LessThan lt = lessThanOf(fa, THREE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, lt)); + Expression exp = propagateEquals(new Or(EMPTY, eq, lt)); assertEquals(lt, exp); } @@ -223,8 +213,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, THREE); LessThan lt = lessThanOf(fa, THREE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, lt)); + Expression exp = propagateEquals(new Or(EMPTY, eq, lt)); assertEquals(LessThanOrEqual.class, exp.getClass()); LessThanOrEqual lte = (LessThanOrEqual) exp; assertEquals(THREE, lte.right()); @@ -236,8 +225,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, ONE, false, THREE, false); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, range)); + Expression exp = propagateEquals(new Or(EMPTY, eq, range)); assertEquals(range, exp); } @@ -247,8 +235,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, TWO, false, THREE, false); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, range)); + Expression exp = propagateEquals(new Or(EMPTY, eq, range)); assertEquals(Range.class, exp.getClass()); Range r = (Range) exp; assertEquals(TWO, r.lower()); @@ -263,8 +250,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, THREE); Range range = rangeOf(fa, TWO, false, THREE, false); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, range)); + Expression exp = propagateEquals(new Or(EMPTY, eq, range)); assertEquals(Range.class, exp.getClass()); Range r = (Range) exp; assertEquals(TWO, r.lower()); @@ -279,8 +265,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, neq)); + Expression exp = propagateEquals(new Or(EMPTY, eq, neq)); assertEquals(TRUE, exp); } @@ -290,8 +275,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq = equalsOf(fa, TWO); NotEquals neq = notEqualsOf(fa, FIVE); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, neq)); + Expression exp = propagateEquals(new Or(EMPTY, eq, neq)); assertEquals(NotEquals.class, exp.getClass()); NotEquals ne = (NotEquals) exp; assertEquals(FIVE, ne.right()); @@ -305,8 +289,7 @@ public class PropagateEqualsTests extends ESTestCase { GreaterThan gt = greaterThanOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt))); + Expression exp = propagateEquals((Or) Predicates.combineOr(asList(eq, range, neq, gt))); assertEquals(TRUE, exp); } @@ -317,8 +300,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq2 = equalsOf(fa, TWO); And and = new And(EMPTY, eq1, eq2); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(and); + Expression exp = propagateEquals(and); assertEquals(and, exp); } @@ -328,8 +310,7 @@ public class PropagateEqualsTests extends ESTestCase { Equals eq1 = equalsOf(fa, ONE); Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataType.INTEGER), false); - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, r)); + Expression exp = propagateEquals(new And(EMPTY, eq1, r)); assertEquals(eq1, exp); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java index d1d6a7fbaa20..d35890e5b56b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java @@ -31,11 +31,20 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.greaterThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.lessThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.relation; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; public class PropagateNullableTests extends ESTestCase { + private Expression propagateNullable(And e) { + return new PropagateNullable().rule(e, unboundLogicalOptimizerContext()); + } + + private LogicalPlan propagateNullable(LogicalPlan p) { + return new PropagateNullable().apply(p, unboundLogicalOptimizerContext()); + } + private Literal nullOf(DataType dataType) { return new Literal(Source.EMPTY, null, dataType); } @@ -45,7 +54,7 @@ public class PropagateNullableTests extends ESTestCase { FieldAttribute fa = getFieldAttribute(); And and = new And(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); - assertEquals(FALSE, new PropagateNullable().rule(and)); + assertEquals(FALSE, propagateNullable(and)); } // a IS NULL AND b IS NOT NULL AND c IS NULL AND d IS NOT NULL AND e IS NULL AND a IS NOT NULL => false @@ -58,7 +67,7 @@ public class PropagateNullableTests extends ESTestCase { And and = new And(EMPTY, andOne, new And(EMPTY, andThree, andTwo)); - assertEquals(FALSE, new PropagateNullable().rule(and)); + assertEquals(FALSE, propagateNullable(and)); } // a IS NULL AND a > 1 => a IS NULL AND false @@ -67,7 +76,7 @@ public class PropagateNullableTests extends ESTestCase { IsNull isNull = new IsNull(EMPTY, fa); And and = new And(EMPTY, isNull, greaterThanOf(fa, ONE)); - assertEquals(new And(EMPTY, isNull, nullOf(BOOLEAN)), new PropagateNullable().rule(and)); + assertEquals(new And(EMPTY, isNull, nullOf(BOOLEAN)), propagateNullable(and)); } // a IS NULL AND b < 1 AND c < 1 AND a < 1 => a IS NULL AND b < 1 AND c < 1 => a IS NULL AND b < 1 AND c < 1 @@ -79,7 +88,7 @@ public class PropagateNullableTests extends ESTestCase { And and = new And(EMPTY, isNull, nestedAnd); And top = new And(EMPTY, and, lessThanOf(fa, ONE)); - Expression optimized = new PropagateNullable().rule(top); + Expression optimized = propagateNullable(top); Expression expected = new And(EMPTY, and, nullOf(BOOLEAN)); assertEquals(Predicates.splitAnd(expected), Predicates.splitAnd(optimized)); } @@ -97,7 +106,7 @@ public class PropagateNullableTests extends ESTestCase { Expression kept = new And(EMPTY, isNull, lessThanOf(getFieldAttribute("b"), THREE)); And and = new And(EMPTY, nullified, kept); - Expression optimized = new PropagateNullable().rule(and); + Expression optimized = propagateNullable(and); Expression expected = new And(EMPTY, new And(EMPTY, nullOf(BOOLEAN), nullOf(BOOLEAN)), kept); assertEquals(Predicates.splitAnd(expected), Predicates.splitAnd(optimized)); @@ -110,13 +119,13 @@ public class PropagateNullableTests extends ESTestCase { Or or = new Or(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); Filter dummy = new Filter(EMPTY, relation(), or); - LogicalPlan transformed = new PropagateNullable().apply(dummy); + LogicalPlan transformed = propagateNullable(dummy); assertSame(dummy, transformed); assertEquals(or, ((Filter) transformed).condition()); or = new Or(EMPTY, new IsNull(EMPTY, fa), greaterThanOf(fa, ONE)); dummy = new Filter(EMPTY, relation(), or); - transformed = new PropagateNullable().apply(dummy); + transformed = propagateNullable(dummy); assertSame(dummy, transformed); assertEquals(or, ((Filter) transformed).condition()); } @@ -129,7 +138,7 @@ public class PropagateNullableTests extends ESTestCase { Or or = new Or(EMPTY, isNull, greaterThanOf(fa, THREE)); And and = new And(EMPTY, new Add(EMPTY, fa, ONE), or); - assertEquals(and, new PropagateNullable().rule(and)); + assertEquals(and, propagateNullable(and)); } public void testDoNotOptimizeIsNullAndMultipleComparisonWithConstants() { @@ -141,7 +150,7 @@ public class PropagateNullableTests extends ESTestCase { And aIsNull_AND_bLT1_AND_cLT1 = new And(EMPTY, aIsNull, bLT1_AND_cLT1); And aIsNull_AND_bLT1_AND_cLT1_AND_aLT1 = new And(EMPTY, aIsNull_AND_bLT1_AND_cLT1, lessThanOf(a, ONE)); - Expression optimized = new PropagateNullable().rule(aIsNull_AND_bLT1_AND_cLT1_AND_aLT1); + Expression optimized = propagateNullable(aIsNull_AND_bLT1_AND_cLT1_AND_aLT1); Literal nullLiteral = new Literal(EMPTY, null, BOOLEAN); assertEquals(asList(aIsNull, nullLiteral, nullLiteral, nullLiteral), Predicates.splitAnd(optimized)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java index c7206c6971bd..b9ffc39e5e13 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java @@ -10,8 +10,10 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; @@ -20,16 +22,20 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equ import static java.util.Arrays.asList; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; public class ReplaceRegexMatchTests extends ESTestCase { + private Expression replaceRegexMatch(RegexMatch e) { + return new ReplaceRegexMatch().rule(e, unboundLogicalOptimizerContext()); + } public void testMatchAllWildcardLikeToExist() { for (String s : asList("*", "**", "***")) { WildcardPattern pattern = new WildcardPattern(s); FieldAttribute fa = getFieldAttribute(); WildcardLike l = new WildcardLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); + Expression e = replaceRegexMatch(l); assertEquals(IsNotNull.class, e.getClass()); IsNotNull inn = (IsNotNull) e; assertEquals(fa, inn.field()); @@ -40,7 +46,7 @@ public class ReplaceRegexMatchTests extends ESTestCase { RLikePattern pattern = new RLikePattern(".*"); FieldAttribute fa = getFieldAttribute(); RLike l = new RLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); + Expression e = replaceRegexMatch(l); assertEquals(IsNotNull.class, e.getClass()); IsNotNull inn = (IsNotNull) e; assertEquals(fa, inn.field()); @@ -51,11 +57,11 @@ public class ReplaceRegexMatchTests extends ESTestCase { WildcardPattern pattern = new WildcardPattern(s); FieldAttribute fa = getFieldAttribute(); WildcardLike l = new WildcardLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); + Expression e = replaceRegexMatch(l); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); - assertEquals(s.replace("\\", StringUtils.EMPTY), eq.right().fold()); + assertEquals(s.replace("\\", StringUtils.EMPTY), eq.right().fold(FoldContext.small())); } } @@ -63,11 +69,11 @@ public class ReplaceRegexMatchTests extends ESTestCase { RLikePattern pattern = new RLikePattern("abc"); FieldAttribute fa = getFieldAttribute(); RLike l = new RLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); + Expression e = replaceRegexMatch(l); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); - assertEquals("abc", eq.right().fold()); + assertEquals("abc", eq.right().fold(FoldContext.small())); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java index 2429bcb1a1b0..90c8ae103232 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -417,7 +418,7 @@ public class PushTopNToSourceTests extends ESTestCase { private static PhysicalPlan pushTopNToSource(TopNExec topNExec) { var configuration = EsqlTestUtils.configuration("from test"); - var ctx = new LocalPhysicalOptimizerContext(configuration, SearchStats.EMPTY); + var ctx = new LocalPhysicalOptimizerContext(configuration, FoldContext.small(), SearchStats.EMPTY); var pushTopNToSource = new PushTopNToSource(); return pushTopNToSource.rule(topNExec, ctx); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java index e6fef186721a..99a04b6ed8f1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java @@ -151,8 +151,12 @@ abstract class AbstractStatementParserTests extends ESTestCase { expectInvalidIndexNameErrorWithLineNumber(query, "\"" + indexString + "\"", lineNumber, indexString); } - void expectInvalidIndexNameErrorWithLineNumber(String query, String indexString, String lineNumber, String error) { - expectError(LoggerMessageFormat.format(null, query, indexString), lineNumber + "Invalid index name [" + error); + void expectInvalidIndexNameErrorWithLineNumber(String query, String indexString, String lineNumber, String name) { + expectError(LoggerMessageFormat.format(null, query, indexString), lineNumber + "Invalid index name [" + name); + } + + void expectInvalidIndexNameErrorWithLineNumber(String query, String indexString, String lineNumber, String name, String error) { + expectError(LoggerMessageFormat.format(null, query, indexString), lineNumber + "Invalid index name [" + name + "], " + error); } void expectDateMathErrorWithLineNumber(String query, String arg, String lineNumber, String error) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 710637c05a90..85d4017b166f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.esql.parser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; @@ -617,16 +618,14 @@ public class ExpressionTests extends ESTestCase { Equals eq = (Equals) e; assertThat(eq.left(), instanceOf(UnresolvedAttribute.class)); assertThat(((UnresolvedAttribute) eq.left()).name(), equalTo("a")); - assertThat(eq.right(), instanceOf(Literal.class)); - assertThat(eq.right().fold(), equalTo(1)); + assertThat(as(eq.right(), Literal.class).value(), equalTo(1)); e = whereExpression("1 IN (a)"); assertThat(e, instanceOf(Equals.class)); eq = (Equals) e; assertThat(eq.right(), instanceOf(UnresolvedAttribute.class)); assertThat(((UnresolvedAttribute) eq.right()).name(), equalTo("a")); - assertThat(eq.left(), instanceOf(Literal.class)); - assertThat(eq.left().fold(), equalTo(1)); + assertThat(eq.left().fold(FoldContext.small()), equalTo(1)); e = whereExpression("1 NOT IN (a)"); assertThat(e, instanceOf(Not.class)); @@ -635,9 +634,7 @@ public class ExpressionTests extends ESTestCase { eq = (Equals) e; assertThat(eq.right(), instanceOf(UnresolvedAttribute.class)); assertThat(((UnresolvedAttribute) eq.right()).name(), equalTo("a")); - assertThat(eq.left(), instanceOf(Literal.class)); - assertThat(eq.left().fold(), equalTo(1)); - + assertThat(eq.left().fold(FoldContext.small()), equalTo(1)); } private Expression whereExpression(String e) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index b83892ea4704..a6243d25ba57 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -577,6 +578,9 @@ public class StatementParserTests extends AbstractStatementParserTests { expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern"); + expectInvalidIndexNameErrorWithLineNumber(command, "\"- , -\"", commands.get(command), "", "must not be empty"); + expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern,-\"", commands.get(command), "", "must not be empty"); + clustersAndIndices(command, "indexpattern", "*-"); clustersAndIndices(command, "indexpattern", "-indexpattern"); } @@ -621,11 +625,8 @@ public class StatementParserTests extends AbstractStatementParserTests { expectError("FROM \"foo\"bar\"", ": token recognition error at: '\"'"); expectError("FROM \"foo\"\"bar\"", ": extraneous input '\"bar\"' expecting "); - expectError("FROM \"\"\"foo\"\"\"bar\"\"\"", ": mismatched input 'bar' expecting {, '|', ',', OPENING_BRACKET, 'metadata'}"); - expectError( - "FROM \"\"\"foo\"\"\"\"\"\"bar\"\"\"", - ": mismatched input '\"bar\"' expecting {, '|', ',', OPENING_BRACKET, 'metadata'}" - ); + expectError("FROM \"\"\"foo\"\"\"bar\"\"\"", ": mismatched input 'bar' expecting {, '|', ',', 'metadata'}"); + expectError("FROM \"\"\"foo\"\"\"\"\"\"bar\"\"\"", ": mismatched input '\"bar\"' expecting {, '|', ',', 'metadata'}"); } public void testInvalidQuotingAsMetricsIndexPattern() { @@ -905,10 +906,7 @@ public class StatementParserTests extends AbstractStatementParserTests { public void testMetadataFieldOnOtherSources() { expectError("row a = 1 metadata _index", "line 1:20: extraneous input '_index' expecting "); expectError("show info metadata _index", "line 1:11: token recognition error at: 'm'"); - expectError( - "explain [from foo] metadata _index", - "line 1:20: mismatched input 'metadata' expecting {'|', ',', OPENING_BRACKET, ']', 'metadata'}" - ); + expectError("explain [from foo] metadata _index", "line 1:20: mismatched input 'metadata' expecting {'|', ',', ']', 'metadata'}"); } public void testMetadataFieldMultipleDeclarations() { @@ -1129,51 +1127,51 @@ public class StatementParserTests extends AbstractStatementParserTests { assertThat(field.name(), is("x")); assertThat(field, instanceOf(Alias.class)); Alias alias = (Alias) field; - assertThat(alias.child().fold(), is(1)); + assertThat(alias.child().fold(FoldContext.small()), is(1)); field = row.fields().get(1); assertThat(field.name(), is("y")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is("2")); + assertThat(alias.child().fold(FoldContext.small()), is("2")); field = row.fields().get(2); assertThat(field.name(), is("a")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is("2 days")); + assertThat(alias.child().fold(FoldContext.small()), is("2 days")); field = row.fields().get(3); assertThat(field.name(), is("b")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is("4 hours")); + assertThat(alias.child().fold(FoldContext.small()), is("4 hours")); field = row.fields().get(4); assertThat(field.name(), is("c")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold().getClass(), is(String.class)); - assertThat(alias.child().fold().toString(), is("1.2.3")); + assertThat(alias.child().fold(FoldContext.small()).getClass(), is(String.class)); + assertThat(alias.child().fold(FoldContext.small()).toString(), is("1.2.3")); field = row.fields().get(5); assertThat(field.name(), is("d")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold().getClass(), is(String.class)); - assertThat(alias.child().fold().toString(), is("127.0.0.1")); + assertThat(alias.child().fold(FoldContext.small()).getClass(), is(String.class)); + assertThat(alias.child().fold(FoldContext.small()).toString(), is("127.0.0.1")); field = row.fields().get(6); assertThat(field.name(), is("e")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is(9)); + assertThat(alias.child().fold(FoldContext.small()), is(9)); field = row.fields().get(7); assertThat(field.name(), is("f")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is(11)); + assertThat(alias.child().fold(FoldContext.small()), is(11)); } public void testMissingInputParams() { @@ -1190,13 +1188,13 @@ public class StatementParserTests extends AbstractStatementParserTests { assertThat(field.name(), is("x")); assertThat(field, instanceOf(Alias.class)); Alias alias = (Alias) field; - assertThat(alias.child().fold(), is(1)); + assertThat(alias.child().fold(FoldContext.small()), is(1)); field = row.fields().get(1); assertThat(field.name(), is("y")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is(1)); + assertThat(alias.child().fold(FoldContext.small()), is(1)); } public void testInvalidNamedParams() { @@ -1237,13 +1235,13 @@ public class StatementParserTests extends AbstractStatementParserTests { assertThat(field.name(), is("x")); assertThat(field, instanceOf(Alias.class)); Alias alias = (Alias) field; - assertThat(alias.child().fold(), is(1)); + assertThat(alias.child().fold(FoldContext.small()), is(1)); field = row.fields().get(1); assertThat(field.name(), is("y")); assertThat(field, instanceOf(Alias.class)); alias = (Alias) field; - assertThat(alias.child().fold(), is(1)); + assertThat(alias.child().fold(FoldContext.small()), is(1)); } public void testInvalidPositionalParams() { @@ -2057,7 +2055,7 @@ public class StatementParserTests extends AbstractStatementParserTests { var plan = statement(statement); var lookup = as(plan, Lookup.class); var tableName = as(lookup.tableName(), Literal.class); - assertThat(tableName.fold(), equalTo(string)); + assertThat(tableName.fold(FoldContext.small()), equalTo(string)); } public void testIdPatternUnquoted() throws Exception { @@ -2125,7 +2123,7 @@ public class StatementParserTests extends AbstractStatementParserTests { var plan = statement(query); var lookup = as(plan, Lookup.class); var tableName = as(lookup.tableName(), Literal.class); - assertThat(tableName.fold(), equalTo("t")); + assertThat(tableName.fold(FoldContext.small()), equalTo("t")); assertThat(lookup.matchFields(), hasSize(1)); var matchField = as(lookup.matchFields().get(0), UnresolvedAttribute.class); assertThat(matchField.name(), equalTo("j")); @@ -2306,7 +2304,7 @@ public class StatementParserTests extends AbstractStatementParserTests { var match = (Match) filter.condition(); var matchField = (UnresolvedAttribute) match.field(); assertThat(matchField.name(), equalTo("field")); - assertThat(match.query().fold(), equalTo("value")); + assertThat(match.query().fold(FoldContext.small()), equalTo("value")); } public void testInvalidMatchOperator() { @@ -2341,7 +2339,7 @@ public class StatementParserTests extends AbstractStatementParserTests { var toInteger = (ToInteger) function.children().get(0); var matchField = (UnresolvedAttribute) toInteger.field(); assertThat(matchField.name(), equalTo("field")); - assertThat(function.children().get(1).fold(), equalTo("value")); + assertThat(function.children().get(1).fold(FoldContext.small()), equalTo("value")); } public void testMatchOperatorFieldCasting() { @@ -2351,6 +2349,13 @@ public class StatementParserTests extends AbstractStatementParserTests { var toInteger = (ToInteger) match.field(); var matchField = (UnresolvedAttribute) toInteger.field(); assertThat(matchField.name(), equalTo("field")); - assertThat(match.query().fold(), equalTo("value")); + assertThat(match.query().fold(FoldContext.small()), equalTo("value")); + } + + public void testFailingMetadataWithSquareBrackets() { + expectError( + "FROM test [METADATA _index] | STATS count(*)", + "line 1:11: mismatched input '[' expecting {, '|', ',', 'metadata'}" + ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java index a254207865ad..2f47a672a68d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; @@ -42,7 +43,7 @@ public class QueryPlanTests extends ESTestCase { assertEquals(Limit.class, transformed.getClass()); Limit l = (Limit) transformed; - assertEquals(24, l.limit().fold()); + assertEquals(24, l.limit().fold(FoldContext.small())); } public void testTransformWithExpressionTree() throws Exception { @@ -53,7 +54,7 @@ public class QueryPlanTests extends ESTestCase { assertEquals(OrderBy.class, transformed.getClass()); OrderBy order = (OrderBy) transformed; assertEquals(Limit.class, order.child().getClass()); - assertEquals(24, ((Limit) order.child()).limit().fold()); + assertEquals(24, ((Limit) order.child()).limit().fold(FoldContext.small())); } public void testTransformWithExpressionTopLevelInCollection() throws Exception { @@ -83,12 +84,12 @@ public class QueryPlanTests extends ESTestCase { List list = new ArrayList<>(); project.forEachExpression(Literal.class, l -> { - if (l.fold().equals(42)) { - list.add(l.fold()); + if (l.value().equals(42)) { + list.add(l.value()); } }); - assertEquals(singletonList(one.child().fold()), list); + assertEquals(singletonList(one.child().fold(FoldContext.small())), list); } public void testForEachWithExpressionTree() throws Exception { @@ -97,12 +98,12 @@ public class QueryPlanTests extends ESTestCase { List list = new ArrayList<>(); o.forEachExpressionDown(Literal.class, l -> { - if (l.fold().equals(42)) { - list.add(l.fold()); + if (l.value().equals(42)) { + list.add(l.value()); } }); - assertEquals(singletonList(limit.limit().fold()), list); + assertEquals(singletonList(limit.limit().fold(FoldContext.small())), list); } public void testForEachWithExpressionTopLevelInCollection() throws Exception { @@ -129,12 +130,12 @@ public class QueryPlanTests extends ESTestCase { List list = new ArrayList<>(); project.forEachExpression(Literal.class, l -> { - if (l.fold().equals(42)) { - list.add(l.fold()); + if (l.value().equals(42)) { + list.add(l.value()); } }); - assertEquals(singletonList(one.child().fold()), list); + assertEquals(singletonList(one.child().fold(FoldContext.small())), list); } public void testPlanExpressions() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index 5a7547d011c0..e2eb05b0c14d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.SerializationTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; @@ -145,7 +146,7 @@ public class EvalMapperTests extends ESTestCase { lb.append(LONG); Layout layout = lb.build(); - var supplier = EvalMapper.toEvaluator(expression, layout); + var supplier = EvalMapper.toEvaluator(FoldContext.small(), expression, layout); EvalOperator.ExpressionEvaluator evaluator1 = supplier.get(driverContext()); EvalOperator.ExpressionEvaluator evaluator2 = supplier.get(driverContext()); assertNotNull(evaluator1); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index 55f32d07fc2c..4191f42f0823 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; -import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -52,6 +51,7 @@ import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomConfiguration; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.core.util.Queries.Clause.FILTER; @@ -78,7 +78,7 @@ public class FilterTests extends ESTestCase { Map mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(EsqlTestUtils.TEST_CFG)); mapper = new Mapper(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 5d8da21c6faa..a1648c67d9bd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -83,6 +84,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { public void testLuceneSourceOperatorHugeRowSize() throws IOException { int estimatedRowSize = randomEstimatedRowSize(estimatedRowSizeIsHuge); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + FoldContext.small(), new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, null, null, estimatedRowSize) ); assertThat(plan.driverFactories.size(), lessThanOrEqualTo(pragmas.taskConcurrency())); @@ -98,6 +100,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { EsQueryExec.FieldSort sort = new EsQueryExec.FieldSort(sortField, Order.OrderDirection.ASC, Order.NullsPosition.LAST); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + FoldContext.small(), new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, limit, List.of(sort), estimatedRowSize) ); assertThat(plan.driverFactories.size(), lessThanOrEqualTo(pragmas.taskConcurrency())); @@ -113,6 +116,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { EsQueryExec.GeoDistanceSort sort = new EsQueryExec.GeoDistanceSort(sortField, Order.OrderDirection.ASC, 1, -1); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + FoldContext.small(), new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, limit, List.of(sort), estimatedRowSize) ); assertThat(plan.driverFactories.size(), lessThanOrEqualTo(pragmas.taskConcurrency())); @@ -187,7 +191,7 @@ public class LocalExecutionPlannerTests extends MapperServiceTestCase { ); } releasables.add(searcher); - return new EsPhysicalOperationProviders(shardContexts, null); + return new EsPhysicalOperationProviders(FoldContext.small(), shardContexts, null); } private IndexReader reader() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 01dd4db123ee..628737aa36c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; @@ -71,13 +72,13 @@ import static org.apache.lucene.tests.util.LuceneTestCase.createTempDir; public class TestPhysicalOperationProviders extends AbstractPhysicalOperationProviders { private final List indexPages; - private TestPhysicalOperationProviders(List indexPages, AnalysisRegistry analysisRegistry) { - super(analysisRegistry); + private TestPhysicalOperationProviders(FoldContext foldContext, List indexPages, AnalysisRegistry analysisRegistry) { + super(foldContext, analysisRegistry); this.indexPages = indexPages; } - public static TestPhysicalOperationProviders create(List indexPages) throws IOException { - return new TestPhysicalOperationProviders(indexPages, createAnalysisRegistry()); + public static TestPhysicalOperationProviders create(FoldContext foldContext, List indexPages) throws IOException { + return new TestPhysicalOperationProviders(foldContext, indexPages, createAnalysisRegistry()); } public record IndexPage(String index, Page page, List columnNames) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java index f2a619f0dbd8..f3b1d84e507a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; -import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -39,10 +38,10 @@ import java.util.Map; import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomConfiguration; import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomTables; -import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_CFG; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptyPolicyResolution; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.hamcrest.Matchers.equalTo; @@ -187,7 +186,7 @@ public class ClusterRequestTests extends AbstractWireSerializingTestCase mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(test); - var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(TEST_CFG)); + var logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()); var analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, emptyPolicyResolution()), TEST_VERIFIER diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java index 2cc733c2ea2e..fac3495697da 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; @@ -289,7 +290,7 @@ public class DataNodeRequestSerializationTests extends AbstractWireSerializingTe Map mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(test); - var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(TEST_CFG)); + var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(TEST_CFG, FoldContext.small())); var analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, emptyPolicyResolution()), TEST_VERIFIER diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 4db4f7925d4f..b1c9030db7a4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -1365,7 +1365,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( "FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code", Set.of("languages", "languages.*", "language_code", "language_code.*"), @@ -1374,7 +1374,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testLookupJoinKeep() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM employees @@ -1388,7 +1388,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testLookupJoinKeepWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM employees @@ -1402,7 +1402,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1415,7 +1415,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1429,7 +1429,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1454,7 +1454,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1481,7 +1481,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinKeepAfterWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1495,7 +1495,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndex() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1509,7 +1509,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndexKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1524,7 +1524,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndexKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1550,7 +1550,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase { } public void testMultiLookupJoinSameIndexKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V10.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); assertFieldNames( """ FROM sample_data diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 539cd0314a4d..a3c5cd9168b4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.session.EsqlSession; @@ -119,6 +120,7 @@ public class PlanExecutorMetricsTests extends ESTestCase { request, randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, + FoldContext.small(), enrichResolver, new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, @@ -149,6 +151,7 @@ public class PlanExecutorMetricsTests extends ESTestCase { request, randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, + FoldContext.small(), enrichResolver, new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java index 05b75fe6b01c..0b106f6e304d 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java @@ -8,36 +8,22 @@ package org.elasticsearch.xpack.frozen; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.EnginePlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestHandler; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; import org.elasticsearch.xpack.frozen.action.TransportFreezeIndexAction; -import org.elasticsearch.xpack.frozen.rest.action.RestFreezeIndexAction; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Optional; -import java.util.function.Predicate; -import java.util.function.Supplier; public class FrozenIndices extends Plugin implements ActionPlugin, EnginePlugin { @@ -63,19 +49,4 @@ public class FrozenIndices extends Plugin implements ActionPlugin, EnginePlugin actions.add(new ActionHandler<>(FreezeIndexAction.INSTANCE, TransportFreezeIndexAction.class)); return actions; } - - @Override - public List getRestHandlers( - Settings settings, - NamedWriteableRegistry namedWriteableRegistry, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster, - Predicate clusterSupportsFeature - ) { - return Collections.singletonList(new RestFreezeIndexAction()); - } } diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java deleted file mode 100644 index f07d17fee87f..000000000000 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.frozen.rest.action; - -import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; - -import java.util.List; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestUtils.getAckTimeout; -import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; - -public final class RestFreezeIndexAction extends BaseRestHandler { - - private static final String FREEZE_REMOVED = "It is no longer possible to freeze indices, but existing frozen indices can still be " - + "unfrozen"; - - private static final String UNFREEZE_DEPRECATED = "Frozen indices are deprecated because they provide no benefit given improvements " - + "in heap memory utilization. They will be removed in a future release."; - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_INDEXING) - // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. - @Override - public List routes() { - return List.of( - // Route.builder(POST, "/{index}/_unfreeze").deprecated(UNFREEZE_DEPRECATED, RestApiVersion.V_8).build() - Route.builder(POST, "/{index}/_unfreeze").deprecateAndKeep(UNFREEZE_DEPRECATED).build() - ); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - final var freezeRequest = new FreezeRequest( - getMasterNodeTimeout(request), - getAckTimeout(request), - Strings.splitStringByCommaToArray(request.param("index")) - ); - freezeRequest.indicesOptions(IndicesOptions.fromRequest(request, freezeRequest.indicesOptions())); - String waitForActiveShards = request.param("wait_for_active_shards"); - if (waitForActiveShards != null) { - freezeRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); - } - freezeRequest.setFreeze(false); - return channel -> client.execute(FreezeIndexAction.INSTANCE, freezeRequest, new RestToXContentListener<>(channel)); - } - - @Override - public String getName() { - return "freeze_index"; - } -} diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java index d3892d6d52f7..ea2c30fac00b 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java @@ -108,7 +108,7 @@ public class ClusterStateWaitThresholdBreachTests extends ESIntegTestCase { String[] firstAttemptShrinkIndexName = new String[1]; assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -118,7 +118,7 @@ public class ClusterStateWaitThresholdBreachTests extends ESIntegTestCase { // let's check ILM for the managed index is waiting in the `shrunk-shards-allocated` step assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -160,7 +160,7 @@ public class ClusterStateWaitThresholdBreachTests extends ESIntegTestCase { String[] secondCycleShrinkIndexName = new String[1]; assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -180,7 +180,9 @@ public class ClusterStateWaitThresholdBreachTests extends ESIntegTestCase { // situation and allow for shrink to complete by reducing the number of shards for the shrunk index to 0 setReplicaCount(0, secondCycleShrinkIndexName[0]); assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(secondCycleShrinkIndexName[0]); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( + secondCycleShrinkIndexName[0] + ); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses() .get(secondCycleShrinkIndexName[0]); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java index 21924634ff6a..d9cc4de344a3 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java @@ -149,7 +149,10 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { List backingIndices = getBackingIndices(dataStreamName); String firstGenerationIndex = backingIndices.get(0); String secondGenerationIndex = backingIndices.get(1); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(firstGenerationIndex, secondGenerationIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( + firstGenerationIndex, + secondGenerationIndex + ); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse firstGenerationExplain = explainResponse.getIndexResponses().get(firstGenerationIndex); @@ -193,7 +196,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String firstGenerationIndex = backingIndices.get(0); String secondGenerationIndex = backingIndices.get(1); String writeIndex = backingIndices.get(2); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, writeIndex @@ -270,7 +273,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String secondGenerationIndex = backingIndices.get(1); String thirdGenerationIndex = backingIndices.get(2); String writeIndex = backingIndices.get(3); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, thirdGenerationIndex, @@ -348,7 +351,10 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { List backingIndices = getBackingIndices(dataStreamName); String firstGenerationIndex = backingIndices.get(0); String secondGenerationIndex = backingIndices.get(1); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(firstGenerationIndex, secondGenerationIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( + firstGenerationIndex, + secondGenerationIndex + ); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse firstGenerationExplain = explainResponse.getIndexResponses().get(firstGenerationIndex); @@ -401,7 +407,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String firstGenerationIndex = backingIndices.get(0); String secondGenerationIndex = backingIndices.get(1); String thirdGenerationIndex = backingIndices.get(2); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, thirdGenerationIndex @@ -465,7 +471,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String secondGenerationIndex = backingIndices.get(1); String thirdGenerationIndex = backingIndices.get(2); String writeIndex = backingIndices.get(3); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, thirdGenerationIndex, @@ -543,7 +549,10 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { List backingIndices = getBackingIndices(dataStreamName); String firstGenerationIndex = backingIndices.get(0); String secondGenerationIndex = backingIndices.get(1); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(firstGenerationIndex, secondGenerationIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( + firstGenerationIndex, + secondGenerationIndex + ); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse firstGenerationExplain = explainResponse.getIndexResponses().get(firstGenerationIndex); @@ -591,7 +600,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String firstGenerationIndex = backingIndices.get(0); String secondGenerationIndex = backingIndices.get(1); String writeIndex = backingIndices.get(2); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, writeIndex @@ -663,7 +672,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String secondGenerationIndex = backingIndices.get(1); String thirdGenerationIndex = backingIndices.get(2); String writeIndex = backingIndices.get(3); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, thirdGenerationIndex, @@ -720,7 +729,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String secondGenerationIndex = backingIndices.get(1); String thirdGenerationIndex = backingIndices.get(2); String writeIndex = backingIndices.get(3); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices( + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( firstGenerationIndex, secondGenerationIndex, thirdGenerationIndex, @@ -846,7 +855,7 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { assertThat(index.isManagedByLifecycle(), is(true)); } - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(writeIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(writeIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse writeIndexExplain = explainResponse.getIndexResponses().get(writeIndex); @@ -875,7 +884,10 @@ public class DataStreamAndIndexLifecycleMixingTests extends ESIntegTestCase { String thirdGenerationIndex = backingIndices.get(2); String writeIndex = backingIndices.get(3); - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(thirdGenerationIndex, writeIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices( + thirdGenerationIndex, + writeIndex + ); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse thirdGenerationExplain = explainResponse.getIndexResponses().get(thirdGenerationIndex); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java index 6d409bf474cf..d48340193183 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java @@ -116,7 +116,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { assertTrue(res.isAcknowledged()); assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -127,7 +127,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { logger.info("starting a warm data node"); internalCluster().startNode(warmNode(Settings.EMPTY)); assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -140,7 +140,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { // wait for lifecycle to complete in the cold phase after the index has been migrated to the cold node assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -177,7 +177,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { assertTrue(res.isAcknowledged()); assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -197,7 +197,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { // 1. start another cold node so both the primary and replica can relocate to the cold nodes // 2. remove the tier routing setting from the index again (we're doing this below) assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); @@ -210,7 +210,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { // wait for lifecycle to complete in the cold phase assertBusy(() -> { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest().indices(managedIndex); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices(managedIndex); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); IndexLifecycleExplainResponse indexLifecycleExplainResponse = explainResponse.getIndexResponses().get(managedIndex); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java index 2c4c1c9e20bb..3f92abc7e3da 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java @@ -99,8 +99,10 @@ public class ILMMultiNodeIT extends ESIntegTestCase { prepareIndex(index).setCreate(true).setId("1").setSource("@timestamp", "2020-09-09").get(); assertBusy(() -> { - ExplainLifecycleResponse explain = client().execute(ExplainLifecycleAction.INSTANCE, new ExplainLifecycleRequest().indices("*")) - .get(); + ExplainLifecycleResponse explain = client().execute( + ExplainLifecycleAction.INSTANCE, + new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices("*") + ).get(); logger.info("--> explain: {}", Strings.toString(explain)); String backingIndexName = DataStream.getDefaultBackingIndexName(index, 1); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java index b91a309a23ae..c695dc8f87b6 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java @@ -99,8 +99,10 @@ public class ILMMultiNodeWithCCRDisabledIT extends ESIntegTestCase { prepareIndex(index).setCreate(true).setId("1").setSource("@timestamp", "2020-09-09").get(); assertBusy(() -> { - ExplainLifecycleResponse explain = client().execute(ExplainLifecycleAction.INSTANCE, new ExplainLifecycleRequest().indices("*")) - .get(); + ExplainLifecycleResponse explain = client().execute( + ExplainLifecycleAction.INSTANCE, + new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT).indices("*") + ).get(); logger.info("--> explain: {}", Strings.toString(explain)); String backingIndexName = DataStream.getDefaultBackingIndexName(index, 1); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java index c6536c6501a5..91934cf46b7c 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java @@ -389,7 +389,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { private IndexLifecycleExplainResponse executeExplainRequestAndGetTestIndexResponse(String indexName) throws ExecutionException, InterruptedException { - ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(); + ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(TEST_REQUEST_TIMEOUT); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); assertThat(explainResponse.getIndexResponses().size(), equalTo(1)); return explainResponse.getIndexResponses().get(indexName); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java index 195f989eab05..3950edf648e1 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java @@ -36,12 +36,11 @@ public class RestExplainLifecycleAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String[] indexes = Strings.splitStringByCommaToArray(restRequest.param("index")); - ExplainLifecycleRequest explainLifecycleRequest = new ExplainLifecycleRequest(); + ExplainLifecycleRequest explainLifecycleRequest = new ExplainLifecycleRequest(getMasterNodeTimeout(restRequest)); explainLifecycleRequest.indices(indexes); explainLifecycleRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, IndicesOptions.strictExpandOpen())); explainLifecycleRequest.onlyManaged(restRequest.paramAsBoolean("only_managed", false)); explainLifecycleRequest.onlyErrors(restRequest.paramAsBoolean("only_errors", false)); - explainLifecycleRequest.masterNodeTimeout(getMasterNodeTimeout(restRequest)); return channel -> client.execute(ExplainLifecycleAction.INSTANCE, explainLifecycleRequest, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBasicLicenseIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBasicLicenseIT.java new file mode 100644 index 000000000000..4400ad8bbb53 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBasicLicenseIT.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; + +import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.mockSparseServiceModelConfig; + +public class InferenceBasicLicenseIT extends InferenceLicenseBaseRestTest { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "basic") + .setting("xpack.security.enabled", "true") + .user("x_pack_rest_user", "x-pack-test-password") + .plugin("inference-service-test") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + public void testPutModel_RestrictedWithBasicLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + var modelConfig = mockSparseServiceModelConfig(null, true); + sendRestrictedRequest("PUT", endpoint, modelConfig); + } + + public void testUpdateModel_RestrictedWithBasicLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s/_update?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + var requestBody = """ + { + "task_settings": { + "num_threads": 2 + } + } + """; + sendRestrictedRequest("PUT", endpoint, requestBody); + } + + public void testPerformInference_RestrictedWithBasicLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + var requestBody = """ + { + "input": ["washing", "machine"] + } + """; + sendRestrictedRequest("POST", endpoint, requestBody); + } + + public void testGetServices_NonRestrictedWithBasicLicense() throws Exception { + var endpoint = "_inference/_services"; + sendNonRestrictedRequest("GET", endpoint, null, 200, false); + } + + public void testGetModels_NonRestrictedWithBasicLicense() throws Exception { + var endpoint = "_inference/_all"; + sendNonRestrictedRequest("GET", endpoint, null, 200, false); + } + + public void testDeleteModel_NonRestrictedWithBasicLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + sendNonRestrictedRequest("DELETE", endpoint, null, 404, true); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 50f83dc7b02e..d2be50cb5e84 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -242,12 +242,7 @@ public class InferenceCrudIT extends InferenceBaseRestTest { @SuppressWarnings("unchecked") public void testGetServicesWithCompletionTaskType() throws IOException { List services = getServices(TaskType.COMPLETION); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(10)); - } else { - assertThat(services.size(), equalTo(9)); - } + assertThat(services.size(), equalTo(9)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -269,9 +264,30 @@ public class InferenceCrudIT extends InferenceBaseRestTest { ) ); + assertArrayEquals(providers, providerList.toArray()); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithChatCompletionTaskType() throws IOException { + List services = getServices(TaskType.CHAT_COMPLETION); if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(6, "elastic"); + assertThat(services.size(), equalTo(2)); + } else { + assertThat(services.size(), equalTo(1)); + } + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + var providerList = new ArrayList<>(List.of("openai")); + + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + providerList.addFirst("elastic"); } assertArrayEquals(providers, providerList.toArray()); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceLicenseBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceLicenseBaseRestTest.java new file mode 100644 index 000000000000..43183bae7325 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceLicenseBaseRestTest.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public class InferenceLicenseBaseRestTest extends ESRestTestCase { + protected void sendRestrictedRequest(String method, String endpoint, String body) throws IOException { + var request = new Request(method, endpoint); + request.setJsonEntity(body); + + var exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); + assertEquals(403, exception.getResponse().getStatusLine().getStatusCode()); + assertThat(exception.getMessage(), containsString("current license is non-compliant for [inference]")); + } + + protected void sendNonRestrictedRequest(String method, String endpoint, String body, int expectedStatusCode, boolean exceptionExpected) + throws IOException { + var request = new Request(method, endpoint); + request.setJsonEntity(body); + + int actualStatusCode; + if (exceptionExpected) { + var exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); + actualStatusCode = exception.getResponse().getStatusLine().getStatusCode(); + } else { + var response = client().performRequest(request); + actualStatusCode = response.getStatusLine().getStatusCode(); + } + assertEquals(expectedStatusCode, actualStatusCode); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceTrialLicenseIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceTrialLicenseIT.java new file mode 100644 index 000000000000..c7066a827fa7 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceTrialLicenseIT.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; + +import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.mockSparseServiceModelConfig; + +public class InferenceTrialLicenseIT extends InferenceLicenseBaseRestTest { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .user("x_pack_rest_user", "x-pack-test-password") + .plugin("inference-service-test") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + public void testPutModel_NonRestrictedWithTrialLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + var modelConfig = mockSparseServiceModelConfig(null, true); + sendNonRestrictedRequest("PUT", endpoint, modelConfig, 200, false); + } + + public void testUpdateModel_NonRestrictedWithTrialLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s/_update?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + var requestBody = """ + { + "task_settings": { + "num_threads": 2 + } + } + """; + sendNonRestrictedRequest("PUT", endpoint, requestBody, 404, true); + } + + public void testPerformInference_NonRestrictedWithTrialLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + var requestBody = """ + { + "input": ["washing", "machine"] + } + """; + sendNonRestrictedRequest("POST", endpoint, requestBody, 404, true); + } + + public void testGetServices_NonRestrictedWithBasicLicense() throws Exception { + var endpoint = "_inference/_services"; + sendNonRestrictedRequest("GET", endpoint, null, 200, false); + } + + public void testGetModels_NonRestrictedWithBasicLicense() throws Exception { + var endpoint = "_inference/_all"; + sendNonRestrictedRequest("GET", endpoint, null, 200, false); + } + + public void testDeleteModel_NonRestrictedWithBasicLicense() throws Exception { + var endpoint = Strings.format("_inference/%s/%s?error_trace", TaskType.SPARSE_EMBEDDING, "endpoint-id"); + sendNonRestrictedRequest("DELETE", endpoint, null, 404, true); + } +} diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index d38503a88409..880557c59f11 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -12,8 +12,11 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.client.Request; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.upgrades.AbstractRollingUpgradeTestCase; +import org.elasticsearch.upgrades.ParameterizedRollingUpgradeTestCase; +import org.junit.ClassRule; import java.io.IOException; import java.util.LinkedList; @@ -22,7 +25,7 @@ import java.util.Map; import static org.elasticsearch.core.Strings.format; -public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { +public class InferenceUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { static final String MODELS_RENAMED_TO_ENDPOINTS = "8.15.0"; @@ -30,6 +33,20 @@ public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { super(upgradedNodes); } + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(NODE_NUM) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + protected static String getUrl(MockWebServer webServer) { return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 90a9dd3355b3..9da6b5255549 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; @@ -73,7 +74,7 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(Utils.TestInferencePlugin.class); + return Arrays.asList(LocalStateInferencePlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index be6b3725b0f3..fd0480b14198 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; @@ -76,7 +76,7 @@ public class ModelRegistryIT extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class, InferencePlugin.class); + return pluginList(ReindexPlugin.class, LocalStateInferencePlugin.class); } public void testStoreModel() throws Exception { @@ -434,7 +434,10 @@ public class ModelRegistryIT extends ESSingleNodeTestCase { assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(2)); - expectThrows(IndexNotFoundException.class, () -> client().admin().indices().prepareGetIndex().addIndices(".inference").get()); + expectThrows( + IndexNotFoundException.class, + () -> client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".inference").get() + ); // this time check the index is created blockingCall(listener -> modelRegistry.getAllModels(true, listener), modelHolder, exceptionHolder); @@ -552,7 +555,7 @@ public class ModelRegistryIT extends ESSingleNodeTestCase { } private void assertInferenceIndexExists() { - var indexResponse = client().admin().indices().prepareGetIndex().addIndices(".inference").get(); + var indexResponse = client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".inference").get(); assertNotNull(indexResponse.getSettings()); assertNotNull(indexResponse.getMappings()); } diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 53974657e4e2..1c2240e8c521 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -34,6 +34,7 @@ module org.elasticsearch.inference { requires software.amazon.awssdk.retries.api; requires org.reactivestreams; requires org.elasticsearch.logging; + requires org.elasticsearch.sslconfig; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index cf75c1bfe640..7d6069572ba2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -11,8 +11,6 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsMapper; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; -import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; -import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; import java.util.Set; @@ -26,18 +24,6 @@ import static org.elasticsearch.xpack.inference.queries.SemanticSparseVectorQuer */ public class InferenceFeatures implements FeatureSpecification { - @Override - public Set getFeatures() { - return Set.of( - TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, - RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, - SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, - SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS, - SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2, - TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED - ); - } - private static final NodeFeature SEMANTIC_TEXT_HIGHLIGHTER = new NodeFeature("semantic_text.highlighter"); @Override @@ -52,7 +38,8 @@ public class InferenceFeatures implements FeatureSpecification { SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED, SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, SemanticInferenceMetadataFieldsMapper.EXPLICIT_NULL_FIXES, - SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED + SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index b16c53a428d7..6ba529cb66ea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -29,6 +29,9 @@ import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -47,6 +50,7 @@ import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceDiagnosticsAction; @@ -56,6 +60,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceDiagnosticsAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; @@ -124,7 +129,6 @@ import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.singletonList; @@ -150,6 +154,12 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP Setting.Property.Dynamic ); + public static final LicensedFeature.Momentary INFERENCE_API_FEATURE = LicensedFeature.momentary( + "inference", + "api", + License.OperationMode.ENTERPRISE + ); + public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; @@ -158,6 +168,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); + private final SetOnce elasicInferenceServiceFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); // This is mainly so that the rest handlers can access the ThreadPool in a way that avoids potential null pointers from it // not being initialized yet @@ -244,31 +255,31 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing - // internal names like "eis" or "gateway". - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - - String elasticInferenceUrl = null; - - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + if (isElasticInferenceServiceEnabled()) { + // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). + var elasticInferenceServiceHttpClientManager = HttpClientManager.create( + settings, + services.threadPool(), + services.clusterService(), + throttlerManager, + getSslService() ); - elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); - } - if (elasticInferenceUrl != null) { + var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( + serviceComponents.get(), + elasticInferenceServiceHttpClientManager, + services.clusterService() + ); + elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); + + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( () -> List.of( context -> new ElasticInferenceService( - httpFactory.get(), + elasicInferenceServiceFactory.get(), serviceComponents.get(), elasticInferenceServiceComponents.get() ) @@ -392,16 +403,21 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP @Override public List> getSettings() { - return Stream.of( - HttpSettings.getSettingsDefinitions(), - HttpClientManager.getSettingsDefinitions(), - ThrottlerManager.getSettingsDefinitions(), - RetrySettings.getSettingsDefinitions(), - ElasticInferenceServiceSettings.getSettingsDefinitions(), - Truncator.getSettingsDefinitions(), - RequestExecutorServiceSettings.getSettingsDefinitions(), - List.of(SKIP_VALIDATE_AND_START) - ).flatMap(Collection::stream).collect(Collectors.toList()); + ArrayList> settings = new ArrayList<>(); + settings.addAll(HttpSettings.getSettingsDefinitions()); + settings.addAll(HttpClientManager.getSettingsDefinitions()); + settings.addAll(ThrottlerManager.getSettingsDefinitions()); + settings.addAll(RetrySettings.getSettingsDefinitions()); + settings.addAll(Truncator.getSettingsDefinitions()); + settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); + settings.add(SKIP_VALIDATE_AND_START); + + // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. + if (isElasticInferenceServiceEnabled()) { + settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); + } + + return settings; } @Override @@ -458,7 +474,10 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP @Override public List> getRetrievers() { return List.of( - new RetrieverSpec<>(new ParseField(TextSimilarityRankBuilder.NAME), TextSimilarityRankRetrieverBuilder::fromXContent), + new RetrieverSpec<>( + new ParseField(TextSimilarityRankBuilder.NAME), + (parser, context) -> TextSimilarityRankRetrieverBuilder.fromXContent(parser, context, getLicenseState()) + ), new RetrieverSpec<>(new ParseField(RandomRankBuilder.NAME), RandomRankRetrieverBuilder::fromXContent) ); } @@ -467,4 +486,36 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } + + // Get Elastic Inference service URL based on feature flags to support transitioning + // to the new Elastic Inference Service URL. + private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = settings.getEisGatewayUrl(); + } + + return elasticInferenceUrl; + } + + protected Boolean isElasticInferenceServiceEnabled() { + return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); + } + + protected SSLService getSslService() { + return XPackPlugin.getSharedSslService(); + } + + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java index 2a0e8e177527..b6c7d26b36f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java @@ -23,9 +23,12 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; @@ -38,6 +41,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; @@ -48,6 +52,7 @@ public abstract class BaseTransportInferenceAction requestReader ) { super(inferenceActionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + this.licenseState = licenseState; this.modelRegistry = modelRegistry; this.serviceRegistry = serviceRegistry; this.inferenceStats = inferenceStats; @@ -72,6 +79,11 @@ public abstract class BaseTransportInferenceAction listener) { + if (INFERENCE_API_FEATURE.check(licenseState) == false) { + listener.onFailure(LicenseUtils.newComplianceException(XPackField.INFERENCE)); + return; + } + var timer = InferenceTimer.start(); var getModelListener = ActionListener.wrap((UnparsedModel unparsedModel) -> { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 08e6d869a553..24ef0d7d610d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; @@ -28,6 +29,7 @@ public class TransportInferenceAction extends BaseTransportInferenceAction listener ) throws Exception { + if (INFERENCE_API_FEATURE.check(licenseState) == false) { + listener.onFailure(LicenseUtils.newComplianceException(XPackField.INFERENCE)); + return; + } + var requestAsMap = requestToMap(request); var resolvedTaskType = ServiceUtils.resolveTaskType(request.getTaskType(), (String) requestAsMap.remove(TaskType.NAME)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java index f0906231d8f4..1478130f6a6c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; @@ -30,6 +31,7 @@ public class TransportUnifiedCompletionInferenceAction extends BaseTransportInfe public TransportUnifiedCompletionInferenceAction( TransportService transportService, ActionFilters actionFilters, + XPackLicenseState licenseState, ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, @@ -39,6 +41,7 @@ public class TransportUnifiedCompletionInferenceAction extends BaseTransportInfe UnifiedCompletionAction.NAME, transportService, actionFilters, + licenseState, modelRegistry, serviceRegistry, inferenceStats, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java index 3c47de1ad64d..4a90f7372243 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java @@ -34,12 +34,15 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelDeploymentAction; @@ -57,6 +60,7 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; import static org.elasticsearch.xpack.inference.services.ServiceUtils.resolveTaskType; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS; @@ -66,6 +70,7 @@ public class TransportUpdateInferenceModelAction extends TransportMasterNodeActi private static final Logger logger = LogManager.getLogger(TransportUpdateInferenceModelAction.class); + private final XPackLicenseState licenseState; private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; private final Client client; @@ -77,6 +82,7 @@ public class TransportUpdateInferenceModelAction extends TransportMasterNodeActi ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + XPackLicenseState licenseState, ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, Client client @@ -92,6 +98,7 @@ public class TransportUpdateInferenceModelAction extends TransportMasterNodeActi UpdateInferenceModelAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.licenseState = licenseState; this.modelRegistry = modelRegistry; this.serviceRegistry = serviceRegistry; this.client = client; @@ -104,6 +111,11 @@ public class TransportUpdateInferenceModelAction extends TransportMasterNodeActi ClusterState state, ActionListener masterListener ) { + if (INFERENCE_API_FEATURE.check(licenseState) == false) { + masterListener.onFailure(LicenseUtils.newComplianceException(XPackField.INFERENCE)); + return; + } + var bodyTaskType = request.getContentAsSettings().taskType(); var resolvedTaskType = resolveTaskType(request.getTaskType(), bodyTaskType != null ? bodyTaskType.toString() : null); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index e5d76b9bb557..6d09c9e67b36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -7,9 +7,14 @@ package org.elasticsearch.xpack.inference.external.http; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; +import org.apache.http.nio.conn.NoopIOSessionStrategy; +import org.apache.http.nio.conn.SchemeIOSessionStrategy; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.pool.PoolStats; @@ -21,6 +26,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; @@ -28,11 +34,13 @@ import java.io.IOException; import java.util.List; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX; public class HttpClientManager implements Closeable { private static final Logger logger = LogManager.getLogger(HttpClientManager.class); /** * The maximum number of total connections the connection pool can lease to all routes. + * The configuration applies to each instance of HTTPClientManager (max_total_connections=10 and instances=5 leads to 50 connections). * From googling around the connection pools maxTotal value should be close to the number of available threads. * * https://stackoverflow.com/questions/30989637/how-to-decide-optimal-settings-for-setmaxtotal-and-setdefaultmaxperroute @@ -47,6 +55,7 @@ public class HttpClientManager implements Closeable { /** * The max number of connections a single route can lease. + * This configuration applies to each instance of HttpClientManager. */ public static final Setting MAX_ROUTE_CONNECTIONS = Setting.intSetting( "xpack.inference.http.max_route_connections", @@ -98,6 +107,22 @@ public class HttpClientManager implements Closeable { return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } + public static HttpClientManager create( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + ThrottlerManager throttlerManager, + SSLService sslService + ) { + // Set the sslStrategy to ensure an encrypted connection, as Elastic Inference Service requires it. + SSLIOSessionStrategy sslioSessionStrategy = sslService.sslIOSessionStrategy( + sslService.getSSLConfiguration(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX) + ); + + PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(sslioSessionStrategy); + return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); + } + // Default for testing HttpClientManager( Settings settings, @@ -121,6 +146,25 @@ public class HttpClientManager implements Closeable { this.addSettingsUpdateConsumers(clusterService); } + private static PoolingNHttpClientConnectionManager createConnectionManager(SSLIOSessionStrategy sslStrategy) { + ConnectingIOReactor ioReactor; + try { + var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); + ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); + } catch (IOReactorException e) { + var message = "Failed to initialize HTTP client manager with SSL."; + logger.error(message, e); + throw new ElasticsearchException(message, e); + } + + Registry registry = RegistryBuilder.create() + .register("http", NoopIOSessionStrategy.INSTANCE) + .register("https", sslStrategy) + .build(); + + return new PoolingNHttpClientConnectionManager(ioReactor, registry); + } + private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntity.java index 1fc61d3331d2..0804cb332b15 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntity.java @@ -39,7 +39,7 @@ public record AlibabaCloudSearchEmbeddingsRequestEntity(List input, Alib builder.startObject(); builder.field(TEXTS_FIELD, input); - String inputType = covertToString(taskSettings.getInputType()); + String inputType = convertToString(taskSettings.getInputType()); if (inputType != null) { builder.field(INPUT_TYPE_FIELD, inputType); } @@ -49,7 +49,7 @@ public record AlibabaCloudSearchEmbeddingsRequestEntity(List input, Alib } // default for testing - static String covertToString(InputType inputType) { + static String convertToString(InputType inputType) { if (inputType == null) { return null; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchSparseRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchSparseRequestEntity.java index 8fae9408b860..22676c11dc54 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchSparseRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchSparseRequestEntity.java @@ -34,7 +34,7 @@ public record AlibabaCloudSearchSparseRequestEntity(List input, AlibabaC public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(TEXTS_FIELD, input); - String inputType = AlibabaCloudSearchEmbeddingsRequestEntity.covertToString(taskSettings.getInputType()); + String inputType = AlibabaCloudSearchEmbeddingsRequestEntity.convertToString(taskSettings.getInputType()); if (inputType != null) { builder.field(INPUT_TYPE_FIELD, inputType); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index 63cc5c3cb726..a972cbbac959 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -51,7 +51,7 @@ public record CohereEmbeddingsRequestEntity( } if (taskSettings.getInputType() != null) { - builder.field(INPUT_TYPE_FIELD, covertToString(taskSettings.getInputType())); + builder.field(INPUT_TYPE_FIELD, convertToString(taskSettings.getInputType())); } if (embeddingType != null) { @@ -67,7 +67,7 @@ public record CohereEmbeddingsRequestEntity( } // default for testing - static String covertToString(InputType inputType) { + static String convertToString(InputType inputType) { return switch (inputType) { case INGEST -> SEARCH_DOCUMENT; case SEARCH -> SEARCH_QUERY; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 0d2cae9335b7..115d0d7947de 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -104,8 +104,6 @@ import static org.elasticsearch.xpack.inference.services.elasticsearch.Elasticse * A {@link FieldMapper} for semantic text fields. */ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFieldMapper { - public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id", true); - public static final NodeFeature SEMANTIC_TEXT_DEFAULT_ELSER_2 = new NodeFeature("semantic_text.default_elser_2", true); public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index dbf41cc8b25e..285739fe0936 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -51,9 +50,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; public class SemanticQueryBuilder extends AbstractQueryBuilder { - // **** THE semantic_text.inner_hits CLUSTER FEATURE IS DEFUNCT, NEVER USE IT **** - public static final NodeFeature SEMANTIC_TEXT_INNER_HITS = new NodeFeature("semantic_text.inner_hits", true); - public static final String NAME = "semantic"; private static final ParseField FIELD_FIELD = new ParseField("field"); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilder.java index 7236b0141a86..bdc833f0c3f4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilder.java @@ -8,12 +8,12 @@ package org.elasticsearch.xpack.inference.rank.random; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -30,8 +30,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr */ public class RandomRankRetrieverBuilder extends RetrieverBuilder { - public static final NodeFeature RANDOM_RERANKER_RETRIEVER_SUPPORTED = new NodeFeature("random_reranker_retriever_supported", true); - public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField RANK_WINDOW_SIZE_FIELD = new ParseField("rank_window_size"); @@ -48,7 +46,12 @@ public class RandomRankRetrieverBuilder extends RetrieverBuilder { }); static { - PARSER.declareNamedObject(constructorArg(), (p, c, n) -> p.namedObject(RetrieverBuilder.class, n, c), RETRIEVER_FIELD); + PARSER.declareField( + constructorArg(), + RandomRankRetrieverBuilder::parseRetrieverBuilder, + RETRIEVER_FIELD, + ObjectParser.ValueType.OBJECT + ); PARSER.declareString(optionalConstructorArg(), FIELD_FIELD); PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); PARSER.declareInt(optionalConstructorArg(), SEED_FIELD); @@ -57,12 +60,25 @@ public class RandomRankRetrieverBuilder extends RetrieverBuilder { } public static RandomRankRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { - if (context.clusterSupportsFeature(RANDOM_RERANKER_RETRIEVER_SUPPORTED) == false) { - throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + RandomRankBuilder.NAME + "]"); - } return PARSER.apply(parser, context); } + private static RetrieverBuilder parseRetrieverBuilder(XContentParser parser, RetrieverParserContext context) throws IOException { + assert parser.currentToken() == XContentParser.Token.START_OBJECT; + parser.nextToken(); + if (parser.currentToken() == XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "empty [" + RETRIEVER_FIELD + "] object"); + } + assert parser.currentToken() == XContentParser.Token.FIELD_NAME; + final RetrieverBuilder builder = parser.namedObject(RetrieverBuilder.class, parser.currentName(), context); + parser.nextToken(); + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "unexpected field [" + parser.currentName() + "]"); + } + assert parser.currentToken() == XContentParser.Token.END_OBJECT; + return builder; + } + private final RetrieverBuilder retrieverBuilder; private final String field; private final int rankWindowSize; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 1b6fb55659a8..42248d246d3d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; @@ -21,7 +21,6 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; import java.util.List; @@ -36,13 +35,8 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr */ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder { - public static final NodeFeature TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED = new NodeFeature( - "text_similarity_reranker_retriever_supported", - true - ); - public static final NodeFeature TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED = new NodeFeature( - "text_similarity_reranker_retriever_composition_supported", - true + public static final NodeFeature TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX = new NodeFeature( + "text_similarity_reranker_alias_handling_fix" ); public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); @@ -74,17 +68,12 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder RetrieverBuilder.declareBaseParserFields(TextSimilarityRankBuilder.NAME, PARSER); } - public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) - throws IOException { - if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED) == false) { - throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); - } - if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED) == false) { - throw new IllegalArgumentException( - "[text_similarity_reranker] retriever composition feature is not supported by all nodes in the cluster" - ); - } - if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { + public static TextSimilarityRankRetrieverBuilder fromXContent( + XContentParser parser, + RetrieverParserContext context, + XPackLicenseState licenceState + ) throws IOException { + if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(licenceState) == false) { throw LicenseUtils.newComplianceException(TextSimilarityRankBuilder.NAME); } return PARSER.apply(parser, context); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java index c46f211bb26a..57c06df8d8df 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java @@ -30,12 +30,15 @@ public final class Paths { + "}/{" + INFERENCE_ID + "}/_stream"; - static final String UNIFIED_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/_unified"; + + public static final String UNIFIED_SUFFIX = "_unified"; + static final String UNIFIED_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + UNIFIED_SUFFIX; static final String UNIFIED_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/{" + INFERENCE_ID - + "}/_unified"; + + "}/" + + UNIFIED_SUFFIX; private Paths() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index 7ea68b2c9bf1..ac6e57d31b74 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -73,7 +73,7 @@ public abstract class SenderService implements InferenceService { private static InferenceInputs createInput(Model model, List input, @Nullable String query, boolean stream) { return switch (model.getTaskType()) { - case COMPLETION -> new ChatCompletionInput(input, stream); + case COMPLETION, CHAT_COMPLETION -> new ChatCompletionInput(input, stream); case RERANK -> new QueryAndDocsInputs(query, input, stream); case TEXT_EMBEDDING, SPARSE_EMBEDDING -> new DocumentsOnlyInput(input, stream); default -> throw new ElasticsearchStatusException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 7d05bac363fb..1ddae3cc8df9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -42,6 +42,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.ENABLED; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.MAX_NUMBER_OF_ALLOCATIONS; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.MIN_NUMBER_OF_ALLOCATIONS; +import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_SUFFIX; import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; public final class ServiceUtils { @@ -780,5 +781,24 @@ public final class ServiceUtils { throw new UnsupportedOperationException(Strings.format("The %s service does not support unified completion", serviceName)); } + public static String unsupportedTaskTypeForInference(Model model, EnumSet supportedTaskTypes) { + return Strings.format( + "Inference entity [%s] does not support task type [%s] for inference, the task type must be one of %s.", + model.getInferenceEntityId(), + model.getTaskType(), + supportedTaskTypes + ); + } + + public static String useChatCompletionUrlMessage(Model model) { + return org.elasticsearch.common.Strings.format( + "The task type for the inference entity is %s, please use the _inference/%s/%s/%s URL.", + model.getTaskType(), + model.getTaskType(), + model.getInferenceEntityId(), + UNIFIED_SUFFIX + ); + } + private ServiceUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 48416faac6a0..cb554cf28812 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -27,6 +27,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; @@ -41,6 +42,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.UnifiedChatInput; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.telemetry.TraceContext; @@ -61,6 +63,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersi import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.useChatCompletionUrlMessage; public class ElasticInferenceService extends SenderService { @@ -69,8 +72,16 @@ public class ElasticInferenceService extends SenderService { private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; - private static final EnumSet supportedTaskTypes = EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.COMPLETION); + // The task types exposed via the _inference/_services API + private static final EnumSet SUPPORTED_TASK_TYPES_FOR_SERVICES_API = EnumSet.of( + TaskType.SPARSE_EMBEDDING, + TaskType.CHAT_COMPLETION + ); private static final String SERVICE_NAME = "Elastic"; + /** + * The task types that the {@link InferenceAction.Request} can accept. + */ + private static final EnumSet SUPPORTED_INFERENCE_ACTION_TASK_TYPES = EnumSet.of(TaskType.SPARSE_EMBEDDING); public ElasticInferenceService( HttpRequestSender.Factory factory, @@ -83,7 +94,7 @@ public class ElasticInferenceService extends SenderService { @Override public Set supportedStreamingTasks() { - return COMPLETION_ONLY; + return EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY); } @Override @@ -129,6 +140,15 @@ public class ElasticInferenceService extends SenderService { TimeValue timeout, ActionListener listener ) { + if (SUPPORTED_INFERENCE_ACTION_TASK_TYPES.contains(model.getTaskType()) == false) { + var responseString = ServiceUtils.unsupportedTaskTypeForInference(model, SUPPORTED_INFERENCE_ACTION_TASK_TYPES); + + if (model.getTaskType() == TaskType.CHAT_COMPLETION) { + responseString = responseString + " " + useChatCompletionUrlMessage(model); + } + listener.onFailure(new ElasticsearchStatusException(responseString, RestStatus.BAD_REQUEST)); + } + if (model instanceof ElasticInferenceServiceExecutableActionModel == false) { listener.onFailure(createInvalidModelException(model)); return; @@ -207,7 +227,7 @@ public class ElasticInferenceService extends SenderService { @Override public EnumSet supportedTaskTypes() { - return supportedTaskTypes; + return SUPPORTED_TASK_TYPES_FOR_SERVICES_API; } private static ElasticInferenceServiceModel createModel( @@ -383,7 +403,7 @@ public class ElasticInferenceService extends SenderService { return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) - .setTaskTypes(supportedTaskTypes) + .setTaskTypes(SUPPORTED_TASK_TYPES_FOR_SERVICES_API) .setConfigurations(configurationMap) .build(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 5146cec1552a..0c1a032dc892 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -9,7 +9,9 @@ package org.elasticsearch.xpack.inference.services.elastic; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; +import java.util.ArrayList; import java.util.List; /** @@ -21,6 +23,8 @@ public class ElasticInferenceServiceSettings { @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope @@ -35,11 +39,27 @@ public class ElasticInferenceServiceSettings { public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); - } + public static final SSLConfigurationSettings ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS = SSLConfigurationSettings.withPrefix( + ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX, + false + ); + + public static final Setting ELASTIC_INFERENCE_SERVICE_SSL_ENABLED = Setting.boolSetting( + ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX + "enabled", + true, + Setting.Property.NodeScope + ); + public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); + ArrayList> settings = new ArrayList<>(); + settings.add(EIS_GATEWAY_URL); + settings.add(ELASTIC_INFERENCE_SERVICE_URL); + settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); + settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); + + return settings; } @Deprecated diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index ba9dea8ace8e..3efd7c44c3e9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -27,6 +27,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; @@ -63,6 +64,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFrom import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.useChatCompletionUrlMessage; import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.EMBEDDING_MAX_BATCH_SIZE; import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.ORGANIZATION; @@ -70,7 +72,16 @@ public class OpenAiService extends SenderService { public static final String NAME = "openai"; private static final String SERVICE_NAME = "OpenAI"; - private static final EnumSet supportedTaskTypes = EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION); + // The task types exposed via the _inference/_services API + private static final EnumSet SUPPORTED_TASK_TYPES_FOR_SERVICES_API = EnumSet.of( + TaskType.TEXT_EMBEDDING, + TaskType.COMPLETION, + TaskType.CHAT_COMPLETION + ); + /** + * The task types that the {@link InferenceAction.Request} can accept. + */ + private static final EnumSet SUPPORTED_INFERENCE_ACTION_TASK_TYPES = EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION); public OpenAiService(HttpRequestSender.Factory factory, ServiceComponents serviceComponents) { super(factory, serviceComponents); @@ -164,7 +175,7 @@ public class OpenAiService extends SenderService { secretSettings, context ); - case COMPLETION -> new OpenAiChatCompletionModel( + case COMPLETION, CHAT_COMPLETION -> new OpenAiChatCompletionModel( inferenceEntityId, taskType, NAME, @@ -236,7 +247,7 @@ public class OpenAiService extends SenderService { @Override public EnumSet supportedTaskTypes() { - return supportedTaskTypes; + return SUPPORTED_TASK_TYPES_FOR_SERVICES_API; } @Override @@ -248,6 +259,15 @@ public class OpenAiService extends SenderService { TimeValue timeout, ActionListener listener ) { + if (SUPPORTED_INFERENCE_ACTION_TASK_TYPES.contains(model.getTaskType()) == false) { + var responseString = ServiceUtils.unsupportedTaskTypeForInference(model, SUPPORTED_INFERENCE_ACTION_TASK_TYPES); + + if (model.getTaskType() == TaskType.CHAT_COMPLETION) { + responseString = responseString + " " + useChatCompletionUrlMessage(model); + } + listener.onFailure(new ElasticsearchStatusException(responseString, RestStatus.BAD_REQUEST)); + } + if (model instanceof OpenAiModel == false) { listener.onFailure(createInvalidModelException(model)); return; @@ -356,7 +376,7 @@ public class OpenAiService extends SenderService { @Override public Set supportedStreamingTasks() { - return COMPLETION_ONLY; + return EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION, TaskType.ANY); } /** @@ -444,7 +464,7 @@ public class OpenAiService extends SenderService { return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) - .setTaskTypes(supportedTaskTypes) + .setTaskTypes(SUPPORTED_TASK_TYPES_FOR_SERVICES_API) .setConfigurations(configurationMap) .build(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java index 54aa72f3e926..c09afaaaaac1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.hamcrest.Matchers; import java.util.Arrays; @@ -28,7 +28,7 @@ public class SemanticTextClusterMetadataTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(InferencePlugin.class); + return List.of(LocalStateInferencePlugin.class); } public void testCreateIndexWithSemanticTextField() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java new file mode 100644 index 000000000000..d1db5b8b12cc --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; +import org.junit.After; +import org.junit.Before; + +import static org.hamcrest.Matchers.is; + +public class InferencePluginTests extends ESTestCase { + private InferencePlugin inferencePlugin; + + private Boolean elasticInferenceServiceEnabled = true; + + private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { + this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; + } + + @Before + public void setUp() throws Exception { + super.setUp(); + + Settings settings = Settings.builder().build(); + inferencePlugin = new InferencePlugin(settings) { + @Override + protected Boolean isElasticInferenceServiceEnabled() { + return elasticInferenceServiceEnabled; + } + }; + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + } + + public void testElasticInferenceServiceSettingsPresent() throws Exception { + setElasticInferenceServiceEnabled(true); // enable elastic inference service + boolean anyMatch = inferencePlugin.getSettings() + .stream() + .map(Setting::getKey) + .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); + + assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); + } + + public void testElasticInferenceServiceSettingsNotPresent() throws Exception { + setElasticInferenceServiceEnabled(false); // disable elastic inference service + boolean noneMatch = inferencePlugin.getSettings() + .stream() + .map(Setting::getKey) + .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); + + assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java new file mode 100644 index 000000000000..68ea175bd987 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.action.support.MappedActionFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; +import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; + +import java.nio.file.Path; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static java.util.stream.Collectors.toList; + +public class LocalStateInferencePlugin extends LocalStateCompositeXPackPlugin { + private final InferencePlugin inferencePlugin; + + public LocalStateInferencePlugin(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + LocalStateInferencePlugin thisVar = this; + this.inferencePlugin = new InferencePlugin(settings) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + + @Override + public List getInferenceServiceFactories() { + return List.of( + TestSparseInferenceServiceExtension.TestInferenceService::new, + TestDenseInferenceServiceExtension.TestInferenceService::new + ); + } + }; + plugins.add(inferencePlugin); + } + + @Override + public List> getRetrievers() { + return this.filterPlugins(SearchPlugin.class).stream().flatMap(p -> p.getRetrievers().stream()).collect(toList()); + } + + @Override + public Map getMappers() { + return inferencePlugin.getMappers(); + } + + @Override + public Collection getMappedActionFilters() { + return inferencePlugin.getMappedActionFilters(); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index e02ac7b8853a..1260b89034e6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; -import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -149,33 +148,24 @@ public final class Utils { latch.await(); } - public static class TestInferencePlugin extends InferencePlugin { - public TestInferencePlugin(Settings settings) { - super(settings); - } - - @Override - public List getInferenceServiceFactories() { - return List.of( - TestSparseInferenceServiceExtension.TestInferenceService::new, - TestDenseInferenceServiceExtension.TestInferenceService::new - ); - } - } - - public static Model getInvalidModel(String inferenceEntityId, String serviceName) { + public static Model getInvalidModel(String inferenceEntityId, String serviceName, TaskType taskType) { var mockConfigs = mock(ModelConfigurations.class); when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); when(mockConfigs.getService()).thenReturn(serviceName); - when(mockConfigs.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + when(mockConfigs.getTaskType()).thenReturn(taskType); var mockModel = mock(Model.class); + when(mockModel.getInferenceEntityId()).thenReturn(inferenceEntityId); when(mockModel.getConfigurations()).thenReturn(mockConfigs); - when(mockModel.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + when(mockModel.getTaskType()).thenReturn(taskType); return mockModel; } + public static Model getInvalidModel(String inferenceEntityId, String serviceName) { + return getInvalidModel(inferenceEntityId, serviceName, TaskType.TEXT_EMBEDDING); + } + public static SimilarityMeasure randomSimilarityMeasure() { return randomFrom(SimilarityMeasure.values()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index 47f3a0e0b57a..a723e5a9dffd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -18,11 +18,13 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -49,6 +51,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public abstract class BaseTransportInferenceActionTestCase extends ESTestCase { + private MockLicenseState licenseState; private ModelRegistry modelRegistry; private StreamingTaskManager streamingTaskManager; private BaseTransportInferenceAction action; @@ -64,16 +67,28 @@ public abstract class BaseTransportInferenceActionTestCase createAction( TransportService transportService, ActionFilters actionFilters, + MockLicenseState licenseState, ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, @@ -361,4 +376,8 @@ public abstract class BaseTransportInferenceActionTestCase createAction( TransportService transportService, ActionFilters actionFilters, + MockLicenseState licenseState, ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, @@ -31,6 +33,7 @@ public class TransportInferenceActionTests extends BaseTransportInferenceActionT return new TransportInferenceAction( transportService, actionFilters, + licenseState, modelRegistry, serviceRegistry, inferenceStats, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index 4c943599ce52..3856a3d111b6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; @@ -36,6 +37,7 @@ public class TransportUnifiedCompletionActionTests extends BaseTransportInferenc protected BaseTransportInferenceAction createAction( TransportService transportService, ActionFilters actionFilters, + MockLicenseState licenseState, ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, @@ -44,6 +46,7 @@ public class TransportUnifiedCompletionActionTests extends BaseTransportInferenc return new TransportUnifiedCompletionInferenceAction( transportService, actionFilters, + licenseState, modelRegistry, serviceRegistry, inferenceStats, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index 3552a9d209f2..51d48b1c5cd5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -32,11 +32,13 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; @@ -588,7 +590,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { } public static StaticModel createRandomInstance() { - TestModel testModel = TestModel.createRandomInstance(); + TestModel testModel = randomModel(randomFrom(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)); return new StaticModel( testModel.getInferenceEntityId(), testModel.getTaskType(), @@ -611,4 +613,18 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { return resultMap.containsKey(text); } } + + private static TestModel randomModel(TaskType taskType) { + var dimensions = taskType == TaskType.TEXT_EMBEDDING ? randomIntBetween(2, 64) : null; + var similarity = taskType == TaskType.TEXT_EMBEDDING ? randomFrom(SimilarityMeasure.values()) : null; + var elementType = taskType == TaskType.TEXT_EMBEDDING ? DenseVectorFieldMapper.ElementType.FLOAT : null; + return new TestModel( + randomAlphaOfLength(4), + taskType, + randomAlphaOfLength(10), + new TestModel.TestServiceSettings(randomAlphaOfLength(4), dimensions, similarity, elementType), + new TestModel.TestTaskSettings(randomInt(3)), + new TestModel.TestSecretSettings(randomAlphaOfLength(4)) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntityTests.java index 6aaab219c331..39eeef3a9552 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/alibabacloudsearch/AlibabaCloudSearchEmbeddingsRequestEntityTests.java @@ -50,7 +50,7 @@ public class AlibabaCloudSearchEmbeddingsRequestEntityTests extends ESTestCase { public void testConvertToString_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { var thrownException = expectThrows( AssertionError.class, - () -> AlibabaCloudSearchEmbeddingsRequestEntity.covertToString(InputType.UNSPECIFIED) + () -> AlibabaCloudSearchEmbeddingsRequestEntity.convertToString(InputType.UNSPECIFIED) ); MatcherAssert.assertThat(thrownException.getMessage(), is("received invalid input type value [unspecified]")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java index 0690bf56893c..8c8aeba4a0a0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java @@ -84,7 +84,10 @@ public class CohereEmbeddingsRequestEntityTests extends ESTestCase { } public void testConvertToString_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { - var thrownException = expectThrows(AssertionError.class, () -> CohereEmbeddingsRequestEntity.covertToString(InputType.UNSPECIFIED)); + var thrownException = expectThrows( + AssertionError.class, + () -> CohereEmbeddingsRequestEntity.convertToString(InputType.UNSPECIFIED) + ); MatcherAssert.assertThat(thrownException.getMessage(), is("received invalid input type value [unspecified]")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java similarity index 97% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldMapperTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java index 8fcc0df0093c..57d71a48a4ae 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.inference.InferencePlugin; import java.util.Collection; import java.util.Collections; -public class SemanticInferenceMetadataFieldMapperTests extends MapperServiceTestCase { +public class SemanticInferenceMetadataFieldsMapperTests extends MapperServiceTestCase { @Override protected Collection getPlugins() { return Collections.singletonList(new InferencePlugin(Settings.EMPTY)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsRecoveryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsRecoveryTests.java new file mode 100644 index 000000000000..2fa6d520fcc6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsRecoveryTests.java @@ -0,0 +1,281 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineTestCase; +import org.elasticsearch.index.engine.LuceneChangesSnapshot; +import org.elasticsearch.index.engine.LuceneSyntheticSourceChangesSnapshot; +import org.elasticsearch.index.engine.SearchBasedChangesSnapshot; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.inference.ChunkedInference; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.model.TestModel; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomChunkedInferenceEmbeddingByte; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomChunkedInferenceEmbeddingSparse; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.semanticTextFieldFromChunkedInferenceResults; +import static org.hamcrest.Matchers.equalTo; + +public class SemanticInferenceMetadataFieldsRecoveryTests extends EngineTestCase { + private final Model model1; + private final Model model2; + private final boolean useSynthetic; + private final boolean useIncludesExcludes; + + public SemanticInferenceMetadataFieldsRecoveryTests(boolean useSynthetic, boolean useIncludesExcludes) { + this.model1 = randomModel(TaskType.TEXT_EMBEDDING); + this.model2 = randomModel(TaskType.SPARSE_EMBEDDING); + this.useSynthetic = useSynthetic; + this.useIncludesExcludes = useIncludesExcludes; + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return List.of(new Object[] { false, false }, new Object[] { false, true }, new Object[] { true, false }); + } + + @Override + protected List extraMappers() { + return List.of(new InferencePlugin(Settings.EMPTY)); + } + + @Override + protected Settings indexSettings() { + var builder = Settings.builder() + .put(super.indexSettings()) + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), false); + if (useSynthetic) { + builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()); + builder.put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true); + } + return builder.build(); + } + + @Override + protected String defaultMapping() { + XContentBuilder builder = null; + try { + builder = JsonXContent.contentBuilder().startObject(); + if (useIncludesExcludes) { + builder.startObject(SourceFieldMapper.NAME).array("excludes", "field").endObject(); + } + builder.field("dynamic", false); + builder.startObject("properties"); + + builder.startObject("field"); + builder.field("type", "keyword"); + builder.endObject(); + + builder.startObject("semantic_1"); + builder.field("type", "semantic_text"); + builder.field("inference_id", model1.getInferenceEntityId()); + builder.startObject("model_settings"); + builder.field("task_type", model1.getTaskType().name()); + builder.field("dimensions", model1.getServiceSettings().dimensions()); + builder.field("similarity", model1.getServiceSettings().similarity().name()); + builder.field("element_type", model1.getServiceSettings().elementType().name()); + builder.endObject(); + builder.endObject(); + + builder.startObject("semantic_2"); + builder.field("type", "semantic_text"); + builder.field("inference_id", model2.getInferenceEntityId()); + builder.startObject("model_settings"); + builder.field("task_type", model2.getTaskType().name()); + builder.endObject(); + builder.endObject(); + + builder.endObject(); + builder.endObject(); + return BytesReference.bytes(builder).utf8ToString(); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } + + public void testSnapshotRecovery() throws IOException { + List expectedOperations = new ArrayList<>(); + int size = randomIntBetween(10, 50); + for (int i = 0; i < size; i++) { + var source = randomSource(); + var sourceToParse = new SourceToParse(Integer.toString(i), source, XContentType.JSON, null); + var doc = mapperService.documentMapper().parse(sourceToParse); + assertNull(doc.dynamicMappingsUpdate()); + if (useSynthetic) { + assertNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME)); + assertNotNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME)); + } else { + if (useIncludesExcludes) { + assertNotNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME)); + var originalSource = new BytesArray(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME).binaryValue()); + var map = XContentHelper.convertToMap(originalSource, false, XContentType.JSON); + assertThat(map.v2().size(), equalTo(1)); + assertNull(map.v2().remove(InferenceMetadataFieldsMapper.NAME)); + } else { + assertNull(doc.rootDoc().getField(SourceFieldMapper.RECOVERY_SOURCE_NAME)); + } + } + var op = indexForDoc(doc); + expectedOperations.add(op); + engine.index(op); + if (frequently()) { + engine.flush(); + } + } + engine.flush(); + + var searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); + try ( + var snapshot = newRandomSnapshot( + engine.config().getMapperService(), + searcher, + SearchBasedChangesSnapshot.DEFAULT_BATCH_SIZE, + 0, + size - 1, + true, + randomBoolean(), + randomBoolean(), + IndexVersion.current() + ) + ) { + for (int i = 0; i < size; i++) { + var op = snapshot.next(); + assertThat(op.opType(), equalTo(Translog.Operation.Type.INDEX)); + Translog.Index indexOp = (Translog.Index) op; + assertThat(indexOp.id(), equalTo(expectedOperations.get(i).id())); + assertThat(indexOp.routing(), equalTo(expectedOperations.get(i).routing())); + assertToXContentEquivalent(indexOp.source(), expectedOperations.get(i).source(), XContentType.JSON); + } + assertNull(snapshot.next()); + } + } + + private Translog.Snapshot newRandomSnapshot( + MapperService mapperService, + Engine.Searcher engineSearcher, + int searchBatchSize, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean singleConsumer, + boolean accessStats, + IndexVersion indexVersionCreated + ) throws IOException { + if (useSynthetic) { + return new LuceneSyntheticSourceChangesSnapshot( + mapperService, + engineSearcher, + searchBatchSize, + randomLongBetween(0, ByteSizeValue.ofBytes(Integer.MAX_VALUE).getBytes()), + fromSeqNo, + toSeqNo, + requiredFullRange, + accessStats, + indexVersionCreated + ); + } else { + return new LuceneChangesSnapshot( + mapperService, + engineSearcher, + searchBatchSize, + fromSeqNo, + toSeqNo, + requiredFullRange, + singleConsumer, + accessStats, + indexVersionCreated + ); + } + } + + private static Model randomModel(TaskType taskType) { + var dimensions = taskType == TaskType.TEXT_EMBEDDING ? randomIntBetween(2, 64) : null; + var similarity = taskType == TaskType.TEXT_EMBEDDING ? randomFrom(SimilarityMeasure.values()) : null; + var elementType = taskType == TaskType.TEXT_EMBEDDING ? DenseVectorFieldMapper.ElementType.BYTE : null; + return new TestModel( + randomAlphaOfLength(4), + taskType, + randomAlphaOfLength(10), + new TestModel.TestServiceSettings(randomAlphaOfLength(4), dimensions, similarity, elementType), + new TestModel.TestTaskSettings(randomInt(3)), + new TestModel.TestSecretSettings(randomAlphaOfLength(4)) + ); + } + + private BytesReference randomSource() throws IOException { + var builder = JsonXContent.contentBuilder().startObject(); + builder.field("field", randomAlphaOfLengthBetween(10, 30)); + if (rarely()) { + return BytesReference.bytes(builder.endObject()); + } + SemanticTextFieldMapperTests.addSemanticTextInferenceResults( + false, + builder, + List.of( + randomSemanticText(false, "semantic_2", model2, randomInputs(), XContentType.JSON), + randomSemanticText(false, "semantic_1", model1, randomInputs(), XContentType.JSON) + ) + ); + builder.endObject(); + return BytesReference.bytes(builder); + } + + private static SemanticTextField randomSemanticText( + boolean useLegacyFormat, + String fieldName, + Model model, + List inputs, + XContentType contentType + ) throws IOException { + ChunkedInference results = switch (model.getTaskType()) { + case TEXT_EMBEDDING -> switch (model.getServiceSettings().elementType()) { + case BYTE -> randomChunkedInferenceEmbeddingByte(model, inputs); + default -> throw new AssertionError("invalid element type: " + model.getServiceSettings().elementType().name()); + }; + case SPARSE_EMBEDDING -> randomChunkedInferenceEmbeddingSparse(inputs, false); + default -> throw new AssertionError("invalid task type: " + model.getTaskType().name()); + }; + return semanticTextFieldFromChunkedInferenceResults(useLegacyFormat, fieldName, model, inputs, results, contentType); + } + + private static List randomInputs() { + int size = randomIntBetween(1, 5); + List resp = new ArrayList<>(); + for (int i = 0; i < size; i++) { + resp.add(randomAlphaOfLengthBetween(10, 50)); + } + return resp; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index b056f5880b21..881bde41a30c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -115,7 +115,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { var settings = Settings.builder() .put( IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), - SemanticInferenceMetadataFieldMapperTests.getRandomCompatibleIndexVersion(useLegacyFormat) + SemanticInferenceMetadataFieldsMapperTests.getRandomCompatibleIndexVersion(useLegacyFormat) ) .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) .build(); @@ -920,7 +920,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { mappingBuilder.endObject(); } - private static void addSemanticTextInferenceResults( + public static void addSemanticTextInferenceResults( boolean useLegacyFormat, XContentBuilder sourceBuilder, List semanticTextInferenceResults diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index 29ca71d38e1b..6a25ed506c2a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingByte; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.ml.search.WeightedToken; @@ -183,12 +184,26 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase inputs) { + List chunks = new ArrayList<>(); + for (String input : inputs) { + byte[] values = new byte[model.getServiceSettings().dimensions()]; + for (int j = 0; j < values.length; j++) { + values[j] = randomByte(); + } + chunks.add( + new ChunkedInferenceEmbeddingByte.ByteEmbeddingChunk(values, input, new ChunkedInference.TextOffset(0, input.length())) + ); + } + return new ChunkedInferenceEmbeddingByte(chunks); + } + public static ChunkedInferenceEmbeddingFloat randomChunkedInferenceEmbeddingFloat(Model model, List inputs) { List chunks = new ArrayList<>(); for (String input : inputs) { float[] values = new float[model.getServiceSettings().dimensions()]; for (int j = 0; j < values.length; j++) { - values[j] = (float) randomDouble(); + values[j] = randomFloat(); } chunks.add( new ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk(values, input, new ChunkedInference.TextOffset(0, input.length())) @@ -198,11 +213,15 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase inputs) { + return randomChunkedInferenceEmbeddingSparse(inputs, true); + } + + public static ChunkedInferenceEmbeddingSparse randomChunkedInferenceEmbeddingSparse(List inputs, boolean withFloats) { List chunks = new ArrayList<>(); for (String input : inputs) { var tokens = new ArrayList(); for (var token : input.split("\\s+")) { - tokens.add(new WeightedToken(token, randomFloat())); + tokens.add(new WeightedToken(token, withFloats ? randomFloat() : randomIntBetween(1, 255))); } chunks.add( new ChunkedInferenceEmbeddingSparse.SparseEmbeddingChunk(tokens, input, new ChunkedInference.TextOffset(0, input.length())) @@ -219,7 +238,10 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase randomChunkedInferenceEmbeddingFloat(model, inputs); + case TEXT_EMBEDDING -> switch (model.getServiceSettings().elementType()) { + case FLOAT -> randomChunkedInferenceEmbeddingFloat(model, inputs); + case BIT, BYTE -> randomChunkedInferenceEmbeddingByte(model, inputs); + }; case SPARSE_EMBEDDING -> randomChunkedInferenceEmbeddingSparse(inputs); default -> throw new AssertionError("invalid task type: " + model.getTaskType().name()); }; @@ -363,8 +385,8 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase one chunk) to calculate the offset values. * * @param useLegacyFormat Whether the old format should be used - * @param chunk The chunk to get/calculate offset values for - * @param matchedText The matched text to calculate offset values for + * @param chunk The chunk to get/calculate offset values for + * @param matchedText The matched text to calculate offset values for * @return A {@link ChunkedInference.TextOffset} instance with valid offset values */ private static ChunkedInference.TextOffset createOffset(boolean useLegacyFormat, SemanticTextField.Chunk chunk, String matchedText) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java index 1f58c4165056..24183b21f73e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.inference.mapper; import org.elasticsearch.index.mapper.NonDynamicFieldMapperTests; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; import org.junit.Before; @@ -26,7 +27,7 @@ public class SemanticTextNonDynamicFieldMapperTests extends NonDynamicFieldMappe @Override protected Collection> getPlugins() { - return List.of(Utils.TestInferencePlugin.class); + return List.of(LocalStateInferencePlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilderTests.java index c0ef4e45f101..2d91877da97b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/random/RandomRankRetrieverBuilderTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.rank.random; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.core.Predicates; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; import org.elasticsearch.search.retriever.TestRetrieverBuilder; @@ -15,6 +16,7 @@ import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.usage.SearchUsage; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; @@ -23,6 +25,7 @@ import java.util.ArrayList; import java.util.List; import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; +import static org.hamcrest.Matchers.containsString; public class RandomRankRetrieverBuilderTests extends AbstractXContentTestCase { @@ -49,10 +52,7 @@ public class RandomRankRetrieverBuilderTests extends AbstractXContentTestCase nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED - ) + new RetrieverParserContext(new SearchUsage(), Predicates.never()) ); } @@ -99,4 +99,65 @@ public class RandomRankRetrieverBuilderTests extends AbstractXContentTestCase RandomRankRetrieverBuilder.PARSER.parse(parser, null) + ); + assertThat(ex.getMessage(), containsString("[random_reranker] failed to parse field [retriever]")); + assertThat(ex.getCause().getMessage(), containsString("empty [retriever] object")); + } + } + + public void testParserWrongRetrieverName() throws IOException { + String json = """ + { + "retriever": { + "test2": { + "value": "my-test-retriever" + } + }, + "field": "my-field" + }"""; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> RandomRankRetrieverBuilder.PARSER.parse(parser, null) + ); + assertThat(ex.getMessage(), containsString("[random_reranker] failed to parse field [retriever]")); + assertThat(ex.getCause().getMessage(), containsString("unknown field [test2]")); + } + } + + public void testExtraContent() throws IOException { + String json = """ + { + "retriever": { + "test": { + "value": "my-test-retriever" + }, + "field2": "my-field" + }, + "field": "my-field" + }"""; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> RandomRankRetrieverBuilder.PARSER.parse(parser, null) + ); + assertThat(ex.getMessage(), containsString("[random_reranker] failed to parse field [retriever]")); + assertThat(ex.getCause().getMessage(), containsString("unexpected field [field2]")); + } + } + } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java index 6d6403b69ea1..daed03c198e0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import java.util.Collection; import java.util.List; @@ -40,7 +40,7 @@ public class TextSimilarityRankMultiNodeTests extends AbstractRerankerIT { @Override protected Collection> pluginsNeeded() { - return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } public void testQueryPhaseShardThrowingAllShardsFail() throws Exception { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java index 478f3b2f33c9..b6d455dd233b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -68,12 +69,7 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes protected TextSimilarityRankRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { return (TextSimilarityRankRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( parser, - new RetrieverParserContext( - new SearchUsage(), - nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED - || nf == TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED - || nf == TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED - ) + new RetrieverParserContext(new SearchUsage(), Predicates.never()) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java index 084a7f3de4a5..ba6924ba0ff3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java @@ -24,8 +24,7 @@ import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.junit.Before; import java.io.IOException; @@ -47,7 +46,7 @@ public class TextSimilarityRankRetrieverTelemetryTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return List.of(InferencePlugin.class, XPackPlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index a042fca44fdb..f81f2965c392 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.junit.Before; import java.util.Collection; @@ -108,7 +108,7 @@ public class TextSimilarityRankTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } @Before diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettingsTests.java index 3a5f56c0b424..167a3f268829 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettingsTests.java @@ -8,9 +8,7 @@ package org.elasticsearch.xpack.inference.services.alibabacloudsearch.completion; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.AlibabaCloudSearchServiceSettings; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.AlibabaCloudSearchServiceSettingsTests; import org.hamcrest.MatcherAssert; @@ -29,10 +27,6 @@ public class AlibabaCloudSearchCompletionServiceSettingsTests extends AbstractWi } public void testFromMap() { - var url = "https://www.abc.com"; - var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); - var dims = 1536; - var maxInputTokens = 512; var model = "model"; var host = "host"; var workspaceName = "default"; @@ -40,14 +34,6 @@ public class AlibabaCloudSearchCompletionServiceSettingsTests extends AbstractWi var serviceSettings = AlibabaCloudSearchCompletionServiceSettings.fromMap( new HashMap<>( Map.of( - ServiceFields.URL, - url, - ServiceFields.SIMILARITY, - similarity, - ServiceFields.DIMENSIONS, - dims, - ServiceFields.MAX_INPUT_TOKENS, - maxInputTokens, AlibabaCloudSearchServiceSettings.HOST, host, AlibabaCloudSearchServiceSettings.SERVICE_ID, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 8a826e99c3c0..89bf1355ee76 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -319,7 +319,7 @@ public class ElasticInferenceServiceTests extends ESTestCase { var factory = mock(HttpRequestSender.Factory.class); when(factory.createSender()).thenReturn(sender); - var mockModel = getInvalidModel("model_id", "service_name"); + var mockModel = getInvalidModel("model_id", "service_name", TaskType.SPARSE_EMBEDDING); try ( var service = new ElasticInferenceService( @@ -355,6 +355,98 @@ public class ElasticInferenceServiceTests extends ESTestCase { verifyNoMoreInteractions(sender); } + public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name", TaskType.TEXT_EMBEDDING); + + try ( + var service = new ElasticInferenceService( + factory, + createWithEmptySettings(threadPool), + new ElasticInferenceServiceComponents(null) + ) + ) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + mockModel, + null, + List.of(""), + false, + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + MatcherAssert.assertThat( + thrownException.getMessage(), + is( + "Inference entity [model_id] does not support task type [text_embedding] " + + "for inference, the task type must be one of [sparse_embedding]." + ) + ); + + verify(factory, times(1)).createSender(); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name", TaskType.CHAT_COMPLETION); + + try ( + var service = new ElasticInferenceService( + factory, + createWithEmptySettings(threadPool), + new ElasticInferenceServiceComponents(null) + ) + ) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + mockModel, + null, + List.of(""), + false, + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + MatcherAssert.assertThat( + thrownException.getMessage(), + is( + "Inference entity [model_id] does not support task type [chat_completion] " + + "for inference, the task type must be one of [sparse_embedding]. " + + "The task type for the inference entity is chat_completion, " + + "please use the _inference/chat_completion/model_id/_unified URL." + ) + ); + + verify(factory, times(1)).createSender(); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + public void testInfer_SendsEmbeddingsRequest() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); var eisGatewayUrl = getUrl(webServer); @@ -482,7 +574,7 @@ public class ElasticInferenceServiceTests extends ESTestCase { { "service": "elastic", "name": "Elastic", - "task_types": ["sparse_embedding" , "completion"], + "task_types": ["sparse_embedding", "chat_completion"], "configurations": { "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 678c4528a3f4..da912cd6e5d1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -864,6 +864,86 @@ public class OpenAiServiceTests extends ESTestCase { verifyNoMoreInteractions(sender); } + public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name", TaskType.SPARSE_EMBEDDING); + + try (var service = new OpenAiService(factory, createWithEmptySettings(threadPool))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + mockModel, + null, + List.of(""), + false, + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is( + "Inference entity [model_id] does not support task type [sparse_embedding] " + + "for inference, the task type must be one of [text_embedding, completion]." + ) + ); + + verify(factory, times(1)).createSender(); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name", TaskType.CHAT_COMPLETION); + + try (var service = new OpenAiService(factory, createWithEmptySettings(threadPool))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + mockModel, + null, + List.of(""), + false, + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is( + "Inference entity [model_id] does not support task type [chat_completion] " + + "for inference, the task type must be one of [text_embedding, completion]. " + + "The task type for the inference entity is chat_completion, " + + "please use the _inference/chat_completion/model_id/_unified URL." + ) + ); + + verify(factory, times(1)).createSender(); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + public void testInfer_SendsRequest() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -1661,7 +1741,7 @@ public class OpenAiServiceTests extends ESTestCase { { "service": "openai", "name": "OpenAI", - "task_types": ["text_embedding", "completion"], + "task_types": ["text_embedding", "completion", "chat_completion"], "configurations": { "api_key": { "description": "The OpenAI API authentication key. For more details about generating OpenAI API keys, refer to the https://platform.openai.com/account/api-keys.", diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 2f3bcfae600e..71db83e4667d 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -173,10 +173,6 @@ setup: --- "Query using a sparse embedding model via a search inference ID": - - requires: - cluster_features: "semantic_text.search_inference_id" - reason: search_inference_id introduced in 8.16.0 - - skip: features: [ "headers", "close_to" ] @@ -357,10 +353,6 @@ setup: --- "Query using a dense embedding model via a search inference ID": - - requires: - cluster_features: "semantic_text.search_inference_id" - reason: search_inference_id introduced in 8.16.0 - - skip: features: [ "headers", "close_to" ] @@ -737,10 +729,6 @@ setup: --- "Query a field with a search inference ID that uses the wrong task type": - - requires: - cluster_features: "semantic_text.search_inference_id" - reason: search_inference_id introduced in 8.16.0 - - do: indices.put_mapping: index: test-sparse-index @@ -778,10 +766,6 @@ setup: --- "Query a field with a search inference ID that uses the wrong dimension count": - - requires: - cluster_features: "semantic_text.search_inference_id" - reason: search_inference_id introduced in 8.16.0 - - do: inference.put: task_type: text_embedding @@ -835,10 +819,6 @@ setup: --- "Query a field with an invalid search inference ID": - - requires: - cluster_features: "semantic_text.search_inference_id" - reason: search_inference_id introduced in 8.16.0 - - do: indices.put_mapping: index: test-dense-index diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml index 2980d42d22c3..88569daaa607 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml @@ -4,8 +4,6 @@ setup: - close_to - contains - requires: - cluster_features: "text_similarity_reranker_retriever_supported" - reason: semantic reranking introduced in 8.15.0 test_runner_features: "close_to" - do: @@ -216,3 +214,82 @@ setup: - close_to: { hits.hits.0._explanation.value: { value: 0.4, error: 0.000001 } } - match: {hits.hits.0._explanation.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } - match: {hits.hits.0._explanation.details.0.description: "/weight.*science.*/" } + +--- +"text similarity reranker properly handles aliases": + - requires: + cluster_features: "text_similarity_reranker_alias_handling_fix" + reason: Test for alias handling fix + + # Create an empty index that will have an earlier shard index than the index with the desired result when referenced + # via the alias + - do: + indices.create: + index: first-test-index + body: + mappings: + properties: + text: + type: text + topic: + type: keyword + subtopic: + type: keyword + + - do: + indices.create: + index: second-test-index + body: + settings: + number_of_shards: 2 + number_of_replicas: 0 + mappings: + properties: + text: + type: text + topic: + type: keyword + subtopic: + type: keyword + + - do: + indices.put_alias: + index: first-test-index + name: test-alias + + - do: + indices.put_alias: + index: second-test-index + name: test-alias + + - do: + index: + index: second-test-index + id: doc_1 + body: + text: "As seen from Earth, a solar eclipse happens when the Moon is directly between the Earth and the Sun." + topic: [ "science" ] + subtopic: [ "technology" ] + refresh: true + + - do: + search: + index: test-alias + body: + track_total_hits: true + retriever: + text_similarity_reranker: + retriever: + standard: + query: + term: + topic: "science" + rank_window_size: 10 + inference_id: my-rerank-model + inference_text: "How often does the moon hide the sun?" + field: text + size: 10 + + - match: { hits.total.value: 1 } + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "doc_1" } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java index 2a517ce6ffed..99f04ad8f798 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java @@ -14,8 +14,8 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.monitor.metrics.IndexModeStatsActionType; @@ -23,24 +23,19 @@ import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.XPackFeatures; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; import org.elasticsearch.xpack.core.application.LogsDBFeatureSetUsage; -import static org.elasticsearch.index.mapper.SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING; - public class LogsDBUsageTransportAction extends XPackUsageFeatureTransportAction { private final ClusterService clusterService; - private final FeatureService featureService; private final Client client; @Inject public LogsDBUsageTransportAction( TransportService transportService, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, ActionFilters actionFilters, Client client, @@ -55,7 +50,6 @@ public class LogsDBUsageTransportAction extends XPackUsageFeatureTransportAction indexNameExpressionResolver ); this.clusterService = clusterService; - this.featureService = featureService; this.client = client; } @@ -71,38 +65,30 @@ public class LogsDBUsageTransportAction extends XPackUsageFeatureTransportAction for (IndexMetadata indexMetadata : state.metadata().getProject()) { if (indexMetadata.getIndexMode() == IndexMode.LOGSDB) { numIndices++; - if (INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexMetadata.getSettings()) == SourceFieldMapper.Mode.SYNTHETIC) { + if (IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexMetadata.getSettings()) == SourceFieldMapper.Mode.SYNTHETIC) { numIndicesWithSyntheticSources++; } } } final boolean enabled = LogsDBPlugin.CLUSTER_LOGSDB_ENABLED.get(clusterService.getSettings()); final boolean hasCustomCutoffDate = System.getProperty(SyntheticSourceLicenseService.CUTOFF_DATE_SYS_PROP_NAME) != null; - if (featureService.clusterHasFeature(state, XPackFeatures.LOGSDB_TELMETRY_STATS)) { - final DiscoveryNode[] nodes = state.nodes().getDataNodes().values().toArray(DiscoveryNode[]::new); - final var statsRequest = new IndexModeStatsActionType.StatsRequest(nodes); - final int finalNumIndices = numIndices; - final int finalNumIndicesWithSyntheticSources = numIndicesWithSyntheticSources; - client.execute(IndexModeStatsActionType.TYPE, statsRequest, listener.map(statsResponse -> { - final var indexStats = statsResponse.stats().get(IndexMode.LOGSDB); - return new XPackUsageFeatureResponse( - new LogsDBFeatureSetUsage( - true, - enabled, - finalNumIndices, - finalNumIndicesWithSyntheticSources, - indexStats.numDocs(), - indexStats.numBytes(), - hasCustomCutoffDate - ) - ); - })); - } else { - listener.onResponse( - new XPackUsageFeatureResponse( - new LogsDBFeatureSetUsage(true, enabled, numIndices, numIndicesWithSyntheticSources, 0L, 0L, hasCustomCutoffDate) + final DiscoveryNode[] nodes = state.nodes().getDataNodes().values().toArray(DiscoveryNode[]::new); + final var statsRequest = new IndexModeStatsActionType.StatsRequest(nodes); + final int finalNumIndices = numIndices; + final int finalNumIndicesWithSyntheticSources = numIndicesWithSyntheticSources; + client.execute(IndexModeStatsActionType.TYPE, statsRequest, listener.map(statsResponse -> { + final var indexStats = statsResponse.stats().get(IndexMode.LOGSDB); + return new XPackUsageFeatureResponse( + new LogsDBFeatureSetUsage( + true, + enabled, + finalNumIndices, + finalNumIndicesWithSyntheticSources, + indexStats.numDocs(), + indexStats.numBytes(), + hasCustomCutoffDate ) ); - } + })); } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index 720bfa1054ee..db3e477f0956 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexMode; @@ -30,12 +31,14 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_PATH; @@ -44,6 +47,7 @@ import static org.elasticsearch.xpack.logsdb.LogsDBPlugin.CLUSTER_LOGSDB_ENABLED final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { private static final Logger LOGGER = LogManager.getLogger(LogsdbIndexModeSettingsProvider.class); private static final String LOGS_PATTERN = "logs-*-*"; + private static final Set MAPPING_INCLUDES = Set.of("_doc._source.*", "_doc.properties.host**", "_doc.subobjects"); private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final SetOnce> mapperServiceFactory = new SetOnce<>(); @@ -117,7 +121,7 @@ final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { if (settingsBuilder == null) { settingsBuilder = Settings.builder(); } - settingsBuilder.put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()); + settingsBuilder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()); } } @@ -206,7 +210,7 @@ final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings); var indexMode = tmpIndexMetadata.getIndexMode(); boolean hasSyntheticSourceUsage = false; - if (SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.exists(tmpIndexMetadata.getSettings()) + if (IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.exists(tmpIndexMetadata.getSettings()) || indexMode == IndexMode.LOGSDB || indexMode == IndexMode.TIME_SERIES) { // In case when index mode is tsdb or logsdb and only _source.mode mapping attribute is specified, then the default @@ -214,7 +218,7 @@ final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { // then configuring the index.mapping.source.mode setting to stored has no effect. Additionally _source.mode can't be set // to disabled, because that isn't allowed with logsdb/tsdb. In other words setting index.mapping.source.mode setting to // stored when _source.mode mapping attribute is stored is fine as it has no effect, but avoids creating MapperService. - var sourceMode = SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(tmpIndexMetadata.getSettings()); + var sourceMode = IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(tmpIndexMetadata.getSettings()); hasSyntheticSourceUsage = sourceMode == SourceFieldMapper.Mode.SYNTHETIC; if (IndexSortConfig.INDEX_SORT_FIELD_SETTING.get(indexTemplateAndCreateRequestSettings).isEmpty() == false) { // Custom sort config, no point for further checks on [host.name] field. @@ -232,6 +236,19 @@ final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { // combinedTemplateMappings can be empty when creating a normal index that doesn't match any template and without mapping. if (combinedTemplateMappings == null || combinedTemplateMappings.isEmpty()) { combinedTemplateMappings = List.of(new CompressedXContent("{}")); + } else { + // Filter the mapping to contain only the part this index settings provider is interested in. + // This reduces the overhead of loading mappings, since mappings can be very large. + // The _doc._source.mode is needed to determine synthetic source usage. + // The _doc.properties.host* is needed to determine whether host.name field can be injected. + // The _doc.subobjects is needed to determine whether subobjects is enabled. + List filteredMappings = new ArrayList<>(combinedTemplateMappings.size()); + for (CompressedXContent mappingSource : combinedTemplateMappings) { + var ref = mappingSource.compressedReference(); + var map = XContentHelper.convertToMap(ref, true, XContentType.JSON, MAPPING_INCLUDES, Set.of()).v2(); + filteredMappings.add(new CompressedXContent(map)); + } + combinedTemplateMappings = filteredMappings; } mapperService.merge(MapperService.SINGLE_MAPPING_NAME, combinedTemplateMappings, MapperService.MergeReason.INDEX_TEMPLATE); Mapper hostName = mapperService.mappingLookup().getMapper("host.name"); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java index f8f307b572f3..b2a533f6b76c 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.license.AbstractLicensesIntegrationTestCase; import org.elasticsearch.license.GetFeatureUsageRequest; import org.elasticsearch.license.GetFeatureUsageResponse; @@ -104,11 +104,11 @@ public class LegacyLicenceIntegrationTests extends AbstractLicensesIntegrationTe } private void createIndexWithSyntheticSourceAndAssertExpectedType(String indexName, String expectedType) { - var settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "synthetic").build(); + var settings = Settings.builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "synthetic").build(); createIndex(indexName, settings); var response = admin().indices().getSettings(new GetSettingsRequest().indices(indexName)).actionGet(); assertThat( - response.getIndexToSettings().get(indexName).get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), + response.getIndexToSettings().get(indexName).get(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo(expectedType) ); } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index d70d0ea425b0..dd0bfaf62e82 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -642,7 +642,7 @@ public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { ); Metadata metadata = mb.build(); LogsdbIndexModeSettingsProvider provider = withSyntheticSourceDemotionSupport(false); - Settings settings = builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + Settings settings = builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) .build(); Settings result = provider.getAdditionalIndexSettings( @@ -668,7 +668,7 @@ public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { List.of() ); assertThat(result.size(), equalTo(1)); - assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertEquals(SourceFieldMapper.Mode.STORED, IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); assertThat(newMapperServiceCounter.get(), equalTo(2)); result = provider.getAdditionalIndexSettings( @@ -681,7 +681,7 @@ public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { List.of() ); assertThat(result.size(), equalTo(1)); - assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertEquals(SourceFieldMapper.Mode.STORED, IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); assertThat(newMapperServiceCounter.get(), equalTo(3)); result = provider.getAdditionalIndexSettings( @@ -694,7 +694,7 @@ public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { List.of() ); assertThat(result.size(), equalTo(3)); - assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertEquals(SourceFieldMapper.Mode.STORED, IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); assertTrue(IndexSettings.LOGSDB_SORT_ON_HOST_NAME.get(result)); assertTrue(IndexSettings.LOGSDB_ADD_HOST_NAME_FIELD.get(result)); assertThat(newMapperServiceCounter.get(), equalTo(4)); @@ -749,7 +749,7 @@ public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { List.of() ); assertThat(result.size(), equalTo(4)); - assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertEquals(SourceFieldMapper.Mode.STORED, IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); assertEquals(IndexMode.LOGSDB, IndexSettings.MODE.get(result)); assertTrue(IndexSettings.LOGSDB_SORT_ON_HOST_NAME.get(result)); assertTrue(IndexSettings.LOGSDB_ADD_HOST_NAME_FIELD.get(result)); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java index 492134427884..7fa2f11880f4 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexSettingsProviderLegacyLicenseTests.java @@ -10,9 +10,9 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; @@ -58,16 +58,16 @@ public class LogsdbIndexSettingsProviderLegacyLicenseTests extends ESTestCase { } public void testGetAdditionalIndexSettingsDefault() { - Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); String dataStreamName = "metrics-my-app"; String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, null, null, null, settings, List.of()); assertThat(result.size(), equalTo(1)); - assertThat(result.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo("STORED")); + assertThat(result.get(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo("STORED")); } public void testGetAdditionalIndexSettingsApm() throws IOException { - Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); String dataStreamName = "metrics-apm.app.test"; String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, null, null, null, settings, List.of()); @@ -75,7 +75,7 @@ public class LogsdbIndexSettingsProviderLegacyLicenseTests extends ESTestCase { } public void testGetAdditionalIndexSettingsProfiling() throws IOException { - Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); for (String dataStreamName : new String[] { "profiling-metrics", "profiling-events" }) { String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, null, null, null, settings, List.of()); @@ -89,7 +89,7 @@ public class LogsdbIndexSettingsProviderLegacyLicenseTests extends ESTestCase { } public void testGetAdditionalIndexSettingsTsdb() throws IOException { - Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); String dataStreamName = "metrics-my-app"; String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, IndexMode.TIME_SERIES, null, null, settings, List.of()); @@ -118,11 +118,11 @@ public class LogsdbIndexSettingsProviderLegacyLicenseTests extends ESTestCase { true ); - Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); String dataStreamName = "metrics-my-app"; String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, IndexMode.TIME_SERIES, null, null, settings, List.of()); assertThat(result.size(), equalTo(1)); - assertThat(result.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo("STORED")); + assertThat(result.get(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo("STORED")); } } diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml index 1fd10c2028fd..b49ada6e1809 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/30_logsdb_default_mapping.yml @@ -2,9 +2,6 @@ create logsdb data stream with host.name as keyword: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: cluster.put_component_template: @@ -38,9 +35,6 @@ create logsdb data stream with host.name as keyword: create logsdb data stream with host.name as keyword and timestamp as date: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: cluster.put_component_template: @@ -76,9 +70,6 @@ create logsdb data stream with host.name as keyword and timestamp as date: create logsdb data stream with host.name as integer and timestamp as date: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: cluster.put_component_template: @@ -107,16 +98,120 @@ create logsdb data stream with host.name as integer and timestamp as date: - do: indices.create_data_stream: name: "logsdb" - - is_true: acknowledged + - do: + indices.get_data_stream: + name: "logsdb" + expand_wildcards: hidden + - length: { data_streams: 1 } + - set: { data_streams.0.indices.0.index_name: backing_index } + + - do: + indices.get_settings: + index: $backing_index + - match: { .$backing_index.settings.index.mode: logsdb } + - is_false: .$backing_index.settings.index.logsdb.add_host_name_field + - match: { .$backing_index.settings.index.logsdb.sort_on_host_name: "true" } + +--- +create logsdb data stream with no host.name and timestamp as date: + - requires: + test_runner_features: [ "allowed_warnings" ] + + - do: + cluster.put_component_template: + name: "logsdb-mappings" + body: + template: + settings: + mode: "logsdb" + mappings: + properties: + "@timestamp": + type: "date" + + - do: + indices.put_index_template: + name: "logsdb-index-template" + body: + index_patterns: ["logsdb"] + data_stream: {} + composed_of: ["logsdb-mappings"] + allowed_warnings: + - "index template [logsdb-index-template] has index patterns [logsdb] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template] will take precedence during new index creation" + + - do: + indices.create_data_stream: + name: "logsdb" + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: "logsdb" + expand_wildcards: hidden + - length: { data_streams: 1 } + - set: { data_streams.0.indices.0.index_name: backing_index } + + - do: + indices.get_settings: + index: $backing_index + - match: { .$backing_index.settings.index.mode: logsdb } + - match: { .$backing_index.settings.index.logsdb.add_host_name_field: "true" } + - match: { .$backing_index.settings.index.logsdb.sort_on_host_name: "true" } + +--- +create logsdb data stream with host as keyword and timestamp as date: + - requires: + test_runner_features: [ "allowed_warnings" ] + + - do: + cluster.put_component_template: + name: "logsdb-mappings" + body: + template: + settings: + mode: "logsdb" + mappings: + properties: + host: + type: "keyword" + "@timestamp": + type: "date" + + - do: + indices.put_index_template: + name: "logsdb-index-template" + body: + index_patterns: ["logsdb"] + data_stream: {} + composed_of: ["logsdb-mappings"] + allowed_warnings: + - "index template [logsdb-index-template] has index patterns [logsdb] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logsdb-index-template] will take precedence during new index creation" + + - do: + indices.create_data_stream: + name: "logsdb" + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: "logsdb" + expand_wildcards: hidden + - length: { data_streams: 1 } + - set: { data_streams.0.indices.0.index_name: backing_index } + + - do: + indices.get_settings: + index: $backing_index + - match: { .$backing_index.settings.index.mode: logsdb } + - is_false: .$backing_index.settings.index.logsdb.add_host_name_field + - is_false: .$backing_index.settings.index.logsdb.sort_on_host_name + --- create logsdb data stream with host as keyword: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: cluster.put_component_template: @@ -150,9 +245,6 @@ create logsdb data stream with host as keyword: create logsdb data stream with host as text and multi fields: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: cluster.put_component_template: @@ -193,9 +285,6 @@ create logsdb data stream with host as text and multi fields: create logsdb data stream with host as text: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: ["mapper.keyword_normalizer_synthetic_source"] - reason: "Support for normalizer on keyword fields" - do: cluster.put_component_template: @@ -232,9 +321,6 @@ create logsdb data stream with host as text: create logsdb data stream with host as text and name as double: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: ["mapper.keyword_normalizer_synthetic_source"] - reason: "Support for normalizer on keyword fields" - do: cluster.put_component_template: @@ -274,9 +360,6 @@ create logsdb data stream with host as text and name as double: create logsdb data stream with timestamp object mapping: - requires: test_runner_features: [ "allowed_warnings" ] - - requires: - cluster_features: ["mapper.keyword_normalizer_synthetic_source"] - reason: "Support for normalizer on keyword fields" - do: cluster.put_component_template: @@ -315,9 +398,6 @@ create logsdb data stream with timestamp object mapping: create logsdb data stream with custom sorting without host.name: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -365,9 +445,6 @@ create logsdb data stream with custom sorting without host.name: create logsdb data stream with custom sorting and host object: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -424,9 +501,6 @@ create logsdb data stream with custom sorting and host object: create logsdb data stream with custom sorting and dynamically mapped host.name: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -489,9 +563,6 @@ create logsdb data stream with custom sorting and dynamically mapped host.name: create logsdb data stream with custom sorting and host.name object: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -545,9 +616,6 @@ create logsdb data stream with custom sorting and host.name object: create logsdb data stream with default sorting on malformed host.name: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -609,9 +677,6 @@ create logsdb data stream with default sorting on malformed host.name: create logsdb data stream with custom sorting and host.name date field: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -662,9 +727,6 @@ create logsdb data stream with custom sorting and host.name date field: create logsdb data stream with custom sorting and missing host.name field mapping: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -713,9 +775,6 @@ create logsdb data stream with custom sorting and missing host.name field mappin create logsdb data stream with custom sorting and host.name field without doc values: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -755,9 +814,6 @@ create logsdb data stream with custom sorting and host.name field without doc va create logsdb data stream with incompatible ignore_above on host.name: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -803,9 +859,6 @@ create logsdb data stream with incompatible ignore_above on host.name: create logsdb data stream with no sorting and host.name as text: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -846,9 +899,6 @@ create logsdb data stream with no sorting and host.name as text: create logsdb data stream without index sorting and ignore_above on host.name: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -892,9 +942,6 @@ create logsdb data stream without index sorting and ignore_above on host.name: create logsdb data stream with host.name as alias and sorting on it: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -930,9 +977,6 @@ create logsdb data stream with host.name as alias and sorting on it: create logsdb data stream with multi-fields on host.name: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -977,9 +1021,6 @@ create logsdb data stream with multi-fields on host.name: create logsdb data stream with multi-fields on host.name and no sorting: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -1021,9 +1062,6 @@ create logsdb data stream with multi-fields on host.name and no sorting: create logsdb data stream with custom empty sorting: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: @@ -1068,9 +1106,6 @@ create logsdb data stream with custom empty sorting: create logsdb data stream with custom sorting on timestamp: - skip: features: [ "allowed_warnings" ] - - requires: - cluster_features: [ "mapper.keyword_normalizer_synthetic_source" ] - reason: support for normalizer on keyword fields - do: allowed_warnings: diff --git a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/80_synthetic_source.yml b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/80_synthetic_source.yml index 22f69e30650f..eb7c1df91de6 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/80_synthetic_source.yml +++ b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/80_synthetic_source.yml @@ -5,10 +5,6 @@ setup: --- synthetic source: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires synthetic source support - - do: indices.create: index: synthetic_source_test @@ -51,10 +47,6 @@ synthetic source: --- synthetic source with copy_to: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: synthetic_source_test @@ -111,10 +103,6 @@ synthetic source with copy_to: --- synthetic source with disabled doc_values: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires disabled doc_values support in synthetic source - - do: indices.create: index: synthetic_source_test diff --git a/x-pack/plugin/mapper-version/src/yamlRestTest/resources/rest-api-spec/test/40_synthetic_source.yml b/x-pack/plugin/mapper-version/src/yamlRestTest/resources/rest-api-spec/test/40_synthetic_source.yml index 3095a19fa29d..0989a92f2ca0 100644 --- a/x-pack/plugin/mapper-version/src/yamlRestTest/resources/rest-api-spec/test/40_synthetic_source.yml +++ b/x-pack/plugin/mapper-version/src/yamlRestTest/resources/rest-api-spec/test/40_synthetic_source.yml @@ -69,10 +69,6 @@ script values: --- synthetic source with copy_to: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: synthetic_source_test diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java index b460c6abfeee..34bc650caec5 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java @@ -54,7 +54,7 @@ public class CreateIndexFromSourceActionIT extends ESIntegTestCase { ); try { - indicesAdmin().getIndex(new GetIndexRequest().indices(destIndex)).actionGet(); + indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex)).actionGet(); } catch (IndexNotFoundException e) { fail(); } @@ -110,7 +110,8 @@ public class CreateIndexFromSourceActionIT extends ESIntegTestCase { client().execute(CreateIndexFromSourceAction.INSTANCE, new CreateIndexFromSourceAction.Request(sourceIndex, destIndex)) ); - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + .actionGet(); Map mappings = mappingsResponse.mappings(); var destMappings = mappings.get(destIndex).sourceAsMap(); var sourceMappings = mappings.get(sourceIndex).sourceAsMap(); @@ -224,7 +225,7 @@ public class CreateIndexFromSourceActionIT extends ESIntegTestCase { ) ); - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(destIndex)).actionGet(); + var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(destIndex)).actionGet(); Map mappings = mappingsResponse.mappings(); var destMappings = mappings.get(destIndex).sourceAsMap(); diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index 0902f6ce6468..b5eb1e584673 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.migrate.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -96,7 +97,7 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { assertHitCount(prepareSearch(destIndex).setSize(0), 0); } - public void testDestIndexNameSet() throws Exception { + public void testDestIndexNameSet_noDotPrefix() throws Exception { assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); @@ -110,6 +111,20 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { assertEquals(expectedDestIndexName, response.getDestIndex()); } + public void testDestIndexNameSet_withDotPrefix() throws Exception { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = "." + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + + // call reindex + var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + .actionGet(); + + var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); + assertEquals(expectedDestIndexName, response.getDestIndex()); + } + public void testDestIndexContainsDocs() throws Exception { assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); @@ -199,7 +214,8 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .actionGet() .getDestIndex(); - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + .actionGet(); Map mappings = mappingsResponse.mappings(); var destMappings = mappings.get(destIndex).sourceAsMap(); var sourceMappings = mappings.get(sourceIndex).sourceAsMap(); @@ -209,16 +225,46 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { assertEquals("text", XContentMapValues.extractValue("properties.foo1.type", destMappings)); } - public void testReadOnlyAddedBack() { + public void testFailIfMetadataBlockSet() { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_METADATA, true).build(); + indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + + try { + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + } catch (ElasticsearchException e) { + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); + } + + cleanupMetadataBlocks(sourceIndex); + } + + public void testFailIfReadBlockSet() { + assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_READ, true).build(); + indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + + try { + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + } catch (ElasticsearchException e) { + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); + } + + cleanupMetadataBlocks(sourceIndex); + } + + public void testReadOnlyBlocksNotAddedBack() { assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - // Create source index with read-only and/or block-writes var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - boolean isReadOnly = randomBoolean(); - boolean isBlockWrites = randomBoolean(); var settings = Settings.builder() - .put(IndexMetadata.SETTING_READ_ONLY, isReadOnly) - .put(IndexMetadata.SETTING_BLOCKS_WRITE, isBlockWrites) + .put(IndexMetadata.SETTING_READ_ONLY, randomBoolean()) + .put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, randomBoolean()) + .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) .build(); indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); @@ -227,13 +273,13 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .actionGet() .getDestIndex(); - // assert read-only settings added back to dest index var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); - assertEquals(isReadOnly, Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); - assertEquals(isBlockWrites, Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); + assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); + assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); + assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); - removeReadOnly(sourceIndex); - removeReadOnly(destIndex); + cleanupMetadataBlocks(sourceIndex); + cleanupMetadataBlocks(destIndex); } public void testUpdateSettingsDefaultsRestored() { @@ -305,7 +351,8 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { // verify mappings from templates copied to dest index { - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + .actionGet(); var destMappings = mappingsResponse.mappings().get(destIndex).sourceAsMap(); var sourceMappings = mappingsResponse.mappings().get(sourceIndex).sourceAsMap(); assertEquals(sourceMappings, destMappings); @@ -377,7 +424,7 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { backingIndexName = indexResponse.getIndex(); } - var sourceSettings = indicesAdmin().getIndex(new GetIndexRequest().indices(backingIndexName)) + var sourceSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName)) .actionGet() .getSettings() .get(backingIndexName); @@ -399,7 +446,10 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { .actionGet() .getDestIndex(); - var destSettings = indicesAdmin().getIndex(new GetIndexRequest().indices(destIndex)).actionGet().getSettings().get(destIndex); + var destSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex)) + .actionGet() + .getSettings() + .get(destIndex); var destStart = IndexSettings.TIME_SERIES_START_TIME.get(destSettings); var destEnd = IndexSettings.TIME_SERIES_END_TIME.get(destSettings); @@ -412,10 +462,11 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { // TODO check other IndexMetadata fields that need to be fixed after the fact // TODO what happens if don't have necessary perms for a given index? - private static void removeReadOnly(String index) { + private static void cleanupMetadataBlocks(String index) { var settings = Settings.builder() - .put(IndexMetadata.SETTING_READ_ONLY, false) - .put(IndexMetadata.SETTING_BLOCKS_WRITE, false) + .putNull(IndexMetadata.SETTING_READ_ONLY) + .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) + .putNull(IndexMetadata.SETTING_BLOCKS_METADATA) .build(); assertAcked(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index)).actionGet()); } @@ -440,10 +491,38 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { } private static String getIndexUUID(String index) { - return indicesAdmin().getIndex(new GetIndexRequest().indices(index)) + return indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)) .actionGet() .getSettings() .get(index) .get(IndexMetadata.SETTING_INDEX_UUID); } + + public void testGenerateDestIndexName_noDotPrefix() { + String sourceIndex = "sourceindex"; + String expectedDestIndex = "migrated-sourceindex"; + String actualDestIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); + assertEquals(expectedDestIndex, actualDestIndex); + } + + public void testGenerateDestIndexName_withDotPrefix() { + String sourceIndex = ".sourceindex"; + String expectedDestIndex = ".migrated-sourceindex"; + String actualDestIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); + assertEquals(expectedDestIndex, actualDestIndex); + } + + public void testGenerateDestIndexName_withHyphen() { + String sourceIndex = "source-index"; + String expectedDestIndex = "migrated-source-index"; + String actualDestIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); + assertEquals(expectedDestIndex, actualDestIndex); + } + + public void testGenerateDestIndexName_withUnderscore() { + String sourceIndex = "source_index"; + String expectedDestIndex = "migrated-source_index"; + String actualDestIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); + assertEquals(expectedDestIndex, actualDestIndex); + } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index 54698d583bc0..544e5764605c 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -40,7 +40,6 @@ import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.READ_ONLY; import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; public class ReindexDataStreamIndexTransportAction extends HandledTransportAction< @@ -94,13 +93,22 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio ); } + if (settingsBefore.getAsBoolean(IndexMetadata.SETTING_BLOCKS_READ, false)) { + var errorMessage = String.format(Locale.ROOT, "Cannot reindex index [%s] which has a read block.", destIndexName); + listener.onFailure(new ElasticsearchException(errorMessage)); + return; + } + if (settingsBefore.getAsBoolean(IndexMetadata.SETTING_BLOCKS_METADATA, false)) { + var errorMessage = String.format(Locale.ROOT, "Cannot reindex index [%s] which has a metadata block.", destIndexName); + listener.onFailure(new ElasticsearchException(errorMessage)); + return; + } + SubscribableListener.newForked(l -> setBlockWrites(sourceIndexName, l, taskId)) .andThen(l -> deleteDestIfExists(destIndexName, l, taskId)) .andThen(l -> createIndex(sourceIndex, destIndexName, l, taskId)) .andThen(l -> reindex(sourceIndexName, destIndexName, l, taskId)) .andThen(l -> copyOldSourceSettingsToDest(settingsBefore, destIndexName, l, taskId)) - .andThen(l -> addBlockIfFromSource(WRITE, settingsBefore, destIndexName, l, taskId)) - .andThen(l -> addBlockIfFromSource(READ_ONLY, settingsBefore, destIndexName, l, taskId)) .andThenApply(ignored -> new ReindexDataStreamIndexAction.Response(destIndexName)) .addListener(listener); } @@ -121,7 +129,8 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio @Override public void onFailure(Exception e) { if (e instanceof ClusterBlockException || e.getCause() instanceof ClusterBlockException) { - // It's fine if block-writes is already set + // Could fail with a cluster block exception if read-only or read-only-allow-delete is already set + // In this case, we can proceed listener.onResponse(null); } else { listener.onFailure(e); @@ -147,10 +156,12 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio ) { logger.debug("Creating destination index [{}] for source index [{}]", destIndexName, sourceIndex.getIndex().getName()); - // override read-only settings if they exist var removeReadOnlyOverride = Settings.builder() + // remove read-only settings if they exist .putNull(IndexMetadata.SETTING_READ_ONLY) + .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) .putNull(IndexMetadata.SETTING_BLOCKS_WRITE) + // settings to optimize reindex .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1) .build(); @@ -193,6 +204,18 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio } } + private void updateSettings( + String index, + Settings.Builder settings, + ActionListener listener, + TaskId parentTaskId + ) { + var updateSettingsRequest = new UpdateSettingsRequest(settings.build(), index); + updateSettingsRequest.setParentTask(parentTaskId); + var errorMessage = String.format(Locale.ROOT, "Could not update settings on index [%s]", index); + client.admin().indices().updateSettings(updateSettingsRequest, failIfNotAcknowledged(listener, errorMessage)); + } + private void copyOldSourceSettingsToDest( Settings settingsBefore, String destIndexName, @@ -200,15 +223,10 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio TaskId parentTaskId ) { logger.debug("Updating settings on destination index after reindex completes"); - var settings = Settings.builder(); copySettingOrUnset(settingsBefore, settings, IndexMetadata.SETTING_NUMBER_OF_REPLICAS); copySettingOrUnset(settingsBefore, settings, IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()); - - var updateSettingsRequest = new UpdateSettingsRequest(settings.build(), destIndexName); - updateSettingsRequest.setParentTask(parentTaskId); - var errorMessage = String.format(Locale.ROOT, "Could not update settings on index [%s]", destIndexName); - client.admin().indices().updateSettings(updateSettingsRequest, failIfNotAcknowledged(listener, errorMessage)); + updateSettings(destIndexName, settings, listener, parentTaskId); } private static void copySettingOrUnset(Settings settingsBefore, Settings.Builder builder, String setting) { @@ -222,8 +240,12 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio } } - public static String generateDestIndexName(String sourceIndex) { - return "migrated-" + sourceIndex; + static String generateDestIndexName(String sourceIndex) { + String prefix = "migrated-"; + if (sourceIndex.startsWith(".")) { + return "." + prefix + sourceIndex.substring(1); + } + return prefix + sourceIndex; } private static ActionListener failIfNotAcknowledged( diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 706f64576bc1..a33c4bf86db1 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -11,11 +11,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.support.CountDownActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.DataStream; @@ -213,26 +216,29 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec reindexDataStreamTask.incrementInProgressIndicesCount(index.getName()); ReindexDataStreamIndexAction.Request reindexDataStreamIndexRequest = new ReindexDataStreamIndexAction.Request(index.getName()); reindexDataStreamIndexRequest.setParentTask(parentTaskId); - reindexClient.execute(ReindexDataStreamIndexAction.INSTANCE, reindexDataStreamIndexRequest, ActionListener.wrap(response1 -> { - updateDataStream(sourceDataStream, index.getName(), response1.getDestIndex(), ActionListener.wrap(unused -> { + + SubscribableListener.newForked( + l -> reindexClient.execute(ReindexDataStreamIndexAction.INSTANCE, reindexDataStreamIndexRequest, l) + ) + .andThen( + (l, result) -> updateDataStream(sourceDataStream, index.getName(), result.getDestIndex(), l, reindexClient, parentTaskId) + ) + .andThen(l -> deleteIndex(index.getName(), reindexClient, parentTaskId, l)) + .addListener(ActionListener.wrap(unused -> { reindexDataStreamTask.reindexSucceeded(index.getName()); listener.onResponse(null); maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, reindexClient, sourceDataStream, listener, parentTaskId); - }, exception -> { - reindexDataStreamTask.reindexFailed(index.getName(), exception); + }, e -> { + reindexDataStreamTask.reindexFailed(index.getName(), e); listener.onResponse(null); - }), reindexClient, parentTaskId); - }, exception -> { - reindexDataStreamTask.reindexFailed(index.getName(), exception); - listener.onResponse(null); - })); + })); } private void updateDataStream( String dataStream, String oldIndex, String newIndex, - ActionListener listener, + ActionListener listener, ExecuteWithHeadersClient reindexClient, TaskId parentTaskId ) { @@ -242,17 +248,18 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec List.of(DataStreamAction.removeBackingIndex(dataStream, oldIndex), DataStreamAction.addBackingIndex(dataStream, newIndex)) ); modifyDataStreamRequest.setParentTask(parentTaskId); - reindexClient.execute(ModifyDataStreamsAction.INSTANCE, modifyDataStreamRequest, new ActionListener<>() { - @Override - public void onResponse(AcknowledgedResponse response) { - listener.onResponse(null); - } + reindexClient.execute(ModifyDataStreamsAction.INSTANCE, modifyDataStreamRequest, listener); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + private void deleteIndex( + String indexName, + ExecuteWithHeadersClient reindexClient, + TaskId parentTaskId, + ActionListener listener + ) { + DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName); + deleteIndexRequest.setParentTask(parentTaskId); + reindexClient.execute(TransportDeleteIndexAction.TYPE, deleteIndexRequest, listener); } private void completeSuccessfulPersistentTask( diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java index 5287d149fae3..367c1cee8b0e 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java @@ -11,6 +11,8 @@ import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.ReferenceDocs; @@ -80,6 +82,10 @@ public class DatafeedJobsIT extends MlNativeAutodetectIntegTestCase { public void cleanup() { updateClusterSettings(Settings.builder().putNull("logger.org.elasticsearch.xpack.ml.datafeed")); cleanUp(); + // Race conditions between closing and killing tasks in these tests, + // sometimes result in lingering persistent tasks (such as "_close"), + // which cause subsequent tests to fail. + client().execute(TransportCancelTasksAction.TYPE, new CancelTasksRequest()); } public void testLookbackOnly() throws Exception { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 39519dc7931d..50b9597bb032 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -133,7 +133,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { client().admin().indices().prepareCreate(".ml-state-000007").addAlias(new Alias(".ml-state-write").isHidden(true)).get(); refresh(); - GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(".ml-state*").get(); + GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(".ml-state*").get(); assertThat( Strings.toString(getIndexResponse), getIndexResponse.getIndices(), @@ -143,7 +143,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { client().execute(DeleteExpiredDataAction.INSTANCE, new DeleteExpiredDataAction.Request()).get(); refresh(); - getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(".ml-state*").get(); + getIndexResponse = client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(".ml-state*").get(); assertThat( Strings.toString(getIndexResponse), getIndexResponse.getIndices(), diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index 98ca92201382..4747adf4acb8 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -406,7 +406,7 @@ abstract class MlNativeDataFrameAnalyticsIntegTestCase extends MlNativeIntegTest } protected static void assertMlResultsFieldMappings(String index, String predictedClassField, String expectedType) { - Map mappings = client().execute(GetIndexAction.INSTANCE, new GetIndexRequest().indices(index)) + Map mappings = client().execute(GetIndexAction.INSTANCE, new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)) .actionGet() .mappings() .get(index) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java index d179a28aa989..57fbf44f2565 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java @@ -129,7 +129,9 @@ public class TestFeatureResetIT extends MlNativeAutodetectIntegTestCase { ); client().execute(ResetFeatureStateAction.INSTANCE, new ResetFeatureStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); assertBusy(() -> { - List indices = Arrays.asList(client().admin().indices().prepareGetIndex().addIndices(".ml*").get().indices()); + List indices = Arrays.asList( + client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".ml*").get().indices() + ); assertThat(indices.toString(), indices, is(empty())); }); assertThat(isResetMode(), is(false)); @@ -160,7 +162,9 @@ public class TestFeatureResetIT extends MlNativeAutodetectIntegTestCase { createModelDeployment(); client().execute(ResetFeatureStateAction.INSTANCE, new ResetFeatureStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); assertBusy(() -> { - List indices = Arrays.asList(client().admin().indices().prepareGetIndex().addIndices(".ml*").get().indices()); + List indices = Arrays.asList( + client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".ml*").get().indices() + ); assertThat(indices.toString(), indices, is(empty())); }); assertThat(isResetMode(), is(false)); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index 6f5fa72a2b2f..b163036e9476 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -321,7 +321,7 @@ public class AnnotationIndexIT extends MlSingleNodeTestCase { } private boolean annotationsIndexExists(String expectedName) { - GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex() + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndices(AnnotationIndex.LATEST_INDEX_NAME) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .get(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java index ec95b979fdd6..40d9d97eddd1 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java @@ -60,7 +60,12 @@ public class IndexLayoutIT extends BaseMlIntegTestCase { OriginSettingClient client = new OriginSettingClient(client(), ML_ORIGIN); assertThat( - client.admin().indices().prepareGetIndex().addIndices(AnomalyDetectorsIndex.jobStateIndexPattern()).get().indices(), + client.admin() + .indices() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(AnomalyDetectorsIndex.jobStateIndexPattern()) + .get() + .indices(), arrayContaining(".ml-state-000001") ); assertThat( @@ -77,7 +82,7 @@ public class IndexLayoutIT extends BaseMlIntegTestCase { assertThat( client.admin() .indices() - .prepareGetIndex() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) .addIndices(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) .get() .indices().length, @@ -86,7 +91,7 @@ public class IndexLayoutIT extends BaseMlIntegTestCase { assertThat( client.admin() .indices() - .prepareGetIndex() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) .addIndices(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)) .get() .indices().length, @@ -137,7 +142,12 @@ public class IndexLayoutIT extends BaseMlIntegTestCase { OriginSettingClient client = new OriginSettingClient(client(), ML_ORIGIN); assertThat( - client.admin().indices().prepareGetIndex().addIndices(AnomalyDetectorsIndex.jobStateIndexPattern()).get().indices(), + client.admin() + .indices() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(AnomalyDetectorsIndex.jobStateIndexPattern()) + .get() + .indices(), arrayContaining(".ml-state-000001") ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index ff6c3c844478..7ce0fbe76064 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -255,7 +255,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase { // Assert that the mappings contain all the additional fields: field1, field2, field3, etc. String sharedResultsIndex = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; - GetMappingsRequest request = new GetMappingsRequest().indices(sharedResultsIndex); + GetMappingsRequest request = new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sharedResultsIndex); GetMappingsResponse response = client().execute(GetMappingsAction.INSTANCE, request).actionGet(); Map indexMappings = response.getMappings(); assertNotNull(indexMappings); @@ -506,7 +506,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase { } private Map getIndexMappingProperties(String index) { - GetMappingsRequest request = new GetMappingsRequest().indices(index); + GetMappingsRequest request = new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(index); GetMappingsResponse response = client().execute(GetMappingsAction.INSTANCE, request).actionGet(); Map indexMappings = response.getMappings(); assertNotNull(indexMappings); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java index 4493a680d25c..46ffa649e18c 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java @@ -195,7 +195,7 @@ public class JobStorageDeletionTaskIT extends BaseMlIntegTestCase { // Make sure dedicated index is gone assertThat( - indicesAdmin().prepareGetIndex() + indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndices(dedicatedIndex) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .get() diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java index 3a08b56ed38a..00f4748cdf97 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java @@ -97,7 +97,12 @@ public class TrainedModelCRUDIT extends MlSingleNodeTestCase { ).actionGet(); assertThat( - client().admin().indices().prepareGetIndex().addIndices(InferenceIndexConstants.nativeDefinitionStore()).get().indices().length, + client().admin() + .indices() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) + .addIndices(InferenceIndexConstants.nativeDefinitionStore()) + .get() + .indices().length, equalTo(1) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 759538b4cdc6..dea5e891bd8d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -223,7 +223,7 @@ public class TransportStartDataFrameAnalyticsAction extends TransportMasterNodeA ); // Get start context - getStartContext(request.getId(), task, startContextListener); + getStartContext(request.getId(), task, startContextListener, request.masterNodeTimeout()); } private void estimateMemoryUsageAndUpdateMemoryTracker(StartContext startContext, ActionListener listener) { @@ -264,7 +264,7 @@ public class TransportStartDataFrameAnalyticsAction extends TransportMasterNodeA } - private void getStartContext(String id, Task task, ActionListener finalListener) { + private void getStartContext(String id, Task task, ActionListener finalListener, TimeValue masterTimeout) { ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, task.getParentTaskId()); @@ -320,6 +320,7 @@ public class TransportStartDataFrameAnalyticsAction extends TransportMasterNodeA .andThen( (l, startContext) -> MappingsMerger.mergeMappings( parentTaskClient, + masterTimeout, startContext.config.getHeaders(), startContext.config.getSource(), l.map(ignored -> startContext) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java index ed12f54ab86b..ba69c8733dd1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.analyses.RequiredField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.MachineLearning; import java.time.Clock; import java.util.Arrays; @@ -147,6 +148,7 @@ public final class DestinationIndex { final Settings settings = settings(settingsResponse, destIndexAllowedSettings); MappingsMerger.mergeMappings( client, + MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT, config.getHeaders(), config.getSource(), delegate.delegateFailureAndWrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java index 8fee0160cf92..817020d78855 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; @@ -34,6 +35,7 @@ public final class MappingsMerger { public static void mergeMappings( Client client, + TimeValue masterTimeout, Map headers, DataFrameAnalyticsSource source, ActionListener listener @@ -43,7 +45,7 @@ public final class MappingsMerger { listener::onFailure ); - GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); + GetMappingsRequest getMappingsRequest = new GetMappingsRequest(masterTimeout); getMappingsRequest.indices(source.getIndex()); ClientHelper.executeWithHeadersAsync(headers, ML_ORIGIN, client, GetMappingsAction.INSTANCE, getMappingsRequest, mappingsListener); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java index 2a6d6eb32950..83212578982a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsTask; import org.elasticsearch.xpack.ml.dataframe.DestinationIndex; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; @@ -225,7 +226,7 @@ public class ReindexingStep extends AbstractDataFrameAnalyticsStep { ML_ORIGIN, parentTaskClient, GetIndexAction.INSTANCE, - new GetIndexRequest().indices(config.getDest().getIndex()), + new GetIndexRequest(MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT).indices(config.getDest().getIndex()), destIndexListener ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 4170e386c7c6..372e16760caa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -126,6 +126,7 @@ import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.ml.stats.StatsAccumulator; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.categorization.GrokPatternCreator; import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; @@ -392,7 +393,10 @@ public class JobResultsProvider { addTermsMapping(indexMappings, indexName, termFields, listener); }, listener::onFailure); - GetMappingsRequest getMappingsRequest = client.admin().indices().prepareGetMappings(indexName).request(); + GetMappingsRequest getMappingsRequest = client.admin() + .indices() + .prepareGetMappings(MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT, indexName) + .request(); executeAsyncWithOrigin( client.threadPool().getThreadContext(), ML_ORIGIN, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java index 337b4c397493..2ee493630f0e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.MachineLearning; import java.util.Objects; import java.util.Set; @@ -82,7 +83,9 @@ public class EmptyStateIndexRemover implements MlDataRemover { } private void getCurrentStateIndices(ActionListener> listener) { - GetIndexRequest getIndexRequest = new GetIndexRequest().indices(AnomalyDetectorsIndex.jobStateIndexWriteAlias()); + GetIndexRequest getIndexRequest = new GetIndexRequest(MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT).indices( + AnomalyDetectorsIndex.jobStateIndexWriteAlias() + ); getIndexRequest.setParentTask(parentTaskId); client.admin() .indices() diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index bab012afc310..ff1a1d19779d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -27,6 +27,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.security.Security; @@ -86,6 +87,12 @@ public class LocalStateMachineLearning extends LocalStateCompositeXPackPlugin { } }); plugins.add(new MockedRollupPlugin()); + plugins.add(new InferencePlugin(settings) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + }); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index 8dce8a62eac2..372b07ed8c95 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -82,7 +82,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ilm.IndexLifecycle; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; @@ -161,8 +160,7 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase { DataStreamsPlugin.class, // To remove errors from parsing build in templates that contain scaled_float MapperExtrasPlugin.class, - Wildcard.class, - InferencePlugin.class + Wildcard.class ); } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java index 8935b9f450c1..fddcee25d636 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java @@ -527,7 +527,7 @@ public class TransportMonitoringMigrateAlertsActionTests extends MonitoringInteg private void assertWatchesExist(boolean exist) { // Check if watches index exists - if (client().admin().indices().prepareGetIndex().addIndices(".watches").get().getIndices().length == 0) { + if (client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".watches").get().getIndices().length == 0) { fail("Expected [.watches] index with cluster alerts present, but no [.watches] index was found"); } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index 4259fb532843..f8ac5f9032fe 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -257,7 +257,7 @@ public class LocalExporterResourceIntegTests extends LocalExporterIntegTestCase private void assertWatchesExist() { // Check if watches index exists - if (client().admin().indices().prepareGetIndex().addIndices(".watches").get().getIndices().length == 0) { + if (client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".watches").get().getIndices().length == 0) { fail("Expected [.watches] index with cluster alerts present, but no [.watches] index was found"); } @@ -284,7 +284,7 @@ public class LocalExporterResourceIntegTests extends LocalExporterIntegTestCase private void assertNoWatchesExist() { // Check if watches index exists - if (client().admin().indices().prepareGetIndex().addIndices(".watches").get().getIndices().length == 0) { + if (client().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(".watches").get().getIndices().length == 0) { fail("Expected [.watches] index with cluster alerts present, but no [.watches] index was found"); } diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml index 1aafe3765813..c8330a44cd7c 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml @@ -148,9 +148,6 @@ setup: - match: { .$idx0name.mappings.properties.attributes.properties.foo.type: "keyword" } --- IP dimensions: - - requires: - cluster_features: ["routing.multi_value_routing_path"] - reason: support for multi-value dimensions - do: bulk: index: metrics-generic.otel-default diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index c2dd51acfd76..6ec1f9a7aa7e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; @@ -142,9 +141,4 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX public int hashCode() { return Objects.hash(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, samplingRate); } - - @Override - public String toString() { - return Strings.toString(this, true, true); - } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java index b50ca4d92804..e91094583083 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java @@ -263,7 +263,7 @@ public class IndexResolver { ) { if (retrieveIndices || retrieveFrozenIndices) { if (clusterIsLocal(clusterWildcard)) { // resolve local indices - GetIndexRequest indexRequest = new GetIndexRequest().local(true) + GetIndexRequest indexRequest = new GetIndexRequest(MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT).local(true) .indices(indexWildcards) .features(Feature.SETTINGS) .includeDefaults(false) diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java index bb61fa951948..494eaa508c14 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java @@ -13,18 +13,12 @@ import org.elasticsearch.features.NodeFeature; import java.util.Set; import static org.elasticsearch.search.retriever.CompoundRetrieverBuilder.INNER_RETRIEVERS_FILTER_SUPPORT; -import static org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilder.RRF_RETRIEVER_COMPOSITION_SUPPORTED; /** * A set of features specifically for the rrf plugin. */ public class RRFFeatures implements FeatureSpecification { - @Override - public Set getFeatures() { - return Set.of(RRFRetrieverBuilder.RRF_RETRIEVER_SUPPORTED, RRF_RETRIEVER_COMPOSITION_SUPPORTED); - } - @Override public Set getTestFeatures() { return Set.of(INNER_RETRIEVERS_FILTER_SUPPORT); diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java index b5bca5747868..91bc19a3e090 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java @@ -194,9 +194,6 @@ public class RRFRankBuilder extends RankBuilder { @Override public RetrieverBuilder toRetriever(SearchSourceBuilder source, Predicate clusterSupportsFeature) { - if (false == clusterSupportsFeature.test(RRFRetrieverBuilder.RRF_RETRIEVER_COMPOSITION_SUPPORTED)) { - return null; - } int totalQueries = source.subSearches().size() + source.knnSearch().size(); if (totalQueries < 2) { throw new IllegalArgumentException("[rrf] requires at least 2 sub-queries to be defined"); diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 186febfda18f..a749a7c402c3 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -8,9 +8,7 @@ package org.elasticsearch.xpack.rank.rrf; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.search.rank.RankBuilder; @@ -44,8 +42,6 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr public final class RRFRetrieverBuilder extends CompoundRetrieverBuilder { public static final String NAME = "rrf"; - public static final NodeFeature RRF_RETRIEVER_SUPPORTED = new NodeFeature("rrf_retriever_supported", true); - public static final NodeFeature RRF_RETRIEVER_COMPOSITION_SUPPORTED = new NodeFeature("rrf_retriever_composition_supported", true); public static final ParseField RETRIEVERS_FIELD = new ParseField("retrievers"); public static final ParseField RANK_CONSTANT_FIELD = new ParseField("rank_constant"); @@ -79,12 +75,6 @@ public final class RRFRetrieverBuilder extends CompoundRetrieverBuilder ssb.parseXContent(parser, true, nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED) - ); - assertEquals("unknown retriever [rrf]", iae.getMessage()); - } - } - /** Tests extraction errors related to compound retrievers. These tests require a compound retriever which is why they are here. */ public void testRetrieverExtractionErrors() throws IOException { try ( diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml index 258ab70cd09b..ac328967d9fc 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml @@ -1,8 +1,4 @@ setup: - - requires: - cluster_features: 'rrf_retriever_supported' - reason: 'test requires rrf retriever implementation' - - do: indices.create: index: test diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml index d5d7a5de1dc7..e7a4bf8a5b34 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml @@ -4,10 +4,6 @@ setup: - close_to - contains - - requires: - cluster_features: 'rrf_retriever_composition_supported' - reason: 'test requires rrf retriever composition support' - - do: indices.create: index: test diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml index bbc1087b05cc..4dea4c356873 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml @@ -2,10 +2,6 @@ setup: - skip: features: close_to - - requires: - cluster_features: 'rrf_retriever_supported' - reason: 'test requires rrf retriever implementation' - - do: indices.create: index: test diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml index 24259e3aa2a8..b1c5ec1664c8 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml @@ -1,7 +1,5 @@ setup: - requires: - cluster_features: 'rrf_retriever_composition_supported' - reason: 'test requires rrf retriever composition support' test_runner_features: close_to - do: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml index cb30542d8000..01d645fbfb4f 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml @@ -1,9 +1,6 @@ setup: - skip: features: close_to - - requires: - cluster_features: 'rrf_retriever_composition_supported' - reason: 'test requires rrf retriever composition support' - do: indices.create: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml index d9db1fe38762..98c4ae9e642d 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml @@ -5,8 +5,6 @@ setup: - contains - requires: - cluster_features: ['rrf_retriever_composition_supported', 'text_similarity_reranker_retriever_supported'] - reason: need to have support for rrf and semantic reranking composition test_runner_features: "close_to" - do: diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index 71311acab14c..1fced696ad60 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -199,7 +199,7 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode ActionListener.wrap(createIndexResponse -> startPersistentTask(job, listener, persistentTasksService), e -> { if (e instanceof ResourceAlreadyExistsException) { logger.debug("Rolled index already exists for rollup job [" + job.getConfig().getId() + "], updating metadata."); - updateMapping(job, listener, persistentTasksService, client, logger); + updateMapping(job, listener, persistentTasksService, client, logger, request.masterNodeTimeout()); } else { String msg = "Could not create index for rollup job [" + job.getConfig().getId() + "]"; logger.error(msg); @@ -249,7 +249,8 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode ActionListener listener, PersistentTasksService persistentTasksService, Client client, - Logger logger + Logger logger, + TimeValue masterTimeout ) { final String indexName = job.getConfig().getRollupIndex(); @@ -304,7 +305,7 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode ); }; - GetMappingsRequest request = new GetMappingsRequest(); + GetMappingsRequest request = new GetMappingsRequest(masterTimeout); client.execute(GetMappingsAction.INSTANCE, request, ActionListener.wrap(getMappingResponseHandler, e -> { String msg = "Could not update mappings for rollup job [" + job.getConfig().getId() + "]"; logger.error(msg); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index ee8b4c79d189..fed2439e513c 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -166,7 +166,14 @@ public class PutJobStateMachineTests extends ESTestCase { return null; }).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture()); - TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + TransportPutRollupJobAction.updateMapping( + job, + testListener, + mock(PersistentTasksService.class), + client, + logger, + TEST_REQUEST_TIMEOUT + ); verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); } @@ -201,7 +208,14 @@ public class PutJobStateMachineTests extends ESTestCase { return null; }).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture()); - TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + TransportPutRollupJobAction.updateMapping( + job, + testListener, + mock(PersistentTasksService.class), + client, + logger, + TEST_REQUEST_TIMEOUT + ); verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); } @@ -238,7 +252,14 @@ public class PutJobStateMachineTests extends ESTestCase { return null; }).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture()); - TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + TransportPutRollupJobAction.updateMapping( + job, + testListener, + mock(PersistentTasksService.class), + client, + logger, + TEST_REQUEST_TIMEOUT + ); verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); } @@ -272,7 +293,14 @@ public class PutJobStateMachineTests extends ESTestCase { return null; }).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture()); - TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + TransportPutRollupJobAction.updateMapping( + job, + testListener, + mock(PersistentTasksService.class), + client, + logger, + TEST_REQUEST_TIMEOUT + ); verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); } @@ -318,7 +346,14 @@ public class PutJobStateMachineTests extends ESTestCase { return null; }).when(client).execute(eq(TransportPutMappingAction.TYPE), any(PutMappingRequest.class), requestCaptor2.capture()); - TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + TransportPutRollupJobAction.updateMapping( + job, + testListener, + mock(PersistentTasksService.class), + client, + logger, + TEST_REQUEST_TIMEOUT + ); verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); verify(client).execute(eq(TransportPutMappingAction.TYPE), any(PutMappingRequest.class), any()); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java index cdc42b877404..8deadef105ba 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java @@ -158,7 +158,7 @@ public class SearchableSnapshotsBlobStoreCacheIntegTests extends BaseFrozenSearc expectThrows( IndexNotFoundException.class, ".snapshot-blob-cache system index should not be created yet", - () -> systemClient().admin().indices().prepareGetIndex().addIndices(SNAPSHOT_BLOB_CACHE_INDEX).get() + () -> systemClient().admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).addIndices(SNAPSHOT_BLOB_CACHE_INDEX).get() ); final Storage storage1 = randomFrom(Storage.values()); diff --git a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java index ab5be0f48f5f..057ebdece5c6 100644 --- a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java +++ b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.frozen.FrozenIndices; import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.ilm.IndexLifecycle; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.profiling.ProfilingPlugin; import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.search.AsyncSearch; @@ -89,7 +88,6 @@ public class CrossClusterShardTests extends ESSingleNodeTestCase { FrozenIndices.class, Graph.class, IndexLifecycle.class, - InferencePlugin.class, IngestCommonPlugin.class, IngestTestPlugin.class, MustachePlugin.class, diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java index 47c7ac8241fc..19d742e32aa0 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java @@ -24,11 +24,13 @@ import java.util.ArrayList; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; public class CrossClusterEsqlRCS1EnrichUnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicBoolean SSL_ENABLED_REF = new AtomicBoolean(); @@ -185,8 +187,7 @@ public class CrossClusterEsqlRCS1EnrichUnavailableRemotesIT extends AbstractRemo Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); - assertThat(reason.get("type").toString(), equalTo("connect_transport_exception")); - assertThat(reason.get("reason").toString(), containsString("Unable to connect to [my_remote_cluster]")); + assertThat(reason.get("type").toString(), oneOf("node_disconnected_exception", "connect_transport_exception")); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); @@ -202,7 +203,11 @@ public class CrossClusterEsqlRCS1EnrichUnavailableRemotesIT extends AbstractRemo String query = "FROM to-be-enr*,my_remote_cluster:to-be-enr* | ENRICH " + randomFrom(modes) + ":employees-policy | LIMIT 10"; ResponseException ex = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(query))); - assertThat(ex.getMessage(), containsString("connect_transport_exception")); + + assertThat( + ex.getMessage(), + anyOf(containsString("connect_transport_exception"), containsString("node_disconnected_exception")) + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java index da59e3c77273..3f068cf3a04c 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java @@ -26,11 +26,13 @@ import java.util.ArrayList; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; public class CrossClusterEsqlRCS2EnrichUnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -205,8 +207,7 @@ public class CrossClusterEsqlRCS2EnrichUnavailableRemotesIT extends AbstractRemo Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); - assertThat(reason.get("type").toString(), equalTo("connect_transport_exception")); - assertThat(reason.get("reason").toString(), containsString("Unable to connect to [my_remote_cluster]")); + assertThat(reason.get("type").toString(), oneOf("node_disconnected_exception", "connect_transport_exception")); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); @@ -222,7 +223,10 @@ public class CrossClusterEsqlRCS2EnrichUnavailableRemotesIT extends AbstractRemo String query = "FROM to-be-enr*,my_remote_cluster:to-be-enr* | ENRICH " + randomFrom(modes) + ":employees-policy | LIMIT 10"; ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); - assertThat(ex.getMessage(), containsString("connect_transport_exception")); + assertThat( + ex.getMessage(), + anyOf(containsString("connect_transport_exception"), containsString("node_disconnected_exception")) + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java index 57d18abaf1a9..f051289d6d7c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java @@ -287,28 +287,28 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { { GetMappingsResponse getMappingsResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).admin().indices().prepareGetMappings("test").get(); + ).admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertExpectedMetadataFields(getMappingsResponse.getMappings(), "field1"); } { GetMappingsResponse getMappingsResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) - ).admin().indices().prepareGetMappings("test").get(); + ).admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertExpectedMetadataFields(getMappingsResponse.getMappings(), "field2"); } { GetMappingsResponse getMappingsResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD)) - ).admin().indices().prepareGetMappings("test").get(); + ).admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertExpectedMetadataFields(getMappingsResponse.getMappings(), "field1"); } { GetMappingsResponse getMappingsResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD)) - ).admin().indices().prepareGetMappings("test").get(); + ).admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "test").get(); assertExpectedMetadataFields(getMappingsResponse.getMappings(), "field1", "field2"); } } @@ -321,25 +321,25 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { { GetIndexResponse getIndexResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).admin().indices().prepareGetIndex().setIndices("test").get(); + ).admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("test").get(); assertExpectedMetadataFields(getIndexResponse.getMappings(), "field1"); } { GetIndexResponse getIndexResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) - ).admin().indices().prepareGetIndex().setIndices("test").get(); + ).admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("test").get(); assertExpectedMetadataFields(getIndexResponse.getMappings(), "field2"); } { GetIndexResponse getIndexResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD)) - ).admin().indices().prepareGetIndex().setIndices("test").get(); + ).admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("test").get(); assertExpectedMetadataFields(getIndexResponse.getMappings(), "field1"); } { GetIndexResponse getIndexResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD)) - ).admin().indices().prepareGetIndex().setIndices("test").get(); + ).admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("test").get(); assertExpectedMetadataFields(getIndexResponse.getMappings(), "field1", "field2"); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index bc730b5695c1..7f262817b941 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -149,12 +149,12 @@ public class KibanaUserRoleIntegTests extends NativeRealmIntegTestCase { final String field = "foo"; indexRandom(true, prepareIndex(index).setSource(field, "bar")); - GetIndexResponse response = indicesAdmin().prepareGetIndex().setIndices(index).get(); + GetIndexResponse response = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(index).get(); assertThat(response.getIndices(), arrayContaining(index)); response = client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) - ).admin().indices().prepareGetIndex().setIndices(index).get(); + ).admin().indices().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(index).get(); assertThat(response.getIndices(), arrayContaining(index)); } @@ -166,7 +166,7 @@ public class KibanaUserRoleIntegTests extends NativeRealmIntegTestCase { GetMappingsResponse response = client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) - ).admin().indices().prepareGetMappings("logstash-*").get(); + ).admin().indices().prepareGetMappings(TEST_REQUEST_TIMEOUT, "logstash-*").get(); Map mappingsMap = response.getMappings(); assertNotNull(mappingsMap); assertNotNull(mappingsMap.get(index)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/action/filter/DestructiveOperationsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/action/filter/DestructiveOperationsTests.java index 846314534af7..0a0e6731c90d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/action/filter/DestructiveOperationsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/action/filter/DestructiveOperationsTests.java @@ -29,7 +29,7 @@ public class DestructiveOperationsTests extends SecurityIntegTestCase { () -> indicesAdmin().prepareDelete("*").get() ); assertEquals("Wildcard expressions or all indices are not allowed", illegalArgumentException.getMessage()); - String[] indices = indicesAdmin().prepareGetIndex().setIndices("index1").get().getIndices(); + String[] indices = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("index1").get().getIndices(); assertEquals(1, indices.length); assertEquals("index1", indices[0]); } @@ -39,7 +39,7 @@ public class DestructiveOperationsTests extends SecurityIntegTestCase { () -> indicesAdmin().prepareDelete("*", "-index1").get() ); assertEquals("Wildcard expressions or all indices are not allowed", illegalArgumentException.getMessage()); - String[] indices = indicesAdmin().prepareGetIndex().setIndices("index1").get().getIndices(); + String[] indices = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("index1").get().getIndices(); assertEquals(1, indices.length); assertEquals("index1", indices[0]); } @@ -49,7 +49,7 @@ public class DestructiveOperationsTests extends SecurityIntegTestCase { () -> indicesAdmin().prepareDelete("_all").get() ); assertEquals("Wildcard expressions or all indices are not allowed", illegalArgumentException.getMessage()); - String[] indices = indicesAdmin().prepareGetIndex().setIndices("index1").get().getIndices(); + String[] indices = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices("index1").get().getIndices(); assertEquals(1, indices.length); assertEquals("index1", indices[0]); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java index dfb9b677ba04..5c2d74912161 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java @@ -100,7 +100,7 @@ public class ReservedRealmElasticAutoconfigIntegTests extends SecuritySingleNode assertAcked(clusterAdmin().updateSettings(updateSettingsRequest).actionGet()); // delete the security index, if it exist - GetIndexRequest getIndexRequest = new GetIndexRequest(); + GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); getIndexRequest.indices(SECURITY_MAIN_ALIAS); getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); GetIndexResponse getIndexResponse = client().admin().indices().getIndex(getIndexRequest).actionGet(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java index 3c639471f80b..eea5466f2380 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java @@ -77,7 +77,7 @@ public class SnapshotUserRoleIntegTests extends NativeRealmIntegTestCase { // view all indices, including restricted ones final GetIndexResponse getIndexResponse = client.admin() .indices() - .prepareGetIndex() + .prepareGetIndex(TEST_REQUEST_TIMEOUT) .setIndices(randomFrom("_all", "*")) .setIndicesOptions(IndicesOptions.strictExpandHidden()) .get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 3b55295c1efc..65f6f4f1a5b0 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -872,7 +872,7 @@ public class ProfileIntegTests extends AbstractProfileIntegTestCase { } private GetIndexResponse getProfileIndexResponse() { - final GetIndexRequest getIndexRequest = new GetIndexRequest(); + final GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); getIndexRequest.indices(".*"); return client().execute(GetIndexAction.INSTANCE, getIndexRequest).actionGet(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java index e578c13b5d64..d24bb2923030 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java @@ -291,7 +291,7 @@ public class SecurityIndexManagerIntegTests extends SecurityIntegTestCase { public void testSecurityIndexSettingsCannotBeChanged() throws Exception { // make sure the security index is not auto-created - GetIndexRequest getIndexRequest = new GetIndexRequest(); + GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); getIndexRequest.indices(SECURITY_MAIN_ALIAS); getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); GetIndexResponse getIndexResponse = client().admin().indices().getIndex(getIndexRequest).actionGet(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java index 84749d895a44..409ab62ae3e7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java @@ -13,19 +13,11 @@ import org.elasticsearch.features.NodeFeature; import java.util.Set; import static org.elasticsearch.xpack.security.support.QueryableBuiltInRolesSynchronizer.QUERYABLE_BUILT_IN_ROLES_FEATURE; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLE_MAPPING_CLEANUP; public class SecurityFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( - SECURITY_ROLE_MAPPING_CLEANUP, - SECURITY_ROLES_METADATA_FLATTENED, - SECURITY_MIGRATION_FRAMEWORK, - QUERYABLE_BUILT_IN_ROLES_FEATURE - ); + return Set.of(QUERYABLE_BUILT_IN_ROLES_FEATURE); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 0ead164490be..f4202502cef8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -97,7 +97,6 @@ import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Avai import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityMigrations.ROLE_METADATA_FLATTENED_MIGRATION_VERSION; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; /** * NativeRolesStore is a {@code RolesStore} that, instead of reading from a @@ -656,9 +655,7 @@ public class NativeRolesStore implements BiConsumer, ActionListener< XContentBuilder builder = jsonBuilder().startObject(); role.innerToXContent(builder, ToXContent.EMPTY_PARAMS, true); - if (featureService.clusterHasFeature(clusterService.state(), SECURITY_ROLES_METADATA_FLATTENED)) { - builder.field(RoleDescriptor.Fields.METADATA_FLATTENED.getPreferredName(), role.getMetadata()); - } + builder.field(RoleDescriptor.Fields.METADATA_FLATTENED.getPreferredName(), role.getMetadata()); // When role descriptor XContent is generated for the security index all empty fields need to have default values to make sure // existing values are overwritten if not present since the request to update could be an UpdateRequest diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 9ec6c47bf741..fe6d8e3bc4ff 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -75,7 +75,6 @@ import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_DATA_KEY; import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_KEY; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; /** * Manages the lifecycle, mapping and data upgrades/migrations of the {@code RestrictedIndicesNames#SECURITY_MAIN_ALIAS} @@ -583,7 +582,6 @@ public class SecurityIndexManager implements ClusterStateListener { && this.indexAvailableForSearch && this.isIndexUpToDate && this.indexExists() - && this.securityFeatures.contains(SECURITY_MIGRATION_FRAMEWORK) && isEligibleSecurityMigration(securityMigration); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index ce030bf5101c..2830a755cde6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -143,7 +143,7 @@ public class SecurityMigrations { @Override public Set nodeFeaturesRequired() { - return Set.of(SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED); + return Set.of(); } @Override @@ -223,7 +223,7 @@ public class SecurityMigrations { @Override public Set nodeFeaturesRequired() { - return Set.of(SecuritySystemIndices.SECURITY_ROLE_MAPPING_CLEANUP); + return Set.of(); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 25b8ad825ee1..24cf355818e3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.VersionId; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.ExecutorNames; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -57,9 +56,6 @@ public class SecuritySystemIndices { public static final String INTERNAL_SECURITY_PROFILE_INDEX_8 = ".security-profile-8"; public static final String SECURITY_PROFILE_ALIAS = ".security-profile"; - public static final NodeFeature SECURITY_MIGRATION_FRAMEWORK = new NodeFeature("security.migration_framework", true); - public static final NodeFeature SECURITY_ROLES_METADATA_FLATTENED = new NodeFeature("security.roles_metadata_flattened", true); - public static final NodeFeature SECURITY_ROLE_MAPPING_CLEANUP = new NodeFeature("security.role_mapping_cleanup", true); /** * Security managed index mappings used to be updated based on the product version. They are now updated based on per-index mappings diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 58b8613221d7..aaf23f55d5e0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -416,7 +416,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { ) ) ); - GetIndexRequest getIndexRequest = new GetIndexRequest(); + GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); getIndexRequest.indices(SECURITY_MAIN_ALIAS); getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); GetIndexResponse getIndexResponse = client.admin().indices().getIndex(getIndexRequest).actionGet(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index c4b57feb5a3f..79e64713ac73 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -1416,7 +1416,7 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testAuthorizeIndicesFailures() { - TransportRequest request = new GetIndexRequest().indices("b"); + TransportRequest request = new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("b"); ClusterState state = mockEmptyMetadata(); RoleDescriptor role = new RoleDescriptor( "a_all", @@ -1751,7 +1751,7 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testDenialForAnonymousUser() { - TransportRequest request = new GetIndexRequest().indices("b"); + TransportRequest request = new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("b"); ClusterState state = mockEmptyMetadata(); Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "a_all").build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); @@ -1798,7 +1798,7 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testDenialForAnonymousUserAuthorizationExceptionDisabled() { - TransportRequest request = new GetIndexRequest().indices("b"); + TransportRequest request = new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("b"); ClusterState state = mockEmptyMetadata(); Settings settings = Settings.builder() .put(AnonymousUser.ROLES_SETTING.getKey(), "a_all") @@ -1852,7 +1852,7 @@ public class AuthorizationServiceTests extends ESTestCase { public void testAuditTrailIsRecordedWhenIndexWildcardThrowsError() { IndicesOptions options = IndicesOptions.fromOptions(false, false, true, true); - TransportRequest request = new GetIndexRequest().indices("not-an-index-*").indicesOptions(options); + TransportRequest request = new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("not-an-index-*").indicesOptions(options); ClusterState state = mockEmptyMetadata(); RoleDescriptor role = new RoleDescriptor( "a_all", @@ -1956,7 +1956,7 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testRunAsRequestWithRunAsUserWithoutPermission() { - TransportRequest request = new GetIndexRequest().indices("a"); + TransportRequest request = new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("a"); User authenticatedUser = new User("test user", "can run as"); final Authentication authentication = createAuthentication(new User("run as me", "b"), authenticatedUser); final RoleDescriptor runAsRole = new RoleDescriptor( @@ -2016,7 +2016,7 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testRunAsRequestWithValidPermissions() { - TransportRequest request = new GetIndexRequest().indices("b"); + TransportRequest request = new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices("b"); User authenticatedUser = new User("test user", "can run as"); final Authentication authentication = createAuthentication(new User("run as me", "b"), authenticatedUser); final RoleDescriptor runAsRole = new RoleDescriptor( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index e381663d4174..b984295155c1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -1016,6 +1016,7 @@ public class SimpleSecurityNetty4ServerTransportTests extends AbstractSimpleTran static class TestSecurityNetty4ServerTransport extends SecurityNetty4ServerTransport { private final boolean doHandshake; + private final TransportVersion version; TestSecurityNetty4ServerTransport( Settings settings, @@ -1043,6 +1044,7 @@ public class SimpleSecurityNetty4ServerTransportTests extends AbstractSimpleTran sharedGroupFactory, mock(CrossClusterAccessAuthenticationService.class) ); + this.version = version; this.doHandshake = doHandshake; } @@ -1056,7 +1058,7 @@ public class SimpleSecurityNetty4ServerTransportTests extends AbstractSimpleTran if (doHandshake) { super.executeHandshake(node, channel, profile, listener); } else { - assert getVersion().equals(TransportVersion.current()); + assert version.equals(TransportVersion.current()); listener.onResponse(TransportVersions.MINIMUM_COMPATIBLE); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 479646b26d21..820170b03662 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -238,7 +238,7 @@ public class SnapshotLifecycle extends Plugin implements ActionPlugin, HealthPlu } List> reservedClusterStateHandlers() { - return List.of(new ReservedSnapshotAction(featureService.get())); + return List.of(new ReservedSnapshotAction()); } @Override diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java index 274dec75865a..ca4a632f7a76 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java @@ -15,6 +15,6 @@ import java.util.Set; public class SnapshotLifecycleFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE); + return Set.of(); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleService.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleService.java index 5b7d3d42e902..985141347108 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleService.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleService.java @@ -20,8 +20,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.core.ilm.OperationMode; import org.elasticsearch.xpack.core.ilm.OperationModeUpdateTask; @@ -46,7 +44,6 @@ import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.curren * task according to the policy's schedule. */ public class SnapshotLifecycleService implements Closeable, ClusterStateListener { - public static final NodeFeature INTERVAL_SCHEDULE = new NodeFeature("slm.interval_schedule", true); private static final Logger logger = LogManager.getLogger(SnapshotLifecycleService.class); private static final String JOB_PATTERN_SUFFIX = "-\\d+$"; @@ -261,18 +258,6 @@ public class SnapshotLifecycleService implements Closeable, ClusterStateListener } } - /** - * Validate that interval schedule feature is not supported by all nodes - * @throws IllegalArgumentException if is interval expression but interval schedule not supported - */ - public static void validateIntervalScheduleSupport(String schedule, FeatureService featureService, ClusterState state) { - if (SnapshotLifecyclePolicy.isIntervalSchedule(schedule) && featureService.clusterHasFeature(state, INTERVAL_SCHEDULE) == false) { - throw new IllegalArgumentException( - "Unable to use slm interval schedules in mixed-clusters with nodes that do not support feature " + INTERVAL_SCHEDULE.id() - ); - } - } - @Override public void close() { if (this.running.compareAndSet(true, false)) { diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotAction.java index d1395ac5a49a..f2f657d8b490 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.slm.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; @@ -41,12 +40,6 @@ public class ReservedSnapshotAction implements ReservedClusterStateHandler { private static final Logger logger = LogManager.getLogger(TransportPutSnapshotLifecycleAction.class); - private final FeatureService featureService; @Inject public TransportPutSnapshotLifecycleAction( @@ -58,8 +56,7 @@ public class TransportPutSnapshotLifecycleAction extends TransportMasterNodeActi ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - FeatureService featureService + IndexNameExpressionResolver indexNameExpressionResolver ) { super( PutSnapshotLifecycleAction.NAME, @@ -72,7 +69,6 @@ public class TransportPutSnapshotLifecycleAction extends TransportMasterNodeActi AcknowledgedResponse::readFrom, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.featureService = featureService; } @Override @@ -82,7 +78,6 @@ public class TransportPutSnapshotLifecycleAction extends TransportMasterNodeActi final ClusterState state, final ActionListener listener ) { - SnapshotLifecycleService.validateIntervalScheduleSupport(request.getLifecycle().getSchedule(), featureService, state); SnapshotLifecycleService.validateRepositoryExists(request.getLifecycle().getRepository(), state); SnapshotLifecycleService.validateMinimumInterval(request.getLifecycle(), state); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java index 9955fe4cf0f9..b48a3be0728e 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -50,7 +49,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -525,44 +523,6 @@ public class SnapshotLifecycleServiceTests extends ESTestCase { } } - public void testValidateIntervalScheduleSupport() { - var featureService = new FeatureService(List.of(new SnapshotLifecycleFeatures())); - { - ClusterState state = ClusterState.builder(new ClusterName("cluster")) - .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("a")).add(DiscoveryNodeUtils.create("b"))) - .nodeFeatures(Map.of("a", Set.of(), "b", Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE.id()))) - .build(); - - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> SnapshotLifecycleService.validateIntervalScheduleSupport("30d", featureService, state) - ); - assertThat(e.getMessage(), containsString("Unable to use slm interval schedules")); - } - { - ClusterState state = ClusterState.builder(new ClusterName("cluster")) - .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("a"))) - .nodeFeatures(Map.of("a", Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE.id()))) - .build(); - try { - SnapshotLifecycleService.validateIntervalScheduleSupport("30d", featureService, state); - } catch (Exception e) { - fail("interval schedule is supported by version and should not fail"); - } - } - { - ClusterState state = ClusterState.builder(new ClusterName("cluster")) - .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("a")).add(DiscoveryNodeUtils.create("b"))) - .nodeFeatures(Map.of("a", Set.of(), "b", Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE.id()))) - .build(); - try { - SnapshotLifecycleService.validateIntervalScheduleSupport("*/1 * * * * ?", featureService, state); - } catch (Exception e) { - fail("cron schedule does not need feature check and should not fail"); - } - } - } - class FakeSnapshotTask extends SnapshotLifecycleTask { private final Consumer onTriggered; diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java index cd143b95bcc6..f378ae0262da 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; @@ -82,7 +81,7 @@ public class ReservedSnapshotLifecycleStateServiceTests extends ESTestCase { } public void testDependencies() { - var action = new ReservedSnapshotAction(mock(FeatureService.class)); + var action = new ReservedSnapshotAction(); assertThat(action.optionalDependencies(), contains(ReservedRepositoryAction.NAME)); } @@ -92,8 +91,8 @@ public class ReservedSnapshotLifecycleStateServiceTests extends ESTestCase { final ClusterName clusterName = new ClusterName("elasticsearch"); ClusterState state = ClusterState.builder(clusterName).build(); - ReservedSnapshotAction action = new ReservedSnapshotAction(mock(FeatureService.class)); - TransformState prevState = new TransformState<>(state, Set.of()); + ReservedSnapshotAction action = new ReservedSnapshotAction(); + TransformState prevState = new TransformState<>(state, Set.of()); String badPolicyJSON = """ { @@ -120,56 +119,6 @@ public class ReservedSnapshotLifecycleStateServiceTests extends ESTestCase { ); } - public void testIntervalScheduleSupportValidation() { - Client client = mock(Client.class); - when(client.settings()).thenReturn(Settings.EMPTY); - final ClusterName clusterName = new ClusterName("elasticsearch"); - List repositoriesMetadata = List.of(new RepositoryMetadata("repo", "fs", Settings.EMPTY)); - Metadata.Builder mdBuilder = Metadata.builder(); - mdBuilder.putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(repositoriesMetadata)); - ClusterState state = ClusterState.builder(clusterName).metadata(mdBuilder).build(); - TransformState prevState = new TransformState<>(state, Set.of()); - String goodPolicyJSON = """ - { - "daily-snapshots": { - "schedule": "30d", - "name": "", - "repository": "repo", - "config": { - "indices": ["foo-*", "important"], - "ignore_unavailable": true, - "include_global_state": false - }, - "retention": { - "expire_after": "30d", - "min_count": 1, - "max_count": 50 - } - } - } - """; - - { - FeatureService featureService = mock(FeatureService.class); - when(featureService.clusterHasFeature(any(), any())).thenReturn(false); - ReservedSnapshotAction action = new ReservedSnapshotAction(featureService); - assertThat( - expectThrows(IllegalArgumentException.class, () -> processJSON(action, prevState, goodPolicyJSON)).getMessage(), - is("Error on validating SLM requests") - ); - } - { - FeatureService featureService = mock(FeatureService.class); - when(featureService.clusterHasFeature(any(), any())).thenReturn(true); - ReservedSnapshotAction action = new ReservedSnapshotAction(featureService); - try { - processJSON(action, prevState, goodPolicyJSON); - } catch (Exception e) { - fail("interval schedule with interval feature should pass validation"); - } - } - } - public void testActionAddRemove() throws Exception { Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -181,7 +130,7 @@ public class ReservedSnapshotLifecycleStateServiceTests extends ESTestCase { mdBuilder.putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(repositoriesMetadata)); ClusterState state = ClusterState.builder(clusterName).metadata(mdBuilder).build(); - ReservedSnapshotAction action = new ReservedSnapshotAction(mock(FeatureService.class)); + ReservedSnapshotAction action = new ReservedSnapshotAction(); String emptyJSON = ""; @@ -417,7 +366,7 @@ public class ReservedSnapshotLifecycleStateServiceTests extends ESTestCase { null, List.of( new ReservedClusterSettingsAction(clusterSettings), - new ReservedSnapshotAction(mock(FeatureService.class)), + new ReservedSnapshotAction(), new ReservedRepositoryAction(repositoriesService) ), List.of() @@ -452,8 +401,7 @@ public class ReservedSnapshotLifecycleStateServiceTests extends ESTestCase { mock(ClusterService.class), threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), - mock(FeatureService.class) + mock(IndexNameExpressionResolver.class) ); assertThat(putAction.reservedStateHandlerName().get(), equalTo(ReservedSnapshotAction.NAME)); diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml index be6929a15ff4..18707ea2d1d8 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml @@ -1,9 +1,5 @@ --- setup: - - requires: - cluster_features: "snapshot.repository_verify_integrity" - reason: "required feature" - - do: snapshot.create_repository: repository: test_repo diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java index 70385cdc4cf0..e2c3df252ae2 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java @@ -17,8 +17,4 @@ module org.elasticsearch.repositories.blobstore.testkit { exports org.elasticsearch.repositories.blobstore.testkit.analyze; exports org.elasticsearch.repositories.blobstore.testkit.integrity; - - provides org.elasticsearch.features.FeatureSpecification - with - org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKitFeatures; } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java deleted file mode 100644 index cc513a948519..000000000000 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.repositories.blobstore.testkit; - -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Set; - -import static org.elasticsearch.repositories.blobstore.testkit.integrity.RestRepositoryVerifyIntegrityAction.REPOSITORY_VERIFY_INTEGRITY_FEATURE; - -public class SnapshotRepositoryTestKitFeatures implements FeatureSpecification { - @Override - public Set getFeatures() { - return Set.of(REPOSITORY_VERIFY_INTEGRITY_FEATURE); - } -} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java index b07358ab861a..53db8efde9ea 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.repositories.blobstore.testkit.integrity; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -22,8 +21,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; @ServerlessScope(Scope.INTERNAL) public class RestRepositoryVerifyIntegrityAction extends BaseRestHandler { - public static final NodeFeature REPOSITORY_VERIFY_INTEGRITY_FEATURE = new NodeFeature("snapshot.repository_verify_integrity", true); - @Override public List routes() { return List.of(new Route(POST, "/_snapshot/{repository}/_verify_integrity")); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index ae11c3bb39d0..000000000000 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKitFeatures diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 5999a3ff1e15..394edc5df5ea 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -9,8 +9,14 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.geometry.utils.WellKnownText; @@ -27,19 +33,28 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; +import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType; import org.elasticsearch.test.index.IndexVersionUtils; -import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContent.MapParams; +import org.elasticsearch.xcontent.ToXContent.Params; +import org.elasticsearch.xpack.spatial.index.mapper.GeometricShapeSyntheticSourceSupport.FieldType; import org.junit.AssumptionViolatedException; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper.DEPRECATED_PARAMETERS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class GeoShapeWithDocValuesFieldMapperTests extends GeoFieldMapperTests { @@ -289,6 +304,75 @@ public class GeoShapeWithDocValuesFieldMapperTests extends GeoFieldMapperTests { ); } + /** + * Test that we can parse legacy v7 "geo_shape" parameters for BWC with read-only N-2 indices + */ + public void testGeoShapeLegacyParametersParsing() throws Exception { + // deprecated parameters needed for bwc with read-only version 7 indices + assertEquals(Set.of("strategy", "tree", "tree_levels", "precision", "distance_error_pct", "points_only"), DEPRECATED_PARAMETERS); + + for (String deprecatedParam : DEPRECATED_PARAMETERS) { + Object value = switch (deprecatedParam) { + case "tree" -> randomFrom("quadtree", "geohash"); + case "tree_levels" -> 6; + case "distance_error_pct" -> "0.01"; + case "points_only" -> true; + case "strategy" -> "recursive"; + case "precision" -> "50m"; + default -> throw new IllegalStateException("Unexpected value: " + deprecatedParam); + }; + + // indices created before 8 should allow parameters but issue a warning + IndexVersion pre8version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + MapperService m = createMapperService( + pre8version, + fieldMapping(b -> b.field("type", getFieldName()).field(deprecatedParam, value)) + ); + + // check mapper + Mapper mapper = m.mappingLookup().getMapper("field"); + assertThat(mapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + // check document parsing + MultiPoint multiPoint = GeometryTestUtils.randomMultiPoint(false); + assertNotNull(m.documentMapper().parse(source(b -> { + b.field("field"); + GeoJson.toXContent(multiPoint, b, null); + }))); + + // check for correct field type and that a query can be created + MappedFieldType fieldType = m.fieldType("field"); + assertThat(fieldType, instanceOf(GeoShapeFieldType.class)); + GeoShapeFieldType ft = (GeoShapeFieldType) fieldType; + + SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); + when(searchExecutionContext.allowExpensiveQueries()).thenReturn(true); + assertNotNull( + ft.geoShapeQuery(searchExecutionContext, "location", SpatialStrategy.TERM, ShapeRelation.INTERSECTS, new Point(-10, 10)) + ); + if (deprecatedParam.equals("strategy") == false) { + assertFieldWarnings(deprecatedParam, "strategy"); + } else { + assertFieldWarnings(deprecatedParam); + } + + // indices created after 8 should throw an error + IndexVersion post8version = IndexVersionUtils.randomCompatibleWriteVersion(random()); + Exception ex = expectThrows( + MapperParsingException.class, + () -> createMapperService(post8version, fieldMapping(b -> b.field("type", getFieldName()).field(deprecatedParam, value))) + ); + assertThat( + ex.getMessage(), + containsString( + "Failed to parse mapping: using deprecated parameters [" + + deprecatedParam + + "] in mapper [field] of type [geo_shape] is no longer allowed" + ) + ); + } + } + public void testGeoShapeLegacyMerge() throws Exception { IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MapperService m = createMapperService(version, fieldMapping(b -> b.field("type", getFieldName()))); @@ -409,7 +493,7 @@ public class GeoShapeWithDocValuesFieldMapperTests extends GeoFieldMapperTests { public String toXContentString(AbstractShapeGeometryFieldMapper mapper, boolean includeDefaults) { if (includeDefaults) { - ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")); + Params params = new MapParams(Collections.singletonMap("include_defaults", "true")); return Strings.toString(mapper, params); } else { return Strings.toString(mapper); @@ -428,7 +512,7 @@ public class GeoShapeWithDocValuesFieldMapperTests extends GeoFieldMapperTests { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - return new GeometricShapeSyntheticSourceSupport(GeometricShapeSyntheticSourceSupport.FieldType.GEO_SHAPE, ignoreMalformed); + return new GeometricShapeSyntheticSourceSupport(FieldType.GEO_SHAPE, ignoreMalformed); } @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java deleted file mode 100644 index f26dba2f3398..000000000000 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.spatial.index.query; - -import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.GeoJson; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.geo.GeometryTestUtils; -import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.geometry.MultiPoint; -import org.elasticsearch.geometry.Point; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.index.mapper.DocumentParsingException; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.geo.GeoShapeQueryTestCase; -import org.elasticsearch.test.index.IndexVersionUtils; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.geoIntersectionQuery; -import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.containsString; - -@UpdateForV9(owner = UpdateForV9.Owner.SEARCH_ANALYTICS) -@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") -public class LegacyGeoShapeWithDocValuesQueryTests extends GeoShapeQueryTestCase { - - @SuppressWarnings("deprecation") - private static final String[] PREFIX_TREES = new String[] { - LegacyGeoShapeFieldMapper.PrefixTrees.GEOHASH, - LegacyGeoShapeFieldMapper.PrefixTrees.QUADTREE }; - - @Override - protected Collection> getPlugins() { - return Collections.singleton(LocalStateSpatialPlugin.class); - } - - @Override - protected void createMapping(String indexName, String fieldName, Settings settings) throws Exception { - final XContentBuilder xcb = XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject(fieldName) - .field("type", "geo_shape") - .field("tree", randomFrom(PREFIX_TREES)) - .endObject() - .endObject() - .endObject(); - - final Settings finalSetting; - MapperParsingException ex = expectThrows( - MapperParsingException.class, - () -> indicesAdmin().prepareCreate(indexName).setMapping(xcb).setSettings(settings).get() - ); - assertThat( - ex.getMessage(), - containsString("using deprecated parameters [tree] in mapper [" + fieldName + "] of type [geo_shape] is no longer allowed") - ); - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - finalSetting = settings(version).put(settings).build(); - indicesAdmin().prepareCreate(indexName).setMapping(xcb).setSettings(finalSetting).get(); - } - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - public void testPointsOnlyExplicit() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject(defaultFieldName) - .field("type", "geo_shape") - .field("tree", randomBoolean() ? "quadtree" : "geohash") - .field("tree_levels", "6") - .field("distance_error_pct", "0.01") - .field("points_only", true) - .endObject() - .endObject() - .endObject() - ); - - MapperParsingException ex = expectThrows( - MapperParsingException.class, - () -> indicesAdmin().prepareCreate("geo_points_only").setMapping(mapping).get() - ); - assertThat( - ex.getMessage(), - containsString( - "using deprecated parameters [points_only, tree, distance_error_pct, tree_levels] " - + "in mapper [geo] of type [geo_shape] is no longer allowed" - ) - ); - - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - Settings settings = settings(version).build(); - indicesAdmin().prepareCreate("geo_points_only").setMapping(mapping).setSettings(settings).get(); - ensureGreen(); - - // MULTIPOINT - MultiPoint multiPoint = GeometryTestUtils.randomMultiPoint(false); - prepareIndex("geo_points_only").setId("1") - .setSource(GeoJson.toXContent(multiPoint, jsonBuilder().startObject().field(defaultFieldName), null).endObject()) - .setRefreshPolicy(IMMEDIATE) - .get(); - - // POINT - Point point = GeometryTestUtils.randomPoint(false); - prepareIndex("geo_points_only").setId("2") - .setSource(GeoJson.toXContent(point, jsonBuilder().startObject().field(defaultFieldName), null).endObject()) - .setRefreshPolicy(IMMEDIATE) - .get(); - - // test that point was inserted - assertHitCount(client().prepareSearch("geo_points_only").setQuery(matchAllQuery()), 2L); - } - - public void testPointsOnly() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject(defaultFieldName) - .field("type", "geo_shape") - .field("tree", randomBoolean() ? "quadtree" : "geohash") - .field("tree_levels", "6") - .field("distance_error_pct", "0.01") - .field("points_only", true) - .endObject() - .endObject() - .endObject() - ); - - MapperParsingException ex = expectThrows( - MapperParsingException.class, - () -> indicesAdmin().prepareCreate("geo_points_only").setMapping(mapping).get() - ); - assertThat( - ex.getMessage(), - containsString( - "using deprecated parameters [points_only, tree, distance_error_pct, tree_levels] " - + "in mapper [geo] of type [geo_shape] is no longer allowed" - ) - ); - - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - Settings settings = settings(version).build(); - indicesAdmin().prepareCreate("geo_points_only").setMapping(mapping).setSettings(settings).get(); - ensureGreen(); - - Geometry geometry = GeometryTestUtils.randomGeometry(false); - try { - prepareIndex("geo_points_only").setId("1") - .setSource(GeoJson.toXContent(geometry, jsonBuilder().startObject().field(defaultFieldName), null).endObject()) - .setRefreshPolicy(IMMEDIATE) - .get(); - } catch (DocumentParsingException e) { - // Random geometry generator created something other than a POINT type, verify the correct exception is thrown - assertThat(e.getMessage(), containsString("is configured for points only")); - return; - } - - // test that point was inserted - assertHitCount(client().prepareSearch("geo_points_only").setQuery(geoIntersectionQuery(defaultFieldName, geometry)), 1L); - } - - public void testFieldAlias() throws IOException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject(defaultFieldName) - .field("type", "geo_shape") - .field("tree", randomBoolean() ? "quadtree" : "geohash") - .endObject() - .startObject("alias") - .field("type", "alias") - .field("path", defaultFieldName) - .endObject() - .endObject() - .endObject() - ); - - MapperParsingException ex = expectThrows( - MapperParsingException.class, - () -> indicesAdmin().prepareCreate(defaultIndexName).setMapping(mapping).get() - ); - assertThat( - ex.getMessage(), - containsString("using deprecated parameters [tree] in mapper [geo] of type [geo_shape] is no longer allowed") - ); - - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - Settings settings = settings(version).build(); - indicesAdmin().prepareCreate(defaultIndexName).setMapping(mapping).setSettings(settings).get(); - ensureGreen(); - - MultiPoint multiPoint = GeometryTestUtils.randomMultiPoint(false); - prepareIndex(defaultIndexName).setId("1") - .setSource(GeoJson.toXContent(multiPoint, jsonBuilder().startObject().field(defaultFieldName), null).endObject()) - .setRefreshPolicy(IMMEDIATE) - .get(); - - assertHitCount(client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", multiPoint)), 1L); - } - - protected boolean ignoreLons(double[] lons) { - return Arrays.stream(lons).anyMatch(v -> v == 180); - } -} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml index 2576a51e8b80..3723f6cd4d45 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml @@ -61,10 +61,6 @@ aggregate_metric_double: --- aggregate_metric_double with ignore_malformed: - - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: requires tracking ignored source - - do: indices.create: index: test diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index cffc161b1153..a77a5a775442 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -257,9 +257,6 @@ --- "mv_dedupe from index #104745": - - requires: - cluster_features: ["esql.mv_ordering_sorted_ascending"] - reason: "fixed by introducing a sorted, non-deduplicated MvOrdering" - do: indices.create: index: idx_with_multivalues diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml index 1567b6b556bd..e7cda3389614 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [join_lookup_v10] + capabilities: [join_lookup_v11] reason: "uses LOOKUP JOIN" - do: indices.create: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index ebf464ba667d..b9415bce62ea 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -1,7 +1,5 @@ setup: - requires: - cluster_features: ["esql.metrics_counter_fields"] - reason: "require metrics counter fields" test_runner_features: allowed_warnings_regex - do: indices.create: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 3a989d2c87bf..bfb49bdc7e5c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -574,10 +574,6 @@ setup: --- "values function": - - requires: - cluster_features: esql.agg_values - reason: "values is available in 8.14+" - - do: esql.query: body: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml index 731082378fe1..b7fdd16111db 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml @@ -1,8 +1,5 @@ --- logsdb usage: - - requires: - cluster_features: ["logsdb_telemetry_stats"] - reason: "requires stats" - do: indices.create: index: test1 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml index 5e9faa84ee08..10556a49bcb4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml @@ -5,10 +5,6 @@ setup: --- "geo_shape": - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test @@ -72,10 +68,6 @@ setup: --- "geo_shape with ignore_malformed": - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test @@ -156,10 +148,6 @@ setup: --- "shape": - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test @@ -223,10 +211,6 @@ setup: --- "shape with ignore_malformed": - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test @@ -424,8 +408,6 @@ setup: --- "geo_point with ignore_malformed": - requires: - cluster_features: ["mapper.track_ignored_source"] - reason: introduced in 8.15.0 test_runner_features: close_to - do: @@ -504,10 +486,6 @@ setup: --- "point": - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test @@ -601,10 +579,6 @@ setup: --- "point with ignore_malformed": - - requires: - cluster_features: ["mapper.source.synthetic_source_fallback"] - reason: introduced in 8.15.0 - - do: indices.create: index: test diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/20_ignore_above_stored_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/20_ignore_above_stored_source.yml index 252bafbdbe15..12c2a9858a16 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/20_ignore_above_stored_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/20_ignore_above_stored_source.yml @@ -1,8 +1,5 @@ --- wildcard field type ignore_above: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml index 26beb3aa1907..5076e7556d91 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml @@ -5,9 +5,6 @@ setup: --- wildcard field type ignore_above: - - requires: - cluster_features: [ "mapper.ignore_above_index_level_setting" ] - reason: introduce ignore_above index level setting - do: indices.create: index: test diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java index d414d9be8d17..38171a7249bd 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingI import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition; import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.checkpoint.RemoteClusterResolver.ResolvedIndices; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; @@ -281,7 +282,7 @@ class DefaultCheckpointProvider implements CheckpointProvider { ActionListener> listener ) { // 1st get index to see the indexes the user has access to - GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indices) + GetIndexRequest getIndexRequest = new GetIndexRequest(Transform.HARD_CODED_TRANSFORM_MASTER_NODE_TIMEOUT).indices(indices) .features(new GetIndexRequest.Feature[0]) .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java index 626816262597..f087357921c6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.transform.transforms.DestAlias; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettings; import org.elasticsearch.xpack.core.transform.transforms.TransformEffectiveSettings; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; import java.time.Clock; @@ -66,7 +67,7 @@ public final class TransformIndex { * Returns {@code true} if the given index was created by the transform and {@code false} otherwise. */ public static void isDestinationIndexCreatedByTransform(Client client, String destIndex, ActionListener listener) { - GetIndexRequest getIndexRequest = new GetIndexRequest().indices(destIndex) + GetIndexRequest getIndexRequest = new GetIndexRequest(Transform.HARD_CODED_TRANSFORM_MASTER_NODE_TIMEOUT).indices(destIndex) // We only need mappings, more specifically its "_meta" part .features(GetIndexRequest.Feature.MAPPINGS); executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, GetIndexAction.INSTANCE, getIndexRequest, ActionListener.wrap(getIndexResponse -> { diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index ffac36846414..ea9d76fb643f 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -172,7 +172,8 @@ public class HistoryTemplateHttpMappingsTests extends AbstractWatcherIntegration // ensure that enabled is set to false List indexed = new ArrayList<>(); - GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings(HistoryStoreField.INDEX_PREFIX + "*").get(); + GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, HistoryStoreField.INDEX_PREFIX + "*") + .get(); for (MappingMetadata mapping : mappingsResponse.getMappings().values()) { Map docMapping = mapping.getSourceAsMap(); if (abortAtInput) { diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java index c78315383df2..34d38dfde064 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java @@ -46,7 +46,7 @@ public class HistoryTemplateTimeMappingsTests extends AbstractWatcherIntegration assertWatchWithMinimumActionsCount("_id", ExecutionState.EXECUTED, 1); assertBusy(() -> { - GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings() + GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT) .setIndicesOptions(IndicesOptions.strictExpandHidden()) .get(); assertThat(mappingsResponse, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 626e1e1e9e9d..957a23e68a5d 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -266,7 +266,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase } public void replaceWatcherIndexWithRandomlyNamedIndex(String originalIndexOrAlias, String to) { - GetIndexResponse index = indicesAdmin().prepareGetIndex().setIndices(originalIndexOrAlias).get(); + GetIndexResponse index = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(originalIndexOrAlias).get(); MappingMetadata mapping = index.getMappings().get(index.getIndices()[0]); Settings settings = index.getSettings().get(index.getIndices()[0]); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java index 1bcdd060994c..732b0800b431 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java @@ -120,7 +120,7 @@ public class HistoryIntegrationTests extends AbstractWatcherIntegrationTestCase assertBusy(() -> { assertHitCount(getWatchHistory(), 1); }); // as fields with dots are allowed in 5.0 again, the mapping must be checked in addition - GetMappingsResponse response = indicesAdmin().prepareGetMappings(".watcher-history*").get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, ".watcher-history*").get(); XContentSource source = new XContentSource( response.getMappings().values().iterator().next().source().uncompressed(), XContentType.JSON @@ -162,7 +162,7 @@ public class HistoryIntegrationTests extends AbstractWatcherIntegrationTestCase assertBusy(() -> { assertHitCount(getWatchHistory(), 1); }); // as fields with dots are allowed in 5.0 again, the mapping must be checked in addition - GetMappingsResponse response = indicesAdmin().prepareGetMappings(".watcher-history*").get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, ".watcher-history*").get(); XContentSource source = new XContentSource( response.getMappings().values().iterator().next().source().uncompressed(), XContentType.JSON @@ -225,7 +225,7 @@ public class HistoryIntegrationTests extends AbstractWatcherIntegrationTestCase assertBusy(() -> { // also ensure that the status field is disabled in the watch history - GetMappingsResponse response = indicesAdmin().prepareGetMappings(".watcher-history*").get(); + GetMappingsResponse response = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, ".watcher-history*").get(); XContentSource mappingSource = new XContentSource( response.getMappings().values().iterator().next().source().uncompressed(), XContentType.JSON diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java index 3f4b51b266a6..7a0bb7387073 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java @@ -62,7 +62,7 @@ public class GetWatchTests extends AbstractWatcherIntegrationTestCase { // if the watches index is an alias, remove the alias randomly, otherwise the index if (randomBoolean()) { try { - GetIndexResponse indexResponse = indicesAdmin().prepareGetIndex().setIndices(Watch.INDEX).get(); + GetIndexResponse indexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT).setIndices(Watch.INDEX).get(); boolean isWatchIndexAlias = Watch.INDEX.equals(indexResponse.indices()[0]) == false; if (isWatchIndexAlias) { assertAcked(indicesAdmin().prepareAliases().removeAlias(indexResponse.indices()[0], Watch.INDEX)); diff --git a/x-pack/plugin/wildcard/src/yamlRestTest/resources/rest-api-spec/test/30_synthetic_source.yml b/x-pack/plugin/wildcard/src/yamlRestTest/resources/rest-api-spec/test/30_synthetic_source.yml index 14e4a6f5aaef..4eaf89d0bf67 100644 --- a/x-pack/plugin/wildcard/src/yamlRestTest/resources/rest-api-spec/test/30_synthetic_source.yml +++ b/x-pack/plugin/wildcard/src/yamlRestTest/resources/rest-api-spec/test/30_synthetic_source.yml @@ -5,10 +5,6 @@ setup: --- synthetic source: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires synthetic source support - - do: indices.create: index: synthetic_source_test @@ -47,10 +43,6 @@ synthetic source: --- synthetic source with copy_to: - - requires: - cluster_features: ["mapper.source.synthetic_source_with_copy_to_and_doc_values_false"] - reason: requires copy_to support in synthetic source - - do: indices.create: index: synthetic_source_test diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml index 1b435c551fbe..41bc0a96e737 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml @@ -92,8 +92,6 @@ teardown: - do: catch: bad_request - allowed_warnings: - - "Line 1:21: Square brackets '[]' need to be removed in FROM METADATA declaration" headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } esql.query: body: @@ -104,12 +102,10 @@ teardown: - do: catch: bad_request - allowed_warnings: - - "Line 1:21: Square brackets '[]' need to be removed in FROM METADATA declaration" headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } esql.query: body: - query: 'FROM *:esql*,esql_* [METADATA _index] | sort cost | KEEP _index, tag, cost | LIMIT 10' + query: 'FROM *:esql*,esql_* METADATA _index | sort cost | KEEP _index, tag, cost | LIMIT 10' filter: range: since: diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 094ca9304a69..846c3c47a271 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -26,7 +26,9 @@ import java.nio.charset.StandardCharsets; import java.time.Instant; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.elasticsearch.upgrades.IndexingIT.assertCount; import static org.hamcrest.Matchers.equalTo; @@ -256,6 +258,7 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { } private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster) throws Exception { + Set indicesNeedingUpgrade = getDataStreamIndices(dataStreamName); final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2); for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) { rollover(dataStreamName); @@ -292,10 +295,9 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { } else { // The number of rollovers that will have happened when we call reindex: final int rolloversPerformedByReindex = explicitRolloverOnNewClusterCount == 0 ? 1 : 0; - assertThat( - statusResponseMap.get("total_indices_in_data_stream"), - equalTo(originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount + rolloversPerformedByReindex) - ); + final int expectedTotalIndicesInDataStream = originalWriteIndex + numRolloversOnOldCluster + + explicitRolloverOnNewClusterCount + rolloversPerformedByReindex; + assertThat(statusResponseMap.get("total_indices_in_data_stream"), equalTo(expectedTotalIndicesInDataStream)); /* * total_indices_requiring_upgrade is made up of: (the original write index) + numRolloversOnOldCluster. The number of * rollovers on the upgraded cluster is irrelevant since those will not be reindexed. @@ -305,6 +307,11 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { equalTo(originalWriteIndex + numRolloversOnOldCluster) ); assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1)); + // We expect all the original indices to have been deleted + for (String oldIndex : indicesNeedingUpgrade) { + assertThat(indexExists(oldIndex), equalTo(false)); + } + assertThat(getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream)); } }, 60, TimeUnit.SECONDS); Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); @@ -312,6 +319,16 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { assertOK(cancelResponse); } + @SuppressWarnings("unchecked") + private Set getDataStreamIndices(String dataStreamName) throws IOException { + Response response = client().performRequest(new Request("GET", "_data_stream/" + dataStreamName)); + Map responseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false); + List> dataStreams = (List>) responseMap.get("data_streams"); + Map dataStream = dataStreams.get(0); + List> indices = (List>) dataStream.get("indices"); + return indices.stream().map(index -> index.get("index_name").toString()).collect(Collectors.toSet()); + } + /* * Similar to isOriginalClusterCurrent, but returns true if the major versions of the clusters are the same. So true * for 8.6 and 8.17, but false for 7.17 and 8.18. diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java deleted file mode 100644 index 915122c97d3f..000000000000 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.upgrades; - -import org.elasticsearch.Version; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static org.elasticsearch.TransportVersions.V_8_15_0; -import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.containsInAnyOrder; - -public class SecurityIndexRoleMappingCleanupIT extends AbstractUpgradeTestCase { - private static final Version UPGRADE_FROM_VERSION = Version.fromString(System.getProperty("tests.upgrade_from_version")); - - public void testCleanupDuplicateMappings() throws Exception { - // see build.gradle where we set operator/settings.json for more details on this skip - assumeTrue( - "Cluster requires version higher than since operator/settings.json is only set then: " + Version.V_8_7_0, - UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_7_0) - ); - if (CLUSTER_TYPE == ClusterType.OLD) { - // If we're in a state where the same operator-defined role mappings can exist both in cluster state and the native store - // (V_8_15_0 transport added to security.role_mapping_cleanup feature added), create a state - // where the native store will need to be cleaned up - assumeTrue( - "Cleanup only needed before security.role_mapping_cleanup feature available in cluster", - clusterHasFeature("security.role_mapping_cleanup") == false - ); - assumeTrue( - "If role mappings are in cluster state but cleanup has not been performed yet, create duplicated role mappings", - minimumTransportVersion().onOrAfter(V_8_15_0) - ); - // Since the old cluster has role mappings in cluster state, but doesn't check duplicates, create duplicates - createNativeRoleMapping("operator_role_mapping_1", Map.of("meta", "test"), true); - createNativeRoleMapping("operator_role_mapping_2", Map.of("meta", "test"), true); - } else if (CLUSTER_TYPE == ClusterType.MIXED) { - // Create a native role mapping that doesn't conflict with anything before the migration run - createNativeRoleMapping("no_name_conflict", Map.of("meta", "test")); - } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { - waitForSecurityMigrationCompletion(adminClient(), 2); - assertAllRoleMappings( - client(), - "operator_role_mapping_1" + ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_SUFFIX, - "operator_role_mapping_2" + ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_SUFFIX, - "no_name_conflict" - ); - // In the old cluster we might have created these (depending on the node features), so make sure they were removed - assertFalse(roleMappingExistsInSecurityIndex("operator_role_mapping_1")); - assertFalse(roleMappingExistsInSecurityIndex("operator_role_mapping_2")); - assertTrue(roleMappingExistsInSecurityIndex("no_name_conflict")); - // Make sure we can create and delete a conflicting role mapping again - createNativeRoleMapping("operator_role_mapping_1", Map.of("meta", "test"), true); - deleteNativeRoleMapping("operator_role_mapping_1", true); - } - } - - @SuppressWarnings("unchecked") - private boolean roleMappingExistsInSecurityIndex(String mappingName) throws IOException { - final Request request = new Request("POST", "/.security/_search"); - request.setJsonEntity(String.format(Locale.ROOT, """ - {"query":{"bool":{"must":[{"term":{"_id":"%s_%s"}}]}}}""", "role-mapping", mappingName)); - - request.setOptions( - expectWarnings( - "this request accesses system indices: [.security-7]," - + " but in a future major version, direct access to system indices will be prevented by default" - ) - ); - - Response response = adminClient().performRequest(request); - assertOK(response); - final Map responseMap = responseAsMap(response); - - Map hits = ((Map) responseMap.get("hits")); - return ((List) hits.get("hits")).isEmpty() == false; - } - - private void createNativeRoleMapping(String roleMappingName, Map metadata) throws IOException { - createNativeRoleMapping(roleMappingName, metadata, false); - } - - private void createNativeRoleMapping(String roleMappingName, Map metadata, boolean expectWarning) throws IOException { - final Request request = new Request("POST", "/_security/role_mapping/" + roleMappingName); - if (expectWarning) { - request.setOptions( - expectWarnings( - "A read-only role mapping with the same name [" - + roleMappingName - + "] has been previously defined in a configuration file. " - + "Both role mappings will be used to determine role assignments." - ) - ); - } - - BytesReference source = BytesReference.bytes( - jsonBuilder().map( - Map.of( - ExpressionRoleMapping.Fields.ROLES.getPreferredName(), - List.of("superuser"), - ExpressionRoleMapping.Fields.ENABLED.getPreferredName(), - true, - ExpressionRoleMapping.Fields.RULES.getPreferredName(), - Map.of("field", Map.of("username", "role-mapping-test-user")), - RoleDescriptor.Fields.METADATA.getPreferredName(), - metadata - ) - ) - ); - request.setJsonEntity(source.utf8ToString()); - assertOK(client().performRequest(request)); - } - - private void deleteNativeRoleMapping(String roleMappingName, boolean expectWarning) throws IOException { - final Request request = new Request("DELETE", "/_security/role_mapping/" + roleMappingName); - if (expectWarning) { - request.setOptions( - expectWarnings( - "A read-only role mapping with the same name [" - + roleMappingName - + "] has previously been defined in a configuration file. " - + "The native role mapping was deleted, but the read-only mapping will remain active " - + "and will be used to determine role assignments." - ) - ); - } - assertOK(client().performRequest(request)); - } - - private void assertAllRoleMappings(RestClient client, String... roleNames) throws IOException { - Request request = new Request("GET", "/_security/role_mapping"); - Response response = client.performRequest(request); - assertOK(response); - Map responseMap = responseAsMap(response); - - assertThat(responseMap.keySet(), containsInAnyOrder(roleNames)); - assertThat(responseMap.size(), is(roleNames.length)); - } -} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java index 6c34e68297aa..df8290327ee5 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java @@ -39,10 +39,6 @@ public class SecurityIndexRolesMetadataMigrationIT extends AbstractUpgradeTestCa if (CLUSTER_TYPE == ClusterType.OLD) { createRoleWithMetadata(oldTestRole, Map.of("meta", "test")); assertDocInSecurityIndex(oldTestRole); - if (canRolesBeMigrated() == false) { - assertNoMigration(adminClient()); - assertCannotQueryRolesByMetadata(client()); - } } else if (CLUSTER_TYPE == ClusterType.MIXED) { if (FIRST_MIXED_ROUND) { createRoleWithMetadata(mixed1TestRole, Map.of("meta", "test")); @@ -51,13 +47,8 @@ public class SecurityIndexRolesMetadataMigrationIT extends AbstractUpgradeTestCa createRoleWithMetadata(mixed2TestRole, Map.of("meta", "test")); assertDocInSecurityIndex(mixed2TestRole); } - if (canRolesBeMigrated() == false) { - assertNoMigration(adminClient()); - assertCannotQueryRolesByMetadata(client()); - } } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { createRoleWithMetadata(upgradedTestRole, Map.of("meta", "test")); - assertTrue(canRolesBeMigrated()); waitForSecurityMigrationCompletion(adminClient(), 1); assertMigratedDocInSecurityIndex(oldTestRole, "meta", "test"); assertMigratedDocInSecurityIndex(mixed1TestRole, "meta", "test"); @@ -196,9 +187,4 @@ public class SecurityIndexRolesMetadataMigrationIT extends AbstractUpgradeTestCa assertThat(roles.get(i).get("name"), equalTo(roleNames[i])); } } - - private boolean canRolesBeMigrated() { - return clusterHasFeature("security.migration_framework") != false - && clusterHasFeature("security.roles_metadata_flattened") != false; - } }