diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/LogsDbDocumentParsingBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/LogsDbDocumentParsingBenchmark.java new file mode 100644 index 000000000000..8924f84fdc90 --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/LogsDbDocumentParsingBenchmark.java @@ -0,0 +1,400 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.benchmark.index.mapper; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.io.IOException; +import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; + +@Fork(value = 1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@BenchmarkMode(Mode.Throughput) +@OutputTimeUnit(TimeUnit.SECONDS) +@State(Scope.Benchmark) +public class LogsDbDocumentParsingBenchmark { + private Random random; + private MapperService mapperServiceEnabled; + private MapperService mapperServiceEnabledWithStoreArrays; + private MapperService mapperServiceDisabled; + private SourceToParse[] documents; + + static { + LogConfigurator.configureESLogging(); // doc values implementations need logging + } + + private static String SAMPLE_LOGS_MAPPING_ENABLED = """ + { + "_source": { + "mode": "synthetic" + }, + "properties": { + "kafka": { + "properties": { + "log": { + "properties": { + "component": { + "ignore_above": 1024, + "type": "keyword" + }, + "trace": { + "properties": { + "message": { + "type": "text" + }, + "class": { + "ignore_above": 1024, + "type": "keyword" + } + } + }, + "thread": { + "ignore_above": 1024, + "type": "keyword" + }, + "class": { + "ignore_above": 1024, + "type": "keyword" + } + } + } + } + }, + "host": { + "properties": { + "hostname": { + "ignore_above": 1024, + "type": "keyword" + }, + "os": { + "properties": { + "build": { + "ignore_above": 1024, + "type": "keyword" + }, + "kernel": { + "ignore_above": 1024, + "type": "keyword" + }, + "codename": { + "ignore_above": 1024, + "type": "keyword" + }, + "name": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "text": { + "type": "text" + } + } + }, + "family": { + "ignore_above": 1024, + "type": "keyword" + }, + "version": { + "ignore_above": 1024, + "type": "keyword" + }, + "platform": { + "ignore_above": 1024, + "type": "keyword" + } + } + }, + "domain": { + "ignore_above": 1024, + "type": "keyword" + }, + "ip": { + "type": "ip" + }, + "containerized": { + "type": "boolean" + }, + "name": { + "ignore_above": 1024, + "type": "keyword" + }, + "id": { + "ignore_above": 1024, + "type": "keyword" + }, + "type": { + "ignore_above": 1024, + "type": "keyword" + }, + "mac": { + "ignore_above": 1024, + "type": "keyword" + }, + "architecture": { + "ignore_above": 1024, + "type": "keyword" + } + } + } + } + } + """; + + private static String SAMPLE_LOGS_MAPPING_ENABLED_WITH_STORE_ARRAYS = """ + { + "_source": { + "mode": "synthetic" + }, + "properties": { + "kafka": { + "properties": { + "log": { + "properties": { + "component": { + "ignore_above": 1024, + "type": "keyword" + }, + "trace": { + "synthetic_source_keep": "arrays", + "properties": { + "message": { + "type": "text" + }, + "class": { + "ignore_above": 1024, + "type": "keyword" + } + } + }, + "thread": { + "ignore_above": 1024, + "type": "keyword" + }, + "class": { + "ignore_above": 1024, + "type": "keyword" + } + } + } + } + }, + "host": { + "properties": { + "hostname": { + "ignore_above": 1024, + "type": "keyword" + }, + "os": { + "properties": { + "build": { + "ignore_above": 1024, + "type": "keyword" + }, + "kernel": { + "ignore_above": 1024, + "type": "keyword" + }, + "codename": { + "ignore_above": 1024, + "type": "keyword" + }, + "name": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "text": { + "type": "text" + } + } + }, + "family": { + "ignore_above": 1024, + "type": "keyword" + }, + "version": { + "ignore_above": 1024, + "type": "keyword" + }, + "platform": { + "ignore_above": 1024, + "type": "keyword" + } + } + }, + "domain": { + "ignore_above": 1024, + "type": "keyword" + }, + "ip": { + "type": "ip" + }, + "containerized": { + "type": "boolean" + }, + "name": { + "ignore_above": 1024, + "type": "keyword" + }, + "id": { + "ignore_above": 1024, + "type": "keyword" + }, + "type": { + "ignore_above": 1024, + "type": "keyword" + }, + "mac": { + "ignore_above": 1024, + "type": "keyword" + }, + "architecture": { + "ignore_above": 1024, + "type": "keyword" + } + } + } + } + } + """; + + private static String SAMPLE_LOGS_MAPPING_DISABLED = """ + { + "_source": { + "mode": "synthetic" + }, + "enabled": false + } + """; + + @Setup + public void setUp() throws IOException { + this.random = new Random(); + this.mapperServiceEnabled = MapperServiceFactory.create(SAMPLE_LOGS_MAPPING_ENABLED); + this.mapperServiceEnabledWithStoreArrays = MapperServiceFactory.create(SAMPLE_LOGS_MAPPING_ENABLED_WITH_STORE_ARRAYS); + this.mapperServiceDisabled = MapperServiceFactory.create(SAMPLE_LOGS_MAPPING_DISABLED); + this.documents = generateRandomDocuments(10_000); + } + + @Benchmark + public List benchmarkEnabledObject() { + return mapperServiceEnabled.documentMapper().parse(randomFrom(documents)).docs(); + } + + @Benchmark + public List benchmarkEnabledObjectWithStoreArrays() { + return mapperServiceEnabledWithStoreArrays.documentMapper().parse(randomFrom(documents)).docs(); + } + + @Benchmark + public List benchmarkDisabledObject() { + return mapperServiceDisabled.documentMapper().parse(randomFrom(documents)).docs(); + } + + @SafeVarargs + @SuppressWarnings("varargs") + private T randomFrom(T... items) { + return items[random.nextInt(items.length)]; + } + + private SourceToParse[] generateRandomDocuments(int count) throws IOException { + var docs = new SourceToParse[count]; + for (int i = 0; i < count; i++) { + docs[i] = generateRandomDocument(); + } + return docs; + } + + private SourceToParse generateRandomDocument() throws IOException { + var builder = XContentBuilder.builder(XContentType.JSON.xContent()); + + builder.startObject(); + + builder.startObject("kafka"); + { + builder.startObject("log"); + { + builder.field("component", randomString(10)); + builder.startArray("trace"); + { + builder.startObject(); + { + builder.field("message", randomString(50)); + builder.field("class", randomString(10)); + } + builder.endObject(); + builder.startObject(); + { + builder.field("message", randomString(50)); + builder.field("class", randomString(10)); + } + builder.endObject(); + } + builder.endArray(); + builder.field("thread", randomString(10)); + builder.field("class", randomString(10)); + + } + builder.endObject(); + } + builder.endObject(); + + builder.startObject("host"); + { + builder.field("hostname", randomString(10)); + builder.startObject("os"); + { + builder.field("name", randomString(10)); + } + builder.endObject(); + + builder.field("domain", randomString(10)); + builder.field("ip", randomIp()); + builder.field("name", randomString(10)); + } + + builder.endObject(); + + builder.endObject(); + + return new SourceToParse(UUIDs.randomBase64UUID(), BytesReference.bytes(builder), XContentType.JSON); + } + + private String randomIp() { + return "" + random.nextInt(255) + '.' + random.nextInt(255) + '.' + random.nextInt(255) + '.' + random.nextInt(255); + } + + private String randomString(int maxLength) { + var length = random.nextInt(maxLength); + var builder = new StringBuilder(length); + for (int i = 0; i < length; i++) { + builder.append((byte) (32 + random.nextInt(94))); + } + return builder.toString(); + } +} diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java index 538008d1dcac..6f64edfc8c0d 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java @@ -14,7 +14,6 @@ import org.elasticsearch.gradle.internal.conventions.info.GitInfoValueSource; import org.elasticsearch.gradle.internal.conventions.util.Util; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; @@ -35,11 +34,19 @@ public abstract class GitInfoPlugin implements Plugin { @Override public void apply(Project project) { - File rootDir = Util.locateElasticsearchWorkspace(project.getGradle()); + File rootDir = getGitRootDir(project); getGitInfo().convention(factory.of(GitInfoValueSource.class, spec -> { spec.getParameters().getPath().set(rootDir); })); revision = getGitInfo().map(info -> info.getRevision() == null ? info.getRevision() : "main"); } + private static File getGitRootDir(Project project) { + File rootDir = project.getRootDir(); + if (new File(rootDir, ".git").exists()) { + return rootDir; + } + return Util.locateElasticsearchWorkspace(project.getGradle()); + } + public abstract Property getGitInfo(); public Provider getRevision() { diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/util/Util.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/util/Util.java index a21606156b09..16e8af5832c5 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/util/Util.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/util/Util.java @@ -14,18 +14,19 @@ import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.file.FileTree; import org.gradle.api.initialization.IncludedBuild; +import org.gradle.api.internal.GradleInternal; import org.gradle.api.invocation.Gradle; import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.util.PatternFilterable; -import javax.annotation.Nullable; import java.io.File; -import java.util.Collection; import java.util.Optional; import java.util.function.Supplier; +import javax.annotation.Nullable; + public class Util { public static boolean getBooleanProperty(String property, boolean defaultValue) { @@ -120,6 +121,14 @@ public class Util { return project.getExtensions().getByType(JavaPluginExtension.class).getSourceSets(); } + public static File getRootFolder(Gradle gradle) { + Gradle parent = gradle.getParent(); + if (parent == null) { + return gradle.getRootProject().getRootDir(); + } + return getRootFolder(parent); + } + public static File locateElasticsearchWorkspace(Gradle gradle) { if (gradle.getRootProject().getName().startsWith("build-tools")) { File buildToolsParent = gradle.getRootProject().getRootDir().getParentFile(); diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 22286c90de3d..e712035eabc7 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip +distributionSha256Sum=7ebdac923867a3cec0098302416d1e3c6c0c729fc4e2e05c10637a8af33a76c5 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index 18b681ef5a9e..cc551057cd60 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -285,7 +285,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { esplugin { name = 'hello-world-plugin' - classname 'org.acme.HelloWorldPlugin' + classname = 'org.acme.HelloWorldPlugin' description = "shadowed es plugin" } @@ -375,7 +375,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { esplugin { name = 'hello-world-plugin' - classname 'org.acme.HelloWorldPlugin' + classname = 'org.acme.HelloWorldPlugin' description = "custom project description" } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 847eda7a355c..797dc8bd0641 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -98,8 +98,8 @@ develocity { link 'Source', "${prBaseUrl}/tree/${System.getenv('BUILDKITE_COMMIT')}" link 'Pull Request', "https://github.com/${repository}/pull/${prId}" } else { - value 'Git Commit ID', gitRevision - link 'Source', "https://github.com/${repository}/tree/${gitRevision}" + value 'Git Commit ID', gitRevision.get() + link 'Source', "https://github.com/${repository}/tree/${gitRevision.get()}" } buildFinished { result -> diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index 0e8dbb7fce26..c38ea5b4f085 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -131,11 +131,13 @@ public class InternalDistributionBwcSetupPlugin implements Plugin { // We don't use a normal `Copy` task here as snapshotting the entire gradle user home is very expensive. This task is cheap, so // up-to-date checking doesn't buy us much project.getTasks().register("setupGradleUserHome", task -> { + File gradleUserHome = project.getGradle().getGradleUserHomeDir(); + String projectName = project.getName(); task.doLast(t -> { fileSystemOperations.copy(copy -> { - String gradleUserHome = project.getGradle().getGradleUserHomeDir().getAbsolutePath(); - copy.into(gradleUserHome + "-" + project.getName()); - copy.from(gradleUserHome, copySpec -> { + String absoluteGradleUserHomePath = gradleUserHome.getAbsolutePath(); + copy.into(absoluteGradleUserHomePath + "-" + projectName); + copy.from(absoluteGradleUserHomePath, copySpec -> { copySpec.include("gradle.properties"); copySpec.include("init.d/*"); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java index 28776f03d17e..c44b1212e648 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java @@ -14,14 +14,14 @@ import org.elasticsearch.gradle.ElasticsearchDistributionType; import java.util.List; public class InternalElasticsearchDistributionTypes { - public static ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType(); - public static ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); + public static final ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType(); + public static final ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType(); + public static final ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType(); + public static final ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType(); + public static final ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); + public static final ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); - public static List ALL_INTERNAL = List.of( + public static final List ALL_INTERNAL = List.of( DEB, RPM, DOCKER, diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java index f0b28f5381b9..80e64ad81323 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java @@ -172,6 +172,11 @@ public class ErrorReportingTestListener implements TestOutputListener, TestListe return Destination.StdErr; } + @Override + public long getLogTime() { + return result.getEndTime(); + } + @Override public String getMessage() { return message; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rerun/TestRerunTaskExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rerun/TestRerunTaskExtension.java index f08d3dadc1bb..8e4519b55e15 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rerun/TestRerunTaskExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rerun/TestRerunTaskExtension.java @@ -30,7 +30,7 @@ public class TestRerunTaskExtension { /** * The name of the extension added to each test task. */ - public static String NAME = "rerun"; + public static final String NAME = "rerun"; private final Property maxReruns; diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index 876e3136ea81..9c57ca327c7b 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.11.1 \ No newline at end of file +8.12 \ No newline at end of file diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ede1b392b8a4..57882fa842b4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -19,6 +19,7 @@ commons_lang3 = 3.9 google_oauth_client = 1.34.1 awsv1sdk = 1.12.270 awsv2sdk = 2.28.13 +reactive_streams = 1.0.4 antlr4 = 4.13.1 # bouncy castle version for non-fips. fips jars use a different version diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy index 2575f80ad9da..bbc47bedeffc 100644 --- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy +++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy @@ -135,7 +135,7 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest { esplugin { name = 'test-$pluginType' - classname 'org.acme.TestModule' + classname = 'org.acme.TestModule' description = "test $pluginType description" } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/distribution/ElasticsearchDistributionTypes.java b/build-tools/src/main/java/org/elasticsearch/gradle/distribution/ElasticsearchDistributionTypes.java index 8a53d56e3c4c..68eef25df5be 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/distribution/ElasticsearchDistributionTypes.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/distribution/ElasticsearchDistributionTypes.java @@ -12,6 +12,6 @@ package org.elasticsearch.gradle.distribution; import org.elasticsearch.gradle.ElasticsearchDistributionType; public class ElasticsearchDistributionTypes { - public static ElasticsearchDistributionType ARCHIVE = new ArchiveElasticsearchDistributionType(); - public static ElasticsearchDistributionType INTEG_TEST_ZIP = new IntegTestZipElasticsearchDistributionType(); + public static final ElasticsearchDistributionType ARCHIVE = new ArchiveElasticsearchDistributionType(); + public static final ElasticsearchDistributionType INTEG_TEST_ZIP = new IntegTestZipElasticsearchDistributionType(); } diff --git a/build.gradle b/build.gradle index b95e34640cb5..b85742be9632 100644 --- a/build.gradle +++ b/build.gradle @@ -221,25 +221,11 @@ tasks.register("verifyVersions") { } } -/* - * When adding backcompat behavior that spans major versions, temporarily - * disabling the backcompat tests is necessary. This flag controls - * the enabled state of every bwc task. It should be set back to true - * after the backport of the backcompat code is complete. - */ - +// TODO: This flag existed as a mechanism to disable bwc tests during a backport. It is no +// longer used for that purpose, but instead a way to run only functional tests. We should +// rework the functionalTests task to be more explicit about which tasks it wants to run +// so that that this flag is no longer needed. boolean bwc_tests_enabled = true -// place a PR link here when committing bwc changes: -String bwc_tests_disabled_issue = "" -if (bwc_tests_enabled == false) { - if (bwc_tests_disabled_issue.isEmpty()) { - throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") - } - println "========================= WARNING =========================" - println " Backwards compatibility tests are disabled!" - println "See ${bwc_tests_disabled_issue}" - println "===========================================================" -} if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || it == 'functionalTests' }) { // Disable BWC tests for checkPart* tasks and platform support tests as it's expected that this will run on it's own check bwc_tests_enabled = false @@ -378,24 +364,24 @@ tasks.register("verifyBwcTestsEnabled") { } tasks.register("branchConsistency") { - description 'Ensures this branch is internally consistent. For example, that versions constants match released versions.' + description = 'Ensures this branch is internally consistent. For example, that versions constants match released versions.' group 'Verification' dependsOn ":verifyVersions", ":verifyBwcTestsEnabled" } tasks.named("wrapper").configure { distributionType = 'ALL' + def minimumGradleVersionFile = project.file('build-tools-internal/src/main/resources/minimumGradleVersion') doLast { // copy wrapper properties file to build-tools-internal to allow seamless idea integration def file = new File("build-tools-internal/gradle/wrapper/gradle-wrapper.properties") - Files.copy(wrapper.getPropertiesFile().toPath(), file.toPath(), REPLACE_EXISTING) + Files.copy(getPropertiesFile().toPath(), file.toPath(), REPLACE_EXISTING) // copy wrapper properties file to plugins/examples to allow seamless idea integration def examplePluginsWrapperProperties = new File("plugins/examples/gradle/wrapper/gradle-wrapper.properties") - Files.copy(wrapper.getPropertiesFile().toPath(), examplePluginsWrapperProperties.toPath(), REPLACE_EXISTING) - + Files.copy(getPropertiesFile().toPath(), examplePluginsWrapperProperties.toPath(), REPLACE_EXISTING) // Update build-tools to reflect the Gradle upgrade // TODO: we can remove this once we have tests to make sure older versions work. - project.file('build-tools-internal/src/main/resources/minimumGradleVersion').text = gradleVersion + minimumGradleVersionFile.text = gradleVersion println "Updated minimum Gradle Version" } } diff --git a/client/client-benchmark-noop-api-plugin/build.gradle b/client/client-benchmark-noop-api-plugin/build.gradle index 7f03450c406d..9a93cb38b288 100644 --- a/client/client-benchmark-noop-api-plugin/build.gradle +++ b/client/client-benchmark-noop-api-plugin/build.gradle @@ -12,9 +12,9 @@ group = 'org.elasticsearch.plugin' apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'client-benchmark-noop-api' - description 'Stubbed out Elasticsearch actions that can be used for client-side benchmarking' - classname 'org.elasticsearch.plugin.noop.NoopPlugin' + name = 'client-benchmark-noop-api' + description = 'Stubbed out Elasticsearch actions that can be used for client-side benchmarking' + classname ='org.elasticsearch.plugin.noop.NoopPlugin' } // Not published so no need to assemble diff --git a/distribution/build.gradle b/distribution/build.gradle index e65d07dcfc2b..e0302a081ce6 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -647,8 +647,8 @@ subprojects { // in the final log4j2.properties configuration, as it appears in the // archive distribution. artifacts.add('log4jConfig', file("${defaultOutputs}/log4j2.properties")) { - type 'file' - name 'log4j2.properties' + type = 'file' + name = 'log4j2.properties' builtBy 'buildDefaultLog4jConfig' } diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index f5b94fb9dfd9..db7797ca23ec 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -28,7 +28,7 @@ repositories { // other Docker variants, the need for the main image to be rebuildable by Docker Hub // means that the Dockerfile itself has to fetch the binary. ivy { - url 'https://github.com/' + url = 'https://github.com/' patternLayout { artifact '/[organisation]/[module]/releases/download/v[revision]/[module]-[classifier]' } @@ -50,7 +50,7 @@ if (useDra == false) { artifact '/[organisation]/[module]-[revision]-[classifier].[ext]' } } else { - url "https://artifacts-snapshot.elastic.co/" + url = "https://artifacts-snapshot.elastic.co/" patternLayout { if (VersionProperties.isElasticsearchSnapshot()) { artifact '/[organization]/[revision]/downloads/[organization]/[module]/[module]-[revision]-[classifier].[ext]' @@ -583,8 +583,8 @@ subprojects { Project subProject -> } artifacts.add('default', file(tarFile)) { - type 'tar' - name artifactName + type = 'tar' + name = artifactName builtBy exportTaskName } } diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 5f45b4b72974..b7ba4e32edae 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -43,7 +43,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.10.0" + id "com.netflix.nebula.ospackage-base" version "11.10.1" } ['deb', 'rpm'].each { type -> @@ -87,21 +87,21 @@ def commonPackageConfig(String type, String architecture) { OS.current().equals(OS.WINDOWS) == false } dependsOn "process${type.capitalize()}Files" - packageName "elasticsearch" + packageName = "elasticsearch" if (type == 'deb') { if (architecture == 'x64') { - arch('amd64') + arch = 'amd64' } else { assert architecture == 'aarch64': architecture - arch('arm64') + arch = 'arm64' } } else { assert type == 'rpm': type if (architecture == 'x64') { - arch('X86_64') + arch = 'X86_64' } else { assert architecture == 'aarch64': architecture - arch('aarch64') + arch = 'aarch64' } } // Follow elasticsearch's file naming convention @@ -200,7 +200,7 @@ def commonPackageConfig(String type, String architecture) { into('/etc') permissionGroup 'elasticsearch' setgid true - includeEmptyDirs true + includeEmptyDirs = true createDirectoryEntry true include("elasticsearch") // empty dir, just to add directory entry include("elasticsearch/jvm.options.d") // empty dir, just to add directory entry @@ -215,7 +215,7 @@ def commonPackageConfig(String type, String architecture) { unix(0660) } permissionGroup 'elasticsearch' - includeEmptyDirs true + includeEmptyDirs = true createDirectoryEntry true fileType CONFIG | NOREPLACE } @@ -265,7 +265,7 @@ def commonPackageConfig(String type, String architecture) { into(file.parent) { from "${packagingFiles}/${file.parent}" include file.name - includeEmptyDirs true + includeEmptyDirs = true createDirectoryEntry true user u permissionGroup g @@ -289,15 +289,15 @@ def commonPackageConfig(String type, String architecture) { // this is package independent configuration ospackage { - maintainer 'Elasticsearch Team ' - summary 'Distributed RESTful search engine built for the cloud' - packageDescription ''' + maintainer = 'Elasticsearch Team ' + summary = 'Distributed RESTful search engine built for the cloud' + packageDescription = ''' Reference documentation can be found at https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html and the 'Elasticsearch: The Definitive Guide' book can be found at https://www.elastic.co/guide/en/elasticsearch/guide/current/index.html '''.stripIndent().trim() - url 'https://www.elastic.co/' + url = 'https://www.elastic.co/' // signing setup if (project.hasProperty('signing.password') && buildParams.isSnapshotBuild() == false) { @@ -311,10 +311,10 @@ ospackage { // version found on oldest supported distro, centos-6 requires('coreutils', '8.4', GREATER | EQUAL) - fileMode 0644 - dirMode 0755 - user 'root' - permissionGroup 'root' + fileMode = 0644 + dirMode = 0755 + user = 'root' + permissionGroup = 'root' into '/usr/share/elasticsearch' } @@ -330,7 +330,7 @@ Closure commonDebConfig(String architecture) { customFields['License'] = 'Elastic-License' archiveVersion = project.version.replace('-', '~') - packageGroup 'web' + packageGroup = 'web' // versions found on oldest supported distro, centos-6 requires('bash', '4.1', GREATER | EQUAL) @@ -358,24 +358,24 @@ Closure commonRpmConfig(String architecture) { return { configure(commonPackageConfig('rpm', architecture)) - license 'Elastic License' + license = 'Elastic License' - packageGroup 'Application/Internet' + packageGroup = 'Application/Internet' requires '/bin/bash' obsoletes packageName, '7.0.0', Flags.LESS prefix '/usr' - packager 'Elasticsearch' + packager = 'Elasticsearch' archiveVersion = project.version.replace('-', '_') release = '1' - os 'LINUX' - distribution 'Elasticsearch' - vendor 'Elasticsearch' + os = 'LINUX' + distribution = 'Elasticsearch' + vendor = 'Elasticsearch' // TODO ospackage doesn't support icon but we used to have one // without this the rpm will have parent dirs of any files we copy in, eg /etc/elasticsearch - addParentDirs false + addParentDirs = false } } diff --git a/docs/changelog/117858.yaml b/docs/changelog/117858.yaml new file mode 100644 index 000000000000..70f12dc40027 --- /dev/null +++ b/docs/changelog/117858.yaml @@ -0,0 +1,5 @@ +pr: 117858 +summary: Create upgrade mode +area: Transform +type: enhancement +issues: [] diff --git a/docs/changelog/118958.yaml b/docs/changelog/118958.yaml new file mode 100644 index 000000000000..fb0fd6388ab6 --- /dev/null +++ b/docs/changelog/118958.yaml @@ -0,0 +1,5 @@ +pr: 118958 +summary: Add missing timeouts to rest-api-spec SLM APIs +area: ILM+SLM +type: bug +issues: [] diff --git a/docs/changelog/119265.yaml b/docs/changelog/119265.yaml new file mode 100644 index 000000000000..296106b9c01c --- /dev/null +++ b/docs/changelog/119265.yaml @@ -0,0 +1,6 @@ +pr: 119265 +summary: Fix `AbstractShapeGeometryFieldMapperTests` +area: "ES|QL" +type: bug +issues: + - 119201 diff --git a/docs/changelog/119291.yaml b/docs/changelog/119291.yaml new file mode 100644 index 000000000000..89a6b6118049 --- /dev/null +++ b/docs/changelog/119291.yaml @@ -0,0 +1,5 @@ +pr: 119291 +summary: Register mustache size limit setting +area: Infra/Scripting +type: bug +issues: [] diff --git a/docs/changelog/119296.yaml b/docs/changelog/119296.yaml new file mode 100644 index 000000000000..f1a92cffb513 --- /dev/null +++ b/docs/changelog/119296.yaml @@ -0,0 +1,6 @@ +pr: 119296 +summary: Fix writing for LOOKUP status +area: ES|QL +type: bug +issues: + - 119086 diff --git a/docs/changelog/119310.yaml b/docs/changelog/119310.yaml new file mode 100644 index 000000000000..4e09e1a5c39d --- /dev/null +++ b/docs/changelog/119310.yaml @@ -0,0 +1,6 @@ +pr: 119310 +summary: Remove ChunkedToXContentBuilder +area: "Network" +type: bug +issues: + - 118647 diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index f8d925945401..28933eb75050 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -28,7 +28,7 @@ You can pass an `X-Opaque-Id` HTTP header to track the origin of a request in * Response of any request that includes the header * <<_identifying_running_tasks,Task management API>> response -* <<_identifying_search_slow_log_origin,Slow logs>> +* <> * <> For the deprecation logs, {es} also uses the `X-Opaque-Id` value to throttle @@ -52,7 +52,7 @@ safely generate a unique `traceparent` header for each request. If provided, {es} surfaces the header's `trace-id` value as `trace.id` in the: * <> -* <<_identifying_search_slow_log_origin,Slow logs>> +* <> * <> For example, the following `traceparent` value would produce the following diff --git a/docs/reference/autoscaling/apis/autoscaling-apis.asciidoc b/docs/reference/autoscaling/apis/autoscaling-apis.asciidoc index e4da2c45ee97..87de3818bfaf 100644 --- a/docs/reference/autoscaling/apis/autoscaling-apis.asciidoc +++ b/docs/reference/autoscaling/apis/autoscaling-apis.asciidoc @@ -4,6 +4,13 @@ NOTE: {cloud-only} + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-autoscaling[Autoscaling APIs]. +-- + You can use the following APIs to perform {cloud}/ec-autoscaling.html[autoscaling operations]. [discrete] diff --git a/docs/reference/autoscaling/apis/delete-autoscaling-policy.asciidoc b/docs/reference/autoscaling/apis/delete-autoscaling-policy.asciidoc index 190428485a00..349e40aab054 100644 --- a/docs/reference/autoscaling/apis/delete-autoscaling-policy.asciidoc +++ b/docs/reference/autoscaling/apis/delete-autoscaling-policy.asciidoc @@ -7,6 +7,12 @@ NOTE: {cloud-only} +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-autoscaling[Autoscaling APIs]. +-- + Delete {cloud}/ec-autoscaling.html[autoscaling] policy. [[autoscaling-delete-autoscaling-policy-request]] diff --git a/docs/reference/autoscaling/apis/get-autoscaling-capacity.asciidoc b/docs/reference/autoscaling/apis/get-autoscaling-capacity.asciidoc index d635d8c8f7bd..d45f7cbacc24 100644 --- a/docs/reference/autoscaling/apis/get-autoscaling-capacity.asciidoc +++ b/docs/reference/autoscaling/apis/get-autoscaling-capacity.asciidoc @@ -7,6 +7,12 @@ NOTE: {cloud-only} +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-autoscaling[Autoscaling APIs]. +-- + Get {cloud}/ec-autoscaling.html[autoscaling] capacity. [[autoscaling-get-autoscaling-capacity-request]] diff --git a/docs/reference/autoscaling/apis/get-autoscaling-policy.asciidoc b/docs/reference/autoscaling/apis/get-autoscaling-policy.asciidoc index 973eedcb361c..9962b266fb66 100644 --- a/docs/reference/autoscaling/apis/get-autoscaling-policy.asciidoc +++ b/docs/reference/autoscaling/apis/get-autoscaling-policy.asciidoc @@ -7,6 +7,13 @@ NOTE: {cloud-only} +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-autoscaling[Autoscaling APIs]. +-- + + Get {cloud}/ec-autoscaling.html[autoscaling] policy. [[autoscaling-get-autoscaling-policy-request]] diff --git a/docs/reference/autoscaling/apis/put-autoscaling-policy.asciidoc b/docs/reference/autoscaling/apis/put-autoscaling-policy.asciidoc index e564f83411eb..97c6a54fab03 100644 --- a/docs/reference/autoscaling/apis/put-autoscaling-policy.asciidoc +++ b/docs/reference/autoscaling/apis/put-autoscaling-policy.asciidoc @@ -7,6 +7,12 @@ NOTE: {cloud-only} +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-autoscaling[Autoscaling APIs]. +-- + Creates or updates an {cloud}/ec-autoscaling.html[autoscaling] policy. [[autoscaling-put-autoscaling-policy-request]] diff --git a/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc b/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc index a6894a933b46..19c1b5437ef0 100644 --- a/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc +++ b/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc @@ -8,6 +8,12 @@ beta::[] Delete Analytics Collection ++++ +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. +-- + //// [source,console] ---- diff --git a/docs/reference/behavioral-analytics/apis/index.asciidoc b/docs/reference/behavioral-analytics/apis/index.asciidoc index 692d3374f89f..6dc12599c229 100644 --- a/docs/reference/behavioral-analytics/apis/index.asciidoc +++ b/docs/reference/behavioral-analytics/apis/index.asciidoc @@ -9,6 +9,12 @@ beta::[] --- +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. +-- + Use the following APIs to manage tasks and resources related to <>: * <> diff --git a/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc b/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc index 14511a125827..46ee8296f3eb 100644 --- a/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc +++ b/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc @@ -8,6 +8,12 @@ beta::[] List Analytics Collections ++++ +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. +-- + //// [source,console] ---- diff --git a/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc b/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc index f82717e22ed3..60985cd50d3d 100644 --- a/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc +++ b/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc @@ -8,6 +8,12 @@ beta::[] Post Analytics Collection Event ++++ +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. +-- + //// [source,console] ---- diff --git a/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc b/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc index cbbab2ae3e26..412277afa207 100644 --- a/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc +++ b/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc @@ -8,6 +8,12 @@ beta::[] Put Analytics Collection ++++ +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. +-- + //// [source,console] ---- diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index 328970e1d9dc..61ec9f7680f7 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -1,6 +1,12 @@ [[cat]] == Compact and aligned text (CAT) APIs +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs]. +-- + ["float",id="intro"] === Introduction diff --git a/docs/reference/cat/alias.asciidoc b/docs/reference/cat/alias.asciidoc index aab0c9df25ed..8be7ae60ff7e 100644 --- a/docs/reference/cat/alias.asciidoc +++ b/docs/reference/cat/alias.asciidoc @@ -4,6 +4,12 @@ cat aliases ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or the diff --git a/docs/reference/cat/allocation.asciidoc b/docs/reference/cat/allocation.asciidoc index bbd044c4b8e5..07735335c91a 100644 --- a/docs/reference/cat/allocation.asciidoc +++ b/docs/reference/cat/allocation.asciidoc @@ -4,6 +4,12 @@ cat allocation ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/anomaly-detectors.asciidoc b/docs/reference/cat/anomaly-detectors.asciidoc index 68d952d2a853..589ff6487226 100644 --- a/docs/reference/cat/anomaly-detectors.asciidoc +++ b/docs/reference/cat/anomaly-detectors.asciidoc @@ -5,6 +5,12 @@ cat anomaly detectors ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/component-templates.asciidoc b/docs/reference/cat/component-templates.asciidoc index 596c86befd1b..c111fd8c2fe0 100644 --- a/docs/reference/cat/component-templates.asciidoc +++ b/docs/reference/cat/component-templates.asciidoc @@ -4,6 +4,12 @@ cat component templates ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/count.asciidoc b/docs/reference/cat/count.asciidoc index 37e602c75902..39c5b5b798d8 100644 --- a/docs/reference/cat/count.asciidoc +++ b/docs/reference/cat/count.asciidoc @@ -4,6 +4,12 @@ cat count ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/datafeeds.asciidoc b/docs/reference/cat/datafeeds.asciidoc index 506812fedaba..fa40d3aff4af 100644 --- a/docs/reference/cat/datafeeds.asciidoc +++ b/docs/reference/cat/datafeeds.asciidoc @@ -5,6 +5,12 @@ cat {dfeeds} ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/dataframeanalytics.asciidoc b/docs/reference/cat/dataframeanalytics.asciidoc index ed0f697c36d5..a164ac7c460e 100644 --- a/docs/reference/cat/dataframeanalytics.asciidoc +++ b/docs/reference/cat/dataframeanalytics.asciidoc @@ -5,6 +5,12 @@ cat {dfanalytics} ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/fielddata.asciidoc b/docs/reference/cat/fielddata.asciidoc index 376ef1d97057..22c0d532f212 100644 --- a/docs/reference/cat/fielddata.asciidoc +++ b/docs/reference/cat/fielddata.asciidoc @@ -4,6 +4,12 @@ cat fielddata ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/health.asciidoc b/docs/reference/cat/health.asciidoc index ad39ace31080..70e7960b0357 100644 --- a/docs/reference/cat/health.asciidoc +++ b/docs/reference/cat/health.asciidoc @@ -4,6 +4,12 @@ cat health ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/indices.asciidoc b/docs/reference/cat/indices.asciidoc index b8dda01c2eae..22d240e8d674 100644 --- a/docs/reference/cat/indices.asciidoc +++ b/docs/reference/cat/indices.asciidoc @@ -4,6 +4,12 @@ cat indices ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/master.asciidoc b/docs/reference/cat/master.asciidoc index bcf2b876e450..d2ce0c4fdbdc 100644 --- a/docs/reference/cat/master.asciidoc +++ b/docs/reference/cat/master.asciidoc @@ -4,6 +4,12 @@ cat master ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/nodeattrs.asciidoc b/docs/reference/cat/nodeattrs.asciidoc index ff37b430956a..e8b1128e0c21 100644 --- a/docs/reference/cat/nodeattrs.asciidoc +++ b/docs/reference/cat/nodeattrs.asciidoc @@ -4,6 +4,12 @@ cat nodeattrs ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/nodes.asciidoc b/docs/reference/cat/nodes.asciidoc index 651c4ef3c7c2..c099449ceae6 100644 --- a/docs/reference/cat/nodes.asciidoc +++ b/docs/reference/cat/nodes.asciidoc @@ -5,6 +5,12 @@ cat nodes ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/pending_tasks.asciidoc b/docs/reference/cat/pending_tasks.asciidoc index f772cdb66d88..634fec4ca44d 100644 --- a/docs/reference/cat/pending_tasks.asciidoc +++ b/docs/reference/cat/pending_tasks.asciidoc @@ -4,6 +4,12 @@ cat pending tasks ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc index ae360862a0ed..ed06a8df2a55 100644 --- a/docs/reference/cat/plugins.asciidoc +++ b/docs/reference/cat/plugins.asciidoc @@ -4,6 +4,11 @@ cat plugins ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- [IMPORTANT] ==== diff --git a/docs/reference/cat/recovery.asciidoc b/docs/reference/cat/recovery.asciidoc index 7393a8b71908..9ac84e4528fb 100644 --- a/docs/reference/cat/recovery.asciidoc +++ b/docs/reference/cat/recovery.asciidoc @@ -4,6 +4,12 @@ cat recovery ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/repositories.asciidoc b/docs/reference/cat/repositories.asciidoc index ec2f243c27be..140bcf8ee702 100644 --- a/docs/reference/cat/repositories.asciidoc +++ b/docs/reference/cat/repositories.asciidoc @@ -4,6 +4,12 @@ cat repositories ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/segments.asciidoc b/docs/reference/cat/segments.asciidoc index 82dc7298a078..0dc74ea84989 100644 --- a/docs/reference/cat/segments.asciidoc +++ b/docs/reference/cat/segments.asciidoc @@ -4,6 +4,12 @@ cat segments ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/shards.asciidoc b/docs/reference/cat/shards.asciidoc index 87dcb01838bf..4c20b6a241dd 100644 --- a/docs/reference/cat/shards.asciidoc +++ b/docs/reference/cat/shards.asciidoc @@ -5,6 +5,12 @@ cat shards ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/snapshots.asciidoc b/docs/reference/cat/snapshots.asciidoc index 820c4b56c783..27fdc6eb486d 100644 --- a/docs/reference/cat/snapshots.asciidoc +++ b/docs/reference/cat/snapshots.asciidoc @@ -4,6 +4,12 @@ cat snapshots ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/tasks.asciidoc b/docs/reference/cat/tasks.asciidoc index 91d67baa72d7..6745381f6bab 100644 --- a/docs/reference/cat/tasks.asciidoc +++ b/docs/reference/cat/tasks.asciidoc @@ -6,6 +6,12 @@ beta::["The cat task management API is new and should still be considered a beta feature. The API may change in ways that are not backwards compatible.",{es-issue}51628] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/templates.asciidoc b/docs/reference/cat/templates.asciidoc index bcc8e9e4f5dc..624ae244d6f0 100644 --- a/docs/reference/cat/templates.asciidoc +++ b/docs/reference/cat/templates.asciidoc @@ -4,6 +4,12 @@ cat templates ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index 948ed9a1a7a3..7e7d20b77fc7 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -4,6 +4,12 @@ cat thread pool ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/trainedmodel.asciidoc b/docs/reference/cat/trainedmodel.asciidoc index 5b20a0b6e842..ec571af4b9eb 100644 --- a/docs/reference/cat/trainedmodel.asciidoc +++ b/docs/reference/cat/trainedmodel.asciidoc @@ -5,6 +5,12 @@ cat trained model ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/cat/transforms.asciidoc b/docs/reference/cat/transforms.asciidoc index 53f22f02fbdb..475c09bcf984 100644 --- a/docs/reference/cat/transforms.asciidoc +++ b/docs/reference/cat/transforms.asciidoc @@ -5,6 +5,12 @@ cat transforms ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cat[Compact and aligned text (CAT) APIs].. +-- + [IMPORTANT] ==== cat APIs are only intended for human consumption using the command line or {kib} diff --git a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc index b510163bab50..18cf723901b5 100644 --- a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc @@ -5,6 +5,12 @@ Delete auto-follow pattern ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Delete {ccr} <>. [[ccr-delete-auto-follow-pattern-request]] diff --git a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc index a2969e993ddf..b4fd82008836 100644 --- a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc @@ -5,6 +5,12 @@ Get auto-follow pattern ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Get {ccr} <>. [[ccr-get-auto-follow-pattern-request]] diff --git a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc index c5ae5a7b4af9..2d70b8da92b4 100644 --- a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc @@ -5,6 +5,12 @@ Pause auto-follow pattern ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Pauses a {ccr} <>. [[ccr-pause-auto-follow-pattern-request]] diff --git a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc index 6769f21ca5ce..9b25bf174659 100644 --- a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc @@ -5,6 +5,12 @@ Create auto-follow pattern ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Creates a {ccr} <>. [[ccr-put-auto-follow-pattern-request]] diff --git a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc index a580bb3838f9..345b5e764547 100644 --- a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc @@ -5,6 +5,12 @@ Resume auto-follow pattern ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Resumes a {ccr} <>. [[ccr-resume-auto-follow-pattern-request]] diff --git a/docs/reference/ccr/apis/ccr-apis.asciidoc b/docs/reference/ccr/apis/ccr-apis.asciidoc index ae94e1931af8..21e859a0bff9 100644 --- a/docs/reference/ccr/apis/ccr-apis.asciidoc +++ b/docs/reference/ccr/apis/ccr-apis.asciidoc @@ -2,6 +2,12 @@ [[ccr-apis]] == {ccr-cap} APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + You can use the following APIs to perform <> operations. [discrete] diff --git a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc index 6c049d9c92b5..a913e2e2bd15 100644 --- a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc @@ -5,6 +5,12 @@ Get follower info ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Retrieves information about all <> follower indices. [[ccr-get-follow-info-request]] diff --git a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc index 4892f86b3523..ff0a72a8b234 100644 --- a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc @@ -5,6 +5,12 @@ Get follower stats ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Get <> follower stats. [[ccr-get-follow-stats-request]] diff --git a/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc b/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc index 1917c08d6640..b6f397647817 100644 --- a/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc +++ b/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc @@ -5,6 +5,12 @@ Forget follower ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Removes the <> follower retention leases from the leader. [[ccr-post-forget-follower-request]] diff --git a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc index 6d4730d10efe..f5ce00cbfccf 100644 --- a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc @@ -5,6 +5,12 @@ Pause follower ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Pauses a <> follower index. [[ccr-post-pause-follow-request]] diff --git a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc index b023a8cb5cb7..7bca84827be1 100644 --- a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc @@ -5,6 +5,12 @@ Resume follower ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Resumes a <> follower index. [[ccr-post-resume-follow-request]] diff --git a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc index dab11ef9e7a5..933e3c567446 100644 --- a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc @@ -5,6 +5,12 @@ Unfollow ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Converts a <> follower index to a regular index. [[ccr-post-unfollow-request]] diff --git a/docs/reference/ccr/apis/follow/put-follow.asciidoc b/docs/reference/ccr/apis/follow/put-follow.asciidoc index b7ae9ac98747..97a4948c43a7 100644 --- a/docs/reference/ccr/apis/follow/put-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/put-follow.asciidoc @@ -5,6 +5,12 @@ Create follower ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Creates a <> follower index. [[ccr-put-follow-request]] diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index 92e6bae0bdce..69dae2ca6966 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -6,6 +6,12 @@ Get {ccr-init} stats ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ccr[Cross-cluster replication APIs]. +-- + Get <> stats. [[ccr-get-stats-request]] diff --git a/docs/reference/cluster.asciidoc b/docs/reference/cluster.asciidoc index 3de386d3288c..b4359c2bf4fb 100644 --- a/docs/reference/cluster.asciidoc +++ b/docs/reference/cluster.asciidoc @@ -1,6 +1,12 @@ [[cluster]] == Cluster APIs +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + ["float",id="cluster-nodes"] === Node specification diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index e640fa77c71e..aa7f091c8670 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -4,6 +4,12 @@ Cluster allocation explain ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Provides an explanation for a shard's current <>. [source,console] diff --git a/docs/reference/cluster/cluster-info.asciidoc b/docs/reference/cluster/cluster-info.asciidoc index 7d67f1602aea..2106fcfc6975 100644 --- a/docs/reference/cluster/cluster-info.asciidoc +++ b/docs/reference/cluster/cluster-info.asciidoc @@ -7,6 +7,12 @@ experimental::[] Cluster Info ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns cluster information. [[cluster-info-api-request]] diff --git a/docs/reference/cluster/delete-desired-balance.asciidoc b/docs/reference/cluster/delete-desired-balance.asciidoc index c67834269e50..49b555fd5005 100644 --- a/docs/reference/cluster/delete-desired-balance.asciidoc +++ b/docs/reference/cluster/delete-desired-balance.asciidoc @@ -6,6 +6,12 @@ NOTE: {cloud-only} +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Discards the current <> and computes a new desired balance starting from the current allocation of shards. This can sometimes help {es} find a desired balance which needs fewer shard movements to achieve, especially if the cluster has experienced changes so substantial that the current desired balance is no longer optimal without {es} having diff --git a/docs/reference/cluster/delete-desired-nodes.asciidoc b/docs/reference/cluster/delete-desired-nodes.asciidoc index a58d19e2dfa3..93e8f6484895 100644 --- a/docs/reference/cluster/delete-desired-nodes.asciidoc +++ b/docs/reference/cluster/delete-desired-nodes.asciidoc @@ -6,6 +6,12 @@ NOTE: {cloud-only} +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Delete desired nodes. [[delete-desired-nodes-request]] diff --git a/docs/reference/cluster/get-desired-balance.asciidoc b/docs/reference/cluster/get-desired-balance.asciidoc index 74afdaa52daf..381f263a5ee7 100644 --- a/docs/reference/cluster/get-desired-balance.asciidoc +++ b/docs/reference/cluster/get-desired-balance.asciidoc @@ -6,6 +6,12 @@ NOTE: {cloud-only} +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Exposes: * the <> computation and reconciliation stats diff --git a/docs/reference/cluster/get-desired-nodes.asciidoc b/docs/reference/cluster/get-desired-nodes.asciidoc index de27bd657b3f..c4182a37b2d0 100644 --- a/docs/reference/cluster/get-desired-nodes.asciidoc +++ b/docs/reference/cluster/get-desired-nodes.asciidoc @@ -6,6 +6,12 @@ NOTE: {cloud-only} +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Get desired nodes. [[get-desired-nodes-request]] diff --git a/docs/reference/cluster/get-settings.asciidoc b/docs/reference/cluster/get-settings.asciidoc index 32c186e4ef24..0b577356aa76 100644 --- a/docs/reference/cluster/get-settings.asciidoc +++ b/docs/reference/cluster/get-settings.asciidoc @@ -4,6 +4,12 @@ Cluster get settings ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns cluster-wide settings. [source,console] diff --git a/docs/reference/cluster/health.asciidoc b/docs/reference/cluster/health.asciidoc index 94eb80a03d12..b24b408964d3 100644 --- a/docs/reference/cluster/health.asciidoc +++ b/docs/reference/cluster/health.asciidoc @@ -4,6 +4,12 @@ Cluster health ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns the health status of a cluster. [[cluster-health-api-request]] diff --git a/docs/reference/cluster/nodes-hot-threads.asciidoc b/docs/reference/cluster/nodes-hot-threads.asciidoc index 446a8fede69e..f78719772d5a 100644 --- a/docs/reference/cluster/nodes-hot-threads.asciidoc +++ b/docs/reference/cluster/nodes-hot-threads.asciidoc @@ -4,6 +4,12 @@ Nodes hot threads ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns the hot threads on each selected node in the cluster. [[cluster-nodes-hot-threads-api-request]] diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index 6f1d769e696c..32959eda3176 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -4,8 +4,13 @@ Nodes info ++++ -Returns cluster nodes information. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- +Returns cluster nodes information. [[cluster-nodes-info-api-request]] ==== {api-request-title} diff --git a/docs/reference/cluster/nodes-reload-secure-settings.asciidoc b/docs/reference/cluster/nodes-reload-secure-settings.asciidoc index f86304d2e9ba..0db189a4a396 100644 --- a/docs/reference/cluster/nodes-reload-secure-settings.asciidoc +++ b/docs/reference/cluster/nodes-reload-secure-settings.asciidoc @@ -4,6 +4,12 @@ Nodes reload secure settings ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Reloads the keystore on nodes in the cluster. [[cluster-nodes-reload-secure-settings-api-request]] diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index adf8229712ec..b1c6d3ab466a 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -5,6 +5,12 @@ Nodes stats ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns cluster nodes statistics. [[cluster-nodes-stats-api-request]] diff --git a/docs/reference/cluster/nodes-usage.asciidoc b/docs/reference/cluster/nodes-usage.asciidoc index 486edf67bba8..58815b86a881 100644 --- a/docs/reference/cluster/nodes-usage.asciidoc +++ b/docs/reference/cluster/nodes-usage.asciidoc @@ -4,8 +4,13 @@ Nodes feature usage ++++ -Returns information on the usage of features. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- +Returns information on the usage of features. [[cluster-nodes-usage-api-request]] ==== {api-request-title} diff --git a/docs/reference/cluster/pending.asciidoc b/docs/reference/cluster/pending.asciidoc index 3e87234c7d26..dd5146d0f04d 100644 --- a/docs/reference/cluster/pending.asciidoc +++ b/docs/reference/cluster/pending.asciidoc @@ -4,6 +4,12 @@ Pending cluster tasks ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns cluster-level changes that have not yet been executed. diff --git a/docs/reference/cluster/prevalidate-node-removal.asciidoc b/docs/reference/cluster/prevalidate-node-removal.asciidoc index 16bf28c58668..d2c2fe562d56 100644 --- a/docs/reference/cluster/prevalidate-node-removal.asciidoc +++ b/docs/reference/cluster/prevalidate-node-removal.asciidoc @@ -6,6 +6,12 @@ NOTE: {cloud-only} +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Prevalidate node removal. [[prevalidate-node-removal-api-request]] diff --git a/docs/reference/cluster/remote-info.asciidoc b/docs/reference/cluster/remote-info.asciidoc index 8f2923846df0..bbfc44b6b555 100644 --- a/docs/reference/cluster/remote-info.asciidoc +++ b/docs/reference/cluster/remote-info.asciidoc @@ -4,8 +4,13 @@ Remote cluster info ++++ -Returns configured remote cluster information. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- +Returns configured remote cluster information. [[cluster-remote-info-api-request]] ==== {api-request-title} diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index 429070f80b9b..65ecb47dc7cb 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -4,8 +4,13 @@ Cluster reroute ++++ -Changes the allocation of shards in a cluster. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- +Changes the allocation of shards in a cluster. [[cluster-reroute-api-request]] ==== {api-request-title} diff --git a/docs/reference/cluster/state.asciidoc b/docs/reference/cluster/state.asciidoc index fcb2f5f2f5dc..cda375c514c3 100644 --- a/docs/reference/cluster/state.asciidoc +++ b/docs/reference/cluster/state.asciidoc @@ -4,6 +4,12 @@ Cluster state ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns an internal representation of the cluster state for debugging or diagnostic purposes. diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index d875417bde51..278ac8ec7821 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -5,6 +5,12 @@ Cluster stats ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Returns cluster statistics. [[cluster-stats-api-request]] diff --git a/docs/reference/cluster/tasks.asciidoc b/docs/reference/cluster/tasks.asciidoc index 4b32d5f1b903..4d8703900337 100644 --- a/docs/reference/cluster/tasks.asciidoc +++ b/docs/reference/cluster/tasks.asciidoc @@ -6,6 +6,12 @@ beta::["The task management API is new and should still be considered a beta feature. The API may change in ways that are not backwards compatible.",{es-issue}51628] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-tasks[task management APIs]. +-- + Returns information about the tasks currently executing in the cluster. [[tasks-api-request]] diff --git a/docs/reference/cluster/update-desired-nodes.asciidoc b/docs/reference/cluster/update-desired-nodes.asciidoc index c72a2b53208e..edcaeb745e7a 100644 --- a/docs/reference/cluster/update-desired-nodes.asciidoc +++ b/docs/reference/cluster/update-desired-nodes.asciidoc @@ -6,6 +6,12 @@ NOTE: {cloud-only} +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Creates or updates the desired nodes. [[update-desired-nodes-request]] diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 3d8bdcca07e2..20dfef1bea76 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -4,8 +4,13 @@ Cluster update settings ++++ -Configures <>. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- +Configures <>. [[cluster-update-settings-api-request]] ==== {api-request-title} diff --git a/docs/reference/cluster/voting-exclusions.asciidoc b/docs/reference/cluster/voting-exclusions.asciidoc index 55587a7010f8..f1aa03c6550b 100644 --- a/docs/reference/cluster/voting-exclusions.asciidoc +++ b/docs/reference/cluster/voting-exclusions.asciidoc @@ -4,6 +4,12 @@ Voting configuration exclusions ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-cluster[Cluster APIs]. +-- + Adds or removes master-eligible nodes from the <>. diff --git a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc index 8cd8f71dc7ae..0c3b7350d0aa 100644 --- a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Cancels a connector sync job. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/check-in-connector-api.asciidoc b/docs/reference/connector/apis/check-in-connector-api.asciidoc index 15e65b10074d..808f835a7484 100644 --- a/docs/reference/connector/apis/check-in-connector-api.asciidoc +++ b/docs/reference/connector/apis/check-in-connector-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `last_seen` field of a connector with current timestamp. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc index 8d7d0a36ad88..1aa5762e8e6f 100644 --- a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Checks in a connector sync job (updates `last_seen` to the current time). To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/claim-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/claim-connector-sync-job-api.asciidoc index 62491582ce75..c8de1efa07f2 100644 --- a/docs/reference/connector/apis/claim-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/claim-connector-sync-job-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Claims a connector sync job. The `_claim` endpoint is not intended for direct connector management by users. It is there to support the implementation of services that utilize the https://github.com/elastic/connectors/blob/main/docs/CONNECTOR_PROTOCOL.md[Connector Protocol] to communicate with {es}. diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index 15ce31a60598..33b58abd193b 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -1,6 +1,12 @@ [[connector-apis]] == Connector APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + beta::[] The connector and sync jobs APIs provide a convenient way to create and manage Elastic <>. diff --git a/docs/reference/connector/apis/create-connector-api.asciidoc b/docs/reference/connector/apis/create-connector-api.asciidoc index 3ecef6d30273..3b7ffd4b4255 100644 --- a/docs/reference/connector/apis/create-connector-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Creates an Elastic connector. Connectors are {es} integrations that bring content from third-party data sources, which can be deployed on {ecloud} or hosted on your own infrastructure: diff --git a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc index 240ab696954f..58608bfd7fbd 100644 --- a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc @@ -6,6 +6,11 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- Creates a connector sync job. diff --git a/docs/reference/connector/apis/delete-connector-api.asciidoc b/docs/reference/connector/apis/delete-connector-api.asciidoc index 76621d7f1843..ae2c2ac3b424 100644 --- a/docs/reference/connector/apis/delete-connector-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Removes a connector and associated sync jobs. This is a destructive action that is not recoverable. diff --git a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc index eeea40f430ab..67666392ad19 100644 --- a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Removes a connector sync job and its associated data. This is a destructive action that is not recoverable. diff --git a/docs/reference/connector/apis/get-connector-api.asciidoc b/docs/reference/connector/apis/get-connector-api.asciidoc index 302773e0af83..50ec84617b9d 100644 --- a/docs/reference/connector/apis/get-connector-api.asciidoc +++ b/docs/reference/connector/apis/get-connector-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Retrieves the details about a connector. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc index a524c1291c26..e30b0e4ec98d 100644 --- a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Retrieves the details about a connector sync job. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc index 4a4fa5a22dcc..9bcf9fbb4b8e 100644 --- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -7,6 +7,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Returns information about all stored connector sync jobs ordered by their creation date in ascending order. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/list-connectors-api.asciidoc b/docs/reference/connector/apis/list-connectors-api.asciidoc index 4a93ecf2b010..9e7ea17bfaa3 100644 --- a/docs/reference/connector/apis/list-connectors-api.asciidoc +++ b/docs/reference/connector/apis/list-connectors-api.asciidoc @@ -7,6 +7,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Returns information about all created connectors. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc index e6ad9e8cc93d..2a29cd9ba54f 100644 --- a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc +++ b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Sets a connector sync job error. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc index 7e22f657ba6b..63c4a0ee152b 100644 --- a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc +++ b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Sets connector sync job stats. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc b/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc index fbd3f887758f..440b8025138f 100644 --- a/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `api_key_id` and/or `api_key_secret_id` field(s) of a connector, specifying: . The ID of the API key used for authorization diff --git a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc index 4b25f9e71ae4..a987b03c7674 100644 --- a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates a connector's `configuration`, allowing for config value updates within a registered configuration schema. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/update-connector-error-api.asciidoc b/docs/reference/connector/apis/update-connector-error-api.asciidoc index 29358b243041..278f9377b91d 100644 --- a/docs/reference/connector/apis/update-connector-error-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-error-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `error` field of a connector. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/update-connector-features-api.asciidoc b/docs/reference/connector/apis/update-connector-features-api.asciidoc index 77571fcd7d5a..d9da7b767e47 100644 --- a/docs/reference/connector/apis/update-connector-features-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-features-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Manages the `features` of a connector. This endpoint can be used to control the following aspects of a connector: * document-level security diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc index 4820fa151901..3cebee37b533 100644 --- a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the draft `filtering` configuration of a connector and marks the draft validation state as `edited`. The filtering draft is activated once validated by the running Elastic connector service. The filtering property is used to configure sync rules (both basic and advanced) for a connector. Learn more in the <>. diff --git a/docs/reference/connector/apis/update-connector-index-name-api.asciidoc b/docs/reference/connector/apis/update-connector-index-name-api.asciidoc index 6222baf6a6ca..7a7e9fc535ef 100644 --- a/docs/reference/connector/apis/update-connector-index-name-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-index-name-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `index_name` field of a connector, specifying the index where the data ingested by the connector is stored. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc index 17f892d852f4..9fa3aeaa3a56 100644 --- a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the fields related to the last sync of a connector. This action is used for analytics and monitoring. diff --git a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc index 384cec2c73e2..0a0a63567706 100644 --- a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc @@ -6,6 +6,11 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- Updates the `name` and `description` fields of a connector. diff --git a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc index e54b01ec47d0..666897a5f9a8 100644 --- a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `pipeline` configuration of a connector. When you create a new connector, the configuration of an <> is populated with default settings. diff --git a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc index 64302c26a723..af3b353eaf94 100644 --- a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `scheduling` configuration of a connector. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/update-connector-service-type-api.asciidoc b/docs/reference/connector/apis/update-connector-service-type-api.asciidoc index c02967d03e2d..e92f8cd96b4c 100644 --- a/docs/reference/connector/apis/update-connector-service-type-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-service-type-api.asciidoc @@ -6,6 +6,12 @@ beta::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `service_type` of a connector. To get started with Connector APIs, check out <>. diff --git a/docs/reference/connector/apis/update-connector-status-api.asciidoc b/docs/reference/connector/apis/update-connector-status-api.asciidoc index dadd93fe5f9c..910cc32605db 100644 --- a/docs/reference/connector/apis/update-connector-status-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-status-api.asciidoc @@ -6,6 +6,12 @@ preview::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-connector[Connector APIs]. +-- + Updates the `status` of a connector. To get started with Connector APIs, check out <>. diff --git a/docs/reference/data-streams/data-stream-apis.asciidoc b/docs/reference/data-streams/data-stream-apis.asciidoc index c13703ab2a6e..66662ef4a218 100644 --- a/docs/reference/data-streams/data-stream-apis.asciidoc +++ b/docs/reference/data-streams/data-stream-apis.asciidoc @@ -2,6 +2,12 @@ [[data-stream-apis]] == Data stream APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + The following APIs are available for managing <>: * <> diff --git a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc index 315f7fa85e45..70e901bb43b9 100644 --- a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc @@ -4,6 +4,12 @@ Delete Data Stream Lifecycle ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Deletes the <> from a set of data streams. [[delete-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc index 2b15886ebe19..0a76114abfa0 100644 --- a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc @@ -4,6 +4,12 @@ Explain Data Stream Lifecycle ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Retrieves the current <> status for one or more data stream backing indices. [[explain-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc index f48fa1eb52da..ba33574b4977 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc @@ -4,6 +4,12 @@ Get Data Stream Lifecycle ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Gets stats about the execution of <>. [[get-lifecycle-stats-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index 6323fac1eac2..32e68c0c3c31 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -4,6 +4,12 @@ Get Data Stream Lifecycle ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Gets the <> of a set of <>. [[get-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc index c60c105e818a..e2eb52959c95 100644 --- a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc @@ -4,6 +4,12 @@ Put Data Stream Lifecycle ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Configures the data stream <> for the targeted <>. [[put-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/modify-data-streams-api.asciidoc b/docs/reference/data-streams/modify-data-streams-api.asciidoc index 2da869083df2..082034213467 100644 --- a/docs/reference/data-streams/modify-data-streams-api.asciidoc +++ b/docs/reference/data-streams/modify-data-streams-api.asciidoc @@ -4,6 +4,12 @@ Modify data streams ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Performs one or more <> modification actions in a single atomic operation. diff --git a/docs/reference/data-streams/promote-data-stream-api.asciidoc b/docs/reference/data-streams/promote-data-stream-api.asciidoc index 5ba9c4d9fad0..c8bb575e399f 100644 --- a/docs/reference/data-streams/promote-data-stream-api.asciidoc +++ b/docs/reference/data-streams/promote-data-stream-api.asciidoc @@ -5,6 +5,12 @@ Promote data stream ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + The purpose of the promote <> API is to turn a data stream that is replicated by CCR into a regular data stream. diff --git a/docs/reference/docs.asciidoc b/docs/reference/docs.asciidoc index ff2d823410a6..34662401842f 100644 --- a/docs/reference/docs.asciidoc +++ b/docs/reference/docs.asciidoc @@ -1,6 +1,12 @@ [[docs]] == Document APIs +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + This section starts with a short introduction to {es}'s <>, followed by a detailed description of the following CRUD APIs: diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 6edccfcdb13f..e2093dbf4ff3 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -4,6 +4,12 @@ Bulk ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Performs multiple indexing or delete operations in a single API call. This reduces overhead and can greatly increase indexing speed. diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc index 452d7f7758bf..966561909663 100644 --- a/docs/reference/docs/delete.asciidoc +++ b/docs/reference/docs/delete.asciidoc @@ -4,6 +4,12 @@ Delete ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Removes a JSON document from the specified index. [[docs-delete-api-request]] diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index c71215fff8d7..bdbcb08e186c 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -4,6 +4,12 @@ Get ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Retrieves the specified JSON document from an index. [source,console] diff --git a/docs/reference/docs/multi-get.asciidoc b/docs/reference/docs/multi-get.asciidoc index 7c3eafa9c79f..b27bd6a97d53 100644 --- a/docs/reference/docs/multi-get.asciidoc +++ b/docs/reference/docs/multi-get.asciidoc @@ -4,6 +4,12 @@ Multi get ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Retrieves multiple JSON documents by ID. [source,console] diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index dc27e40ecd90..26e4d1ab67c5 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -4,6 +4,12 @@ Reindex ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Copies documents from a source to a destination. The source can be any existing index, alias, or data stream. The destination diff --git a/docs/reference/docs/termvectors.asciidoc b/docs/reference/docs/termvectors.asciidoc index 31dfba1ac266..285db46fd4d2 100644 --- a/docs/reference/docs/termvectors.asciidoc +++ b/docs/reference/docs/termvectors.asciidoc @@ -4,6 +4,12 @@ Term vectors ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Retrieves information and statistics for terms in the fields of a particular document. [source,console] diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index d470080fc602..c5fccc86a1cc 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -4,6 +4,12 @@ Update by query ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Updates documents that match the specified query. If no query is specified, performs an update on every document in the data stream or index without modifying the source, which is useful for picking up mapping changes. diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index a212c4e152b0..619cc921c4d6 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -4,6 +4,12 @@ Update ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-document[Document APIs]. +-- + Updates a document using the specified script. [[docs-update-api-request]] diff --git a/docs/reference/eql/delete-async-eql-search-api.asciidoc b/docs/reference/eql/delete-async-eql-search-api.asciidoc index 2f52a1a9cb23..f3c516188c25 100644 --- a/docs/reference/eql/delete-async-eql-search-api.asciidoc +++ b/docs/reference/eql/delete-async-eql-search-api.asciidoc @@ -6,6 +6,12 @@ Delete async EQL search ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-eql[EQL APIs]. +-- + Deletes an <> or a <>. The API also deletes results for the search. diff --git a/docs/reference/eql/eql-apis.asciidoc b/docs/reference/eql/eql-apis.asciidoc index e8cc2b21492a..0bf7462d8f4d 100644 --- a/docs/reference/eql/eql-apis.asciidoc +++ b/docs/reference/eql/eql-apis.asciidoc @@ -1,6 +1,12 @@ [[eql-apis]] == EQL APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-eql[EQL APIs]. +-- + <> is a query language for event-based time series data, such as logs, metrics, and traces. For an overview of EQL and related tutorials, see <>. diff --git a/docs/reference/eql/eql-search-api.asciidoc b/docs/reference/eql/eql-search-api.asciidoc index 0fd490609277..a4928ce82f56 100644 --- a/docs/reference/eql/eql-search-api.asciidoc +++ b/docs/reference/eql/eql-search-api.asciidoc @@ -6,6 +6,12 @@ EQL search ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-eql[EQL APIs]. +-- + Returns search results for an <> query. EQL assumes each document in a data stream or index corresponds to an diff --git a/docs/reference/eql/get-async-eql-search-api.asciidoc b/docs/reference/eql/get-async-eql-search-api.asciidoc index 2ac4271b5b98..36f18a7061bc 100644 --- a/docs/reference/eql/get-async-eql-search-api.asciidoc +++ b/docs/reference/eql/get-async-eql-search-api.asciidoc @@ -6,6 +6,12 @@ Get async EQL search ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-eql[EQL APIs]. +-- + Returns the current status and available results for an <> or a <>. diff --git a/docs/reference/eql/get-async-eql-status-api.asciidoc b/docs/reference/eql/get-async-eql-status-api.asciidoc index 908a65773f6a..fe188ec9112e 100644 --- a/docs/reference/eql/get-async-eql-status-api.asciidoc +++ b/docs/reference/eql/get-async-eql-status-api.asciidoc @@ -5,6 +5,13 @@ ++++ Get async EQL search status ++++ + +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-eql[EQL APIs]. +-- + Returns the current status for an <> or a <> without returning results. This is a more lightweight API than diff --git a/docs/reference/esql/esql-apis.asciidoc b/docs/reference/esql/esql-apis.asciidoc index 8586cd1ae6bc..57747d3e357e 100644 --- a/docs/reference/esql/esql-apis.asciidoc +++ b/docs/reference/esql/esql-apis.asciidoc @@ -1,6 +1,14 @@ [[esql-apis]] == {esql} APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + +https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-query + The <> provides a powerful way to filter, transform, and analyze data stored in {es}, and in the future in other runtimes. For an overview of {esql} and related tutorials, see <>. diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 6cd23fc524f9..76597822a3fa 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -4,6 +4,12 @@ {esql} async query API ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + Runs an async <>. The async query API lets you asynchronously execute a query request, diff --git a/docs/reference/esql/esql-async-query-delete-api.asciidoc b/docs/reference/esql/esql-async-query-delete-api.asciidoc index 5cad566f7f9c..a3f21ad81e44 100644 --- a/docs/reference/esql/esql-async-query-delete-api.asciidoc +++ b/docs/reference/esql/esql-async-query-delete-api.asciidoc @@ -4,6 +4,12 @@ {esql} async query delete API ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + The <> async query delete API is used to manually delete an async query by ID. If the query is still running, the query will be cancelled. Otherwise, the stored results are deleted. diff --git a/docs/reference/esql/esql-async-query-get-api.asciidoc b/docs/reference/esql/esql-async-query-get-api.asciidoc index 82a6ae5b28b5..54d62ede608e 100644 --- a/docs/reference/esql/esql-async-query-get-api.asciidoc +++ b/docs/reference/esql/esql-async-query-get-api.asciidoc @@ -4,6 +4,12 @@ {esql} async query get API ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + Returns the current status and available results for an <> or a stored results. diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index 8e07a627567d..689a534facab 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -4,6 +4,12 @@ {esql} query API ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + Returns search results for an <> query. [source,console] diff --git a/docs/reference/features/apis/features-apis.asciidoc b/docs/reference/features/apis/features-apis.asciidoc index fe06471cff0d..94055a0a41d7 100644 --- a/docs/reference/features/apis/features-apis.asciidoc +++ b/docs/reference/features/apis/features-apis.asciidoc @@ -1,6 +1,12 @@ [[features-apis]] == Features APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-features[Features APIs]. +-- + You can use the following APIs to introspect and manage Features provided by Elasticsearch and Elasticsearch plugins. diff --git a/docs/reference/features/apis/get-features-api.asciidoc b/docs/reference/features/apis/get-features-api.asciidoc index 676ec3c41da2..3df7ccb53689 100644 --- a/docs/reference/features/apis/get-features-api.asciidoc +++ b/docs/reference/features/apis/get-features-api.asciidoc @@ -4,6 +4,12 @@ Get features ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-features[Features APIs]. +-- + Gets a list of features which can be included in snapshots using the <> when creating a snapshot. diff --git a/docs/reference/features/apis/reset-features-api.asciidoc b/docs/reference/features/apis/reset-features-api.asciidoc index 36ff12cf0fc3..5223315caa56 100644 --- a/docs/reference/features/apis/reset-features-api.asciidoc +++ b/docs/reference/features/apis/reset-features-api.asciidoc @@ -6,6 +6,12 @@ experimental::[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-features[Features APIs]. +-- + Clears all of the state information stored in system indices by {es} features, including the security and machine learning indices. WARNING: Intended for development and testing use only. Do not reset features on a production cluster. diff --git a/docs/reference/fleet/fleet-multi-search.asciidoc b/docs/reference/fleet/fleet-multi-search.asciidoc index 3ee6b67b06ba..eaec76ebdd83 100644 --- a/docs/reference/fleet/fleet-multi-search.asciidoc +++ b/docs/reference/fleet/fleet-multi-search.asciidoc @@ -5,6 +5,12 @@ Fleet multi search ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-fleet[{fleet} APIs]. +-- + Executes several <> with a single API request. The API follows the same structure as the <> API. However, diff --git a/docs/reference/fleet/fleet-search.asciidoc b/docs/reference/fleet/fleet-search.asciidoc index 961846385969..10e77d9e6652 100644 --- a/docs/reference/fleet/fleet-search.asciidoc +++ b/docs/reference/fleet/fleet-search.asciidoc @@ -5,6 +5,12 @@ Fleet search ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-fleet[{fleet} APIs]. +-- + The purpose of the fleet search api is to provide a search api where the search will only be executed after provided checkpoint has been processed and is visible for searches inside of Elasticsearch. diff --git a/docs/reference/fleet/index.asciidoc b/docs/reference/fleet/index.asciidoc index b22609aff32e..bdffee227029 100644 --- a/docs/reference/fleet/index.asciidoc +++ b/docs/reference/fleet/index.asciidoc @@ -2,6 +2,12 @@ [[fleet-apis]] == Fleet APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-fleet[{fleet} APIs]. +-- + TIP: For the {kib} {fleet} APIs, see the {fleet-guide}/fleet-api-docs.html[Fleet API Documentation]. diff --git a/docs/reference/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc index 34ac367125ad..1f5faeda76ef 100644 --- a/docs/reference/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -2,6 +2,12 @@ [[graph-explore-api]] == Graph explore API +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-graph[Graph explore APIs]. +-- + The graph explore API enables you to extract and summarize information about the documents and terms in an {es} data stream or index. diff --git a/docs/reference/health/health.asciidoc b/docs/reference/health/health.asciidoc index 34714e80e1b1..7077cb28f894 100644 --- a/docs/reference/health/health.asciidoc +++ b/docs/reference/health/health.asciidoc @@ -4,6 +4,12 @@ Health ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-health_report[Cluster health APIs]. +-- + An API that reports the health status of an {es} cluster. [[health-api-request]] diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index 0ef55d780887..2db03a19b353 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -567,3 +567,8 @@ also possible to update the client-side logic in order to route queries to the relevant indices based on filters. However `constant_keyword` makes it transparently and allows to decouple search requests from the index topology in exchange of very little overhead. + +[discrete] +=== Default search timeout + +By default, search requests don't time out. You can set a timeout using the <> setting. diff --git a/docs/reference/ilm/apis/ilm-api.asciidoc b/docs/reference/ilm/apis/ilm-api.asciidoc index 149ba2a6b449..3a6c8430bb4c 100644 --- a/docs/reference/ilm/apis/ilm-api.asciidoc +++ b/docs/reference/ilm/apis/ilm-api.asciidoc @@ -1,6 +1,13 @@ [[index-lifecycle-management-api]] == {ilm-cap} APIs +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-ilm[{ilm-cap} APIs]. +-- + +https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-ilm + You use the following APIs to set up policies to automatically manage the index lifecycle. For more information about {ilm} ({ilm-init}), see <>. diff --git a/docs/reference/index-modules/slowlog.asciidoc b/docs/reference/index-modules/slowlog.asciidoc index c29296b59ad4..e848668c1a66 100644 --- a/docs/reference/index-modules/slowlog.asciidoc +++ b/docs/reference/index-modules/slowlog.asciidoc @@ -1,15 +1,118 @@ [[index-modules-slowlog]] -== Slow Log +== Slow log + +The slow log records database searching and indexing events that have execution durations above specified thresholds. You can use these logs to investigate analyze or troubleshoot your cluster's historical search and indexing performance. + +Slow logs report task duration at the shard level for searches, and at the index level +for indexing, but might not encompass the full task execution time observed on the client. For example, slow logs don't surface HTTP network delays or the impact of <>. + +Events that meet the specified threshold are emitted into <> under the `fileset.name` of `slowlog`. These logs can be viewed in the following locations: + +* If <> is enabled, from +{kibana-ref}/xpack-monitoring.html[Stack Monitoring]. Slow log events have a `logger` value of `index.search.slowlog` or `index.indexing.slowlog`. + +* From local {es} service logs directory. Slow log files have a suffix of `_index_search_slowlog.json` or `_index_indexing_slowlog.json`. + +[discrete] +[[slow-log-format]] +=== Slow log format + +The following is an example of a search event in the slow log: + +TIP: If a call was initiated with an `X-Opaque-ID` header, then the ID is automatically included in Search slow logs in the **elasticsearch.slowlog.id** field. See <> for details and best practices. + +[source,js] +--------------------------- +{ + "@timestamp": "2024-12-21T12:42:37.255Z", + "auth.type": "REALM", + "ecs.version": "1.2.0", + "elasticsearch.cluster.name": "distribution_run", + "elasticsearch.cluster.uuid": "Ui23kfF1SHKJwu_hI1iPPQ", + "elasticsearch.node.id": "JK-jn-XpQ3OsDUsq5ZtfGg", + "elasticsearch.node.name": "node-0", + "elasticsearch.slowlog.id": "tomcat-123", + "elasticsearch.slowlog.message": "[index6][0]", + "elasticsearch.slowlog.search_type": "QUERY_THEN_FETCH", + "elasticsearch.slowlog.source": "{\"query\":{\"match_all\":{\"boost\":1.0}}}", + "elasticsearch.slowlog.stats": "[]", + "elasticsearch.slowlog.took": "747.3micros", + "elasticsearch.slowlog.took_millis": 0, + "elasticsearch.slowlog.total_hits": "1 hits", + "elasticsearch.slowlog.total_shards": 1, + "event.dataset": "elasticsearch.index_search_slowlog", + "fileset.name" : "slowlog", + "log.level": "WARN", + "log.logger": "index.search.slowlog.query", + "process.thread.name": "elasticsearch[runTask-0][search][T#5]", + "service.name": "ES_ECS", + "user.name": "elastic", + "user.realm": "reserved" +} + +--------------------------- +// NOTCONSOLE + + +The following is an example of an indexing event in the slow log: + +[source,js] +--------------------------- +{ + "@timestamp" : "2024-12-11T22:34:22.613Z", + "auth.type": "REALM", + "ecs.version": "1.2.0", + "elasticsearch.cluster.name" : "41bd111609d849fc9bf9d25b5df9ce96", + "elasticsearch.cluster.uuid" : "BZTn4I9URXSK26imlia0QA", + "elasticsearch.index.id" : "3VfGR7wRRRKmMCEn7Ii58g", + "elasticsearch.index.name": "my-index-000001", + "elasticsearch.node.id" : "GGiBgg21S3eqPDHzQiCMvQ", + "elasticsearch.node.name" : "instance-0000000001", + "elasticsearch.slowlog.id" : "RCHbt5MBT0oSsCOu54AJ", + "elasticsearch.slowlog.source": "{\"key\":\"value\"}" + "elasticsearch.slowlog.took" : "0.01ms", + "event.dataset": "elasticsearch.index_indexing_slowlog", + "fileset.name" : "slowlog", + "log.level" : "TRACE", + "log.logger" : "index.indexing.slowlog.index", + "service.name" : "ES_ECS", + "user.name": "elastic", + "user.realm": "reserved" +} + +--------------------------- +// NOTCONSOLE + +[discrete] +[[enable-slow-log]] +=== Enable slow logging + +You can enable slow logging at two levels: + +* For all indices under the <>. This method requires a node restart. +* At the index level, using the <> + +By default, all thresholds are set to `-1`, which results in no events being logged. + +Slow log thresholds can be enabled for the four logging levels: `trace`, `debug`, `info`, and `warn`. You can mimic setting log level thresholds by disabling more verbose levels. + +To view the current slow log settings, use the <>: + +[source,console] +-------------------------------------------------- +GET _all/_settings?expand_wildcards=all&filter_path=*.settings.index.*.slowlog +-------------------------------------------------- [discrete] [[search-slow-log]] -=== Search Slow Log +==== Enable slow logging for search events -Shard level slow search log allows to log slow search (query and fetch -phases) into a dedicated log file. +Search slow logs emit per shard. They must be enabled separately for the shard's link:https://www.elastic.co/blog/understanding-query-then-fetch-vs-dfs-query-then-fetch[query and fetch search phases]. -Thresholds can be set for both the query phase of the execution, and -fetch phase, here is a sample: +You can use the `index.search.slowlog.include.user` setting to append `user.*` and `auth.type` fields to slow log entries. These fields contain information about the user who triggered the request. + +The following snippet adjusts all available search slow log settings across all indices using the +<>: [source,yaml] -------------------------------------------------- @@ -22,10 +125,11 @@ index.search.slowlog.threshold.fetch.warn: 1s index.search.slowlog.threshold.fetch.info: 800ms index.search.slowlog.threshold.fetch.debug: 500ms index.search.slowlog.threshold.fetch.trace: 200ms + +index.search.slowlog.include.user: true -------------------------------------------------- -All of the above settings are _dynamic_ and can be set for each index using the -<> API. For example: +The following snippet adjusts the same settings for a single index using the <>: [source,console] -------------------------------------------------- @@ -38,84 +142,23 @@ PUT /my-index-000001/_settings "index.search.slowlog.threshold.fetch.warn": "1s", "index.search.slowlog.threshold.fetch.info": "800ms", "index.search.slowlog.threshold.fetch.debug": "500ms", - "index.search.slowlog.threshold.fetch.trace": "200ms" -} --------------------------------------------------- -// TEST[setup:my_index] - -By default thresholds are disabled (set to `-1`). - -The logging is done on the shard level scope, meaning the execution of a -search request within a specific shard. It does not encompass the whole -search request, which can be broadcast to several shards in order to -execute. Some of the benefits of shard level logging is the association -of the actual execution on the specific machine, compared with request -level. - - -The search slow log file is configured in the `log4j2.properties` file. - -[discrete] -==== Identifying search slow log origin - -It is often useful to identify what triggered a slow running query. -To include information about the user that triggered a slow search, -use the `index.search.slowlog.include.user` setting. - -[source,console] --------------------------------------------------- -PUT /my-index-000001/_settings -{ + "index.search.slowlog.threshold.fetch.trace": "200ms", "index.search.slowlog.include.user": true } -------------------------------------------------- // TEST[setup:my_index] -This will result in user information being included in the slow log. - -[source,js] ---------------------------- -{ - "@timestamp": "2024-02-21T12:42:37.255Z", - "log.level": "WARN", - "auth.type": "REALM", - "elasticsearch.slowlog.id": "tomcat-123", - "elasticsearch.slowlog.message": "[index6][0]", - "elasticsearch.slowlog.search_type": "QUERY_THEN_FETCH", - "elasticsearch.slowlog.source": "{\"query\":{\"match_all\":{\"boost\":1.0}}}", - "elasticsearch.slowlog.stats": "[]", - "elasticsearch.slowlog.took": "747.3micros", - "elasticsearch.slowlog.took_millis": 0, - "elasticsearch.slowlog.total_hits": "1 hits", - "elasticsearch.slowlog.total_shards": 1, - "user.name": "elastic", - "user.realm": "reserved", - "ecs.version": "1.2.0", - "service.name": "ES_ECS", - "event.dataset": "elasticsearch.index_search_slowlog", - "process.thread.name": "elasticsearch[runTask-0][search][T#5]", - "log.logger": "index.search.slowlog.query", - "elasticsearch.cluster.uuid": "Ui23kfF1SHKJwu_hI1iPPQ", - "elasticsearch.node.id": "JK-jn-XpQ3OsDUsq5ZtfGg", - "elasticsearch.node.name": "node-0", - "elasticsearch.cluster.name": "distribution_run" -} - ---------------------------- -// NOTCONSOLE - -If a call was initiated with an `X-Opaque-ID` header, then the ID is included -in Search Slow logs in the **elasticsearch.slowlog.id** field. See -<> for details and best practices. [discrete] [[index-slow-log]] -=== Index Slow log +==== Enable slow logging for indexing events -The indexing slow log, similar in functionality to the search slow -log. The log file name ends with `_index_indexing_slowlog.json`. Log and -the thresholds are configured in the same way as the search slowlog. -Index slowlog sample: +Indexing slow logs emit per index document. + +You can use the `index.indexing.slowlog.include.user` setting to append `user.*` and `auth.type` fields to slow log entries. These fields contain information about the user who triggered the request. + +The following snippet adjusts all available indexing slow log settings across all indices using the +<>: [source,yaml] -------------------------------------------------- @@ -123,11 +166,15 @@ index.indexing.slowlog.threshold.index.warn: 10s index.indexing.slowlog.threshold.index.info: 5s index.indexing.slowlog.threshold.index.debug: 2s index.indexing.slowlog.threshold.index.trace: 500ms + index.indexing.slowlog.source: 1000 +index.indexing.slowlog.reformat: true + +index.indexing.slowlog.include.user: true -------------------------------------------------- -All of the above settings are _dynamic_ and can be set for each index using the -<> API. For example: + +The following snippet adjusts the same settings for a single index using the <>: [source,console] -------------------------------------------------- @@ -137,39 +184,67 @@ PUT /my-index-000001/_settings "index.indexing.slowlog.threshold.index.info": "5s", "index.indexing.slowlog.threshold.index.debug": "2s", "index.indexing.slowlog.threshold.index.trace": "500ms", - "index.indexing.slowlog.source": "1000" -} --------------------------------------------------- -// TEST[setup:my_index] - -To include information about the user that triggered a slow indexing event, -use the `index.indexing.slowlog.include.user` setting. - -[source,console] --------------------------------------------------- -PUT /my-index-000001/_settings -{ + "index.indexing.slowlog.source": "1000", + "index.indexing.slowlog.reformat": true, "index.indexing.slowlog.include.user": true } -------------------------------------------------- // TEST[setup:my_index] -By default Elasticsearch will log the first 1000 characters of the _source in -the slowlog. You can change that with `index.indexing.slowlog.source`. Setting -it to `false` or `0` will skip logging the source entirely, while setting it to -`true` will log the entire source regardless of size. The original `_source` is -reformatted by default to make sure that it fits on a single log line. If preserving -the original document format is important, you can turn off reformatting by setting -`index.indexing.slowlog.reformat` to `false`, which will cause the source to be -logged "as is" and can potentially span multiple log lines. +[discrete] +===== Logging the `_source` field -The index slow log file is configured in the `log4j2.properties` file. +By default, {es} logs the first 1000 characters of the `_source` in the slow log. You can adjust how `_source` is logged using the `index.indexing.slowlog.source` setting. Set `index.indexing.slowlog.source` to `false` or `0` to skip logging the source entirely. Set `index.indexing.slowlog.source` to `true` to log the entire source regardless of size. + +The original `_source` is reformatted by default to make sure that it fits on a single log line. If preserving the original document format is important, then you can turn off reformatting by setting `index.indexing.slowlog.reformat` to `false`. This causes source to be logged with the original formatting intact, potentially spanning multiple log lines. [discrete] -=== Slow log levels +[[slow-log-fields]] -You can mimic the search or indexing slow log level by setting appropriate -threshold making "more verbose" loggers to be switched off. -If for instance we want to simulate `index.indexing.slowlog.level: INFO` -then all we need to do is to set -`index.indexing.slowlog.threshold.index.debug` and `index.indexing.slowlog.threshold.index.trace` to `-1`. +[discrete] +[[troubleshoot-slow-log]] +=== Best practices for slow logging + +Logging slow requests can be resource intensive to your {es} cluster depending on the qualifying traffic's volume. For example, emitted logs might increase the index disk usage of your <> cluster. To reduce the impact of slow logs, consider the following: + +* Enable slow logs against specific indices rather than across all indices. +* Set high thresholds to reduce the number of logged events. +* Enable slow logs only when troubleshooting. + +If you aren't sure how to start investigating traffic issues, consider enabling the `warn` threshold with a high `30s` threshold at the index level using the <>: + +* Enable for search requests: ++ +[source,console] +-------------------------------------------------- +PUT /*/_settings +{ + "index.search.slowlog.include.user": true, + "index.search.slowlog.threshold.fetch.warn": "30s", + "index.search.slowlog.threshold.query.warn": "30s" +} +-------------------------------------------------- +// TEST[setup:my_index] + +* Enable for indexing requests: ++ +[source,console] +-------------------------------------------------- +PUT /*/_settings +{ + "index.indexing.slowlog.include.user": true, + "index.indexing.slowlog.threshold.index.warn": "30s" +} +-------------------------------------------------- +// TEST[setup:my_index] + +Slow log thresholds being met does not guarantee cluster performance issues. In the event that symptoms are noticed, slow logs can provide helpful data to diagnose upstream traffic patterns or sources to resolve client-side issues. For example, you can use data included in `X-Opaque-ID`, the `_source` request body, or `user.*` fields to identify the source of your issue. This is similar to troubleshooting <>. + +If you're experiencing search performance issues, then you might also consider investigating searches flagged for their query durations using the <>. You can then use the profiled query to investigate optimization options using the link:{kibana-ref}/xpack-profiler.html[query profiler]. This type of investigation should usually take place in a non-production environment. + +Slow logging checks each event against the reporting threshold when the event is complete. This means that it can't report if events trigger <>. If suspect circuit breaker errors, then you should also consider enabling <>, which logs events before they are executed. + +[discrete] +=== Learn more + +To learn about other ways to optimize your search and indexing requests, refer to <> and <>. \ No newline at end of file diff --git a/docs/reference/indices.asciidoc b/docs/reference/indices.asciidoc index eaef54a1effb..ca7de396147a 100644 --- a/docs/reference/indices.asciidoc +++ b/docs/reference/indices.asciidoc @@ -1,6 +1,12 @@ [[indices]] == Index APIs +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Index APIs are used to manage individual indices, index settings, aliases, mappings, and index templates. diff --git a/docs/reference/indices/add-alias.asciidoc b/docs/reference/indices/add-alias.asciidoc index e14af6a64a2e..b8f6362f974c 100644 --- a/docs/reference/indices/add-alias.asciidoc +++ b/docs/reference/indices/add-alias.asciidoc @@ -4,6 +4,12 @@ Create or update alias ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Adds a data stream or index to an <>. [source,console] diff --git a/docs/reference/indices/alias-exists.asciidoc b/docs/reference/indices/alias-exists.asciidoc index a514d36a1bfe..1c40779323de 100644 --- a/docs/reference/indices/alias-exists.asciidoc +++ b/docs/reference/indices/alias-exists.asciidoc @@ -4,6 +4,12 @@ Alias exists ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Checks if an <> exists. [source,console] diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 1df9e0a4883b..cff331d88af9 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -4,6 +4,12 @@ Aliases ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Performs one or more <> actions in a single atomic operation. [source,console] diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index eb4d877b463c..49e1c9de749f 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -4,6 +4,12 @@ Analyze ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Performs <> on a text string and returns the resulting tokens. diff --git a/docs/reference/indices/apis/unfreeze.asciidoc b/docs/reference/indices/apis/unfreeze.asciidoc index 450efc03c492..ece1f37c7784 100644 --- a/docs/reference/indices/apis/unfreeze.asciidoc +++ b/docs/reference/indices/apis/unfreeze.asciidoc @@ -17,6 +17,12 @@ You can use this API to unfreeze indices that were frozen in 7.x. Frozen indices are not related to the frozen data tier. ==== +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Unfreezes an index. [[unfreeze-index-api-request]] diff --git a/docs/reference/indices/clearcache.asciidoc b/docs/reference/indices/clearcache.asciidoc index a3150ec6f72e..19b2d204356a 100644 --- a/docs/reference/indices/clearcache.asciidoc +++ b/docs/reference/indices/clearcache.asciidoc @@ -4,6 +4,12 @@ Clear cache ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Clears the caches of one or more indices. For data streams, the API clears the caches of the stream's backing indices. diff --git a/docs/reference/indices/clone-index.asciidoc b/docs/reference/indices/clone-index.asciidoc index c8e5d2e200f2..192cf7035b84 100644 --- a/docs/reference/indices/clone-index.asciidoc +++ b/docs/reference/indices/clone-index.asciidoc @@ -4,6 +4,12 @@ Clone index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Clones an existing index. [source,console] diff --git a/docs/reference/indices/close.asciidoc b/docs/reference/indices/close.asciidoc index a4bf1742fdea..a3014f811a1b 100644 --- a/docs/reference/indices/close.asciidoc +++ b/docs/reference/indices/close.asciidoc @@ -4,6 +4,12 @@ Close index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Closes an index. [source,console] diff --git a/docs/reference/indices/create-data-stream.asciidoc b/docs/reference/indices/create-data-stream.asciidoc index e97e9973f106..278670047d4c 100644 --- a/docs/reference/indices/create-data-stream.asciidoc +++ b/docs/reference/indices/create-data-stream.asciidoc @@ -5,6 +5,12 @@ Create data stream ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Creates a new <>. //// diff --git a/docs/reference/indices/create-index.asciidoc b/docs/reference/indices/create-index.asciidoc index 2e66f3d6030c..9a3934efa718 100644 --- a/docs/reference/indices/create-index.asciidoc +++ b/docs/reference/indices/create-index.asciidoc @@ -4,6 +4,12 @@ Create index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Creates a new index. [source,console] diff --git a/docs/reference/indices/dangling-index-delete.asciidoc b/docs/reference/indices/dangling-index-delete.asciidoc index 6af35031e9e6..0afe9e0204bc 100644 --- a/docs/reference/indices/dangling-index-delete.asciidoc +++ b/docs/reference/indices/dangling-index-delete.asciidoc @@ -4,6 +4,12 @@ Delete dangling index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Deletes a dangling index. [[dangling-index-delete-api-request]] diff --git a/docs/reference/indices/dangling-index-import.asciidoc b/docs/reference/indices/dangling-index-import.asciidoc index 44cde56de8c9..2b5922a2c0cf 100644 --- a/docs/reference/indices/dangling-index-import.asciidoc +++ b/docs/reference/indices/dangling-index-import.asciidoc @@ -4,6 +4,12 @@ Import dangling index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Imports a dangling index. [[dangling-index-import-api-request]] diff --git a/docs/reference/indices/dangling-indices-list.asciidoc b/docs/reference/indices/dangling-indices-list.asciidoc index b7774843e36d..b098cb696e5f 100644 --- a/docs/reference/indices/dangling-indices-list.asciidoc +++ b/docs/reference/indices/dangling-indices-list.asciidoc @@ -4,6 +4,12 @@ List dangling indices ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Lists dangling indices. [[dangling-indices-list-api-request]] diff --git a/docs/reference/indices/data-stream-stats.asciidoc b/docs/reference/indices/data-stream-stats.asciidoc index 3ed285abc035..1d664e85628e 100644 --- a/docs/reference/indices/data-stream-stats.asciidoc +++ b/docs/reference/indices/data-stream-stats.asciidoc @@ -5,6 +5,12 @@ Data stream stats ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Retrieves statistics for one or more <>. //// diff --git a/docs/reference/indices/delete-alias.asciidoc b/docs/reference/indices/delete-alias.asciidoc index 748862df0610..5a8baa262d99 100644 --- a/docs/reference/indices/delete-alias.asciidoc +++ b/docs/reference/indices/delete-alias.asciidoc @@ -4,6 +4,12 @@ Delete alias ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Removes a data stream or index from an <>. [source,console] diff --git a/docs/reference/indices/delete-component-template.asciidoc b/docs/reference/indices/delete-component-template.asciidoc index 065a4adb9002..0affd9e4d424 100644 --- a/docs/reference/indices/delete-component-template.asciidoc +++ b/docs/reference/indices/delete-component-template.asciidoc @@ -4,6 +4,12 @@ Delete component template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Deletes an existing component template. //// diff --git a/docs/reference/indices/delete-data-stream.asciidoc b/docs/reference/indices/delete-data-stream.asciidoc index 38e7a00d451d..b8339624c837 100644 --- a/docs/reference/indices/delete-data-stream.asciidoc +++ b/docs/reference/indices/delete-data-stream.asciidoc @@ -5,6 +5,12 @@ Delete data stream ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Deletes one or more <> and their backing indices. See <>. diff --git a/docs/reference/indices/delete-index-template-v1.asciidoc b/docs/reference/indices/delete-index-template-v1.asciidoc index 98b1e2fb255f..e8d5e2e355a5 100644 --- a/docs/reference/indices/delete-index-template-v1.asciidoc +++ b/docs/reference/indices/delete-index-template-v1.asciidoc @@ -9,6 +9,12 @@ templates>>, which are deprecated and will be replaced by the composable templates introduced in {es} 7.8. For information about composable templates, see <>. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Deletes a legacy index template. //// diff --git a/docs/reference/indices/delete-index-template.asciidoc b/docs/reference/indices/delete-index-template.asciidoc index b828e4a536b7..430a18470da4 100644 --- a/docs/reference/indices/delete-index-template.asciidoc +++ b/docs/reference/indices/delete-index-template.asciidoc @@ -4,6 +4,12 @@ Delete index template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Deletes an <>. //// diff --git a/docs/reference/indices/delete-index.asciidoc b/docs/reference/indices/delete-index.asciidoc index d5d168154e44..ec147fb10ff3 100644 --- a/docs/reference/indices/delete-index.asciidoc +++ b/docs/reference/indices/delete-index.asciidoc @@ -4,6 +4,12 @@ Delete index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Deletes one or more indices. [source,console] diff --git a/docs/reference/indices/diskusage.asciidoc b/docs/reference/indices/diskusage.asciidoc index 3510ba346e5a..85d84ea9ad43 100644 --- a/docs/reference/indices/diskusage.asciidoc +++ b/docs/reference/indices/diskusage.asciidoc @@ -6,6 +6,12 @@ experimental[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Analyzes the disk usage of each field of an index or data stream. This API might not support indices created in previous {es} versions. The result of a small index can be inaccurate as some parts of an index diff --git a/docs/reference/indices/downsample-data-stream.asciidoc b/docs/reference/indices/downsample-data-stream.asciidoc index 5ace4e03dfb6..ecff35033e2d 100644 --- a/docs/reference/indices/downsample-data-stream.asciidoc +++ b/docs/reference/indices/downsample-data-stream.asciidoc @@ -5,6 +5,12 @@ Downsample ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Aggregates a time series (TSDS) index and stores pre-computed statistical summaries (`min`, `max`, `sum`, `value_count` and `avg`) for each metric field grouped by a configured time interval. For example, diff --git a/docs/reference/indices/field-usage-stats.asciidoc b/docs/reference/indices/field-usage-stats.asciidoc index a4856092834e..f50c97ceddf0 100644 --- a/docs/reference/indices/field-usage-stats.asciidoc +++ b/docs/reference/indices/field-usage-stats.asciidoc @@ -6,6 +6,12 @@ experimental[] +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns field usage information for each shard and field of an index. Field usage statistics are automatically captured when diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index 61c44f157da9..03fd2c175ba5 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -4,6 +4,12 @@ Flush ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Flushes one or more data streams or indices. [source,console] diff --git a/docs/reference/indices/forcemerge.asciidoc b/docs/reference/indices/forcemerge.asciidoc index e1581a3cfa63..13579740c966 100644 --- a/docs/reference/indices/forcemerge.asciidoc +++ b/docs/reference/indices/forcemerge.asciidoc @@ -4,6 +4,12 @@ Force merge ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Forces a <> on the shards of one or more indices. For data streams, the API forces a merge on the shards of the stream's backing indices. diff --git a/docs/reference/indices/get-alias.asciidoc b/docs/reference/indices/get-alias.asciidoc index d4c5b9211694..0877eb4aea7a 100644 --- a/docs/reference/indices/get-alias.asciidoc +++ b/docs/reference/indices/get-alias.asciidoc @@ -4,6 +4,12 @@ Get alias ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Retrieves information for one or more <>. [source,console] diff --git a/docs/reference/indices/get-component-template.asciidoc b/docs/reference/indices/get-component-template.asciidoc index 7e16457730f4..a83871d927c9 100644 --- a/docs/reference/indices/get-component-template.asciidoc +++ b/docs/reference/indices/get-component-template.asciidoc @@ -4,6 +4,12 @@ Get component template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Retrieves information about one or more component templates. ////////////////////////// diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc index 6bf150897aca..88f136d0726f 100644 --- a/docs/reference/indices/get-data-stream.asciidoc +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -5,6 +5,12 @@ Get data stream ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Retrieves information about one or more <>. See <>. diff --git a/docs/reference/indices/get-field-mapping.asciidoc b/docs/reference/indices/get-field-mapping.asciidoc index ac5895872fbb..aac0ae9801f4 100644 --- a/docs/reference/indices/get-field-mapping.asciidoc +++ b/docs/reference/indices/get-field-mapping.asciidoc @@ -4,6 +4,12 @@ Get field mapping ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Retrieves <> for one or more fields. For data streams, the API retrieves field mappings for the stream's backing indices. diff --git a/docs/reference/indices/get-index-template-v1.asciidoc b/docs/reference/indices/get-index-template-v1.asciidoc index 602ca2fe454a..03f7e2873903 100644 --- a/docs/reference/indices/get-index-template-v1.asciidoc +++ b/docs/reference/indices/get-index-template-v1.asciidoc @@ -8,6 +8,12 @@ IMPORTANT: This documentation is about legacy index templates, which are deprecated and will be replaced by the composable templates introduced in {es} 7.8. For information about composable templates, see <>. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Retrieves information about one or more index templates. //// diff --git a/docs/reference/indices/get-index-template.asciidoc b/docs/reference/indices/get-index-template.asciidoc index 2cde5adc8ae2..a1fb974b2bab 100644 --- a/docs/reference/indices/get-index-template.asciidoc +++ b/docs/reference/indices/get-index-template.asciidoc @@ -4,6 +4,12 @@ Get index template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns information about one or more index templates. //// diff --git a/docs/reference/indices/get-index.asciidoc b/docs/reference/indices/get-index.asciidoc index 2551d25801d7..691493e078c4 100644 --- a/docs/reference/indices/get-index.asciidoc +++ b/docs/reference/indices/get-index.asciidoc @@ -4,6 +4,12 @@ Get index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns information about one or more indices. For data streams, the API returns information about the stream's backing indices. diff --git a/docs/reference/indices/get-mapping.asciidoc b/docs/reference/indices/get-mapping.asciidoc index 16dc8c66d071..9fd02e6fba89 100644 --- a/docs/reference/indices/get-mapping.asciidoc +++ b/docs/reference/indices/get-mapping.asciidoc @@ -4,6 +4,12 @@ Get mapping ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Retrieves <> for one or more indices. For data streams, the API retrieves mappings for the stream's backing indices. diff --git a/docs/reference/indices/get-settings.asciidoc b/docs/reference/indices/get-settings.asciidoc index b6cb6d292638..4b3929664bf1 100644 --- a/docs/reference/indices/get-settings.asciidoc +++ b/docs/reference/indices/get-settings.asciidoc @@ -4,6 +4,12 @@ Get index settings ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns setting information for one or more indices. For data streams, the API returns setting information for the stream's backing indices. diff --git a/docs/reference/indices/index-template-exists-v1.asciidoc b/docs/reference/indices/index-template-exists-v1.asciidoc index 2358f0b1a376..d9aec6f4b86c 100644 --- a/docs/reference/indices/index-template-exists-v1.asciidoc +++ b/docs/reference/indices/index-template-exists-v1.asciidoc @@ -9,6 +9,12 @@ templates>>, which are deprecated and will be replaced by the composable templates introduced in {es} 7.8. For information about composable templates, see <>. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Checks if an <> exists. diff --git a/docs/reference/indices/indices-exists.asciidoc b/docs/reference/indices/indices-exists.asciidoc index d699d36add03..3594619c22fc 100644 --- a/docs/reference/indices/indices-exists.asciidoc +++ b/docs/reference/indices/indices-exists.asciidoc @@ -4,6 +4,12 @@ Exists ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Checks if a data stream, index, or alias exists. [source,console] diff --git a/docs/reference/indices/migrate-to-data-stream.asciidoc b/docs/reference/indices/migrate-to-data-stream.asciidoc index 48302761c7dc..8c5a8d7b1376 100644 --- a/docs/reference/indices/migrate-to-data-stream.asciidoc +++ b/docs/reference/indices/migrate-to-data-stream.asciidoc @@ -5,6 +5,12 @@ Migrate to data stream ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + Converts an <> to a <>. //// diff --git a/docs/reference/indices/open-close.asciidoc b/docs/reference/indices/open-close.asciidoc index a077c4d19fd5..e9502b3f8328 100644 --- a/docs/reference/indices/open-close.asciidoc +++ b/docs/reference/indices/open-close.asciidoc @@ -4,6 +4,12 @@ Open index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Opens a closed index. For data streams, the API opens any closed backing indices. diff --git a/docs/reference/indices/put-component-template.asciidoc b/docs/reference/indices/put-component-template.asciidoc index d880edfe42b8..9104008d66c5 100644 --- a/docs/reference/indices/put-component-template.asciidoc +++ b/docs/reference/indices/put-component-template.asciidoc @@ -4,6 +4,12 @@ Create or update component template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Creates or updates a component template. Component templates are building blocks for constructing <> that specify index <>, <>, and diff --git a/docs/reference/indices/put-index-template-v1.asciidoc b/docs/reference/indices/put-index-template-v1.asciidoc index 86a8a54edd97..e23e8995cca6 100644 --- a/docs/reference/indices/put-index-template-v1.asciidoc +++ b/docs/reference/indices/put-index-template-v1.asciidoc @@ -8,6 +8,12 @@ IMPORTANT: This documentation is about legacy index templates, which are deprecated and will be replaced by the composable templates introduced in {es} 7.8. For information about composable templates, see <>. +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Creates or updates an index template. [source,console] diff --git a/docs/reference/indices/put-index-template.asciidoc b/docs/reference/indices/put-index-template.asciidoc index 9a3103754679..17035daeca15 100644 --- a/docs/reference/indices/put-index-template.asciidoc +++ b/docs/reference/indices/put-index-template.asciidoc @@ -4,6 +4,12 @@ Create or update index template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Creates or updates an index template. Index templates define <>, <>, and <> that can be applied automatically to new indices. diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index dc6dbff1df42..acd5df00cb77 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -4,6 +4,12 @@ Update mapping ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Adds new fields to an existing data stream or index. You can also use this API to change the search settings of existing fields. diff --git a/docs/reference/indices/recovery.asciidoc b/docs/reference/indices/recovery.asciidoc index 06b4d9d92e49..44094eea73f6 100644 --- a/docs/reference/indices/recovery.asciidoc +++ b/docs/reference/indices/recovery.asciidoc @@ -4,6 +4,12 @@ Index recovery ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns information about ongoing and completed shard recoveries for one or more indices. For data streams, the API returns information for the stream's backing indices. diff --git a/docs/reference/indices/refresh.asciidoc b/docs/reference/indices/refresh.asciidoc index bd8e821ff56b..60774ffd6400 100644 --- a/docs/reference/indices/refresh.asciidoc +++ b/docs/reference/indices/refresh.asciidoc @@ -4,6 +4,12 @@ Refresh ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + A refresh makes recent operations performed on one or more indices available for search. For data streams, the API runs the refresh operation on the stream's backing indices. For more information about the refresh operation, see diff --git a/docs/reference/indices/resolve-cluster.asciidoc b/docs/reference/indices/resolve-cluster.asciidoc index 48e6bfac4af1..fc0301916e68 100644 --- a/docs/reference/indices/resolve-cluster.asciidoc +++ b/docs/reference/indices/resolve-cluster.asciidoc @@ -4,6 +4,12 @@ Resolve cluster ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Resolves the specified index expressions to return information about each cluster, including the local cluster, if included. diff --git a/docs/reference/indices/resolve.asciidoc b/docs/reference/indices/resolve.asciidoc index 856546b037fe..fb6c97d9ecf4 100644 --- a/docs/reference/indices/resolve.asciidoc +++ b/docs/reference/indices/resolve.asciidoc @@ -4,6 +4,12 @@ Resolve index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Resolves the specified name(s) and/or index patterns for indices, aliases, and data streams. Multiple patterns and remote clusters are supported. diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 2a47d28e5358..b15444977ba8 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -4,6 +4,12 @@ Rollover ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Creates a new index for a <> or <>. [source,console] diff --git a/docs/reference/indices/segments.asciidoc b/docs/reference/indices/segments.asciidoc index 54f0b6c0cd6d..df999ec4c897 100644 --- a/docs/reference/indices/segments.asciidoc +++ b/docs/reference/indices/segments.asciidoc @@ -4,6 +4,12 @@ Index segments ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns low-level information about the https://lucene.apache.org/core/[Lucene] segments in index shards. For data streams, the API returns information about the stream's backing indices. diff --git a/docs/reference/indices/shard-stores.asciidoc b/docs/reference/indices/shard-stores.asciidoc index 1b001a3175b8..b6f2aca78be0 100644 --- a/docs/reference/indices/shard-stores.asciidoc +++ b/docs/reference/indices/shard-stores.asciidoc @@ -4,6 +4,12 @@ Index shard stores ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Retrieves store information about replica shards in one or more indices. For data streams, the API retrieves store diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 244733282d46..1f4e849abff1 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -4,6 +4,12 @@ Shrink index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Shrinks an existing index into a new index with fewer primary shards. diff --git a/docs/reference/indices/simulate-index.asciidoc b/docs/reference/indices/simulate-index.asciidoc index 5e5709a2d82f..598e4a12acb2 100644 --- a/docs/reference/indices/simulate-index.asciidoc +++ b/docs/reference/indices/simulate-index.asciidoc @@ -4,6 +4,12 @@ Simulate index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns the index configuration that would be applied to the specified index from an existing <>. diff --git a/docs/reference/indices/simulate-template.asciidoc b/docs/reference/indices/simulate-template.asciidoc index c7397ace9788..cb59fd811ce4 100644 --- a/docs/reference/indices/simulate-template.asciidoc +++ b/docs/reference/indices/simulate-template.asciidoc @@ -4,6 +4,12 @@ Simulate template ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns the index configuration that would be applied by a particular <>. diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index 0c93b572639d..a4ee3faa9dc2 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -4,6 +4,12 @@ Split index ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Splits an existing index into a new index with more primary shards. [source,console] diff --git a/docs/reference/indices/stats.asciidoc b/docs/reference/indices/stats.asciidoc index 088d65c37ec6..486b13e47827 100644 --- a/docs/reference/indices/stats.asciidoc +++ b/docs/reference/indices/stats.asciidoc @@ -4,6 +4,12 @@ Index stats ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Returns statistics for one or more indices. For data streams, the API retrieves statistics for the stream's backing indices. diff --git a/docs/reference/indices/update-settings.asciidoc b/docs/reference/indices/update-settings.asciidoc index 3b29946d5ed7..ad500943a042 100644 --- a/docs/reference/indices/update-settings.asciidoc +++ b/docs/reference/indices/update-settings.asciidoc @@ -4,6 +4,12 @@ Update index settings ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + Changes a <> in real time. For data streams, index setting changes are applied to all backing indices by diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 16e38d2aa148..726facd383c1 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -78,12 +78,17 @@ The API returns the following response: { "inference_id": "my-elser-model", "task_type": "sparse_embedding", - "service": "elser", + "service": "elasticsearch", "service_settings": { "num_allocations": 1, - "num_threads": 1 + "num_threads": 1, + "model_id": ".elser_model_2" }, - "task_settings": {} + "chunking_settings": { + "strategy": "sentence", + "max_chunk_size": 250, + "sentence_overlap": 1 + } } ------------------------------------------------------------ // NOTCONSOLE diff --git a/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc index 1d4b7e770325..d190322fbfe9 100644 --- a/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc @@ -5,6 +5,12 @@ Delete enrich policy ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-enrich[Enrich APIs]. +-- + Deletes an existing <> and its <>. diff --git a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc index ebad9f09250d..312cb2593f1f 100644 --- a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc @@ -5,6 +5,12 @@ Execute enrich policy ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-enrich[Enrich APIs]. +-- + Executes an existing <>. //// diff --git a/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc index 88cf2cfa7cf6..48448f592504 100644 --- a/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc @@ -5,6 +5,12 @@ Get enrich policy ++++ +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-enrich[Enrich APIs]. +-- + Returns information about an <>. //// diff --git a/docs/reference/ingest/apis/enrich/index.asciidoc b/docs/reference/ingest/apis/enrich/index.asciidoc index a17c8179af1b..0488d535000e 100644 --- a/docs/reference/ingest/apis/enrich/index.asciidoc +++ b/docs/reference/ingest/apis/enrich/index.asciidoc @@ -2,6 +2,12 @@ [[enrich-apis]] == Enrich APIs +..New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-enrich[Enrich APIs]. +-- + The following enrich APIs are available for managing <>: diff --git a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc index ee33b0b32090..80223cb0ec71 100644 --- a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc @@ -5,6 +5,11 @@ Create enrich policy ++++ +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-enrich[Enrich APIs]. +-- + Creates an enrich policy. //// diff --git a/docs/reference/ingest/enrich.asciidoc b/docs/reference/ingest/enrich.asciidoc index 4bd50641149c..3c08384eb23d 100644 --- a/docs/reference/ingest/enrich.asciidoc +++ b/docs/reference/ingest/enrich.asciidoc @@ -98,7 +98,8 @@ and <>. [IMPORTANT] ==== The enrich processor performs several operations and may impact the speed of -your ingest pipeline. +your ingest pipeline. We recommend <> co-locating +ingest and data roles to minimize remote search operations. We strongly recommend testing and benchmarking your enrich processors before deploying them in production. diff --git a/docs/reference/query-dsl/semantic-query.asciidoc b/docs/reference/query-dsl/semantic-query.asciidoc index 11e19d635608..914f4429f7f9 100644 --- a/docs/reference/query-dsl/semantic-query.asciidoc +++ b/docs/reference/query-dsl/semantic-query.asciidoc @@ -117,79 +117,3 @@ GET my-index/_search } ------------------------------------------------------------ // TEST[skip: Requires inference endpoints] - - -[discrete] -[[advanced-search]] -==== Advanced search on `semantic_text` fields - -The `semantic` query uses default settings for searching on `semantic_text` fields for ease of use. -If you want to fine-tune a search on a `semantic_text` field, you need to know the task type used by the `inference_id` configured in `semantic_text`. -You can find the task type using the <>, and check the `task_type` associated with the {infer} service. -Depending on the `task_type`, use either the <> or the <> query for greater flexibility and customization. - -NOTE: While it is possible to use the `sparse_vector` query or the `knn` query -on a `semantic_text` field, it is not supported to use the `semantic_query` on a -`sparse_vector` or `dense_vector` field type. - - -[discrete] -[[search-sparse-inference]] -===== Search with `sparse_embedding` inference - -When the {infer} endpoint uses a `sparse_embedding` model, you can use a <> on a <> field in the following way: - -[source,console] ------------------------------------------------------------- -GET test-index/_search -{ - "query": { - "nested": { - "path": "inference_field.inference.chunks", - "query": { - "sparse_vector": { - "field": "inference_field.inference.chunks.embeddings", - "inference_id": "my-inference-id", - "query": "mountain lake" - } - } - } - } -} ------------------------------------------------------------- -// TEST[skip: Requires inference endpoints] - -You can customize the `sparse_vector` query to include specific settings, like <>. - - -[discrete] -[[search-text-inferece]] -===== Search with `text_embedding` inference - -When the {infer} endpoint uses a `text_embedding` model, you can use a <> on a `semantic_text` field in the following way: - -[source,console] ------------------------------------------------------------- -GET test-index/_search -{ - "query": { - "nested": { - "path": "inference_field.inference.chunks", - "query": { - "knn": { - "field": "inference_field.inference.chunks.embeddings", - "query_vector_builder": { - "text_embedding": { - "model_id": "my_inference_id", - "model_text": "mountain lake" - } - } - } - } - } - } -} ------------------------------------------------------------- -// TEST[skip: Requires inference endpoints] - -You can customize the `knn` query to include specific settings, like `num_candidates` and `k`. diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 50c9f96ad81b..bfab15ed012c 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -6,9 +6,9 @@ {es} exposes REST APIs that are used by the UI components and can be called directly to configure and access {es} features. -[NOTE] -We are working on including more {es} APIs in this section. Some content might -not be included yet. +..New API reference +[sidebar] +For the most up-to-date API details, refer to {api-es}[{es} APIs]. * <> * <> diff --git a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc index 3448940b6fad..987a24140b6b 100644 --- a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc @@ -157,89 +157,7 @@ GET semantic-embeddings/_search <2> The query text. As a result, you receive the top 10 documents that are closest in meaning to the -query from the `semantic-embedding` index: - -[source,console-result] ------------------------------------------------------------- -"hits": [ - { - "_index": "semantic-embeddings", - "_id": "Jy5065EBBFPLbFsdh_f9", - "_score": 21.487484, - "_source": { - "id": 8836652, - "content": { - "text": "There are a few foods and food groups that will help to fight inflammation and delayed onset muscle soreness (both things that are inevitable after a long, hard workout) when you incorporate them into your postworkout eats, whether immediately after your run or at a meal later in the day. Advertisement. Advertisement.", - "inference": { - "inference_id": "my-elser-endpoint", - "model_settings": { - "task_type": "sparse_embedding" - }, - "chunks": [ - { - "text": "There are a few foods and food groups that will help to fight inflammation and delayed onset muscle soreness (both things that are inevitable after a long, hard workout) when you incorporate them into your postworkout eats, whether immediately after your run or at a meal later in the day. Advertisement. Advertisement.", - "embeddings": { - (...) - } - } - ] - } - } - } - }, - { - "_index": "semantic-embeddings", - "_id": "Ji5065EBBFPLbFsdh_f9", - "_score": 18.211695, - "_source": { - "id": 8836651, - "content": { - "text": "During Your Workout. There are a few things you can do during your workout to help prevent muscle injury and soreness. According to personal trainer and writer for Iron Magazine, Marc David, doing warm-ups and cool-downs between sets can help keep muscle soreness to a minimum.", - "inference": { - "inference_id": "my-elser-endpoint", - "model_settings": { - "task_type": "sparse_embedding" - }, - "chunks": [ - { - "text": "During Your Workout. There are a few things you can do during your workout to help prevent muscle injury and soreness. According to personal trainer and writer for Iron Magazine, Marc David, doing warm-ups and cool-downs between sets can help keep muscle soreness to a minimum.", - "embeddings": { - (...) - } - } - ] - } - } - } - }, - { - "_index": "semantic-embeddings", - "_id": "Wi5065EBBFPLbFsdh_b9", - "_score": 13.089405, - "_source": { - "id": 8800197, - "content": { - "text": "This is especially important if the soreness is due to a weightlifting routine. For this time period, do not exert more than around 50% of the level of effort (weight, distance and speed) that caused the muscle groups to be sore.", - "inference": { - "inference_id": "my-elser-endpoint", - "model_settings": { - "task_type": "sparse_embedding" - }, - "chunks": [ - { - "text": "This is especially important if the soreness is due to a weightlifting routine. For this time period, do not exert more than around 50% of the level of effort (weight, distance and speed) that caused the muscle groups to be sore.", - "embeddings": { - (...) - } - } - ] - } - } - } - } -] ------------------------------------------------------------- -// NOTCONSOLE +query from the `semantic-embedding` index. [discrete] [[semantic-text-further-examples]] diff --git a/docs/reference/shutdown/apis/shutdown-get.asciidoc b/docs/reference/shutdown/apis/shutdown-get.asciidoc index 5feac28353ab..97755b3263d7 100644 --- a/docs/reference/shutdown/apis/shutdown-get.asciidoc +++ b/docs/reference/shutdown/apis/shutdown-get.asciidoc @@ -72,6 +72,7 @@ including the status of shard migration, task migration, and plugin cleanup: "nodes": [ { "node_id": "USpTGYaBSIKbgSUJR2Z9lg", + "node_ephemeral_id": null, "type": "RESTART", "reason": "Demonstrating how the node shutdown API works", "shutdown_startedmillis": 1624406108685, diff --git a/docs/reference/troubleshooting/common-issues/circuit-breaker-errors.asciidoc b/docs/reference/troubleshooting/common-issues/circuit-breaker-errors.asciidoc index fbc6fe7b42a7..74793f8a625b 100644 --- a/docs/reference/troubleshooting/common-issues/circuit-breaker-errors.asciidoc +++ b/docs/reference/troubleshooting/common-issues/circuit-breaker-errors.asciidoc @@ -9,6 +9,9 @@ By default, the <> triggers at 95% JVM memory usage. To prevent errors, we recommend taking steps to reduce memory pressure if usage consistently exceeds 85%. +See https://www.youtube.com/watch?v=k3wYlRVbMSw[this video] for a walkthrough +of diagnosing circuit breaker errors. + [discrete] [[diagnose-circuit-breaker-errors]] ==== Diagnose circuit breaker errors diff --git a/docs/reference/troubleshooting/common-issues/rejected-requests.asciidoc b/docs/reference/troubleshooting/common-issues/rejected-requests.asciidoc index c863709775fc..34ef388f2e3c 100644 --- a/docs/reference/troubleshooting/common-issues/rejected-requests.asciidoc +++ b/docs/reference/troubleshooting/common-issues/rejected-requests.asciidoc @@ -32,6 +32,9 @@ correlating log as `EsRejectedExecutionException` with either These errors are often related to <>. +See https://www.youtube.com/watch?v=auZJRXoAVpI[this video] for a walkthrough +of troubleshooting threadpool rejections. + [discrete] [[check-circuit-breakers]] ==== Check circuit breakers @@ -47,6 +50,9 @@ GET /_nodes/stats/breaker These statistics are cumulative from node startup. For more information, see <>. +See https://www.youtube.com/watch?v=k3wYlRVbMSw[this video] for a walkthrough +of diagnosing circuit breaker errors. + [discrete] [[check-indexing-pressure]] ==== Check indexing pressure @@ -69,6 +75,9 @@ These errors are often related to <>, <> sizing, or the ingest target's <>. +See https://www.youtube.com/watch?v=QuV8QqSfc0c[this video] for a walkthrough +of diagnosing indexing pressure rejections. + [discrete] [[prevent-rejected-requests]] ==== Prevent rejected requests diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 33addef8aedd..2c46c4642e56 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -949,14 +949,9 @@ - - - - - - - - + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 22286c90de3d..e712035eabc7 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip +distributionSha256Sum=7ebdac923867a3cec0098302416d1e3c6c0c729fc4e2e05c10637a8af33a76c5 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index f5feea6d6b11..f3b75f3b0d4f 100755 --- a/gradlew +++ b/gradlew @@ -86,8 +86,7 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s -' "$PWD" ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 25f4e97bd12e..d549c0d8906a 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -32,7 +32,7 @@ public interface EntitlementChecker { void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); // Process creation - void check$$start(Class callerClass, ProcessBuilder that, ProcessBuilder.Redirect[] redirects); + void check$$start(Class callerClass, ProcessBuilder that); void check$java_lang_ProcessBuilder$startPipeline(Class callerClass, List builders); diff --git a/libs/entitlement/qa/entitlement-allowed-nonmodular/build.gradle b/libs/entitlement/qa/entitlement-allowed-nonmodular/build.gradle index 7b3015a5ab83..316b61e15707 100644 --- a/libs/entitlement/qa/entitlement-allowed-nonmodular/build.gradle +++ b/libs/entitlement/qa/entitlement-allowed-nonmodular/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name 'entitlement-allowed-nonmodular' - description 'A non-modular test module that invokes entitlement checks that are supposed to be granted' - classname 'org.elasticsearch.entitlement.qa.nonmodular.EntitlementAllowedNonModularPlugin' + name = 'entitlement-allowed-nonmodular' + description = 'A non-modular test module that invokes entitlement checks that are supposed to be granted' + classname = 'org.elasticsearch.entitlement.qa.nonmodular.EntitlementAllowedNonModularPlugin' } dependencies { diff --git a/libs/entitlement/qa/entitlement-allowed/build.gradle b/libs/entitlement/qa/entitlement-allowed/build.gradle index 6090d658d208..b9518f8d65fb 100644 --- a/libs/entitlement/qa/entitlement-allowed/build.gradle +++ b/libs/entitlement/qa/entitlement-allowed/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name 'entitlement-allowed' - description 'A test module that invokes entitlement checks that are supposed to be granted' - classname 'org.elasticsearch.entitlement.qa.EntitlementAllowedPlugin' + name = 'entitlement-allowed' + description = 'A test module that invokes entitlement checks that are supposed to be granted' + classname = 'org.elasticsearch.entitlement.qa.EntitlementAllowedPlugin' } dependencies { diff --git a/libs/entitlement/qa/entitlement-denied-nonmodular/build.gradle b/libs/entitlement/qa/entitlement-denied-nonmodular/build.gradle index bddd6c83c7cc..6a88dd66eaf7 100644 --- a/libs/entitlement/qa/entitlement-denied-nonmodular/build.gradle +++ b/libs/entitlement/qa/entitlement-denied-nonmodular/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name 'entitlement-denied-nonmodular' - description 'A non-modular test module that invokes non-granted entitlement and triggers exceptions' - classname 'org.elasticsearch.entitlement.qa.nonmodular.EntitlementDeniedNonModularPlugin' + name = 'entitlement-denied-nonmodular' + description = 'A non-modular test module that invokes non-granted entitlement and triggers exceptions' + classname = 'org.elasticsearch.entitlement.qa.nonmodular.EntitlementDeniedNonModularPlugin' } dependencies { diff --git a/libs/entitlement/qa/entitlement-denied/build.gradle b/libs/entitlement/qa/entitlement-denied/build.gradle index cc269135c5bf..9d1872563b8c 100644 --- a/libs/entitlement/qa/entitlement-denied/build.gradle +++ b/libs/entitlement/qa/entitlement-denied/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name 'entitlement-denied' - description 'A test module that invokes non-granted entitlement and triggers exceptions' - classname 'org.elasticsearch.entitlement.qa.EntitlementDeniedPlugin' + name = 'entitlement-denied' + description = 'A test module that invokes non-granted entitlement and triggers exceptions' + classname = 'org.elasticsearch.entitlement.qa.EntitlementDeniedPlugin' } dependencies { diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java index 9f55a7c9e894..7f2bf792ac06 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java @@ -31,6 +31,8 @@ public class EntitlementsDeniedIT extends ESRestTestCase { .plugin("entitlement-denied-nonmodular") .systemProperty("es.entitlements.enabled", "true") .setting("xpack.security.enabled", "false") + // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsDeniedIT.xml + // .setting("logger.org.elasticsearch.entitlement", "TRACE") .build(); @Override diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 75365fbb74d6..7c72e0335a3e 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -70,7 +70,7 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { } @Override - public void check$$start(Class callerClass, ProcessBuilder processBuilder, ProcessBuilder.Redirect[] redirects) { + public void check$$start(Class callerClass, ProcessBuilder processBuilder) { policyManager.checkStartProcess(callerClass); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index e06f7768eb8b..527a9472a7ce 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -10,7 +10,6 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.core.Strings; -import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -32,10 +31,9 @@ import java.util.stream.Stream; import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; import static java.util.Objects.requireNonNull; -import static java.util.function.Predicate.not; public class PolicyManager { - private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + private static final Logger logger = LogManager.getLogger(PolicyManager.class); static class ModuleEntitlements { public static final ModuleEntitlements NONE = new ModuleEntitlements(List.of()); @@ -68,18 +66,12 @@ public class PolicyManager { private static final Set systemModules = findSystemModules(); - /** - * Frames originating from this module are ignored in the permission logic. - */ - private final Module entitlementsModule; - private static Set findSystemModules() { var systemModulesDescriptors = ModuleFinder.ofSystem() .findAll() .stream() .map(ModuleReference::descriptor) .collect(Collectors.toUnmodifiableSet()); - return ModuleLayer.boot() .modules() .stream() @@ -87,6 +79,11 @@ public class PolicyManager { .collect(Collectors.toUnmodifiableSet()); } + /** + * Frames originating from this module are ignored in the permission logic. + */ + private final Module entitlementsModule; + public PolicyManager( Policy defaultPolicy, Map pluginPolicies, @@ -227,12 +224,12 @@ public class PolicyManager { * this is a fast-path check that can avoid the stack walk * in cases where the caller class is available. * @return the requesting module, or {@code null} if the entire call stack - * comes from modules that are trusted. + * comes from the entitlement library itself. */ Module requestingModule(Class callerClass) { if (callerClass != null) { - Module callerModule = callerClass.getModule(); - if (systemModules.contains(callerModule) == false) { + var callerModule = callerClass.getModule(); + if (callerModule != null && entitlementsModule.equals(callerModule) == false) { // fast path return callerModule; } @@ -251,8 +248,8 @@ public class PolicyManager { Optional findRequestingModule(Stream> classes) { return classes.map(Objects::requireNonNull) .map(PolicyManager::moduleOf) - .filter(m -> m != entitlementsModule) // Ignore the entitlements library itself - .filter(not(systemModules::contains)) // Skip trusted JDK modules + .filter(m -> m != entitlementsModule) // Ignore the entitlements library itself entirely + .skip(1) // Skip the sensitive method itself .findFirst(); } @@ -266,8 +263,15 @@ public class PolicyManager { } private static boolean isTriviallyAllowed(Module requestingModule) { + if (logger.isTraceEnabled()) { + logger.trace("Stack trace for upcoming trivially-allowed check", new Exception()); + } if (requestingModule == null) { - logger.debug("Entitlement trivially allowed: entire call stack is in composed of classes in system modules"); + logger.debug("Entitlement trivially allowed: no caller frames outside the entitlement library"); + return true; + } + if (systemModules.contains(requestingModule)) { + logger.debug("Entitlement trivially allowed from system module [{}]", requestingModule.getName()); return true; } logger.trace("Entitlement not trivially allowed"); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 0789fcc8dc77..31e3e62f56bf 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; +import org.junit.BeforeClass; import java.io.IOException; import java.lang.module.Configuration; @@ -37,8 +38,22 @@ import static org.hamcrest.Matchers.sameInstance; @ESTestCase.WithoutSecurityManager public class PolicyManagerTests extends ESTestCase { + /** + * A module you can use for test cases that don't actually care about the + * entitlements module. + */ + private static Module NO_ENTITLEMENTS_MODULE; - private static final Module NO_ENTITLEMENTS_MODULE = null; + @BeforeClass + public static void beforeClass() { + try { + // Any old module will do for tests using NO_ENTITLEMENTS_MODULE + NO_ENTITLEMENTS_MODULE = makeClassInItsOwnModule().getModule(); + } catch (Exception e) { + throw new IllegalStateException(e); + } + + } public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { var policyManager = new PolicyManager( @@ -210,53 +225,31 @@ public class PolicyManagerTests extends ESTestCase { } public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException { - var requestingClass = makeClassInItsOwnModule(); - var runtimeClass = makeClassInItsOwnModule(); // A class in the entitlements library itself + var entitlementsClass = makeClassInItsOwnModule(); // A class in the entitlements library itself + var requestingClass = makeClassInItsOwnModule(); // This guy is always the right answer + var instrumentedClass = makeClassInItsOwnModule(); // The class that called the check method var ignorableClass = makeClassInItsOwnModule(); - var systemClass = Object.class; - var policyManager = policyManagerWithEntitlementsModule(runtimeClass.getModule()); + var policyManager = policyManagerWithEntitlementsModule(entitlementsClass.getModule()); var requestingModule = requestingClass.getModule(); assertEquals( - "Skip one system frame", + "Skip entitlement library and the instrumented method", requestingModule, - policyManager.findRequestingModule(Stream.of(systemClass, requestingClass, ignorableClass)).orElse(null) - ); - assertEquals( - "Skip multiple system frames", - requestingModule, - policyManager.findRequestingModule(Stream.of(systemClass, systemClass, systemClass, requestingClass, ignorableClass)) + policyManager.findRequestingModule(Stream.of(entitlementsClass, instrumentedClass, requestingClass, ignorableClass)) .orElse(null) ); assertEquals( - "Skip system frame between runtime frames", + "Skip multiple library frames", requestingModule, - policyManager.findRequestingModule(Stream.of(runtimeClass, systemClass, runtimeClass, requestingClass, ignorableClass)) - .orElse(null) - ); - assertEquals( - "Skip runtime frame between system frames", - requestingModule, - policyManager.findRequestingModule(Stream.of(systemClass, runtimeClass, systemClass, requestingClass, ignorableClass)) - .orElse(null) - ); - assertEquals( - "No system frames", - requestingModule, - policyManager.findRequestingModule(Stream.of(requestingClass, ignorableClass)).orElse(null) - ); - assertEquals( - "Skip runtime frames up to the first system frame", - requestingModule, - policyManager.findRequestingModule(Stream.of(runtimeClass, runtimeClass, systemClass, requestingClass, ignorableClass)) + policyManager.findRequestingModule(Stream.of(entitlementsClass, entitlementsClass, instrumentedClass, requestingClass)) .orElse(null) ); assertThrows( "Non-modular caller frames are not supported", NullPointerException.class, - () -> policyManager.findRequestingModule(Stream.of(systemClass, null)) + () -> policyManager.findRequestingModule(Stream.of(entitlementsClass, null)) ); } diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/PatternBank.java b/libs/grok/src/main/java/org/elasticsearch/grok/PatternBank.java index e6f8af9b566d..cf8edf07f280 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/PatternBank.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/PatternBank.java @@ -23,7 +23,7 @@ import java.util.Set; public class PatternBank { - public static PatternBank EMPTY = new PatternBank(Map.of()); + public static final PatternBank EMPTY = new PatternBank(Map.of()); private final Map bank; diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java b/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java index 3b3f760c0534..fa52ff6ee56c 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java @@ -29,7 +29,7 @@ final class Constants { /** * sqrt(3) / 2.0 */ - public static double M_SQRT3_2 = 0.8660254037844386467637231707529361834714; + public static final double M_SQRT3_2 = 0.8660254037844386467637231707529361834714; /** * 2.0 * PI */ @@ -37,19 +37,19 @@ final class Constants { /** * The number of H3 base cells */ - public static int NUM_BASE_CELLS = 122; + public static final int NUM_BASE_CELLS = 122; /** * The number of vertices in a hexagon */ - public static int NUM_HEX_VERTS = 6; + public static final int NUM_HEX_VERTS = 6; /** * The number of vertices in a pentagon */ - public static int NUM_PENT_VERTS = 5; + public static final int NUM_PENT_VERTS = 5; /** * H3 index modes */ - public static int H3_CELL_MODE = 1; + public static final int H3_CELL_MODE = 1; /** * square root of 7 */ @@ -64,14 +64,14 @@ final class Constants { * (or distance between adjacent cell center points * on the plane) to gnomonic unit length. */ - public static double RES0_U_GNOMONIC = 0.38196601125010500003; + public static final double RES0_U_GNOMONIC = 0.38196601125010500003; /** * rotation angle between Class II and Class III resolution axes * (asin(sqrt(3.0 / 28.0))) */ - public static double M_AP7_ROT_RADS = 0.333473172251832115336090755351601070065900389; + public static final double M_AP7_ROT_RADS = 0.333473172251832115336090755351601070065900389; /** * threshold epsilon */ - public static double EPSILON = 0.0000000000000001; + public static final double EPSILON = 0.0000000000000001; } diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/H3.java b/libs/h3/src/main/java/org/elasticsearch/h3/H3.java index 08031088728b..ac35fe6670c1 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/H3.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/H3.java @@ -33,7 +33,7 @@ public final class H3 { /** * max H3 resolution; H3 version 1 has 16 resolutions, numbered 0 through 15 */ - public static int MAX_H3_RES = 15; + public static final int MAX_H3_RES = 15; private static final long[] NORTH = new long[MAX_H3_RES + 1]; private static final long[] SOUTH = new long[MAX_H3_RES + 1]; diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java b/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java index 2b1b9cade21a..88261dd458fe 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java @@ -41,22 +41,22 @@ final class H3Index { return BaseCells.isBaseCellPentagon(H3Index.H3_get_base_cell(h3)) && H3Index.h3LeadingNonZeroDigit(h3) == 0; } - public static long H3_INIT = 35184372088831L; + public static final long H3_INIT = 35184372088831L; /** * The bit offset of the mode in an H3 index. */ - public static int H3_MODE_OFFSET = 59; + public static final int H3_MODE_OFFSET = 59; /** * 1's in the 4 mode bits, 0's everywhere else. */ - public static long H3_MODE_MASK = 15L << H3_MODE_OFFSET; + public static final long H3_MODE_MASK = 15L << H3_MODE_OFFSET; /** * 0's in the 4 mode bits, 1's everywhere else. */ - public static long H3_MODE_MASK_NEGATIVE = ~H3_MODE_MASK; + public static final long H3_MODE_MASK_NEGATIVE = ~H3_MODE_MASK; public static long H3_set_mode(long h3, long mode) { return (h3 & H3_MODE_MASK_NEGATIVE) | (mode << H3_MODE_OFFSET); @@ -65,16 +65,16 @@ final class H3Index { /** * The bit offset of the base cell in an H3 index. */ - public static int H3_BC_OFFSET = 45; + public static final int H3_BC_OFFSET = 45; /** * 1's in the 7 base cell bits, 0's everywhere else. */ - public static long H3_BC_MASK = 127L << H3_BC_OFFSET; + public static final long H3_BC_MASK = 127L << H3_BC_OFFSET; /** * 0's in the 7 base cell bits, 1's everywhere else. */ - public static long H3_BC_MASK_NEGATIVE = ~H3_BC_MASK; + public static final long H3_BC_MASK_NEGATIVE = ~H3_BC_MASK; /** * Sets the integer base cell of h3 to bc. @@ -83,26 +83,26 @@ final class H3Index { return (h3 & H3_BC_MASK_NEGATIVE) | (bc << H3_BC_OFFSET); } - public static int H3_RES_OFFSET = 52; + public static final int H3_RES_OFFSET = 52; /** * 1's in the 4 resolution bits, 0's everywhere else. */ - public static long H3_RES_MASK = 15L << H3_RES_OFFSET; + public static final long H3_RES_MASK = 15L << H3_RES_OFFSET; /** * 0's in the 4 resolution bits, 1's everywhere else. */ - public static long H3_RES_MASK_NEGATIVE = ~H3_RES_MASK; + public static final long H3_RES_MASK_NEGATIVE = ~H3_RES_MASK; /** * The bit offset of the max resolution digit in an H3 index. */ - public static int H3_MAX_OFFSET = 63; + public static final int H3_MAX_OFFSET = 63; /** * 1 in the highest bit, 0's everywhere else. */ - public static long H3_HIGH_BIT_MASK = (1L << H3_MAX_OFFSET); + public static final long H3_HIGH_BIT_MASK = (1L << H3_MAX_OFFSET); /** * Gets the highest bit of the H3 index. @@ -121,12 +121,12 @@ final class H3Index { /** * The bit offset of the reserved bits in an H3 index. */ - public static int H3_RESERVED_OFFSET = 56; + public static final int H3_RESERVED_OFFSET = 56; /** * 1's in the 3 reserved bits, 0's everywhere else. */ - public static long H3_RESERVED_MASK = (7L << H3_RESERVED_OFFSET); + public static final long H3_RESERVED_MASK = (7L << H3_RESERVED_OFFSET); /** * Gets a value in the reserved space. Should always be zero for valid indexes. @@ -149,12 +149,12 @@ final class H3Index { /** * The number of bits in a single H3 resolution digit. */ - public static int H3_PER_DIGIT_OFFSET = 3; + public static final int H3_PER_DIGIT_OFFSET = 3; /** * 1's in the 3 bits of res 15 digit bits, 0's everywhere else. */ - public static long H3_DIGIT_MASK = 7L; + public static final long H3_DIGIT_MASK = 7L; /** * Gets the resolution res integer digit (0-7) of h3. diff --git a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/IngestDocumentBridge.java b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/IngestDocumentBridge.java index da97be39d97c..1df089c0427d 100644 --- a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/IngestDocumentBridge.java +++ b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/IngestDocumentBridge.java @@ -20,8 +20,6 @@ import java.util.function.BiConsumer; public class IngestDocumentBridge extends StableBridgeAPI.Proxy { - public static String INGEST_KEY = IngestDocument.INGEST_KEY; - public static IngestDocumentBridge wrap(final IngestDocument ingestDocument) { if (ingestDocument == null) { return null; diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index c6684052334c..ed37d4a70931 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -25,7 +25,7 @@ repositories { exclusiveContent { forRepository { maven { - url "https://artifactory.elastic.dev/artifactory/elasticsearch-native" + url = "https://artifactory.elastic.dev/artifactory/elasticsearch-native" metadataSources { artifact() } diff --git a/libs/simdvec/output.txt b/libs/simdvec/output.txt new file mode 100644 index 000000000000..acccaea19b76 --- /dev/null +++ b/libs/simdvec/output.txt @@ -0,0 +1,3809 @@ +Using gradle at '/Users/rene/dev/elastic/elasticsearch/gradlew' to run buildfile '/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build.gradle': + +Initialized native services in: /Users/rene/.gradle/native +Initialized jansi services in: /Users/rene/.gradle/native +Found daemon DaemonInfo{pid=51106, address=[aba1969e-035e-4d64-b357-459afffcd02b port:60032, addresses:[/127.0.0.1]], state=Idle, lastBusy=1734939945982, context=DefaultDaemonContext[uid=2770ac86-e90f-46e0-aca4-eb484b46539b,javaHome=/Users/rene/.gradle-idea2/jdks/oracle_corporation-22-aarch64-os_x/jdk-22.jdk/Contents/Home,javaVersion=22,javaVendor=Oracle Corporation,daemonRegistryDir=/Users/rene/.gradle/daemon,pid=51106,idleTimeout=10800000,priority=NORMAL,applyInstrumentationAgent=true,nativeServicesMode=ENABLED,daemonOpts=-XX:+HeapDumpOnOutOfMemoryError,-Xss2m,--add-exports,jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED,--add-opens,java.base/java.time=ALL-UNNAMED,-Dfile.encoding=UTF-8,-Duser.country=DE,-Duser.language=en,-Duser.variant]} however its context does not match the desired criteria. +JVM is incompatible. +Wanted: DaemonRequestContext{jvmCriteria=/Users/rene/.sdkman/candidates/java/21.0.5-oracle (no JDK specified, using current Java home), daemonOpts=[-XX:+HeapDumpOnOutOfMemoryError, -Xss2m, --add-exports, jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED, --add-exports, jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED, --add-exports, jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED, --add-exports, jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED, --add-exports, jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED, --add-opens, java.base/java.time=ALL-UNNAMED, -Dfile.encoding=UTF-8, -Duser.country=DE, -Duser.language=en, -Duser.variant], applyInstrumentationAgent=true, nativeServicesMode=ENABLED, priority=NORMAL} +Actual: DefaultDaemonContext[uid=2770ac86-e90f-46e0-aca4-eb484b46539b,javaHome=/Users/rene/.gradle-idea2/jdks/oracle_corporation-22-aarch64-os_x/jdk-22.jdk/Contents/Home,javaVersion=22,javaVendor=Oracle Corporation,daemonRegistryDir=/Users/rene/.gradle/daemon,pid=51106,idleTimeout=10800000,priority=NORMAL,applyInstrumentationAgent=true,nativeServicesMode=ENABLED,daemonOpts=-XX:+HeapDumpOnOutOfMemoryError,-Xss2m,--add-exports,jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED,--add-exports,jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED,--add-opens,java.base/java.time=ALL-UNNAMED,-Dfile.encoding=UTF-8,-Duser.country=DE,-Duser.language=en,-Duser.variant] + + Looking for a different daemon... +The client will now receive all logging from the daemon (pid: 49343). The daemon log file: /Users/rene/.gradle/daemon/8.11.1/daemon-49343.out.log +Starting 7th build in daemon [uptime: 28 mins 11.835 secs, performance: 98%, GC rate: 0.00/s, heap usage: 0% of 8 GiB] +Using 12 worker leases. +Now considering [/Users/rene/dev/elastic/elasticsearch, /Users/rene/dev/elastic/elasticsearch/build-tools-internal, /Users/rene/dev/elastic/elasticsearch/build-tools, /Users/rene/dev/elastic/elasticsearch/build-conventions] as hierarchies to watch +Now considering [/Users/rene/dev/elastic/elasticsearch/build-conventions, /Users/rene/dev/elastic/elasticsearch, /Users/rene/dev/elastic/elasticsearch/build-tools-internal, /Users/rene/dev/elastic/elasticsearch/build-tools] as hierarchies to watch +Now considering [/Users/rene/dev/elastic/elasticsearch/build-tools, /Users/rene/dev/elastic/elasticsearch/build-conventions, /Users/rene/dev/elastic/elasticsearch, /Users/rene/dev/elastic/elasticsearch/build-tools-internal] as hierarchies to watch +Now considering [/Users/rene/dev/elastic/elasticsearch/build-tools-internal, /Users/rene/dev/elastic/elasticsearch/build-tools, /Users/rene/dev/elastic/elasticsearch/build-conventions, /Users/rene/dev/elastic/elasticsearch] as hierarchies to watch +Calculating task graph as configuration cache cannot be reused because directory 'libs/simdvec' has changed. +Now considering [/Users/rene/dev/elastic/elasticsearch, /Users/rene/dev/elastic/elasticsearch/build-tools-internal, /Users/rene/dev/elastic/elasticsearch/build-tools, /Users/rene/dev/elastic/elasticsearch/build-conventions] as hierarchies to watch +Watching the file system is configured to be enabled if available +File system watching is active +Starting Build +Now considering [/Users/rene/dev/elastic/elasticsearch/build-conventions, /Users/rene/dev/elastic/elasticsearch, /Users/rene/dev/elastic/elasticsearch/build-tools-internal, /Users/rene/dev/elastic/elasticsearch/build-tools] as hierarchies to watch +Now considering [/Users/rene/dev/elastic/elasticsearch/build-tools, /Users/rene/dev/elastic/elasticsearch/build-conventions, /Users/rene/dev/elastic/elasticsearch, /Users/rene/dev/elastic/elasticsearch/build-tools-internal] as hierarchies to watch +Now considering [/Users/rene/dev/elastic/elasticsearch/build-tools-internal, /Users/rene/dev/elastic/elasticsearch/build-tools, /Users/rene/dev/elastic/elasticsearch/build-conventions, /Users/rene/dev/elastic/elasticsearch] as hierarchies to watch + +> Configure project :build-conventions +Evaluating project ':build-conventions' using build file '/Users/rene/dev/elastic/elasticsearch/build-conventions/build.gradle'. +Registering project ':build-conventions' in composite build. Will substitute for module 'org.elasticsearch:build-conventions'. +Resolve mutations for :build-conventions:compileJava (Thread[#1322,Execution worker,5,main]) started. +:build-conventions:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-conventions:compileJava UP-TO-DATE +Caching disabled for task ':build-conventions:compileJava' because: + Build cache is disabled +Skipping task ':build-conventions:compileJava' as it is up-to-date. +Resolve mutations for :build-conventions:pluginDescriptors (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-conventions:pluginDescriptors (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-conventions:pluginDescriptors UP-TO-DATE +Caching disabled for task ':build-conventions:pluginDescriptors' because: + Build cache is disabled + Not worth caching +Skipping task ':build-conventions:pluginDescriptors' as it is up-to-date. +Resolve mutations for :build-conventions:processResources (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-conventions:processResources (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-conventions:processResources UP-TO-DATE +Caching disabled for task ':build-conventions:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':build-conventions:processResources' as it is up-to-date. +Resolve mutations for :build-conventions:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-conventions:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-conventions:classes UP-TO-DATE +Skipping task ':build-conventions:classes' as it has no actions. +Resolve mutations for :build-conventions:jar (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-conventions:jar (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-conventions:jar UP-TO-DATE +Caching disabled for task ':build-conventions:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':build-conventions:jar' as it is up-to-date. + +> Configure project :build-tools +Evaluating project ':build-tools' using build file '/Users/rene/dev/elastic/elasticsearch/build-tools/build.gradle'. +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with InstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with InstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with InstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with InstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with InstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with InstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with InstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with InstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with InstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with InstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with InstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with InstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with InstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with InstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with InstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with InstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with InstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with InstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with InstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with InstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with InstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with InstrumentationAnalysisTransform +Transforming main (project :build-conventions) with InstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with InstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with MergeInstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with MergeInstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with InstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with MergeInstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with MergeInstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with InstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with InstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with MergeInstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with InstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with MergeInstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with MergeInstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with InstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with MergeInstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with InstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with MergeInstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with InstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with MergeInstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with MergeInstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with InstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with MergeInstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with MergeInstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with InstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with MergeInstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with InstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with MergeInstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with InstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with MergeInstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with InstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with MergeInstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with InstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with MergeInstrumentationAnalysisTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with MergeInstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with MergeInstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with MergeInstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with InstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with MergeInstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with InstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with MergeInstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with InstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with MergeInstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with MergeInstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with InstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming build-conventions.jar (project :build-conventions) with ProjectDependencyInstrumentingArtifactTransform + +> Configure project :build-tools:reaper +Evaluating project ':build-tools:reaper' using build file '/Users/rene/dev/elastic/elasticsearch/build-tools/reaper/build.gradle'. +Registering project ':build-tools' in composite build. Will substitute for module 'org.elasticsearch.gradle:build-tools'. +Registering project ':build-tools:reaper' in composite build. Will substitute for module 'org.elasticsearch.gradle:reaper'. + +> Configure project :build-tools-internal +Evaluating project ':build-tools-internal' using build file '/Users/rene/dev/elastic/elasticsearch/build-tools-internal/build.gradle'. +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with InstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with InstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with InstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with InstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with InstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with InstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with InstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with InstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with InstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with InstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with InstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with InstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with InstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with InstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with InstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with InstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with InstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with InstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with InstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with InstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with InstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with InstrumentationAnalysisTransform +Transforming main (project :build-conventions) with InstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with InstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with MergeInstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with MergeInstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with InstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with MergeInstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with MergeInstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with MergeInstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with InstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with InstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with MergeInstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with InstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with MergeInstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with MergeInstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with MergeInstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with InstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with InstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with MergeInstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with InstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with MergeInstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with InstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with MergeInstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with ExternalDependencyInstrumentingArtifactTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with InstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with MergeInstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with MergeInstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with InstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with MergeInstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with InstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with MergeInstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with InstrumentationAnalysisTransform +Transforming commons-compress-1.5.jar (org.apache.commons:commons-compress:1.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with MergeInstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with InstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with MergeInstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with InstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with MergeInstrumentationAnalysisTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with MergeInstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with InstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with MergeInstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-codec-1.16.0.jar (commons-codec:commons-codec:1.16.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with MergeInstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with InstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with MergeInstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with ExternalDependencyInstrumentingArtifactTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with InstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with MergeInstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with MergeInstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with MergeInstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with InstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming build-conventions.jar (project :build-conventions) with ProjectDependencyInstrumentingArtifactTransform +Found project 'project :build-conventions' as substitute for module 'org.elasticsearch:build-conventions'. +Found project 'project :build-tools' as substitute for module 'org.elasticsearch.gradle:build-tools'. +Found project 'project :build-tools:reaper' as substitute for module 'org.elasticsearch.gradle:reaper'. +Found project 'project :build-conventions' as substitute for module 'org.elasticsearch:build-conventions'. +Found project 'project :build-tools' as substitute for module 'org.elasticsearch.gradle:build-tools'. +Resolve mutations for :build-tools:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :build-tools:reaper:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :build-tools-internal:extractPluginRequests (Thread[#1331,Execution worker Thread 10,5,main]) started. +work action resolve build-conventions.jar (project :build-conventions) (Thread[#1330,Execution worker Thread 9,5,main]) started. +:build-tools-internal:extractPluginRequests (Thread[#1331,Execution worker Thread 10,5,main]) started. +:build-tools:reaper:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-tools:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools-internal:extractPluginRequests UP-TO-DATE +Caching disabled for task ':build-tools-internal:extractPluginRequests' because: + Build cache is disabled +Skipping task ':build-tools-internal:extractPluginRequests' as it is up-to-date. +Resolve mutations for :build-tools-internal:generatePluginAdapters (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :build-tools:reaper:compileJava UP-TO-DATE +Caching disabled for task ':build-tools:reaper:compileJava' because: + Build cache is disabled +Skipping task ':build-tools:reaper:compileJava' as it is up-to-date. +No compile result for :build-tools:reaper:compileJava +:build-tools-internal:generatePluginAdapters (Thread[#1325,Execution worker Thread 4,5,main]) started. +No compile result for :build-tools:reaper:compileJava +Resolve mutations for :build-tools:reaper:processResources (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-tools:reaper:processResources (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-tools-internal:generatePluginAdapters UP-TO-DATE +Caching disabled for task ':build-tools-internal:generatePluginAdapters' because: + Build cache is disabled +Skipping task ':build-tools-internal:generatePluginAdapters' as it is up-to-date. + +> Task :build-tools:reaper:processResources NO-SOURCE +Skipping task ':build-tools:reaper:processResources' as it has no source files and no previous output files. +Resolve mutations for :build-tools:reaper:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +:build-tools:reaper:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :build-tools:reaper:classes UP-TO-DATE +Skipping task ':build-tools:reaper:classes' as it has no actions. +Resolve mutations for :build-tools-internal:pluginDescriptors (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :build-tools:reaper:jar (Thread[#1330,Execution worker Thread 9,5,main]) started. +:build-tools-internal:pluginDescriptors (Thread[#1325,Execution worker Thread 4,5,main]) started. +:build-tools:reaper:jar (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :build-tools-internal:pluginDescriptors UP-TO-DATE +Caching disabled for task ':build-tools-internal:pluginDescriptors' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools-internal:pluginDescriptors' as it is up-to-date. +Resolve mutations for :build-tools-internal:processResources (Thread[#1325,Execution worker Thread 4,5,main]) started. +:build-tools-internal:processResources (Thread[#1325,Execution worker Thread 4,5,main]) started. + +> Task :build-tools:reaper:jar UP-TO-DATE +Caching disabled for task ':build-tools:reaper:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools:reaper:jar' as it is up-to-date. +work action resolve reaper.jar (project :build-tools:reaper) (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :build-tools-internal:processResources UP-TO-DATE +Caching disabled for task ':build-tools-internal:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools-internal:processResources' as it is up-to-date. + +> Task :build-tools:compileJava UP-TO-DATE +Caching disabled for task ':build-tools:compileJava' because: + Build cache is disabled +Skipping task ':build-tools:compileJava' as it is up-to-date. +No compile result for :build-tools:compileJava +No compile result for :build-tools:compileJava +Resolve mutations for :build-tools:compileGroovy (Thread[#1328,Execution worker Thread 7,5,main]) started. +:build-tools:compileGroovy (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools:compileGroovy NO-SOURCE +Skipping task ':build-tools:compileGroovy' as it has no source files and no previous output files. +Resolve mutations for :build-tools:generateVersionProperties (Thread[#1328,Execution worker Thread 7,5,main]) started. +:build-tools:generateVersionProperties (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools:generateVersionProperties UP-TO-DATE +Caching disabled for task ':build-tools:generateVersionProperties' because: + Build cache is disabled +Skipping task ':build-tools:generateVersionProperties' as it is up-to-date. +Resolve mutations for :build-tools:pluginDescriptors (Thread[#1328,Execution worker Thread 7,5,main]) started. +:build-tools:pluginDescriptors (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools:pluginDescriptors UP-TO-DATE +Caching disabled for task ':build-tools:pluginDescriptors' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools:pluginDescriptors' as it is up-to-date. +Resolve mutations for :build-tools:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. +:build-tools:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools:processResources UP-TO-DATE +Caching disabled for task ':build-tools:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools:processResources' as it is up-to-date. +Resolve mutations for :build-tools:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. +:build-tools:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools:classes UP-TO-DATE +Skipping task ':build-tools:classes' as it has no actions. +Resolve mutations for :build-tools:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. +:build-tools:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :build-tools:jar UP-TO-DATE +Caching disabled for task ':build-tools:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools:jar' as it is up-to-date. +other build task :build-tools:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. +work action resolve build-tools-9.0.0-SNAPSHOT.jar (project :build-tools) (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :build-tools-internal:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. +:build-tools-internal:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :build-tools-internal:compileJava UP-TO-DATE +Caching disabled for task ':build-tools-internal:compileJava' because: + Build cache is disabled +Skipping task ':build-tools-internal:compileJava' as it is up-to-date. +No compile result for :build-tools-internal:compileJava +No compile result for :build-tools-internal:compileJava +Resolve mutations for :build-tools-internal:compileGroovy (Thread[#1331,Execution worker Thread 10,5,main]) started. +:build-tools-internal:compileGroovy (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :build-tools-internal:compileGroovy UP-TO-DATE +Caching disabled for task ':build-tools-internal:compileGroovy' because: + Build cache is disabled +Skipping task ':build-tools-internal:compileGroovy' as it is up-to-date. +Resolve mutations for :build-tools-internal:compileGroovyPlugins (Thread[#1331,Execution worker Thread 10,5,main]) started. +:build-tools-internal:compileGroovyPlugins (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :build-tools-internal:compileGroovyPlugins UP-TO-DATE +Caching disabled for task ':build-tools-internal:compileGroovyPlugins' because: + Build cache is disabled +Skipping task ':build-tools-internal:compileGroovyPlugins' as it is up-to-date. +Resolve mutations for :build-tools-internal:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. +:build-tools-internal:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :build-tools-internal:classes UP-TO-DATE +Skipping task ':build-tools-internal:classes' as it has no actions. +Resolve mutations for :build-tools-internal:jar (Thread[#1331,Execution worker Thread 10,5,main]) started. +:build-tools-internal:jar (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :build-tools-internal:jar UP-TO-DATE +Caching disabled for task ':build-tools-internal:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':build-tools-internal:jar' as it is up-to-date. +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with InstrumentationAnalysisTransform +Transforming develocity-gradle-plugin-3.18.1.jar (com.gradle:develocity-gradle-plugin:3.18.1) with InstrumentationAnalysisTransform +Transforming jackson-dataformat-yaml-2.15.0.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.15.0) with InstrumentationAnalysisTransform +Transforming json-schema-validator-1.0.72.jar (com.networknt:json-schema-validator:1.0.72) with InstrumentationAnalysisTransform +Transforming jackson-databind-2.15.0.jar (com.fasterxml.jackson.core:jackson-databind:2.15.0) with InstrumentationAnalysisTransform +Transforming jackson-core-2.15.0.jar (com.fasterxml.jackson.core:jackson-core:2.15.0) with InstrumentationAnalysisTransform +Transforming jackson-annotations-2.15.0.jar (com.fasterxml.jackson.core:jackson-annotations:2.15.0) with InstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with InstrumentationAnalysisTransform +Transforming commons-compress-1.26.1.jar (org.apache.commons:commons-compress:1.26.1) with InstrumentationAnalysisTransform +Transforming httpclient-4.5.14.jar (org.apache.httpcomponents:httpclient:4.5.14) with InstrumentationAnalysisTransform +Transforming gradle-info-plugin-11.3.3.jar (com.netflix.nebula:gradle-info-plugin:11.3.3) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with InstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.1.jar (commons-codec:commons-codec:1.16.1) with InstrumentationAnalysisTransform +Transforming svnkit-1.8.12.jar (org.tmatesoft.svnkit:svnkit:1.8.12) with InstrumentationAnalysisTransform +Transforming jna-platform-5.7.0.jar (net.java.dev.jna:jna-platform:5.7.0) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.connector-factory-0.0.7.jar (com.jcraft:jsch.agentproxy.connector-factory:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.usocket-jna-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-jna:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.pageant-0.0.7.jar (com.jcraft:jsch.agentproxy.pageant:0.0.7) with InstrumentationAnalysisTransform +Transforming jna-5.10.0.jar (net.java.dev.jna:jna:5.10.0) with InstrumentationAnalysisTransform +Transforming gradle-idea-ext-1.1.4.jar (gradle.plugin.org.jetbrains.gradle.plugin.idea-ext:gradle-idea-ext:1.1.4) with InstrumentationAnalysisTransform +Transforming forbiddenapis-3.8.jar (de.thetaphi:forbiddenapis:3.8) with InstrumentationAnalysisTransform +Transforming gradle-docker-compose-plugin-0.17.5.jar (com.avast.gradle:gradle-docker-compose-plugin:0.17.5) with InstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with InstrumentationAnalysisTransform +Transforming ST4-4.3.4.jar (org.antlr:ST4:4.3.4) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with InstrumentationAnalysisTransform +Transforming httpcore-4.4.16.jar (org.apache.httpcomponents:httpcore:4.4.16) with InstrumentationAnalysisTransform +Transforming snakeyaml-2.0.jar (org.yaml:snakeyaml:2.0) with InstrumentationAnalysisTransform +Transforming javaparser-core-3.18.0.jar (com.github.javaparser:javaparser-core:3.18.0) with InstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with InstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with InstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with InstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with InstrumentationAnalysisTransform +Transforming commons-lang3-3.14.0.jar (org.apache.commons:commons-lang3:3.14.0) with InstrumentationAnalysisTransform +Transforming p4java-2015.2.1365273.jar (com.perforce:p4java:2015.2.1365273) with InstrumentationAnalysisTransform +Transforming nebula-gradle-interop-2.0.0.jar (com.netflix.nebula:nebula-gradle-interop:2.0.0) with InstrumentationAnalysisTransform +Transforming gradle-contacts-plugin-6.0.0.jar (com.netflix.nebula:gradle-contacts-plugin:6.0.0) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with InstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with InstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with InstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with InstrumentationAnalysisTransform +Transforming gson-2.8.6.jar (com.google.code.gson:gson:2.8.6) with InstrumentationAnalysisTransform +Transforming guava-28.2-jre.jar (com.google.guava:guava:28.2-jre) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with InstrumentationAnalysisTransform +Transforming itu-1.7.0.jar (com.ethlo.time:itu:1.7.0) with InstrumentationAnalysisTransform +Transforming sqljet-1.1.10.jar (org.tmatesoft.sqljet:sqljet:1.1.10) with InstrumentationAnalysisTransform +Transforming antlr-runtime-3.5.3.jar (org.antlr:antlr-runtime:3.5.3) with InstrumentationAnalysisTransform +Transforming commons-logging-1.2.jar (commons-logging:commons-logging:1.2) with InstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with InstrumentationAnalysisTransform +Transforming fastdoubleparser-0.8.0.jar (ch.randelshofer:fastdoubleparser:0.8.0) with InstrumentationAnalysisTransform +Transforming jzlib-1.1.2.jar (com.jcraft:jzlib:1.1.2) with InstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.svnkit-trilead-ssh2-0.0.7.jar (com.jcraft:jsch.agentproxy.svnkit-trilead-ssh2:0.0.7) with InstrumentationAnalysisTransform +Transforming trilead-ssh2-1.0.0-build220.jar (com.trilead:trilead-ssh2:1.0.0-build220) with InstrumentationAnalysisTransform +Transforming sequence-library-1.0.3.jar (de.regnis.q.sequence:sequence-library:1.0.3) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with InstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with InstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with InstrumentationAnalysisTransform +Transforming failureaccess-1.0.1.jar (com.google.guava:failureaccess:1.0.1) with InstrumentationAnalysisTransform +Transforming listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava) with InstrumentationAnalysisTransform +Transforming jsr305-3.0.2.jar (com.google.code.findbugs:jsr305:3.0.2) with InstrumentationAnalysisTransform +Transforming checker-qual-2.10.0.jar (org.checkerframework:checker-qual:2.10.0) with InstrumentationAnalysisTransform +Transforming error_prone_annotations-2.3.4.jar (com.google.errorprone:error_prone_annotations:2.3.4) with InstrumentationAnalysisTransform +Transforming j2objc-annotations-1.3.jar (com.google.j2objc:j2objc-annotations:1.3) with InstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with InstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.usocket-nc-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-nc:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.sshagent-0.0.7.jar (com.jcraft:jsch.agentproxy.sshagent:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.core-0.0.7.jar (com.jcraft:jsch.agentproxy.core:0.0.7) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with InstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with InstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with InstrumentationAnalysisTransform +Transforming platform-3.4.0.jar (net.java.dev.jna:platform:3.4.0) with InstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with InstrumentationAnalysisTransform +Transforming main (project :build-tools-internal) with InstrumentationAnalysisTransform +Transforming main (project :build-tools-internal) with InstrumentationAnalysisTransform +Transforming main (project :build-conventions) with InstrumentationAnalysisTransform +Transforming main (project :build-tools) with InstrumentationAnalysisTransform +Transforming main (project :build-tools) with InstrumentationAnalysisTransform +Transforming main (project :build-tools:reaper) with InstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with InstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with MergeInstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with InstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with MergeInstrumentationAnalysisTransform +Transforming shadow-gradle-plugin-8.3.5.jar (com.gradleup.shadow:shadow-gradle-plugin:8.3.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming develocity-gradle-plugin-3.18.1.jar (com.gradle:develocity-gradle-plugin:3.18.1) with InstrumentationAnalysisTransform +Transforming develocity-gradle-plugin-3.18.1.jar (com.gradle:develocity-gradle-plugin:3.18.1) with MergeInstrumentationAnalysisTransform +Transforming log4j-core-2.24.1.jar (org.apache.logging.log4j:log4j-core:2.24.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming jackson-dataformat-yaml-2.15.0.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.15.0) with InstrumentationAnalysisTransform +Transforming jackson-dataformat-yaml-2.15.0.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.15.0) with MergeInstrumentationAnalysisTransform +Transforming develocity-gradle-plugin-3.18.1.jar (com.gradle:develocity-gradle-plugin:3.18.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming json-schema-validator-1.0.72.jar (com.networknt:json-schema-validator:1.0.72) with InstrumentationAnalysisTransform +Transforming json-schema-validator-1.0.72.jar (com.networknt:json-schema-validator:1.0.72) with MergeInstrumentationAnalysisTransform +Transforming jackson-dataformat-yaml-2.15.0.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.15.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming jackson-databind-2.15.0.jar (com.fasterxml.jackson.core:jackson-databind:2.15.0) with InstrumentationAnalysisTransform +Transforming jackson-databind-2.15.0.jar (com.fasterxml.jackson.core:jackson-databind:2.15.0) with MergeInstrumentationAnalysisTransform +Transforming json-schema-validator-1.0.72.jar (com.networknt:json-schema-validator:1.0.72) with ExternalDependencyInstrumentingArtifactTransform +Transforming jackson-core-2.15.0.jar (com.fasterxml.jackson.core:jackson-core:2.15.0) with InstrumentationAnalysisTransform +Transforming jackson-databind-2.15.0.jar (com.fasterxml.jackson.core:jackson-databind:2.15.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming jackson-core-2.15.0.jar (com.fasterxml.jackson.core:jackson-core:2.15.0) with MergeInstrumentationAnalysisTransform +Transforming jackson-annotations-2.15.0.jar (com.fasterxml.jackson.core:jackson-annotations:2.15.0) with InstrumentationAnalysisTransform +Transforming jackson-annotations-2.15.0.jar (com.fasterxml.jackson.core:jackson-annotations:2.15.0) with MergeInstrumentationAnalysisTransform +Transforming jackson-core-2.15.0.jar (com.fasterxml.jackson.core:jackson-core:2.15.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with InstrumentationAnalysisTransform +Transforming jackson-annotations-2.15.0.jar (com.fasterxml.jackson.core:jackson-annotations:2.15.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-0.11.jar (org.apache.rat:apache-rat:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with MergeInstrumentationAnalysisTransform +Transforming apache-rat-tasks-0.11.jar (org.apache.rat:apache-rat-tasks:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-compress-1.26.1.jar (org.apache.commons:commons-compress:1.26.1) with InstrumentationAnalysisTransform +Transforming commons-compress-1.26.1.jar (org.apache.commons:commons-compress:1.26.1) with MergeInstrumentationAnalysisTransform +Transforming httpclient-4.5.14.jar (org.apache.httpcomponents:httpclient:4.5.14) with InstrumentationAnalysisTransform +Transforming apache-rat-core-0.11.jar (org.apache.rat:apache-rat-core:0.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming httpclient-4.5.14.jar (org.apache.httpcomponents:httpclient:4.5.14) with MergeInstrumentationAnalysisTransform +Transforming gradle-info-plugin-11.3.3.jar (com.netflix.nebula:gradle-info-plugin:11.3.3) with InstrumentationAnalysisTransform +Transforming commons-compress-1.26.1.jar (org.apache.commons:commons-compress:1.26.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming httpclient-4.5.14.jar (org.apache.httpcomponents:httpclient:4.5.14) with ExternalDependencyInstrumentingArtifactTransform +Transforming gradle-info-plugin-11.3.3.jar (com.netflix.nebula:gradle-info-plugin:11.3.3) with MergeInstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with MergeInstrumentationAnalysisTransform +Transforming gradle-info-plugin-11.3.3.jar (com.netflix.nebula:gradle-info-plugin:11.3.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with InstrumentationAnalysisTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with InstrumentationAnalysisTransform +Transforming spotless-plugin-gradle-6.25.0.jar (com.diffplug.spotless:spotless-plugin-gradle:6.25.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming spotless-lib-extra-2.45.0.jar (com.diffplug.spotless:spotless-lib-extra:2.45.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with MergeInstrumentationAnalysisTransform +Transforming commons-codec-1.16.1.jar (commons-codec:commons-codec:1.16.1) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.1.jar (commons-codec:commons-codec:1.16.1) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.jgit-6.7.0.202309050840-r.jar (org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r) with ExternalDependencyInstrumentingArtifactTransform +Transforming svnkit-1.8.12.jar (org.tmatesoft.svnkit:svnkit:1.8.12) with InstrumentationAnalysisTransform +Transforming commons-codec-1.16.1.jar (commons-codec:commons-codec:1.16.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming svnkit-1.8.12.jar (org.tmatesoft.svnkit:svnkit:1.8.12) with MergeInstrumentationAnalysisTransform +Transforming jna-platform-5.7.0.jar (net.java.dev.jna:jna-platform:5.7.0) with InstrumentationAnalysisTransform +Transforming svnkit-1.8.12.jar (org.tmatesoft.svnkit:svnkit:1.8.12) with ExternalDependencyInstrumentingArtifactTransform +Transforming jna-platform-5.7.0.jar (net.java.dev.jna:jna-platform:5.7.0) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.connector-factory-0.0.7.jar (com.jcraft:jsch.agentproxy.connector-factory:0.0.7) with InstrumentationAnalysisTransform +Transforming jna-platform-5.7.0.jar (net.java.dev.jna:jna-platform:5.7.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.connector-factory-0.0.7.jar (com.jcraft:jsch.agentproxy.connector-factory:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.usocket-jna-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-jna:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.connector-factory-0.0.7.jar (com.jcraft:jsch.agentproxy.connector-factory:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.usocket-jna-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-jna:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.pageant-0.0.7.jar (com.jcraft:jsch.agentproxy.pageant:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.usocket-jna-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-jna:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.pageant-0.0.7.jar (com.jcraft:jsch.agentproxy.pageant:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming jna-5.10.0.jar (net.java.dev.jna:jna:5.10.0) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.pageant-0.0.7.jar (com.jcraft:jsch.agentproxy.pageant:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming jna-5.10.0.jar (net.java.dev.jna:jna:5.10.0) with MergeInstrumentationAnalysisTransform +Transforming gradle-idea-ext-1.1.4.jar (gradle.plugin.org.jetbrains.gradle.plugin.idea-ext:gradle-idea-ext:1.1.4) with InstrumentationAnalysisTransform +Transforming jna-5.10.0.jar (net.java.dev.jna:jna:5.10.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming gradle-idea-ext-1.1.4.jar (gradle.plugin.org.jetbrains.gradle.plugin.idea-ext:gradle-idea-ext:1.1.4) with MergeInstrumentationAnalysisTransform +Transforming forbiddenapis-3.8.jar (de.thetaphi:forbiddenapis:3.8) with InstrumentationAnalysisTransform +Transforming gradle-idea-ext-1.1.4.jar (gradle.plugin.org.jetbrains.gradle.plugin.idea-ext:gradle-idea-ext:1.1.4) with ExternalDependencyInstrumentingArtifactTransform +Transforming forbiddenapis-3.8.jar (de.thetaphi:forbiddenapis:3.8) with MergeInstrumentationAnalysisTransform +Transforming gradle-docker-compose-plugin-0.17.5.jar (com.avast.gradle:gradle-docker-compose-plugin:0.17.5) with InstrumentationAnalysisTransform +Transforming gradle-docker-compose-plugin-0.17.5.jar (com.avast.gradle:gradle-docker-compose-plugin:0.17.5) with MergeInstrumentationAnalysisTransform +Transforming forbiddenapis-3.8.jar (de.thetaphi:forbiddenapis:3.8) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with InstrumentationAnalysisTransform +Transforming gradle-docker-compose-plugin-0.17.5.jar (com.avast.gradle:gradle-docker-compose-plugin:0.17.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with MergeInstrumentationAnalysisTransform +Transforming ST4-4.3.4.jar (org.antlr:ST4:4.3.4) with InstrumentationAnalysisTransform +Transforming ST4-4.3.4.jar (org.antlr:ST4:4.3.4) with MergeInstrumentationAnalysisTransform +Transforming maven-model-3.6.2.jar (org.apache.maven:maven-model:3.6.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with InstrumentationAnalysisTransform +Transforming ST4-4.3.4.jar (org.antlr:ST4:4.3.4) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-commons-9.7.1.jar (org.ow2.asm:asm-commons:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with InstrumentationAnalysisTransform +Transforming asm-tree-9.7.1.jar (org.ow2.asm:asm-tree:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with MergeInstrumentationAnalysisTransform +Transforming httpcore-4.4.16.jar (org.apache.httpcomponents:httpcore:4.4.16) with InstrumentationAnalysisTransform +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming httpcore-4.4.16.jar (org.apache.httpcomponents:httpcore:4.4.16) with MergeInstrumentationAnalysisTransform +Transforming snakeyaml-2.0.jar (org.yaml:snakeyaml:2.0) with InstrumentationAnalysisTransform +Transforming snakeyaml-2.0.jar (org.yaml:snakeyaml:2.0) with MergeInstrumentationAnalysisTransform +Transforming javaparser-core-3.18.0.jar (com.github.javaparser:javaparser-core:3.18.0) with InstrumentationAnalysisTransform +Transforming httpcore-4.4.16.jar (org.apache.httpcomponents:httpcore:4.4.16) with ExternalDependencyInstrumentingArtifactTransform +Transforming javaparser-core-3.18.0.jar (com.github.javaparser:javaparser-core:3.18.0) with MergeInstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with InstrumentationAnalysisTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with MergeInstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with InstrumentationAnalysisTransform +Transforming snakeyaml-2.0.jar (org.yaml:snakeyaml:2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming solstice-1.7.5.jar (dev.equo.ide:solstice:1.7.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with MergeInstrumentationAnalysisTransform +Transforming javaparser-core-3.18.0.jar (com.github.javaparser:javaparser-core:3.18.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with InstrumentationAnalysisTransform +Transforming org.eclipse.osgi-3.18.300.jar (org.eclipse.platform:org.eclipse.osgi:3.18.300) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with MergeInstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with InstrumentationAnalysisTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with MergeInstrumentationAnalysisTransform +Transforming commons-lang3-3.14.0.jar (org.apache.commons:commons-lang3:3.14.0) with InstrumentationAnalysisTransform +Transforming ant-1.10.15.jar (org.apache.ant:ant:1.10.15) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-io-2.17.0.jar (commons-io:commons-io:2.17.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-lang3-3.14.0.jar (org.apache.commons:commons-lang3:3.14.0) with MergeInstrumentationAnalysisTransform +Transforming p4java-2015.2.1365273.jar (com.perforce:p4java:2015.2.1365273) with InstrumentationAnalysisTransform +Transforming p4java-2015.2.1365273.jar (com.perforce:p4java:2015.2.1365273) with MergeInstrumentationAnalysisTransform +Transforming nebula-gradle-interop-2.0.0.jar (com.netflix.nebula:nebula-gradle-interop:2.0.0) with InstrumentationAnalysisTransform +Transforming commons-lang3-3.14.0.jar (org.apache.commons:commons-lang3:3.14.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming p4java-2015.2.1365273.jar (com.perforce:p4java:2015.2.1365273) with ExternalDependencyInstrumentingArtifactTransform +Transforming nebula-gradle-interop-2.0.0.jar (com.netflix.nebula:nebula-gradle-interop:2.0.0) with MergeInstrumentationAnalysisTransform +Transforming gradle-contacts-plugin-6.0.0.jar (com.netflix.nebula:gradle-contacts-plugin:6.0.0) with InstrumentationAnalysisTransform +Transforming gradle-contacts-plugin-6.0.0.jar (com.netflix.nebula:gradle-contacts-plugin:6.0.0) with MergeInstrumentationAnalysisTransform +Transforming nebula-gradle-interop-2.0.0.jar (com.netflix.nebula:nebula-gradle-interop:2.0.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with InstrumentationAnalysisTransform +Transforming gradle-contacts-plugin-6.0.0.jar (com.netflix.nebula:gradle-contacts-plugin:6.0.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with MergeInstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with InstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with MergeInstrumentationAnalysisTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with InstrumentationAnalysisTransform +Transforming jdom2-2.0.6.1.jar (org.jdom:jdom2:2.0.6.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with MergeInstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with InstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with MergeInstrumentationAnalysisTransform +Transforming plexus-utils-4.0.2.jar (org.codehaus.plexus:plexus-utils:4.0.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-xml-4.0.4.jar (org.codehaus.plexus:plexus-xml:4.0.4) with ExternalDependencyInstrumentingArtifactTransform +Transforming gson-2.8.6.jar (com.google.code.gson:gson:2.8.6) with InstrumentationAnalysisTransform +Transforming gson-2.8.6.jar (com.google.code.gson:gson:2.8.6) with MergeInstrumentationAnalysisTransform +Transforming jdependency-2.11.jar (org.vafer:jdependency:2.11) with ExternalDependencyInstrumentingArtifactTransform +Transforming guava-28.2-jre.jar (com.google.guava:guava:28.2-jre) with InstrumentationAnalysisTransform +Transforming gson-2.8.6.jar (com.google.code.gson:gson:2.8.6) with ExternalDependencyInstrumentingArtifactTransform +Transforming guava-28.2-jre.jar (com.google.guava:guava:28.2-jre) with MergeInstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with MergeInstrumentationAnalysisTransform +Transforming itu-1.7.0.jar (com.ethlo.time:itu:1.7.0) with InstrumentationAnalysisTransform +Transforming guava-28.2-jre.jar (com.google.guava:guava:28.2-jre) with ExternalDependencyInstrumentingArtifactTransform +Transforming slf4j-api-1.7.36.jar (org.slf4j:slf4j-api:1.7.36) with ExternalDependencyInstrumentingArtifactTransform +Transforming itu-1.7.0.jar (com.ethlo.time:itu:1.7.0) with MergeInstrumentationAnalysisTransform +Transforming sqljet-1.1.10.jar (org.tmatesoft.sqljet:sqljet:1.1.10) with InstrumentationAnalysisTransform +Transforming sqljet-1.1.10.jar (org.tmatesoft.sqljet:sqljet:1.1.10) with MergeInstrumentationAnalysisTransform +Transforming itu-1.7.0.jar (com.ethlo.time:itu:1.7.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming antlr-runtime-3.5.3.jar (org.antlr:antlr-runtime:3.5.3) with InstrumentationAnalysisTransform +Transforming sqljet-1.1.10.jar (org.tmatesoft.sqljet:sqljet:1.1.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming antlr-runtime-3.5.3.jar (org.antlr:antlr-runtime:3.5.3) with MergeInstrumentationAnalysisTransform +Transforming commons-logging-1.2.jar (commons-logging:commons-logging:1.2) with InstrumentationAnalysisTransform +Transforming commons-logging-1.2.jar (commons-logging:commons-logging:1.2) with MergeInstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with InstrumentationAnalysisTransform +Transforming antlr-runtime-3.5.3.jar (org.antlr:antlr-runtime:3.5.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-logging-1.2.jar (commons-logging:commons-logging:1.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming durian-io-1.2.0.jar (com.diffplug.durian:durian-io:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with InstrumentationAnalysisTransform +Transforming durian-collect-1.2.0.jar (com.diffplug.durian:durian-collect:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with InstrumentationAnalysisTransform +Transforming durian-core-1.2.0.jar (com.diffplug.durian:durian-core:1.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with MergeInstrumentationAnalysisTransform +Transforming fastdoubleparser-0.8.0.jar (ch.randelshofer:fastdoubleparser:0.8.0) with InstrumentationAnalysisTransform +Transforming fastdoubleparser-0.8.0.jar (ch.randelshofer:fastdoubleparser:0.8.0) with MergeInstrumentationAnalysisTransform +Transforming spotless-lib-2.45.0.jar (com.diffplug.spotless:spotless-lib:2.45.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming jzlib-1.1.2.jar (com.jcraft:jzlib:1.1.2) with InstrumentationAnalysisTransform +Transforming jzlib-1.1.2.jar (com.jcraft:jzlib:1.1.2) with MergeInstrumentationAnalysisTransform +Transforming fastdoubleparser-0.8.0.jar (ch.randelshofer:fastdoubleparser:0.8.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with InstrumentationAnalysisTransform +Transforming jzlib-1.1.2.jar (com.jcraft:jzlib:1.1.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with MergeInstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with MergeInstrumentationAnalysisTransform +Transforming okhttp-4.12.0.jar (com.squareup.okhttp3:okhttp:4.12.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with InstrumentationAnalysisTransform +Transforming okio-jvm-3.6.0.jar (com.squareup.okio:okio-jvm:3.6.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.svnkit-trilead-ssh2-0.0.7.jar (com.jcraft:jsch.agentproxy.svnkit-trilead-ssh2:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.svnkit-trilead-ssh2-0.0.7.jar (com.jcraft:jsch.agentproxy.svnkit-trilead-ssh2:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming trilead-ssh2-1.0.0-build220.jar (com.trilead:trilead-ssh2:1.0.0-build220) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk8-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.svnkit-trilead-ssh2-0.0.7.jar (com.jcraft:jsch.agentproxy.svnkit-trilead-ssh2:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming trilead-ssh2-1.0.0-build220.jar (com.trilead:trilead-ssh2:1.0.0-build220) with MergeInstrumentationAnalysisTransform +Transforming sequence-library-1.0.3.jar (de.regnis.q.sequence:sequence-library:1.0.3) with InstrumentationAnalysisTransform +Transforming sequence-library-1.0.3.jar (de.regnis.q.sequence:sequence-library:1.0.3) with MergeInstrumentationAnalysisTransform +Transforming trilead-ssh2-1.0.0-build220.jar (com.trilead:trilead-ssh2:1.0.0-build220) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with MergeInstrumentationAnalysisTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with InstrumentationAnalysisTransform +Transforming sequence-library-1.0.3.jar (de.regnis.q.sequence:sequence-library:1.0.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with MergeInstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with InstrumentationAnalysisTransform +Transforming commons-collections-3.2.1.jar (commons-collections:commons-collections:3.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-lang-2.6.jar (commons-lang:commons-lang:2.6) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with MergeInstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with InstrumentationAnalysisTransform +Transforming commons-cli-1.2.jar (commons-cli:commons-cli:1.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with MergeInstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.15.jar (org.apache.ant:ant-launcher:1.10.15) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with InstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with MergeInstrumentationAnalysisTransform +Transforming maven-xml-impl-4.0.0-alpha-9.jar (org.apache.maven:maven-xml-impl:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming failureaccess-1.0.1.jar (com.google.guava:failureaccess:1.0.1) with InstrumentationAnalysisTransform +Transforming log4j-api-2.24.1.jar (org.apache.logging.log4j:log4j-api:2.24.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming failureaccess-1.0.1.jar (com.google.guava:failureaccess:1.0.1) with MergeInstrumentationAnalysisTransform +Transforming listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava) with InstrumentationAnalysisTransform +Transforming failureaccess-1.0.1.jar (com.google.guava:failureaccess:1.0.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava) with MergeInstrumentationAnalysisTransform +Transforming jsr305-3.0.2.jar (com.google.code.findbugs:jsr305:3.0.2) with InstrumentationAnalysisTransform +Transforming listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsr305-3.0.2.jar (com.google.code.findbugs:jsr305:3.0.2) with MergeInstrumentationAnalysisTransform +Transforming checker-qual-2.10.0.jar (org.checkerframework:checker-qual:2.10.0) with InstrumentationAnalysisTransform +Transforming checker-qual-2.10.0.jar (org.checkerframework:checker-qual:2.10.0) with MergeInstrumentationAnalysisTransform +Transforming error_prone_annotations-2.3.4.jar (com.google.errorprone:error_prone_annotations:2.3.4) with InstrumentationAnalysisTransform +Transforming jsr305-3.0.2.jar (com.google.code.findbugs:jsr305:3.0.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming error_prone_annotations-2.3.4.jar (com.google.errorprone:error_prone_annotations:2.3.4) with MergeInstrumentationAnalysisTransform +Transforming j2objc-annotations-1.3.jar (com.google.j2objc:j2objc-annotations:1.3) with InstrumentationAnalysisTransform +Transforming checker-qual-2.10.0.jar (org.checkerframework:checker-qual:2.10.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming error_prone_annotations-2.3.4.jar (com.google.errorprone:error_prone_annotations:2.3.4) with ExternalDependencyInstrumentingArtifactTransform +Transforming j2objc-annotations-1.3.jar (com.google.j2objc:j2objc-annotations:1.3) with MergeInstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with InstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with MergeInstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with InstrumentationAnalysisTransform +Transforming j2objc-annotations-1.3.jar (com.google.j2objc:j2objc-annotations:1.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with MergeInstrumentationAnalysisTransform +Transforming JavaEWAH-1.2.3.jar (com.googlecode.javaewah:JavaEWAH:1.2.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with InstrumentationAnalysisTransform +Transforming concurrent-trees-2.6.1.jar (com.googlecode.concurrent-trees:concurrent-trees:2.6.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-jdk7-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming kotlin-stdlib-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.usocket-nc-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-nc:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.usocket-nc-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-nc:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.sshagent-0.0.7.jar (com.jcraft:jsch.agentproxy.sshagent:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.sshagent-0.0.7.jar (com.jcraft:jsch.agentproxy.sshagent:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.core-0.0.7.jar (com.jcraft:jsch.agentproxy.core:0.0.7) with InstrumentationAnalysisTransform +Transforming jsch.agentproxy.usocket-nc-0.0.7.jar (com.jcraft:jsch.agentproxy.usocket-nc:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.core-0.0.7.jar (com.jcraft:jsch.agentproxy.core:0.0.7) with MergeInstrumentationAnalysisTransform +Transforming jsch.agentproxy.sshagent-0.0.7.jar (com.jcraft:jsch.agentproxy.sshagent:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming jsch.agentproxy.core-0.0.7.jar (com.jcraft:jsch.agentproxy.core:0.0.7) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with InstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with MergeInstrumentationAnalysisTransform +Transforming maven-api-xml-4.0.0-alpha-9.jar (org.apache.maven:maven-api-xml:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with InstrumentationAnalysisTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with MergeInstrumentationAnalysisTransform +Transforming woodstox-core-6.5.1.jar (com.fasterxml.woodstox:woodstox-core:6.5.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming xz-1.9.jar (org.tukaani:xz:1.9) with ExternalDependencyInstrumentingArtifactTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with InstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with MergeInstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with MergeInstrumentationAnalysisTransform +Transforming durian-swt.os-4.2.2.jar (com.diffplug.durian:durian-swt.os:4.2.2) with ExternalDependencyInstrumentingArtifactTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with InstrumentationAnalysisTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with MergeInstrumentationAnalysisTransform +Transforming platform-3.4.0.jar (net.java.dev.jna:platform:3.4.0) with InstrumentationAnalysisTransform +Transforming kotlin-stdlib-common-1.9.10.jar (org.jetbrains.kotlin:kotlin-stdlib-common:1.9.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming annotations-13.0.jar (org.jetbrains:annotations:13.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming platform-3.4.0.jar (net.java.dev.jna:platform:3.4.0) with MergeInstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with InstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with MergeInstrumentationAnalysisTransform +Transforming platform-3.4.0.jar (net.java.dev.jna:platform:3.4.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with InstrumentationAnalysisTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with MergeInstrumentationAnalysisTransform +Transforming maven-api-meta-4.0.0-alpha-9.jar (org.apache.maven:maven-api-meta:4.0.0-alpha-9) with ExternalDependencyInstrumentingArtifactTransform +Transforming stax2-api-4.2.1.jar (org.codehaus.woodstox:stax2-api:4.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming build-tools-internal-9.0.0-SNAPSHOT.jar (project :build-tools-internal) with ProjectDependencyInstrumentingArtifactTransform +Transforming build-conventions.jar (project :build-conventions) with ProjectDependencyInstrumentingArtifactTransform +Transforming build-tools-9.0.0-SNAPSHOT.jar (project :build-tools) with ProjectDependencyInstrumentingArtifactTransform +Transforming reaper.jar (project :build-tools:reaper) with ProjectDependencyInstrumentingArtifactTransform +Settings evaluated using settings file '/Users/rene/dev/elastic/elasticsearch/settings.gradle'. +Projects loaded. Root project using build file '/Users/rene/dev/elastic/elasticsearch/build.gradle'. +Included projects: [root project 'elasticsearch', project ':benchmarks', project ':client', project ':distribution', project ':docs', project ':libs', project ':modules', project ':plugins', project ':qa', project ':rest-api-spec', project ':server', project ':test', project ':x-pack', project ':client:benchmark', project ':client:client-benchmark-noop-api-plugin', project ':client:rest', project ':client:sniffer', project ':client:test', project ':distribution:archives', project ':distribution:bwc', project ':distribution:docker', project ':distribution:packages', project ':distribution:tools', project ':libs:cli', project ':libs:core', project ':libs:dissect', project ':libs:entitlement', project ':libs:geo', project ':libs:grok', project ':libs:h3', project ':libs:log4j', project ':libs:logging', project ':libs:logstash-bridge', project ':libs:lz4', project ':libs:native', project ':libs:plugin-analysis-api', project ':libs:plugin-api', project ':libs:plugin-scanner', project ':libs:secure-sm', project ':libs:simdvec', project ':libs:ssl-config', project ':libs:tdigest', project ':libs:x-content', project ':modules:aggregations', project ':modules:analysis-common', project ':modules:apm', project ':modules:data-streams', project ':modules:dot-prefix-validation', project ':modules:health-shards-availability', project ':modules:ingest-attachment', project ':modules:ingest-common', project ':modules:ingest-geoip', project ':modules:ingest-user-agent', project ':modules:kibana', project ':modules:lang-expression', project ':modules:lang-mustache', project ':modules:lang-painless', project ':modules:legacy-geo', project ':modules:mapper-extras', project ':modules:parent-join', project ':modules:percolator', project ':modules:rank-eval', project ':modules:reindex', project ':modules:repository-azure', project ':modules:repository-gcs', project ':modules:repository-s3', project ':modules:repository-url', project ':modules:rest-root', project ':modules:runtime-fields-common', project ':modules:systemd', project ':modules:transport-netty4', project ':plugins:analysis-icu', project ':plugins:analysis-kuromoji', project ':plugins:analysis-nori', project ':plugins:analysis-phonetic', project ':plugins:analysis-smartcn', project ':plugins:analysis-stempel', project ':plugins:analysis-ukrainian', project ':plugins:discovery-azure-classic', project ':plugins:discovery-ec2', project ':plugins:discovery-gce', project ':plugins:mapper-annotated-text', project ':plugins:mapper-murmur3', project ':plugins:mapper-size', project ':plugins:repository-hdfs', project ':plugins:store-smb', project ':qa:ccs-common-rest', project ':qa:ccs-rolling-upgrade-remote-cluster', project ':qa:ccs-unavailable-clusters', project ':qa:custom-rest-controller', project ':qa:evil-tests', project ':qa:full-cluster-restart', project ':qa:logging-config', project ':qa:logging-spi', project ':qa:lucene-index-compatibility', project ':qa:mixed-cluster', project ':qa:multi-cluster-search', project ':qa:no-bootstrap-tests', project ':qa:packaging', project ':qa:remote-clusters', project ':qa:repository-multi-version', project ':qa:restricted-loggers', project ':qa:rolling-upgrade', project ':qa:rolling-upgrade-legacy', project ':qa:smoke-test-http', project ':qa:smoke-test-ingest-disabled', project ':qa:smoke-test-ingest-with-all-dependencies', project ':qa:smoke-test-multinode', project ':qa:smoke-test-plugins', project ':qa:stable-api', project ':qa:system-indices', project ':qa:unconfigured-node-name', project ':qa:verify-version-constants', project ':test:external-modules', project ':test:fixtures', project ':test:framework', project ':test:immutable-collections-patch', project ':test:logger-usage', project ':test:metadata-extractor', project ':test:test-clusters', project ':test:x-content', project ':test:yaml-rest-runner', project ':x-pack:libs', project ':x-pack:license-tools', project ':x-pack:plugin', project ':x-pack:qa', project ':x-pack:rest-resources-zip', project ':x-pack:test', project ':distribution:archives:darwin-aarch64-tar', project ':distribution:archives:darwin-tar', project ':distribution:archives:integ-test-zip', project ':distribution:archives:linux-aarch64-tar', project ':distribution:archives:linux-tar', project ':distribution:archives:windows-zip', project ':distribution:bwc:bugfix', project ':distribution:bwc:bugfix2', project ':distribution:bwc:maintenance', project ':distribution:bwc:minor', project ':distribution:bwc:staged', project ':distribution:docker:cloud-ess-docker-aarch64-export', project ':distribution:docker:cloud-ess-docker-export', project ':distribution:docker:docker-aarch64-export', project ':distribution:docker:docker-export', project ':distribution:docker:ironbank-docker-aarch64-export', project ':distribution:docker:ironbank-docker-export', project ':distribution:docker:wolfi-docker-aarch64-export', project ':distribution:docker:wolfi-docker-export', project ':distribution:packages:aarch64-deb', project ':distribution:packages:aarch64-rpm', project ':distribution:packages:deb', project ':distribution:packages:rpm', project ':distribution:tools:ansi-console', project ':distribution:tools:cli-launcher', project ':distribution:tools:geoip-cli', project ':distribution:tools:java-version-checker', project ':distribution:tools:keystore-cli', project ':distribution:tools:plugin-cli', project ':distribution:tools:server-cli', project ':distribution:tools:windows-service-cli', project ':libs:entitlement:agent', project ':libs:entitlement:asm-provider', project ':libs:entitlement:bridge', project ':libs:entitlement:qa', project ':libs:entitlement:tools', project ':libs:native:native-libraries', project ':libs:x-content:impl', project ':modules:ingest-geoip:qa', project ':modules:lang-painless:spi', project ':plugins:discovery-ec2:qa', project ':plugins:discovery-gce:qa', project ':plugins:repository-hdfs:hadoop-client-api', project ':qa:stable-api:logging', project ':qa:stable-api:plugin-analysis-api', project ':qa:stable-api:plugin-api', project ':test:external-modules:test-apm-integration', project ':test:external-modules:test-delayed-aggs', project ':test:external-modules:test-die-with-dignity', project ':test:external-modules:test-error-query', project ':test:external-modules:test-esql-heap-attack', project ':test:external-modules:test-jvm-crash', project ':test:external-modules:test-latency-simulating-directory', project ':test:fixtures:aws-sts-fixture', project ':test:fixtures:azure-fixture', project ':test:fixtures:ec2-imds-fixture', project ':test:fixtures:gcs-fixture', project ':test:fixtures:geoip-fixture', project ':test:fixtures:hdfs-fixture', project ':test:fixtures:krb5kdc-fixture', project ':test:fixtures:minio-fixture', project ':test:fixtures:old-elasticsearch', project ':test:fixtures:s3-fixture', project ':test:fixtures:testcontainer-utils', project ':test:fixtures:url-fixture', project ':x-pack:libs:es-opensaml-security-api', project ':x-pack:plugin:analytics', project ':x-pack:plugin:apm-data', project ':x-pack:plugin:async', project ':x-pack:plugin:async-search', project ':x-pack:plugin:autoscaling', project ':x-pack:plugin:blob-cache', project ':x-pack:plugin:ccr', project ':x-pack:plugin:core', project ':x-pack:plugin:deprecation', project ':x-pack:plugin:downsample', project ':x-pack:plugin:enrich', project ':x-pack:plugin:ent-search', project ':x-pack:plugin:eql', project ':x-pack:plugin:esql', project ':x-pack:plugin:esql-core', project ':x-pack:plugin:fleet', project ':x-pack:plugin:frozen-indices', project ':x-pack:plugin:geoip-enterprise-downloader', project ':x-pack:plugin:graph', project ':x-pack:plugin:identity-provider', project ':x-pack:plugin:ilm', project ':x-pack:plugin:inference', project ':x-pack:plugin:kql', project ':x-pack:plugin:logsdb', project ':x-pack:plugin:logstash', project ':x-pack:plugin:mapper-aggregate-metric', project ':x-pack:plugin:mapper-constant-keyword', project ':x-pack:plugin:mapper-counted-keyword', project ':x-pack:plugin:mapper-unsigned-long', project ':x-pack:plugin:mapper-version', project ':x-pack:plugin:migrate', project ':x-pack:plugin:ml', project ':x-pack:plugin:ml-package-loader', project ':x-pack:plugin:monitoring', project ':x-pack:plugin:old-lucene-versions', project ':x-pack:plugin:otel-data', project ':x-pack:plugin:profiling', project ':x-pack:plugin:ql', project ':x-pack:plugin:rank-rrf', project ':x-pack:plugin:redact', project ':x-pack:plugin:repositories-metering-api', project ':x-pack:plugin:rollup', project ':x-pack:plugin:search-business-rules', project ':x-pack:plugin:searchable-snapshots', project ':x-pack:plugin:security', project ':x-pack:plugin:shutdown', project ':x-pack:plugin:slm', project ':x-pack:plugin:snapshot-based-recoveries', project ':x-pack:plugin:snapshot-repo-test-kit', project ':x-pack:plugin:spatial', project ':x-pack:plugin:sql', project ':x-pack:plugin:stack', project ':x-pack:plugin:text-structure', project ':x-pack:plugin:transform', project ':x-pack:plugin:vector-tile', project ':x-pack:plugin:voting-only-node', project ':x-pack:plugin:watcher', project ':x-pack:plugin:wildcard', project ':x-pack:plugin:write-load-forecaster', project ':x-pack:qa:core-rest-tests-with-security', project ':x-pack:qa:evil-tests', project ':x-pack:qa:freeze-plugin', project ':x-pack:qa:full-cluster-restart', project ':x-pack:qa:kerberos-tests', project ':x-pack:qa:mixed-tier-cluster', project ':x-pack:qa:multi-cluster-search-security', project ':x-pack:qa:multi-node', project ':x-pack:qa:oidc-op-tests', project ':x-pack:qa:openldap-tests', project ':x-pack:qa:password-protected-keystore', project ':x-pack:qa:reindex-tests-with-security', project ':x-pack:qa:repository-old-versions', project ':x-pack:qa:rolling-upgrade', project ':x-pack:qa:rolling-upgrade-basic', project ':x-pack:qa:rolling-upgrade-multi-cluster', project ':x-pack:qa:runtime-fields', project ':x-pack:qa:saml-idp-tests', project ':x-pack:qa:security-example-spi-extension', project ':x-pack:qa:security-setup-password-tests', project ':x-pack:qa:security-tools-tests', project ':x-pack:qa:smoke-test-plugins', project ':x-pack:qa:smoke-test-plugins-ssl', project ':x-pack:qa:smoke-test-security-with-mustache', project ':x-pack:qa:third-party', project ':x-pack:test:idp-fixture', project ':x-pack:test:smb-fixture', project ':libs:entitlement:qa:common', project ':libs:entitlement:qa:entitlement-allowed', project ':libs:entitlement:qa:entitlement-allowed-nonmodular', project ':libs:entitlement:qa:entitlement-denied', project ':libs:entitlement:qa:entitlement-denied-nonmodular', project ':libs:entitlement:tools:common', project ':libs:entitlement:tools:public-callers-finder', project ':libs:entitlement:tools:securitymanager-scanner', project ':modules:ingest-geoip:qa:file-based-update', project ':modules:ingest-geoip:qa:full-cluster-restart', project ':plugins:discovery-ec2:qa:amazon-ec2', project ':plugins:discovery-gce:qa:gce', project ':x-pack:plugin:async-search:qa', project ':x-pack:plugin:autoscaling:qa', project ':x-pack:plugin:ccr:qa', project ':x-pack:plugin:core:template-resources', project ':x-pack:plugin:deprecation:qa', project ':x-pack:plugin:downsample:qa', project ':x-pack:plugin:enrich:qa', project ':x-pack:plugin:ent-search:qa', project ':x-pack:plugin:eql:qa', project ':x-pack:plugin:esql:arrow', project ':x-pack:plugin:esql:compute', project ':x-pack:plugin:esql:qa', project ':x-pack:plugin:esql-core:test-fixtures', project ':x-pack:plugin:fleet:qa', project ':x-pack:plugin:graph:qa', project ':x-pack:plugin:identity-provider:qa', project ':x-pack:plugin:ilm:qa', project ':x-pack:plugin:inference:qa', project ':x-pack:plugin:logsdb:qa', project ':x-pack:plugin:ml:qa', project ':x-pack:plugin:ql:test-fixtures', project ':x-pack:plugin:repositories-metering-api:qa', project ':x-pack:plugin:searchable-snapshots:qa', project ':x-pack:plugin:security:cli', project ':x-pack:plugin:security:lib', project ':x-pack:plugin:security:qa', project ':x-pack:plugin:shutdown:qa', project ':x-pack:plugin:slm:qa', project ':x-pack:plugin:snapshot-based-recoveries:qa', project ':x-pack:plugin:snapshot-repo-test-kit:qa', project ':x-pack:plugin:sql:jdbc', project ':x-pack:plugin:sql:qa', project ':x-pack:plugin:sql:sql-action', project ':x-pack:plugin:sql:sql-cli', project ':x-pack:plugin:sql:sql-client', project ':x-pack:plugin:sql:sql-proto', project ':x-pack:plugin:stack:qa', project ':x-pack:plugin:text-structure:qa', project ':x-pack:plugin:transform:qa', project ':x-pack:plugin:vector-tile:qa', project ':x-pack:plugin:watcher:qa', project ':x-pack:qa:multi-cluster-search-security:legacy-with-basic-license', project ':x-pack:qa:multi-cluster-search-security:legacy-with-full-license', project ':x-pack:qa:multi-cluster-search-security:legacy-with-restricted-trust', project ':x-pack:qa:runtime-fields:core-with-mapped', project ':x-pack:qa:runtime-fields:core-with-search', project ':x-pack:qa:runtime-fields:with-security', project ':x-pack:qa:third-party:active-directory', project ':x-pack:qa:third-party:jira', project ':x-pack:qa:third-party:pagerduty', project ':x-pack:qa:third-party:slack', project ':x-pack:plugin:async-search:qa:rest', project ':x-pack:plugin:async-search:qa:security', project ':x-pack:plugin:autoscaling:qa:rest', project ':x-pack:plugin:ccr:qa:downgrade-to-basic-license', project ':x-pack:plugin:ccr:qa:multi-cluster', project ':x-pack:plugin:ccr:qa:non-compliant-license', project ':x-pack:plugin:ccr:qa:rest', project ':x-pack:plugin:ccr:qa:restart', project ':x-pack:plugin:ccr:qa:security', project ':x-pack:plugin:deprecation:qa:common', project ':x-pack:plugin:deprecation:qa:early-deprecation-rest', project ':x-pack:plugin:deprecation:qa:rest', project ':x-pack:plugin:downsample:qa:mixed-cluster', project ':x-pack:plugin:downsample:qa:rest', project ':x-pack:plugin:downsample:qa:with-security', project ':x-pack:plugin:enrich:qa:common', project ':x-pack:plugin:enrich:qa:rest', project ':x-pack:plugin:enrich:qa:rest-with-advanced-security', project ':x-pack:plugin:enrich:qa:rest-with-security', project ':x-pack:plugin:ent-search:qa:full-cluster-restart', project ':x-pack:plugin:ent-search:qa:rest', project ':x-pack:plugin:eql:qa:ccs-rolling-upgrade', project ':x-pack:plugin:eql:qa:common', project ':x-pack:plugin:eql:qa:correctness', project ':x-pack:plugin:eql:qa:mixed-node', project ':x-pack:plugin:eql:qa:multi-cluster-with-security', project ':x-pack:plugin:eql:qa:rest', project ':x-pack:plugin:eql:qa:security', project ':x-pack:plugin:esql:compute:ann', project ':x-pack:plugin:esql:compute:gen', project ':x-pack:plugin:esql:qa:action', project ':x-pack:plugin:esql:qa:security', project ':x-pack:plugin:esql:qa:server', project ':x-pack:plugin:esql:qa:testFixtures', project ':x-pack:plugin:fleet:qa:rest', project ':x-pack:plugin:graph:qa:with-security', project ':x-pack:plugin:identity-provider:qa:idp-rest-tests', project ':x-pack:plugin:ilm:qa:multi-cluster', project ':x-pack:plugin:ilm:qa:multi-node', project ':x-pack:plugin:ilm:qa:rest', project ':x-pack:plugin:ilm:qa:with-security', project ':x-pack:plugin:inference:qa:inference-service-tests', project ':x-pack:plugin:inference:qa:mixed-cluster', project ':x-pack:plugin:inference:qa:rolling-upgrade', project ':x-pack:plugin:inference:qa:test-service-plugin', project ':x-pack:plugin:logsdb:qa:with-basic', project ':x-pack:plugin:logsdb:qa:with-custom-cutoff', project ':x-pack:plugin:ml:qa:basic-multi-node', project ':x-pack:plugin:ml:qa:disabled', project ':x-pack:plugin:ml:qa:ml-inference-service-tests', project ':x-pack:plugin:ml:qa:ml-with-security', project ':x-pack:plugin:ml:qa:multi-cluster-tests-with-security', project ':x-pack:plugin:ml:qa:native-multi-node-tests', project ':x-pack:plugin:ml:qa:no-bootstrap-tests', project ':x-pack:plugin:ml:qa:single-node-tests', project ':x-pack:plugin:repositories-metering-api:qa:azure', project ':x-pack:plugin:repositories-metering-api:qa:gcs', project ':x-pack:plugin:repositories-metering-api:qa:s3', project ':x-pack:plugin:searchable-snapshots:qa:azure', project ':x-pack:plugin:searchable-snapshots:qa:gcs', project ':x-pack:plugin:searchable-snapshots:qa:hdfs', project ':x-pack:plugin:searchable-snapshots:qa:minio', project ':x-pack:plugin:searchable-snapshots:qa:rest', project ':x-pack:plugin:searchable-snapshots:qa:s3', project ':x-pack:plugin:searchable-snapshots:qa:url', project ':x-pack:plugin:security:lib:nimbus-jose-jwt-modified', project ':x-pack:plugin:security:lib:nimbus-jose-jwt-modified-part1', project ':x-pack:plugin:security:lib:nimbus-jose-jwt-modified-part2', project ':x-pack:plugin:security:qa:audit', project ':x-pack:plugin:security:qa:basic-enable-security', project ':x-pack:plugin:security:qa:consistency-checks', project ':x-pack:plugin:security:qa:jwt-realm', project ':x-pack:plugin:security:qa:multi-cluster', project ':x-pack:plugin:security:qa:operator-privileges-tests', project ':x-pack:plugin:security:qa:profile', project ':x-pack:plugin:security:qa:saml-rest-tests', project ':x-pack:plugin:security:qa:secondary-auth-actions', project ':x-pack:plugin:security:qa:security-basic', project ':x-pack:plugin:security:qa:security-disabled', project ':x-pack:plugin:security:qa:security-trial', project ':x-pack:plugin:security:qa:service-account', project ':x-pack:plugin:security:qa:smoke-test-all-realms', project ':x-pack:plugin:security:qa:tls-basic', project ':x-pack:plugin:shutdown:qa:full-cluster-restart', project ':x-pack:plugin:shutdown:qa:multi-node', project ':x-pack:plugin:shutdown:qa:rolling-upgrade', project ':x-pack:plugin:slm:qa:multi-node', project ':x-pack:plugin:slm:qa:rest', project ':x-pack:plugin:slm:qa:with-security', project ':x-pack:plugin:snapshot-based-recoveries:qa:azure', project ':x-pack:plugin:snapshot-based-recoveries:qa:fs', project ':x-pack:plugin:snapshot-based-recoveries:qa:gcs', project ':x-pack:plugin:snapshot-based-recoveries:qa:license-enforcing', project ':x-pack:plugin:snapshot-based-recoveries:qa:s3', project ':x-pack:plugin:snapshot-repo-test-kit:qa:azure', project ':x-pack:plugin:snapshot-repo-test-kit:qa:gcs', project ':x-pack:plugin:snapshot-repo-test-kit:qa:hdfs', project ':x-pack:plugin:snapshot-repo-test-kit:qa:minio', project ':x-pack:plugin:snapshot-repo-test-kit:qa:rest', project ':x-pack:plugin:snapshot-repo-test-kit:qa:s3', project ':x-pack:plugin:sql:qa:jdbc', project ':x-pack:plugin:sql:qa:mixed-node', project ':x-pack:plugin:sql:qa:server', project ':x-pack:plugin:stack:qa:rest', project ':x-pack:plugin:text-structure:qa:text-structure-with-security', project ':x-pack:plugin:transform:qa:common', project ':x-pack:plugin:transform:qa:multi-cluster-tests-with-security', project ':x-pack:plugin:transform:qa:multi-node-tests', project ':x-pack:plugin:transform:qa:single-node-tests', project ':x-pack:plugin:vector-tile:qa:multi-cluster', project ':x-pack:plugin:watcher:qa:common', project ':x-pack:plugin:watcher:qa:rest', project ':x-pack:plugin:watcher:qa:with-security', project ':x-pack:plugin:esql:qa:server:mixed-cluster', project ':x-pack:plugin:esql:qa:server:multi-clusters', project ':x-pack:plugin:esql:qa:server:multi-node', project ':x-pack:plugin:esql:qa:server:single-node', project ':x-pack:plugin:sql:qa:jdbc:multi-node', project ':x-pack:plugin:sql:qa:jdbc:no-sql', project ':x-pack:plugin:sql:qa:jdbc:security', project ':x-pack:plugin:sql:qa:jdbc:single-node', project ':x-pack:plugin:sql:qa:server:multi-cluster-with-security', project ':x-pack:plugin:sql:qa:server:multi-node', project ':x-pack:plugin:sql:qa:server:security', project ':x-pack:plugin:sql:qa:server:single-node', project ':x-pack:plugin:sql:qa:jdbc:security:with-ssl', project ':x-pack:plugin:sql:qa:jdbc:security:without-ssl', project ':x-pack:plugin:sql:qa:server:security:with-ssl', project ':x-pack:plugin:sql:qa:server:security:without-ssl'] + +> Configure project : +Evaluating root project 'elasticsearch' using build file '/Users/rene/dev/elastic/elasticsearch/build.gradle'. + +> Configure project :benchmarks +Evaluating project ':benchmarks' using build file '/Users/rene/dev/elastic/elasticsearch/benchmarks/build.gradle'. + +> Configure project :client +Evaluating project ':client' using build file '/Users/rene/dev/elastic/elasticsearch/client/build.gradle'. + +> Configure project :distribution +Evaluating project ':distribution' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/build.gradle'. + +> Configure project :docs +Evaluating project ':docs' using build file '/Users/rene/dev/elastic/elasticsearch/docs/build.gradle'. + +> Configure project :libs +Evaluating project ':libs' using build file '/Users/rene/dev/elastic/elasticsearch/libs/build.gradle'. + +> Configure project :modules +Evaluating project ':modules' using build file '/Users/rene/dev/elastic/elasticsearch/modules/build.gradle'. + +> Configure project :plugins +Evaluating project ':plugins' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/build.gradle'. + +> Configure project :qa +Evaluating project ':qa' using build file '/Users/rene/dev/elastic/elasticsearch/qa/build.gradle'. + +> Configure project :rest-api-spec +Evaluating project ':rest-api-spec' using build file '/Users/rene/dev/elastic/elasticsearch/rest-api-spec/build.gradle'. + +> Configure project :server +Evaluating project ':server' using build file '/Users/rene/dev/elastic/elasticsearch/server/build.gradle'. + +> Configure project :test +Evaluating project ':test' using build file '/Users/rene/dev/elastic/elasticsearch/test/build.gradle'. + +> Configure project :x-pack +Evaluating project ':x-pack' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/build.gradle'. + +> Configure project :client:benchmark +Evaluating project ':client:benchmark' using build file '/Users/rene/dev/elastic/elasticsearch/client/benchmark/build.gradle'. + +> Configure project :client:client-benchmark-noop-api-plugin +Evaluating project ':client:client-benchmark-noop-api-plugin' using build file '/Users/rene/dev/elastic/elasticsearch/client/client-benchmark-noop-api-plugin/build.gradle'. + +> Configure project :client:rest +Evaluating project ':client:rest' using build file '/Users/rene/dev/elastic/elasticsearch/client/rest/build.gradle'. + +> Configure project :client:sniffer +Evaluating project ':client:sniffer' using build file '/Users/rene/dev/elastic/elasticsearch/client/sniffer/build.gradle'. + +> Configure project :client:test +Evaluating project ':client:test' using build file '/Users/rene/dev/elastic/elasticsearch/client/test/build.gradle'. + +> Configure project :distribution:archives +Evaluating project ':distribution:archives' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/build.gradle'. + +> Configure project :distribution:bwc +Evaluating project ':distribution:bwc' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/bwc/build.gradle'. + +> Configure project :distribution:docker +Evaluating project ':distribution:docker' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/build.gradle'. + +> Configure project :distribution:packages +Evaluating project ':distribution:packages' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/packages/build.gradle'. +Transforming gradle-ospackage-plugin-11.10.0.jar (com.netflix.nebula:gradle-ospackage-plugin:11.10.0) with InstrumentationAnalysisTransform +Transforming commons-lang3-3.9.jar (org.apache.commons:commons-lang3:3.9) with InstrumentationAnalysisTransform +Transforming gradle-docker-plugin-3.2.1.jar (com.bmuschko:gradle-docker-plugin:3.2.1) with InstrumentationAnalysisTransform +Transforming redline-1.2.10.jar (org.redline-rpm:redline:1.2.10) with InstrumentationAnalysisTransform +Transforming jdeb-1.10.jar (org.vafer:jdeb:1.10) with InstrumentationAnalysisTransform +Transforming ant-1.10.12.jar (org.apache.ant:ant:1.10.12) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.5.jar (org.slf4j:slf4j-api:1.7.5) with InstrumentationAnalysisTransform +Transforming maven-archiver-3.5.1.jar (org.apache.maven:maven-archiver:3.5.1) with InstrumentationAnalysisTransform +Transforming plexus-archiver-4.2.3.jar (org.codehaus.plexus:plexus-archiver:4.2.3) with InstrumentationAnalysisTransform +Transforming commons-compress-1.21.jar (org.apache.commons:commons-compress:1.21) with InstrumentationAnalysisTransform +Transforming xz-1.8.jar (org.tukaani:xz:1.8) with InstrumentationAnalysisTransform +Transforming bcpg-jdk15on-1.69.jar (org.bouncycastle:bcpg-jdk15on:1.69) with InstrumentationAnalysisTransform +Transforming bcprov-jdk15on-1.69.jar (org.bouncycastle:bcprov-jdk15on:1.69) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.12.jar (org.apache.ant:ant-launcher:1.10.12) with InstrumentationAnalysisTransform +Transforming maven-model-3.1.1.jar (org.apache.maven:maven-model:3.1.1) with InstrumentationAnalysisTransform +Transforming maven-shared-utils-3.3.3.jar (org.apache.maven.shared:maven-shared-utils:3.3.3) with InstrumentationAnalysisTransform +Transforming plexus-io-3.2.0.jar (org.codehaus.plexus:plexus-io:3.2.0) with InstrumentationAnalysisTransform +Transforming commons-io-2.6.jar (commons-io:commons-io:2.6) with InstrumentationAnalysisTransform +Transforming plexus-interpolation-1.26.jar (org.codehaus.plexus:plexus-interpolation:1.26) with InstrumentationAnalysisTransform +Transforming snappy-0.4.jar (org.iq80.snappy:snappy:0.4) with InstrumentationAnalysisTransform +Transforming gradle-ospackage-plugin-11.10.0.jar (com.netflix.nebula:gradle-ospackage-plugin:11.10.0) with InstrumentationAnalysisTransform +Transforming gradle-ospackage-plugin-11.10.0.jar (com.netflix.nebula:gradle-ospackage-plugin:11.10.0) with MergeInstrumentationAnalysisTransform +Transforming commons-lang3-3.9.jar (org.apache.commons:commons-lang3:3.9) with InstrumentationAnalysisTransform +Transforming commons-lang3-3.9.jar (org.apache.commons:commons-lang3:3.9) with MergeInstrumentationAnalysisTransform +Transforming gradle-ospackage-plugin-11.10.0.jar (com.netflix.nebula:gradle-ospackage-plugin:11.10.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming gradle-docker-plugin-3.2.1.jar (com.bmuschko:gradle-docker-plugin:3.2.1) with InstrumentationAnalysisTransform +Transforming commons-lang3-3.9.jar (org.apache.commons:commons-lang3:3.9) with ExternalDependencyInstrumentingArtifactTransform +Transforming gradle-docker-plugin-3.2.1.jar (com.bmuschko:gradle-docker-plugin:3.2.1) with MergeInstrumentationAnalysisTransform +Transforming redline-1.2.10.jar (org.redline-rpm:redline:1.2.10) with InstrumentationAnalysisTransform +Transforming redline-1.2.10.jar (org.redline-rpm:redline:1.2.10) with MergeInstrumentationAnalysisTransform +Transforming gradle-docker-plugin-3.2.1.jar (com.bmuschko:gradle-docker-plugin:3.2.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming jdeb-1.10.jar (org.vafer:jdeb:1.10) with InstrumentationAnalysisTransform +Transforming jdeb-1.10.jar (org.vafer:jdeb:1.10) with MergeInstrumentationAnalysisTransform +Transforming redline-1.2.10.jar (org.redline-rpm:redline:1.2.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-1.10.12.jar (org.apache.ant:ant:1.10.12) with InstrumentationAnalysisTransform +Transforming jdeb-1.10.jar (org.vafer:jdeb:1.10) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-1.10.12.jar (org.apache.ant:ant:1.10.12) with MergeInstrumentationAnalysisTransform +Transforming slf4j-api-1.7.5.jar (org.slf4j:slf4j-api:1.7.5) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.5.jar (org.slf4j:slf4j-api:1.7.5) with MergeInstrumentationAnalysisTransform +Transforming ant-1.10.12.jar (org.apache.ant:ant:1.10.12) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-archiver-3.5.1.jar (org.apache.maven:maven-archiver:3.5.1) with InstrumentationAnalysisTransform +Transforming slf4j-api-1.7.5.jar (org.slf4j:slf4j-api:1.7.5) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-archiver-3.5.1.jar (org.apache.maven:maven-archiver:3.5.1) with MergeInstrumentationAnalysisTransform +Transforming plexus-archiver-4.2.3.jar (org.codehaus.plexus:plexus-archiver:4.2.3) with InstrumentationAnalysisTransform +Transforming plexus-archiver-4.2.3.jar (org.codehaus.plexus:plexus-archiver:4.2.3) with MergeInstrumentationAnalysisTransform +Transforming maven-archiver-3.5.1.jar (org.apache.maven:maven-archiver:3.5.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-compress-1.21.jar (org.apache.commons:commons-compress:1.21) with InstrumentationAnalysisTransform +Transforming commons-compress-1.21.jar (org.apache.commons:commons-compress:1.21) with MergeInstrumentationAnalysisTransform +Transforming plexus-archiver-4.2.3.jar (org.codehaus.plexus:plexus-archiver:4.2.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming xz-1.8.jar (org.tukaani:xz:1.8) with InstrumentationAnalysisTransform +Transforming xz-1.8.jar (org.tukaani:xz:1.8) with MergeInstrumentationAnalysisTransform +Transforming commons-compress-1.21.jar (org.apache.commons:commons-compress:1.21) with ExternalDependencyInstrumentingArtifactTransform +Transforming bcpg-jdk15on-1.69.jar (org.bouncycastle:bcpg-jdk15on:1.69) with InstrumentationAnalysisTransform +Transforming bcpg-jdk15on-1.69.jar (org.bouncycastle:bcpg-jdk15on:1.69) with MergeInstrumentationAnalysisTransform +Transforming xz-1.8.jar (org.tukaani:xz:1.8) with ExternalDependencyInstrumentingArtifactTransform +Transforming bcprov-jdk15on-1.69.jar (org.bouncycastle:bcprov-jdk15on:1.69) with InstrumentationAnalysisTransform +Transforming bcprov-jdk15on-1.69.jar (org.bouncycastle:bcprov-jdk15on:1.69) with MergeInstrumentationAnalysisTransform +Transforming ant-launcher-1.10.12.jar (org.apache.ant:ant-launcher:1.10.12) with InstrumentationAnalysisTransform +Transforming bcpg-jdk15on-1.69.jar (org.bouncycastle:bcpg-jdk15on:1.69) with ExternalDependencyInstrumentingArtifactTransform +Transforming bcprov-jdk15on-1.69.jar (org.bouncycastle:bcprov-jdk15on:1.69) with ExternalDependencyInstrumentingArtifactTransform +Transforming ant-launcher-1.10.12.jar (org.apache.ant:ant-launcher:1.10.12) with MergeInstrumentationAnalysisTransform +Transforming maven-model-3.1.1.jar (org.apache.maven:maven-model:3.1.1) with InstrumentationAnalysisTransform +Transforming maven-model-3.1.1.jar (org.apache.maven:maven-model:3.1.1) with MergeInstrumentationAnalysisTransform +Transforming maven-shared-utils-3.3.3.jar (org.apache.maven.shared:maven-shared-utils:3.3.3) with InstrumentationAnalysisTransform +Transforming ant-launcher-1.10.12.jar (org.apache.ant:ant-launcher:1.10.12) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-model-3.1.1.jar (org.apache.maven:maven-model:3.1.1) with ExternalDependencyInstrumentingArtifactTransform +Transforming maven-shared-utils-3.3.3.jar (org.apache.maven.shared:maven-shared-utils:3.3.3) with MergeInstrumentationAnalysisTransform +Transforming plexus-io-3.2.0.jar (org.codehaus.plexus:plexus-io:3.2.0) with InstrumentationAnalysisTransform +Transforming plexus-io-3.2.0.jar (org.codehaus.plexus:plexus-io:3.2.0) with MergeInstrumentationAnalysisTransform +Transforming commons-io-2.6.jar (commons-io:commons-io:2.6) with InstrumentationAnalysisTransform +Transforming maven-shared-utils-3.3.3.jar (org.apache.maven.shared:maven-shared-utils:3.3.3) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-io-3.2.0.jar (org.codehaus.plexus:plexus-io:3.2.0) with ExternalDependencyInstrumentingArtifactTransform +Transforming commons-io-2.6.jar (commons-io:commons-io:2.6) with MergeInstrumentationAnalysisTransform +Transforming plexus-interpolation-1.26.jar (org.codehaus.plexus:plexus-interpolation:1.26) with InstrumentationAnalysisTransform +Transforming plexus-interpolation-1.26.jar (org.codehaus.plexus:plexus-interpolation:1.26) with MergeInstrumentationAnalysisTransform +Transforming snappy-0.4.jar (org.iq80.snappy:snappy:0.4) with InstrumentationAnalysisTransform +Transforming commons-io-2.6.jar (commons-io:commons-io:2.6) with ExternalDependencyInstrumentingArtifactTransform +Transforming plexus-interpolation-1.26.jar (org.codehaus.plexus:plexus-interpolation:1.26) with ExternalDependencyInstrumentingArtifactTransform +Transforming snappy-0.4.jar (org.iq80.snappy:snappy:0.4) with MergeInstrumentationAnalysisTransform +Transforming snappy-0.4.jar (org.iq80.snappy:snappy:0.4) with ExternalDependencyInstrumentingArtifactTransform + +> Configure project :distribution:tools +Evaluating project ':distribution:tools' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/build.gradle'. + +> Configure project :libs:cli +Evaluating project ':libs:cli' using build file '/Users/rene/dev/elastic/elasticsearch/libs/cli/build.gradle'. + +> Configure project :libs:core +Evaluating project ':libs:core' using build file '/Users/rene/dev/elastic/elasticsearch/libs/core/build.gradle'. + +> Configure project :libs:dissect +Evaluating project ':libs:dissect' using build file '/Users/rene/dev/elastic/elasticsearch/libs/dissect/build.gradle'. + +> Configure project :libs:entitlement +Evaluating project ':libs:entitlement' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/build.gradle'. + +> Configure project :libs:geo +Evaluating project ':libs:geo' using build file '/Users/rene/dev/elastic/elasticsearch/libs/geo/build.gradle'. + +> Configure project :libs:grok +Evaluating project ':libs:grok' using build file '/Users/rene/dev/elastic/elasticsearch/libs/grok/build.gradle'. + +> Configure project :libs:h3 +Evaluating project ':libs:h3' using build file '/Users/rene/dev/elastic/elasticsearch/libs/h3/build.gradle'. + +> Configure project :libs:log4j +Evaluating project ':libs:log4j' using build file '/Users/rene/dev/elastic/elasticsearch/libs/log4j/build.gradle'. + +> Configure project :libs:logging +Evaluating project ':libs:logging' using build file '/Users/rene/dev/elastic/elasticsearch/libs/logging/build.gradle'. + +> Configure project :libs:logstash-bridge +Evaluating project ':libs:logstash-bridge' using build file '/Users/rene/dev/elastic/elasticsearch/libs/logstash-bridge/build.gradle'. + +> Configure project :libs:lz4 +Evaluating project ':libs:lz4' using build file '/Users/rene/dev/elastic/elasticsearch/libs/lz4/build.gradle'. + +> Configure project :libs:native +Evaluating project ':libs:native' using build file '/Users/rene/dev/elastic/elasticsearch/libs/native/build.gradle'. + +> Configure project :libs:plugin-analysis-api +Evaluating project ':libs:plugin-analysis-api' using build file '/Users/rene/dev/elastic/elasticsearch/libs/plugin-analysis-api/build.gradle'. + +> Configure project :libs:plugin-api +Evaluating project ':libs:plugin-api' using build file '/Users/rene/dev/elastic/elasticsearch/libs/plugin-api/build.gradle'. + +> Configure project :libs:plugin-scanner +Evaluating project ':libs:plugin-scanner' using build file '/Users/rene/dev/elastic/elasticsearch/libs/plugin-scanner/build.gradle'. + +> Configure project :libs:secure-sm +Evaluating project ':libs:secure-sm' using build file '/Users/rene/dev/elastic/elasticsearch/libs/secure-sm/build.gradle'. + +> Configure project :libs:simdvec +Evaluating project ':libs:simdvec' using build file '/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build.gradle'. + +> Configure project :libs:ssl-config +Evaluating project ':libs:ssl-config' using build file '/Users/rene/dev/elastic/elasticsearch/libs/ssl-config/build.gradle'. + +> Configure project :libs:tdigest +Evaluating project ':libs:tdigest' using build file '/Users/rene/dev/elastic/elasticsearch/libs/tdigest/build.gradle'. + +> Configure project :libs:x-content +Evaluating project ':libs:x-content' using build file '/Users/rene/dev/elastic/elasticsearch/libs/x-content/build.gradle'. + +> Configure project :modules:aggregations +Evaluating project ':modules:aggregations' using build file '/Users/rene/dev/elastic/elasticsearch/modules/aggregations/build.gradle'. + +> Configure project :modules:analysis-common +Evaluating project ':modules:analysis-common' using build file '/Users/rene/dev/elastic/elasticsearch/modules/analysis-common/build.gradle'. + +> Configure project :modules:apm +Evaluating project ':modules:apm' using build file '/Users/rene/dev/elastic/elasticsearch/modules/apm/build.gradle'. + +> Configure project :modules:data-streams +Evaluating project ':modules:data-streams' using build file '/Users/rene/dev/elastic/elasticsearch/modules/data-streams/build.gradle'. + +> Configure project :modules:dot-prefix-validation +Evaluating project ':modules:dot-prefix-validation' using build file '/Users/rene/dev/elastic/elasticsearch/modules/dot-prefix-validation/build.gradle'. + +> Configure project :modules:health-shards-availability +Evaluating project ':modules:health-shards-availability' using build file '/Users/rene/dev/elastic/elasticsearch/modules/health-shards-availability/build.gradle'. + +> Configure project :modules:ingest-attachment +Evaluating project ':modules:ingest-attachment' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-attachment/build.gradle'. + +> Configure project :modules:ingest-common +Evaluating project ':modules:ingest-common' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-common/build.gradle'. + +> Configure project :modules:ingest-geoip +Evaluating project ':modules:ingest-geoip' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-geoip/build.gradle'. + +> Configure project :modules:ingest-user-agent +Evaluating project ':modules:ingest-user-agent' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-user-agent/build.gradle'. + +> Configure project :modules:kibana +Evaluating project ':modules:kibana' using build file '/Users/rene/dev/elastic/elasticsearch/modules/kibana/build.gradle'. + +> Configure project :modules:lang-expression +Evaluating project ':modules:lang-expression' using build file '/Users/rene/dev/elastic/elasticsearch/modules/lang-expression/build.gradle'. + +> Configure project :modules:lang-mustache +Evaluating project ':modules:lang-mustache' using build file '/Users/rene/dev/elastic/elasticsearch/modules/lang-mustache/build.gradle'. + +> Configure project :modules:lang-painless +Evaluating project ':modules:lang-painless' using build file '/Users/rene/dev/elastic/elasticsearch/modules/lang-painless/build.gradle'. + +> Configure project :modules:legacy-geo +Evaluating project ':modules:legacy-geo' using build file '/Users/rene/dev/elastic/elasticsearch/modules/legacy-geo/build.gradle'. + +> Configure project :modules:mapper-extras +Evaluating project ':modules:mapper-extras' using build file '/Users/rene/dev/elastic/elasticsearch/modules/mapper-extras/build.gradle'. + +> Configure project :modules:parent-join +Evaluating project ':modules:parent-join' using build file '/Users/rene/dev/elastic/elasticsearch/modules/parent-join/build.gradle'. + +> Configure project :modules:percolator +Evaluating project ':modules:percolator' using build file '/Users/rene/dev/elastic/elasticsearch/modules/percolator/build.gradle'. + +> Configure project :modules:rank-eval +Evaluating project ':modules:rank-eval' using build file '/Users/rene/dev/elastic/elasticsearch/modules/rank-eval/build.gradle'. + +> Configure project :modules:reindex +Evaluating project ':modules:reindex' using build file '/Users/rene/dev/elastic/elasticsearch/modules/reindex/build.gradle'. + +> Configure project :modules:repository-azure +Evaluating project ':modules:repository-azure' using build file '/Users/rene/dev/elastic/elasticsearch/modules/repository-azure/build.gradle'. + +> Configure project :modules:repository-gcs +Evaluating project ':modules:repository-gcs' using build file '/Users/rene/dev/elastic/elasticsearch/modules/repository-gcs/build.gradle'. + +> Configure project :modules:repository-s3 +Evaluating project ':modules:repository-s3' using build file '/Users/rene/dev/elastic/elasticsearch/modules/repository-s3/build.gradle'. + +> Configure project :modules:repository-url +Evaluating project ':modules:repository-url' using build file '/Users/rene/dev/elastic/elasticsearch/modules/repository-url/build.gradle'. + +> Configure project :modules:rest-root +Evaluating project ':modules:rest-root' using build file '/Users/rene/dev/elastic/elasticsearch/modules/rest-root/build.gradle'. + +> Configure project :modules:runtime-fields-common +Evaluating project ':modules:runtime-fields-common' using build file '/Users/rene/dev/elastic/elasticsearch/modules/runtime-fields-common/build.gradle'. + +> Configure project :modules:systemd +Evaluating project ':modules:systemd' using build file '/Users/rene/dev/elastic/elasticsearch/modules/systemd/build.gradle'. + +> Configure project :modules:transport-netty4 +Evaluating project ':modules:transport-netty4' using build file '/Users/rene/dev/elastic/elasticsearch/modules/transport-netty4/build.gradle'. + +> Configure project :plugins:analysis-icu +Evaluating project ':plugins:analysis-icu' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-icu/build.gradle'. + +> Configure project :plugins:analysis-kuromoji +Evaluating project ':plugins:analysis-kuromoji' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-kuromoji/build.gradle'. + +> Configure project :plugins:analysis-nori +Evaluating project ':plugins:analysis-nori' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-nori/build.gradle'. + +> Configure project :plugins:analysis-phonetic +Evaluating project ':plugins:analysis-phonetic' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-phonetic/build.gradle'. + +> Configure project :plugins:analysis-smartcn +Evaluating project ':plugins:analysis-smartcn' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-smartcn/build.gradle'. + +> Configure project :plugins:analysis-stempel +Evaluating project ':plugins:analysis-stempel' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-stempel/build.gradle'. + +> Configure project :plugins:analysis-ukrainian +Evaluating project ':plugins:analysis-ukrainian' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/analysis-ukrainian/build.gradle'. + +> Configure project :plugins:discovery-azure-classic +Evaluating project ':plugins:discovery-azure-classic' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-azure-classic/build.gradle'. + +> Configure project :plugins:discovery-ec2 +Evaluating project ':plugins:discovery-ec2' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-ec2/build.gradle'. + +> Configure project :plugins:discovery-gce +Evaluating project ':plugins:discovery-gce' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-gce/build.gradle'. + +> Configure project :plugins:mapper-annotated-text +Evaluating project ':plugins:mapper-annotated-text' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/mapper-annotated-text/build.gradle'. + +> Configure project :plugins:mapper-murmur3 +Evaluating project ':plugins:mapper-murmur3' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/mapper-murmur3/build.gradle'. + +> Configure project :plugins:mapper-size +Evaluating project ':plugins:mapper-size' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/mapper-size/build.gradle'. + +> Configure project :plugins:repository-hdfs +Evaluating project ':plugins:repository-hdfs' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/repository-hdfs/build.gradle'. + +> Configure project :plugins:store-smb +Evaluating project ':plugins:store-smb' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/store-smb/build.gradle'. + +> Configure project :qa:ccs-common-rest +Evaluating project ':qa:ccs-common-rest' using build file '/Users/rene/dev/elastic/elasticsearch/qa/ccs-common-rest/build.gradle'. + +> Configure project :qa:ccs-rolling-upgrade-remote-cluster +Evaluating project ':qa:ccs-rolling-upgrade-remote-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/qa/ccs-rolling-upgrade-remote-cluster/build.gradle'. + +> Configure project :qa:ccs-unavailable-clusters +Evaluating project ':qa:ccs-unavailable-clusters' using build file '/Users/rene/dev/elastic/elasticsearch/qa/ccs-unavailable-clusters/build.gradle'. + +> Configure project :qa:custom-rest-controller +Evaluating project ':qa:custom-rest-controller' using build file '/Users/rene/dev/elastic/elasticsearch/qa/custom-rest-controller/build.gradle'. + +> Configure project :qa:evil-tests +Evaluating project ':qa:evil-tests' using build file '/Users/rene/dev/elastic/elasticsearch/qa/evil-tests/build.gradle'. + +> Configure project :qa:full-cluster-restart +Evaluating project ':qa:full-cluster-restart' using build file '/Users/rene/dev/elastic/elasticsearch/qa/full-cluster-restart/build.gradle'. + +> Configure project :qa:logging-config +Evaluating project ':qa:logging-config' using build file '/Users/rene/dev/elastic/elasticsearch/qa/logging-config/build.gradle'. + +> Configure project :qa:logging-spi +Evaluating project ':qa:logging-spi' using build file '/Users/rene/dev/elastic/elasticsearch/qa/logging-spi/build.gradle'. + +> Configure project :qa:lucene-index-compatibility +Evaluating project ':qa:lucene-index-compatibility' using build file '/Users/rene/dev/elastic/elasticsearch/qa/lucene-index-compatibility/build.gradle'. + +> Configure project :qa:mixed-cluster +Evaluating project ':qa:mixed-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/qa/mixed-cluster/build.gradle'. + +> Configure project :qa:multi-cluster-search +Evaluating project ':qa:multi-cluster-search' using build file '/Users/rene/dev/elastic/elasticsearch/qa/multi-cluster-search/build.gradle'. + +> Configure project :qa:no-bootstrap-tests +Evaluating project ':qa:no-bootstrap-tests' using build file '/Users/rene/dev/elastic/elasticsearch/qa/no-bootstrap-tests/build.gradle'. + +> Configure project :qa:packaging +Evaluating project ':qa:packaging' using build file '/Users/rene/dev/elastic/elasticsearch/qa/packaging/build.gradle'. + +> Configure project :qa:remote-clusters +Evaluating project ':qa:remote-clusters' using build file '/Users/rene/dev/elastic/elasticsearch/qa/remote-clusters/build.gradle'. + +> Configure project :qa:repository-multi-version +Evaluating project ':qa:repository-multi-version' using build file '/Users/rene/dev/elastic/elasticsearch/qa/repository-multi-version/build.gradle'. + +> Configure project :qa:restricted-loggers +Evaluating project ':qa:restricted-loggers' using build file '/Users/rene/dev/elastic/elasticsearch/qa/restricted-loggers/build.gradle'. + +> Configure project :qa:rolling-upgrade +Evaluating project ':qa:rolling-upgrade' using build file '/Users/rene/dev/elastic/elasticsearch/qa/rolling-upgrade/build.gradle'. + +> Configure project :qa:rolling-upgrade-legacy +Evaluating project ':qa:rolling-upgrade-legacy' using build file '/Users/rene/dev/elastic/elasticsearch/qa/rolling-upgrade-legacy/build.gradle'. + +> Configure project :qa:smoke-test-http +Evaluating project ':qa:smoke-test-http' using build file '/Users/rene/dev/elastic/elasticsearch/qa/smoke-test-http/build.gradle'. + +> Configure project :qa:smoke-test-ingest-disabled +Evaluating project ':qa:smoke-test-ingest-disabled' using build file '/Users/rene/dev/elastic/elasticsearch/qa/smoke-test-ingest-disabled/build.gradle'. + +> Configure project :qa:smoke-test-ingest-with-all-dependencies +Evaluating project ':qa:smoke-test-ingest-with-all-dependencies' using build file '/Users/rene/dev/elastic/elasticsearch/qa/smoke-test-ingest-with-all-dependencies/build.gradle'. + +> Configure project :qa:smoke-test-multinode +Evaluating project ':qa:smoke-test-multinode' using build file '/Users/rene/dev/elastic/elasticsearch/qa/smoke-test-multinode/build.gradle'. + +> Configure project :qa:smoke-test-plugins +Evaluating project ':qa:smoke-test-plugins' using build file '/Users/rene/dev/elastic/elasticsearch/qa/smoke-test-plugins/build.gradle'. + +> Configure project :qa:stable-api +Evaluating project ':qa:stable-api' using build file '/Users/rene/dev/elastic/elasticsearch/qa/stable-api/build.gradle'. + +> Configure project :qa:system-indices +Evaluating project ':qa:system-indices' using build file '/Users/rene/dev/elastic/elasticsearch/qa/system-indices/build.gradle'. + +> Configure project :qa:unconfigured-node-name +Evaluating project ':qa:unconfigured-node-name' using build file '/Users/rene/dev/elastic/elasticsearch/qa/unconfigured-node-name/build.gradle'. + +> Configure project :qa:verify-version-constants +Evaluating project ':qa:verify-version-constants' using build file '/Users/rene/dev/elastic/elasticsearch/qa/verify-version-constants/build.gradle'. + +> Configure project :test:external-modules +Evaluating project ':test:external-modules' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/build.gradle'. + +> Configure project :test:fixtures +Evaluating project ':test:fixtures' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/build.gradle'. + +> Configure project :test:framework +Evaluating project ':test:framework' using build file '/Users/rene/dev/elastic/elasticsearch/test/framework/build.gradle'. + +> Configure project :test:immutable-collections-patch +Evaluating project ':test:immutable-collections-patch' using build file '/Users/rene/dev/elastic/elasticsearch/test/immutable-collections-patch/build.gradle'. + +> Configure project :test:logger-usage +Evaluating project ':test:logger-usage' using build file '/Users/rene/dev/elastic/elasticsearch/test/logger-usage/build.gradle'. + +> Configure project :test:metadata-extractor +Evaluating project ':test:metadata-extractor' using build file '/Users/rene/dev/elastic/elasticsearch/test/metadata-extractor/build.gradle'. + +> Configure project :test:test-clusters +Evaluating project ':test:test-clusters' using build file '/Users/rene/dev/elastic/elasticsearch/test/test-clusters/build.gradle'. + +> Configure project :test:x-content +Evaluating project ':test:x-content' using build file '/Users/rene/dev/elastic/elasticsearch/test/x-content/build.gradle'. + +> Configure project :test:yaml-rest-runner +Evaluating project ':test:yaml-rest-runner' using build file '/Users/rene/dev/elastic/elasticsearch/test/yaml-rest-runner/build.gradle'. + +> Configure project :x-pack:libs +Evaluating project ':x-pack:libs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/libs/build.gradle'. + +> Configure project :x-pack:license-tools +Evaluating project ':x-pack:license-tools' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/license-tools/build.gradle'. + +> Configure project :x-pack:plugin +Evaluating project ':x-pack:plugin' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/build.gradle'. + +> Configure project :x-pack:qa +Evaluating project ':x-pack:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/build.gradle'. + +> Configure project :x-pack:rest-resources-zip +Evaluating project ':x-pack:rest-resources-zip' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/rest-resources-zip/build.gradle'. + +> Configure project :x-pack:test +Evaluating project ':x-pack:test' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/test/build.gradle'. + +> Configure project :distribution:archives:darwin-aarch64-tar +Evaluating project ':distribution:archives:darwin-aarch64-tar' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/darwin-aarch64-tar/build.gradle'. + +> Configure project :distribution:archives:darwin-tar +Evaluating project ':distribution:archives:darwin-tar' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/darwin-tar/build.gradle'. + +> Configure project :distribution:archives:integ-test-zip +Evaluating project ':distribution:archives:integ-test-zip' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/integ-test-zip/build.gradle'. + +> Configure project :distribution:archives:linux-aarch64-tar +Evaluating project ':distribution:archives:linux-aarch64-tar' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/linux-aarch64-tar/build.gradle'. + +> Configure project :distribution:archives:linux-tar +Evaluating project ':distribution:archives:linux-tar' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/linux-tar/build.gradle'. + +> Configure project :distribution:archives:windows-zip +Evaluating project ':distribution:archives:windows-zip' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/archives/windows-zip/build.gradle'. + +> Configure project :distribution:bwc:bugfix +Evaluating project ':distribution:bwc:bugfix' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/bwc/bugfix/build.gradle'. + +> Configure project :distribution:bwc:bugfix2 +Evaluating project ':distribution:bwc:bugfix2' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/bwc/bugfix2/build.gradle'. + +> Configure project :distribution:bwc:maintenance +Evaluating project ':distribution:bwc:maintenance' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/bwc/maintenance/build.gradle'. + +> Configure project :distribution:bwc:minor +Evaluating project ':distribution:bwc:minor' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/bwc/minor/build.gradle'. + +> Configure project :distribution:bwc:staged +Evaluating project ':distribution:bwc:staged' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/bwc/staged/build.gradle'. + +> Configure project :distribution:docker:cloud-ess-docker-aarch64-export +Evaluating project ':distribution:docker:cloud-ess-docker-aarch64-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/cloud-ess-docker-aarch64-export/build.gradle'. + +> Configure project :distribution:docker:cloud-ess-docker-export +Evaluating project ':distribution:docker:cloud-ess-docker-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/cloud-ess-docker-export/build.gradle'. + +> Configure project :distribution:docker:docker-aarch64-export +Evaluating project ':distribution:docker:docker-aarch64-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/docker-aarch64-export/build.gradle'. + +> Configure project :distribution:docker:docker-export +Evaluating project ':distribution:docker:docker-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/docker-export/build.gradle'. + +> Configure project :distribution:docker:ironbank-docker-aarch64-export +Evaluating project ':distribution:docker:ironbank-docker-aarch64-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/ironbank-docker-aarch64-export/build.gradle'. + +> Configure project :distribution:docker:ironbank-docker-export +Evaluating project ':distribution:docker:ironbank-docker-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/ironbank-docker-export/build.gradle'. + +> Configure project :distribution:docker:wolfi-docker-aarch64-export +Evaluating project ':distribution:docker:wolfi-docker-aarch64-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/wolfi-docker-aarch64-export/build.gradle'. + +> Configure project :distribution:docker:wolfi-docker-export +Evaluating project ':distribution:docker:wolfi-docker-export' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/docker/wolfi-docker-export/build.gradle'. + +> Configure project :distribution:packages:aarch64-deb +Evaluating project ':distribution:packages:aarch64-deb' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/packages/aarch64-deb/build.gradle'. + +> Configure project :distribution:packages:aarch64-rpm +Evaluating project ':distribution:packages:aarch64-rpm' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/packages/aarch64-rpm/build.gradle'. + +> Configure project :distribution:packages:deb +Evaluating project ':distribution:packages:deb' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/packages/deb/build.gradle'. + +> Configure project :distribution:packages:rpm +Evaluating project ':distribution:packages:rpm' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/packages/rpm/build.gradle'. + +> Configure project :distribution:tools:ansi-console +Evaluating project ':distribution:tools:ansi-console' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/ansi-console/build.gradle'. + +> Configure project :distribution:tools:cli-launcher +Evaluating project ':distribution:tools:cli-launcher' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/cli-launcher/build.gradle'. + +> Configure project :distribution:tools:geoip-cli +Evaluating project ':distribution:tools:geoip-cli' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/geoip-cli/build.gradle'. + +> Configure project :distribution:tools:java-version-checker +Evaluating project ':distribution:tools:java-version-checker' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/java-version-checker/build.gradle'. + +> Configure project :distribution:tools:keystore-cli +Evaluating project ':distribution:tools:keystore-cli' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/keystore-cli/build.gradle'. + +> Configure project :distribution:tools:plugin-cli +Evaluating project ':distribution:tools:plugin-cli' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/plugin-cli/build.gradle'. + +> Configure project :distribution:tools:server-cli +Evaluating project ':distribution:tools:server-cli' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/server-cli/build.gradle'. + +> Configure project :distribution:tools:windows-service-cli +Evaluating project ':distribution:tools:windows-service-cli' using build file '/Users/rene/dev/elastic/elasticsearch/distribution/tools/windows-service-cli/build.gradle'. + +> Configure project :libs:entitlement:agent +Evaluating project ':libs:entitlement:agent' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/agent/build.gradle'. + +> Configure project :libs:entitlement:asm-provider +Evaluating project ':libs:entitlement:asm-provider' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/asm-provider/build.gradle'. + +> Configure project :libs:entitlement:bridge +Evaluating project ':libs:entitlement:bridge' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/bridge/build.gradle'. + +> Configure project :libs:entitlement:qa +Evaluating project ':libs:entitlement:qa' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/qa/build.gradle'. + +> Configure project :libs:entitlement:tools +Evaluating project ':libs:entitlement:tools' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/tools/build.gradle'. + +> Configure project :libs:native:native-libraries +Evaluating project ':libs:native:native-libraries' using build file '/Users/rene/dev/elastic/elasticsearch/libs/native/libraries/build.gradle'. + +> Configure project :libs:x-content:impl +Evaluating project ':libs:x-content:impl' using build file '/Users/rene/dev/elastic/elasticsearch/libs/x-content/impl/build.gradle'. + +> Configure project :modules:ingest-geoip:qa +Evaluating project ':modules:ingest-geoip:qa' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-geoip/qa/build.gradle'. + +> Configure project :modules:lang-painless:spi +Evaluating project ':modules:lang-painless:spi' using build file '/Users/rene/dev/elastic/elasticsearch/modules/lang-painless/spi/build.gradle'. + +> Configure project :plugins:discovery-ec2:qa +Evaluating project ':plugins:discovery-ec2:qa' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-ec2/qa/build.gradle'. + +> Configure project :plugins:discovery-gce:qa +Evaluating project ':plugins:discovery-gce:qa' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-gce/qa/build.gradle'. + +> Configure project :plugins:repository-hdfs:hadoop-client-api +Evaluating project ':plugins:repository-hdfs:hadoop-client-api' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/repository-hdfs/hadoop-client-api/build.gradle'. + +> Configure project :qa:stable-api:logging +Evaluating project ':qa:stable-api:logging' using build file '/Users/rene/dev/elastic/elasticsearch/qa/stable-api/logging/build.gradle'. + +> Configure project :qa:stable-api:plugin-analysis-api +Evaluating project ':qa:stable-api:plugin-analysis-api' using build file '/Users/rene/dev/elastic/elasticsearch/qa/stable-api/plugin-analysis-api/build.gradle'. + +> Configure project :qa:stable-api:plugin-api +Evaluating project ':qa:stable-api:plugin-api' using build file '/Users/rene/dev/elastic/elasticsearch/qa/stable-api/plugin-api/build.gradle'. + +> Configure project :test:external-modules:test-apm-integration +Evaluating project ':test:external-modules:test-apm-integration' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/apm-integration/build.gradle'. + +> Configure project :test:external-modules:test-delayed-aggs +Evaluating project ':test:external-modules:test-delayed-aggs' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/delayed-aggs/build.gradle'. + +> Configure project :test:external-modules:test-die-with-dignity +Evaluating project ':test:external-modules:test-die-with-dignity' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/die-with-dignity/build.gradle'. + +> Configure project :test:external-modules:test-error-query +Evaluating project ':test:external-modules:test-error-query' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/error-query/build.gradle'. + +> Configure project :test:external-modules:test-esql-heap-attack +Evaluating project ':test:external-modules:test-esql-heap-attack' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/esql-heap-attack/build.gradle'. + +> Configure project :test:external-modules:test-jvm-crash +Evaluating project ':test:external-modules:test-jvm-crash' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/jvm-crash/build.gradle'. + +> Configure project :test:external-modules:test-latency-simulating-directory +Evaluating project ':test:external-modules:test-latency-simulating-directory' using build file '/Users/rene/dev/elastic/elasticsearch/test/external-modules/latency-simulating-directory/build.gradle'. + +> Configure project :test:fixtures:aws-sts-fixture +Evaluating project ':test:fixtures:aws-sts-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/aws-sts-fixture/build.gradle'. + +> Configure project :test:fixtures:azure-fixture +Evaluating project ':test:fixtures:azure-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/azure-fixture/build.gradle'. + +> Configure project :test:fixtures:ec2-imds-fixture +Evaluating project ':test:fixtures:ec2-imds-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/ec2-imds-fixture/build.gradle'. + +> Configure project :test:fixtures:gcs-fixture +Evaluating project ':test:fixtures:gcs-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/gcs-fixture/build.gradle'. + +> Configure project :test:fixtures:geoip-fixture +Evaluating project ':test:fixtures:geoip-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/geoip-fixture/build.gradle'. + +> Configure project :test:fixtures:hdfs-fixture +Evaluating project ':test:fixtures:hdfs-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/hdfs-fixture/build.gradle'. + +> Configure project :test:fixtures:krb5kdc-fixture +Evaluating project ':test:fixtures:krb5kdc-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/krb5kdc-fixture/build.gradle'. + +> Configure project :test:fixtures:minio-fixture +Evaluating project ':test:fixtures:minio-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/minio-fixture/build.gradle'. + +> Configure project :test:fixtures:old-elasticsearch +Evaluating project ':test:fixtures:old-elasticsearch' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/old-elasticsearch/build.gradle'. + +> Configure project :test:fixtures:s3-fixture +Evaluating project ':test:fixtures:s3-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/s3-fixture/build.gradle'. + +> Configure project :test:fixtures:testcontainer-utils +Evaluating project ':test:fixtures:testcontainer-utils' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/testcontainer-utils/build.gradle'. + +> Configure project :test:fixtures:url-fixture +Evaluating project ':test:fixtures:url-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/test/fixtures/url-fixture/build.gradle'. + +> Configure project :x-pack:libs:es-opensaml-security-api +Evaluating project ':x-pack:libs:es-opensaml-security-api' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/libs/es-opensaml-security-api/build.gradle'. + +> Configure project :x-pack:plugin:analytics +Evaluating project ':x-pack:plugin:analytics' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/analytics/build.gradle'. + +> Configure project :x-pack:plugin:apm-data +Evaluating project ':x-pack:plugin:apm-data' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/apm-data/build.gradle'. + +> Configure project :x-pack:plugin:async +Evaluating project ':x-pack:plugin:async' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/async/build.gradle'. + +> Configure project :x-pack:plugin:async-search +Evaluating project ':x-pack:plugin:async-search' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/async-search/build.gradle'. + +> Configure project :x-pack:plugin:autoscaling +Evaluating project ':x-pack:plugin:autoscaling' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/autoscaling/build.gradle'. + +> Configure project :x-pack:plugin:blob-cache +Evaluating project ':x-pack:plugin:blob-cache' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/blob-cache/build.gradle'. + +> Configure project :x-pack:plugin:ccr +Evaluating project ':x-pack:plugin:ccr' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/build.gradle'. + +> Configure project :x-pack:plugin:core +Evaluating project ':x-pack:plugin:core' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/core/build.gradle'. + +> Configure project :x-pack:plugin:deprecation +Evaluating project ':x-pack:plugin:deprecation' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/deprecation/build.gradle'. + +> Configure project :x-pack:plugin:downsample +Evaluating project ':x-pack:plugin:downsample' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/downsample/build.gradle'. + +> Configure project :x-pack:plugin:enrich +Evaluating project ':x-pack:plugin:enrich' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/enrich/build.gradle'. + +> Configure project :x-pack:plugin:ent-search +Evaluating project ':x-pack:plugin:ent-search' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ent-search/build.gradle'. + +> Configure project :x-pack:plugin:eql +Evaluating project ':x-pack:plugin:eql' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/build.gradle'. + +> Configure project :x-pack:plugin:esql +Evaluating project ':x-pack:plugin:esql' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/build.gradle'. + +> Configure project :x-pack:plugin:esql-core +Evaluating project ':x-pack:plugin:esql-core' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql-core/build.gradle'. + +> Configure project :x-pack:plugin:fleet +Evaluating project ':x-pack:plugin:fleet' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/fleet/build.gradle'. + +> Configure project :x-pack:plugin:frozen-indices +Evaluating project ':x-pack:plugin:frozen-indices' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/frozen-indices/build.gradle'. + +> Configure project :x-pack:plugin:geoip-enterprise-downloader +Evaluating project ':x-pack:plugin:geoip-enterprise-downloader' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/geoip-enterprise-downloader/build.gradle'. + +> Configure project :x-pack:plugin:graph +Evaluating project ':x-pack:plugin:graph' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/graph/build.gradle'. + +> Configure project :x-pack:plugin:identity-provider +Evaluating project ':x-pack:plugin:identity-provider' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/identity-provider/build.gradle'. + +> Configure project :x-pack:plugin:ilm +Evaluating project ':x-pack:plugin:ilm' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ilm/build.gradle'. + +> Configure project :x-pack:plugin:inference +Evaluating project ':x-pack:plugin:inference' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/inference/build.gradle'. + +> Configure project :x-pack:plugin:kql +Evaluating project ':x-pack:plugin:kql' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/kql/build.gradle'. + +> Configure project :x-pack:plugin:logsdb +Evaluating project ':x-pack:plugin:logsdb' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/logsdb/build.gradle'. + +> Configure project :x-pack:plugin:logstash +Evaluating project ':x-pack:plugin:logstash' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/logstash/build.gradle'. + +> Configure project :x-pack:plugin:mapper-aggregate-metric +Evaluating project ':x-pack:plugin:mapper-aggregate-metric' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/mapper-aggregate-metric/build.gradle'. + +> Configure project :x-pack:plugin:mapper-constant-keyword +Evaluating project ':x-pack:plugin:mapper-constant-keyword' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/mapper-constant-keyword/build.gradle'. + +> Configure project :x-pack:plugin:mapper-counted-keyword +Evaluating project ':x-pack:plugin:mapper-counted-keyword' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/mapper-counted-keyword/build.gradle'. + +> Configure project :x-pack:plugin:mapper-unsigned-long +Evaluating project ':x-pack:plugin:mapper-unsigned-long' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/mapper-unsigned-long/build.gradle'. + +> Configure project :x-pack:plugin:mapper-version +Evaluating project ':x-pack:plugin:mapper-version' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/mapper-version/build.gradle'. + +> Configure project :x-pack:plugin:migrate +Evaluating project ':x-pack:plugin:migrate' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/migrate/build.gradle'. + +> Configure project :x-pack:plugin:ml +Evaluating project ':x-pack:plugin:ml' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/build.gradle'. + +> Configure project :x-pack:plugin:ml-package-loader +Evaluating project ':x-pack:plugin:ml-package-loader' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml-package-loader/build.gradle'. + +> Configure project :x-pack:plugin:monitoring +Evaluating project ':x-pack:plugin:monitoring' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/monitoring/build.gradle'. + +> Configure project :x-pack:plugin:old-lucene-versions +Evaluating project ':x-pack:plugin:old-lucene-versions' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/old-lucene-versions/build.gradle'. + +> Configure project :x-pack:plugin:otel-data +Evaluating project ':x-pack:plugin:otel-data' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/otel-data/build.gradle'. + +> Configure project :x-pack:plugin:profiling +Evaluating project ':x-pack:plugin:profiling' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/profiling/build.gradle'. + +> Configure project :x-pack:plugin:ql +Evaluating project ':x-pack:plugin:ql' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ql/build.gradle'. + +> Configure project :x-pack:plugin:rank-rrf +Evaluating project ':x-pack:plugin:rank-rrf' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/rank-rrf/build.gradle'. + +> Configure project :x-pack:plugin:redact +Evaluating project ':x-pack:plugin:redact' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/redact/build.gradle'. + +> Configure project :x-pack:plugin:repositories-metering-api +Evaluating project ':x-pack:plugin:repositories-metering-api' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/repositories-metering-api/build.gradle'. + +> Configure project :x-pack:plugin:rollup +Evaluating project ':x-pack:plugin:rollup' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/rollup/build.gradle'. + +> Configure project :x-pack:plugin:search-business-rules +Evaluating project ':x-pack:plugin:search-business-rules' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/search-business-rules/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots +Evaluating project ':x-pack:plugin:searchable-snapshots' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/build.gradle'. + +> Configure project :x-pack:plugin:security +Evaluating project ':x-pack:plugin:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/build.gradle'. + +> Configure project :x-pack:plugin:shutdown +Evaluating project ':x-pack:plugin:shutdown' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/shutdown/build.gradle'. + +> Configure project :x-pack:plugin:slm +Evaluating project ':x-pack:plugin:slm' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/slm/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries +Evaluating project ':x-pack:plugin:snapshot-based-recoveries' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/build.gradle'. + +> Configure project :x-pack:plugin:spatial +Evaluating project ':x-pack:plugin:spatial' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/spatial/build.gradle'. + +> Configure project :x-pack:plugin:sql +Evaluating project ':x-pack:plugin:sql' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/build.gradle'. + +> Configure project :x-pack:plugin:stack +Evaluating project ':x-pack:plugin:stack' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/stack/build.gradle'. + +> Configure project :x-pack:plugin:text-structure +Evaluating project ':x-pack:plugin:text-structure' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/text-structure/build.gradle'. + +> Configure project :x-pack:plugin:transform +Evaluating project ':x-pack:plugin:transform' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/transform/build.gradle'. + +> Configure project :x-pack:plugin:vector-tile +Evaluating project ':x-pack:plugin:vector-tile' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/vector-tile/build.gradle'. + +> Configure project :x-pack:plugin:voting-only-node +Evaluating project ':x-pack:plugin:voting-only-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/voting-only-node/build.gradle'. + +> Configure project :x-pack:plugin:watcher +Evaluating project ':x-pack:plugin:watcher' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/watcher/build.gradle'. + +> Configure project :x-pack:plugin:wildcard +Evaluating project ':x-pack:plugin:wildcard' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/wildcard/build.gradle'. + +> Configure project :x-pack:plugin:write-load-forecaster +Evaluating project ':x-pack:plugin:write-load-forecaster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/write-load-forecaster/build.gradle'. + +> Configure project :x-pack:qa:core-rest-tests-with-security +Evaluating project ':x-pack:qa:core-rest-tests-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/core-rest-tests-with-security/build.gradle'. + +> Configure project :x-pack:qa:evil-tests +Evaluating project ':x-pack:qa:evil-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/evil-tests/build.gradle'. + +> Configure project :x-pack:qa:freeze-plugin +Evaluating project ':x-pack:qa:freeze-plugin' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/freeze-plugin/build.gradle'. + +> Configure project :x-pack:qa:full-cluster-restart +Evaluating project ':x-pack:qa:full-cluster-restart' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/full-cluster-restart/build.gradle'. + +> Configure project :x-pack:qa:kerberos-tests +Evaluating project ':x-pack:qa:kerberos-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/kerberos-tests/build.gradle'. + +> Configure project :x-pack:qa:mixed-tier-cluster +Evaluating project ':x-pack:qa:mixed-tier-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/mixed-tier-cluster/build.gradle'. + +> Configure project :x-pack:qa:multi-cluster-search-security +Evaluating project ':x-pack:qa:multi-cluster-search-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/multi-cluster-search-security/build.gradle'. + +> Configure project :x-pack:qa:multi-node +Evaluating project ':x-pack:qa:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/multi-node/build.gradle'. + +> Configure project :x-pack:qa:oidc-op-tests +Evaluating project ':x-pack:qa:oidc-op-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/oidc-op-tests/build.gradle'. + +> Configure project :x-pack:qa:openldap-tests +Evaluating project ':x-pack:qa:openldap-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/openldap-tests/build.gradle'. + +> Configure project :x-pack:qa:password-protected-keystore +Evaluating project ':x-pack:qa:password-protected-keystore' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/password-protected-keystore/build.gradle'. + +> Configure project :x-pack:qa:reindex-tests-with-security +Evaluating project ':x-pack:qa:reindex-tests-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/reindex-tests-with-security/build.gradle'. + +> Configure project :x-pack:qa:repository-old-versions +Evaluating project ':x-pack:qa:repository-old-versions' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/repository-old-versions/build.gradle'. + +> Configure project :x-pack:qa:rolling-upgrade +Evaluating project ':x-pack:qa:rolling-upgrade' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/rolling-upgrade/build.gradle'. + +> Configure project :x-pack:qa:rolling-upgrade-basic +Evaluating project ':x-pack:qa:rolling-upgrade-basic' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/rolling-upgrade-basic/build.gradle'. + +> Configure project :x-pack:qa:rolling-upgrade-multi-cluster +Evaluating project ':x-pack:qa:rolling-upgrade-multi-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle'. + +> Configure project :x-pack:qa:runtime-fields +Evaluating project ':x-pack:qa:runtime-fields' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/runtime-fields/build.gradle'. + +> Configure project :x-pack:qa:saml-idp-tests +Evaluating project ':x-pack:qa:saml-idp-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/saml-idp-tests/build.gradle'. + +> Configure project :x-pack:qa:security-example-spi-extension +Evaluating project ':x-pack:qa:security-example-spi-extension' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/security-example-spi-extension/build.gradle'. + +> Configure project :x-pack:qa:security-setup-password-tests +Evaluating project ':x-pack:qa:security-setup-password-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/security-setup-password-tests/build.gradle'. + +> Configure project :x-pack:qa:security-tools-tests +Evaluating project ':x-pack:qa:security-tools-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/security-tools-tests/build.gradle'. + +> Configure project :x-pack:qa:smoke-test-plugins +Evaluating project ':x-pack:qa:smoke-test-plugins' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/smoke-test-plugins/build.gradle'. + +> Configure project :x-pack:qa:smoke-test-plugins-ssl +Evaluating project ':x-pack:qa:smoke-test-plugins-ssl' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/smoke-test-plugins-ssl/build.gradle'. + +> Configure project :x-pack:qa:smoke-test-security-with-mustache +Evaluating project ':x-pack:qa:smoke-test-security-with-mustache' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/smoke-test-security-with-mustache/build.gradle'. + +> Configure project :x-pack:qa:third-party +Evaluating project ':x-pack:qa:third-party' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/third-party/build.gradle'. + +> Configure project :x-pack:test:idp-fixture +Evaluating project ':x-pack:test:idp-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/test/idp-fixture/build.gradle'. + +> Configure project :x-pack:test:smb-fixture +Evaluating project ':x-pack:test:smb-fixture' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/test/smb-fixture/build.gradle'. + +> Configure project :libs:entitlement:qa:common +Evaluating project ':libs:entitlement:qa:common' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/qa/common/build.gradle'. + +> Configure project :libs:entitlement:qa:entitlement-allowed +Evaluating project ':libs:entitlement:qa:entitlement-allowed' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/qa/entitlement-allowed/build.gradle'. + +> Configure project :libs:entitlement:qa:entitlement-allowed-nonmodular +Evaluating project ':libs:entitlement:qa:entitlement-allowed-nonmodular' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/qa/entitlement-allowed-nonmodular/build.gradle'. + +> Configure project :libs:entitlement:qa:entitlement-denied +Evaluating project ':libs:entitlement:qa:entitlement-denied' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/qa/entitlement-denied/build.gradle'. + +> Configure project :libs:entitlement:qa:entitlement-denied-nonmodular +Evaluating project ':libs:entitlement:qa:entitlement-denied-nonmodular' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/qa/entitlement-denied-nonmodular/build.gradle'. + +> Configure project :libs:entitlement:tools:common +Evaluating project ':libs:entitlement:tools:common' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/tools/common/build.gradle'. + +> Configure project :libs:entitlement:tools:public-callers-finder +Evaluating project ':libs:entitlement:tools:public-callers-finder' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/tools/public-callers-finder/build.gradle'. + +> Configure project :libs:entitlement:tools:securitymanager-scanner +Evaluating project ':libs:entitlement:tools:securitymanager-scanner' using build file '/Users/rene/dev/elastic/elasticsearch/libs/entitlement/tools/securitymanager-scanner/build.gradle'. + +> Configure project :modules:ingest-geoip:qa:file-based-update +Evaluating project ':modules:ingest-geoip:qa:file-based-update' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-geoip/qa/file-based-update/build.gradle'. + +> Configure project :modules:ingest-geoip:qa:full-cluster-restart +Evaluating project ':modules:ingest-geoip:qa:full-cluster-restart' using build file '/Users/rene/dev/elastic/elasticsearch/modules/ingest-geoip/qa/full-cluster-restart/build.gradle'. + +> Configure project :plugins:discovery-ec2:qa:amazon-ec2 +Evaluating project ':plugins:discovery-ec2:qa:amazon-ec2' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-ec2/qa/amazon-ec2/build.gradle'. + +> Configure project :plugins:discovery-gce:qa:gce +Evaluating project ':plugins:discovery-gce:qa:gce' using build file '/Users/rene/dev/elastic/elasticsearch/plugins/discovery-gce/qa/gce/build.gradle'. + +> Configure project :x-pack:plugin:async-search:qa +Evaluating project ':x-pack:plugin:async-search:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/async-search/qa/build.gradle'. + +> Configure project :x-pack:plugin:autoscaling:qa +Evaluating project ':x-pack:plugin:autoscaling:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/autoscaling/qa/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa +Evaluating project ':x-pack:plugin:ccr:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/build.gradle'. + +> Configure project :x-pack:plugin:core:template-resources +Evaluating project ':x-pack:plugin:core:template-resources' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/core/template-resources/build.gradle'. + +> Configure project :x-pack:plugin:deprecation:qa +Evaluating project ':x-pack:plugin:deprecation:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/deprecation/qa/build.gradle'. + +> Configure project :x-pack:plugin:downsample:qa +Evaluating project ':x-pack:plugin:downsample:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/downsample/qa/build.gradle'. + +> Configure project :x-pack:plugin:enrich:qa +Evaluating project ':x-pack:plugin:enrich:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/enrich/qa/build.gradle'. + +> Configure project :x-pack:plugin:ent-search:qa +Evaluating project ':x-pack:plugin:ent-search:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ent-search/qa/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa +Evaluating project ':x-pack:plugin:eql:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/build.gradle'. + +> Configure project :x-pack:plugin:esql:arrow +Evaluating project ':x-pack:plugin:esql:arrow' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/arrow/build.gradle'. + +> Configure project :x-pack:plugin:esql:compute +Evaluating project ':x-pack:plugin:esql:compute' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/compute/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa +Evaluating project ':x-pack:plugin:esql:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/build.gradle'. + +> Configure project :x-pack:plugin:esql-core:test-fixtures +Evaluating project ':x-pack:plugin:esql-core:test-fixtures' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql-core/test-fixtures/build.gradle'. + +> Configure project :x-pack:plugin:fleet:qa +Evaluating project ':x-pack:plugin:fleet:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/fleet/qa/build.gradle'. + +> Configure project :x-pack:plugin:graph:qa +Evaluating project ':x-pack:plugin:graph:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/graph/qa/build.gradle'. + +> Configure project :x-pack:plugin:identity-provider:qa +Evaluating project ':x-pack:plugin:identity-provider:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/identity-provider/qa/build.gradle'. + +> Configure project :x-pack:plugin:ilm:qa +Evaluating project ':x-pack:plugin:ilm:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ilm/qa/build.gradle'. + +> Configure project :x-pack:plugin:inference:qa +Evaluating project ':x-pack:plugin:inference:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/inference/qa/build.gradle'. + +> Configure project :x-pack:plugin:logsdb:qa +Evaluating project ':x-pack:plugin:logsdb:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/logsdb/qa/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa +Evaluating project ':x-pack:plugin:ml:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/build.gradle'. + +> Configure project :x-pack:plugin:ql:test-fixtures +Evaluating project ':x-pack:plugin:ql:test-fixtures' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ql/test-fixtures/build.gradle'. + +> Configure project :x-pack:plugin:repositories-metering-api:qa +Evaluating project ':x-pack:plugin:repositories-metering-api:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/repositories-metering-api/qa/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa +Evaluating project ':x-pack:plugin:searchable-snapshots:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/build.gradle'. + +> Configure project :x-pack:plugin:security:cli +Evaluating project ':x-pack:plugin:security:cli' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/cli/build.gradle'. + +> Configure project :x-pack:plugin:security:lib +Evaluating project ':x-pack:plugin:security:lib' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/lib/build.gradle'. + +> Configure project :x-pack:plugin:security:qa +Evaluating project ':x-pack:plugin:security:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/build.gradle'. + +> Configure project :x-pack:plugin:shutdown:qa +Evaluating project ':x-pack:plugin:shutdown:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/shutdown/qa/build.gradle'. + +> Configure project :x-pack:plugin:slm:qa +Evaluating project ':x-pack:plugin:slm:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/slm/qa/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries:qa +Evaluating project ':x-pack:plugin:snapshot-based-recoveries:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/qa/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/build.gradle'. + +> Configure project :x-pack:plugin:sql:jdbc +Evaluating project ':x-pack:plugin:sql:jdbc' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/jdbc/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa +Evaluating project ':x-pack:plugin:sql:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/build.gradle'. + +> Configure project :x-pack:plugin:sql:sql-action +Evaluating project ':x-pack:plugin:sql:sql-action' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/sql-action/build.gradle'. + +> Configure project :x-pack:plugin:sql:sql-cli +Evaluating project ':x-pack:plugin:sql:sql-cli' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/sql-cli/build.gradle'. + +> Configure project :x-pack:plugin:sql:sql-client +Evaluating project ':x-pack:plugin:sql:sql-client' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/sql-client/build.gradle'. + +> Configure project :x-pack:plugin:sql:sql-proto +Evaluating project ':x-pack:plugin:sql:sql-proto' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/sql-proto/build.gradle'. + +> Configure project :x-pack:plugin:stack:qa +Evaluating project ':x-pack:plugin:stack:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/stack/qa/build.gradle'. + +> Configure project :x-pack:plugin:text-structure:qa +Evaluating project ':x-pack:plugin:text-structure:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/text-structure/qa/build.gradle'. + +> Configure project :x-pack:plugin:transform:qa +Evaluating project ':x-pack:plugin:transform:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/transform/qa/build.gradle'. + +> Configure project :x-pack:plugin:vector-tile:qa +Evaluating project ':x-pack:plugin:vector-tile:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/vector-tile/qa/build.gradle'. + +> Configure project :x-pack:plugin:watcher:qa +Evaluating project ':x-pack:plugin:watcher:qa' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/watcher/qa/build.gradle'. + +> Configure project :x-pack:qa:multi-cluster-search-security:legacy-with-basic-license +Evaluating project ':x-pack:qa:multi-cluster-search-security:legacy-with-basic-license' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle'. + +> Configure project :x-pack:qa:multi-cluster-search-security:legacy-with-full-license +Evaluating project ':x-pack:qa:multi-cluster-search-security:legacy-with-full-license' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle'. + +> Configure project :x-pack:qa:multi-cluster-search-security:legacy-with-restricted-trust +Evaluating project ':x-pack:qa:multi-cluster-search-security:legacy-with-restricted-trust' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle'. + +> Configure project :x-pack:qa:runtime-fields:core-with-mapped +Evaluating project ':x-pack:qa:runtime-fields:core-with-mapped' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/runtime-fields/core-with-mapped/build.gradle'. + +> Configure project :x-pack:qa:runtime-fields:core-with-search +Evaluating project ':x-pack:qa:runtime-fields:core-with-search' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/runtime-fields/core-with-search/build.gradle'. + +> Configure project :x-pack:qa:runtime-fields:with-security +Evaluating project ':x-pack:qa:runtime-fields:with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/runtime-fields/with-security/build.gradle'. + +> Configure project :x-pack:qa:third-party:active-directory +Evaluating project ':x-pack:qa:third-party:active-directory' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/third-party/active-directory/build.gradle'. + +> Configure project :x-pack:qa:third-party:jira +Evaluating project ':x-pack:qa:third-party:jira' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/third-party/jira/build.gradle'. + +> Configure project :x-pack:qa:third-party:pagerduty +Evaluating project ':x-pack:qa:third-party:pagerduty' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/third-party/pagerduty/build.gradle'. + +> Configure project :x-pack:qa:third-party:slack +Evaluating project ':x-pack:qa:third-party:slack' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/qa/third-party/slack/build.gradle'. + +> Configure project :x-pack:plugin:async-search:qa:rest +Evaluating project ':x-pack:plugin:async-search:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/async-search/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:async-search:qa:security +Evaluating project ':x-pack:plugin:async-search:qa:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/async-search/qa/security/build.gradle'. + +> Configure project :x-pack:plugin:autoscaling:qa:rest +Evaluating project ':x-pack:plugin:autoscaling:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/autoscaling/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa:downgrade-to-basic-license +Evaluating project ':x-pack:plugin:ccr:qa:downgrade-to-basic-license' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa:multi-cluster +Evaluating project ':x-pack:plugin:ccr:qa:multi-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/multi-cluster/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa:non-compliant-license +Evaluating project ':x-pack:plugin:ccr:qa:non-compliant-license' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa:rest +Evaluating project ':x-pack:plugin:ccr:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa:restart +Evaluating project ':x-pack:plugin:ccr:qa:restart' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/restart/build.gradle'. + +> Configure project :x-pack:plugin:ccr:qa:security +Evaluating project ':x-pack:plugin:ccr:qa:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ccr/qa/security/build.gradle'. + +> Configure project :x-pack:plugin:deprecation:qa:common +Evaluating project ':x-pack:plugin:deprecation:qa:common' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/deprecation/qa/common/build.gradle'. + +> Configure project :x-pack:plugin:deprecation:qa:early-deprecation-rest +Evaluating project ':x-pack:plugin:deprecation:qa:early-deprecation-rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle'. + +> Configure project :x-pack:plugin:deprecation:qa:rest +Evaluating project ':x-pack:plugin:deprecation:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/deprecation/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:downsample:qa:mixed-cluster +Evaluating project ':x-pack:plugin:downsample:qa:mixed-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle'. + +> Configure project :x-pack:plugin:downsample:qa:rest +Evaluating project ':x-pack:plugin:downsample:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/downsample/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:downsample:qa:with-security +Evaluating project ':x-pack:plugin:downsample:qa:with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/downsample/qa/with-security/build.gradle'. + +> Configure project :x-pack:plugin:enrich:qa:common +Evaluating project ':x-pack:plugin:enrich:qa:common' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/enrich/qa/common/build.gradle'. + +> Configure project :x-pack:plugin:enrich:qa:rest +Evaluating project ':x-pack:plugin:enrich:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/enrich/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:enrich:qa:rest-with-advanced-security +Evaluating project ':x-pack:plugin:enrich:qa:rest-with-advanced-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle'. + +> Configure project :x-pack:plugin:enrich:qa:rest-with-security +Evaluating project ':x-pack:plugin:enrich:qa:rest-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/enrich/qa/rest-with-security/build.gradle'. + +> Configure project :x-pack:plugin:ent-search:qa:full-cluster-restart +Evaluating project ':x-pack:plugin:ent-search:qa:full-cluster-restart' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle'. + +> Configure project :x-pack:plugin:ent-search:qa:rest +Evaluating project ':x-pack:plugin:ent-search:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ent-search/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:ccs-rolling-upgrade +Evaluating project ':x-pack:plugin:eql:qa:ccs-rolling-upgrade' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:common +Evaluating project ':x-pack:plugin:eql:qa:common' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/common/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:correctness +Evaluating project ':x-pack:plugin:eql:qa:correctness' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/correctness/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:mixed-node +Evaluating project ':x-pack:plugin:eql:qa:mixed-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/mixed-node/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:multi-cluster-with-security +Evaluating project ':x-pack:plugin:eql:qa:multi-cluster-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:rest +Evaluating project ':x-pack:plugin:eql:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:eql:qa:security +Evaluating project ':x-pack:plugin:eql:qa:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/eql/qa/security/build.gradle'. + +> Configure project :x-pack:plugin:esql:compute:ann +Evaluating project ':x-pack:plugin:esql:compute:ann' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/compute/ann/build.gradle'. + +> Configure project :x-pack:plugin:esql:compute:gen +Evaluating project ':x-pack:plugin:esql:compute:gen' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/compute/gen/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:action +Evaluating project ':x-pack:plugin:esql:qa:action' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/action/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:security +Evaluating project ':x-pack:plugin:esql:qa:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/security/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:server +Evaluating project ':x-pack:plugin:esql:qa:server' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/server/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:testFixtures +Evaluating project ':x-pack:plugin:esql:qa:testFixtures' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/testFixtures/build.gradle'. + +> Configure project :x-pack:plugin:fleet:qa:rest +Evaluating project ':x-pack:plugin:fleet:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/fleet/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:graph:qa:with-security +Evaluating project ':x-pack:plugin:graph:qa:with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/graph/qa/with-security/build.gradle'. + +> Configure project :x-pack:plugin:identity-provider:qa:idp-rest-tests +Evaluating project ':x-pack:plugin:identity-provider:qa:idp-rest-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle'. + +> Configure project :x-pack:plugin:ilm:qa:multi-cluster +Evaluating project ':x-pack:plugin:ilm:qa:multi-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ilm/qa/multi-cluster/build.gradle'. + +> Configure project :x-pack:plugin:ilm:qa:multi-node +Evaluating project ':x-pack:plugin:ilm:qa:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ilm/qa/multi-node/build.gradle'. + +> Configure project :x-pack:plugin:ilm:qa:rest +Evaluating project ':x-pack:plugin:ilm:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ilm/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:ilm:qa:with-security +Evaluating project ':x-pack:plugin:ilm:qa:with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ilm/qa/with-security/build.gradle'. + +> Configure project :x-pack:plugin:inference:qa:inference-service-tests +Evaluating project ':x-pack:plugin:inference:qa:inference-service-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/inference/qa/inference-service-tests/build.gradle'. + +> Configure project :x-pack:plugin:inference:qa:mixed-cluster +Evaluating project ':x-pack:plugin:inference:qa:mixed-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/inference/qa/mixed-cluster/build.gradle'. + +> Configure project :x-pack:plugin:inference:qa:rolling-upgrade +Evaluating project ':x-pack:plugin:inference:qa:rolling-upgrade' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle'. + +> Configure project :x-pack:plugin:inference:qa:test-service-plugin +Evaluating project ':x-pack:plugin:inference:qa:test-service-plugin' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/inference/qa/test-service-plugin/build.gradle'. + +> Configure project :x-pack:plugin:logsdb:qa:with-basic +Evaluating project ':x-pack:plugin:logsdb:qa:with-basic' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/logsdb/qa/with-basic/build.gradle'. + +> Configure project :x-pack:plugin:logsdb:qa:with-custom-cutoff +Evaluating project ':x-pack:plugin:logsdb:qa:with-custom-cutoff' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/logsdb/qa/with-custom-cutoff/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:basic-multi-node +Evaluating project ':x-pack:plugin:ml:qa:basic-multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/basic-multi-node/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:disabled +Evaluating project ':x-pack:plugin:ml:qa:disabled' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/disabled/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:ml-inference-service-tests +Evaluating project ':x-pack:plugin:ml:qa:ml-inference-service-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/ml-inference-service-tests/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:ml-with-security +Evaluating project ':x-pack:plugin:ml:qa:ml-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/ml-with-security/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:multi-cluster-tests-with-security +Evaluating project ':x-pack:plugin:ml:qa:multi-cluster-tests-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:native-multi-node-tests +Evaluating project ':x-pack:plugin:ml:qa:native-multi-node-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:no-bootstrap-tests +Evaluating project ':x-pack:plugin:ml:qa:no-bootstrap-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle'. + +> Configure project :x-pack:plugin:ml:qa:single-node-tests +Evaluating project ':x-pack:plugin:ml:qa:single-node-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/ml/qa/single-node-tests/build.gradle'. + +> Configure project :x-pack:plugin:repositories-metering-api:qa:azure +Evaluating project ':x-pack:plugin:repositories-metering-api:qa:azure' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle'. + +> Configure project :x-pack:plugin:repositories-metering-api:qa:gcs +Evaluating project ':x-pack:plugin:repositories-metering-api:qa:gcs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle'. + +> Configure project :x-pack:plugin:repositories-metering-api:qa:s3 +Evaluating project ':x-pack:plugin:repositories-metering-api:qa:s3' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:azure +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:azure' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:gcs +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:gcs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:hdfs +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:hdfs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:minio +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:minio' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:rest +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:s3 +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:s3' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle'. + +> Configure project :x-pack:plugin:searchable-snapshots:qa:url +Evaluating project ':x-pack:plugin:searchable-snapshots:qa:url' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/searchable-snapshots/qa/url/build.gradle'. + +> Configure project :x-pack:plugin:security:lib:nimbus-jose-jwt-modified +Evaluating project ':x-pack:plugin:security:lib:nimbus-jose-jwt-modified' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle'. + +> Configure project :x-pack:plugin:security:lib:nimbus-jose-jwt-modified-part1 +Evaluating project ':x-pack:plugin:security:lib:nimbus-jose-jwt-modified-part1' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle'. + +> Configure project :x-pack:plugin:security:lib:nimbus-jose-jwt-modified-part2 +Evaluating project ':x-pack:plugin:security:lib:nimbus-jose-jwt-modified-part2' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:audit +Evaluating project ':x-pack:plugin:security:qa:audit' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/audit/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:basic-enable-security +Evaluating project ':x-pack:plugin:security:qa:basic-enable-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/basic-enable-security/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:consistency-checks +Evaluating project ':x-pack:plugin:security:qa:consistency-checks' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/consistency-checks/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:jwt-realm +Evaluating project ':x-pack:plugin:security:qa:jwt-realm' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/jwt-realm/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:multi-cluster +Evaluating project ':x-pack:plugin:security:qa:multi-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/multi-cluster/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:operator-privileges-tests +Evaluating project ':x-pack:plugin:security:qa:operator-privileges-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:profile +Evaluating project ':x-pack:plugin:security:qa:profile' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/profile/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:saml-rest-tests +Evaluating project ':x-pack:plugin:security:qa:saml-rest-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/saml-rest-tests/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:secondary-auth-actions +Evaluating project ':x-pack:plugin:security:qa:secondary-auth-actions' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:security-basic +Evaluating project ':x-pack:plugin:security:qa:security-basic' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/security-basic/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:security-disabled +Evaluating project ':x-pack:plugin:security:qa:security-disabled' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/security-disabled/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:security-trial +Evaluating project ':x-pack:plugin:security:qa:security-trial' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/security-trial/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:service-account +Evaluating project ':x-pack:plugin:security:qa:service-account' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/service-account/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:smoke-test-all-realms +Evaluating project ':x-pack:plugin:security:qa:smoke-test-all-realms' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/smoke-test-all-realms/build.gradle'. + +> Configure project :x-pack:plugin:security:qa:tls-basic +Evaluating project ':x-pack:plugin:security:qa:tls-basic' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/security/qa/tls-basic/build.gradle'. + +> Configure project :x-pack:plugin:shutdown:qa:full-cluster-restart +Evaluating project ':x-pack:plugin:shutdown:qa:full-cluster-restart' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle'. + +> Configure project :x-pack:plugin:shutdown:qa:multi-node +Evaluating project ':x-pack:plugin:shutdown:qa:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/shutdown/qa/multi-node/build.gradle'. + +> Configure project :x-pack:plugin:shutdown:qa:rolling-upgrade +Evaluating project ':x-pack:plugin:shutdown:qa:rolling-upgrade' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle'. + +> Configure project :x-pack:plugin:slm:qa:multi-node +Evaluating project ':x-pack:plugin:slm:qa:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/slm/qa/multi-node/build.gradle'. + +> Configure project :x-pack:plugin:slm:qa:rest +Evaluating project ':x-pack:plugin:slm:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/slm/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:slm:qa:with-security +Evaluating project ':x-pack:plugin:slm:qa:with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/slm/qa/with-security/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries:qa:azure +Evaluating project ':x-pack:plugin:snapshot-based-recoveries:qa:azure' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries:qa:fs +Evaluating project ':x-pack:plugin:snapshot-based-recoveries:qa:fs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/qa/fs/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries:qa:gcs +Evaluating project ':x-pack:plugin:snapshot-based-recoveries:qa:gcs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries:qa:license-enforcing +Evaluating project ':x-pack:plugin:snapshot-based-recoveries:qa:license-enforcing' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/qa/license-enforcing/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-based-recoveries:qa:s3 +Evaluating project ':x-pack:plugin:snapshot-based-recoveries:qa:s3' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa:azure +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa:azure' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa:gcs +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa:gcs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa:hdfs +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa:hdfs' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa:minio +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa:minio' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa:rest +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:snapshot-repo-test-kit:qa:s3 +Evaluating project ':x-pack:plugin:snapshot-repo-test-kit:qa:s3' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc +Evaluating project ':x-pack:plugin:sql:qa:jdbc' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:mixed-node +Evaluating project ':x-pack:plugin:sql:qa:mixed-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/mixed-node/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server +Evaluating project ':x-pack:plugin:sql:qa:server' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/build.gradle'. + +> Configure project :x-pack:plugin:stack:qa:rest +Evaluating project ':x-pack:plugin:stack:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/stack/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:text-structure:qa:text-structure-with-security +Evaluating project ':x-pack:plugin:text-structure:qa:text-structure-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/text-structure/qa/text-structure-with-security/build.gradle'. + +> Configure project :x-pack:plugin:transform:qa:common +Evaluating project ':x-pack:plugin:transform:qa:common' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/transform/qa/common/build.gradle'. + +> Configure project :x-pack:plugin:transform:qa:multi-cluster-tests-with-security +Evaluating project ':x-pack:plugin:transform:qa:multi-cluster-tests-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle'. + +> Configure project :x-pack:plugin:transform:qa:multi-node-tests +Evaluating project ':x-pack:plugin:transform:qa:multi-node-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/transform/qa/multi-node-tests/build.gradle'. + +> Configure project :x-pack:plugin:transform:qa:single-node-tests +Evaluating project ':x-pack:plugin:transform:qa:single-node-tests' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/transform/qa/single-node-tests/build.gradle'. + +> Configure project :x-pack:plugin:vector-tile:qa:multi-cluster +Evaluating project ':x-pack:plugin:vector-tile:qa:multi-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/vector-tile/qa/multi-cluster/build.gradle'. + +> Configure project :x-pack:plugin:watcher:qa:common +Evaluating project ':x-pack:plugin:watcher:qa:common' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/watcher/qa/common/build.gradle'. + +> Configure project :x-pack:plugin:watcher:qa:rest +Evaluating project ':x-pack:plugin:watcher:qa:rest' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/watcher/qa/rest/build.gradle'. + +> Configure project :x-pack:plugin:watcher:qa:with-security +Evaluating project ':x-pack:plugin:watcher:qa:with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/watcher/qa/with-security/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:server:mixed-cluster +Evaluating project ':x-pack:plugin:esql:qa:server:mixed-cluster' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:server:multi-clusters +Evaluating project ':x-pack:plugin:esql:qa:server:multi-clusters' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:server:multi-node +Evaluating project ':x-pack:plugin:esql:qa:server:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/server/multi-node/build.gradle'. + +> Configure project :x-pack:plugin:esql:qa:server:single-node +Evaluating project ':x-pack:plugin:esql:qa:server:single-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/esql/qa/server/single-node/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc:multi-node +Evaluating project ':x-pack:plugin:sql:qa:jdbc:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/multi-node/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc:no-sql +Evaluating project ':x-pack:plugin:sql:qa:jdbc:no-sql' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/no-sql/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc:security +Evaluating project ':x-pack:plugin:sql:qa:jdbc:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/security/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc:single-node +Evaluating project ':x-pack:plugin:sql:qa:jdbc:single-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/single-node/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server:multi-cluster-with-security +Evaluating project ':x-pack:plugin:sql:qa:server:multi-cluster-with-security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server:multi-node +Evaluating project ':x-pack:plugin:sql:qa:server:multi-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/multi-node/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server:security +Evaluating project ':x-pack:plugin:sql:qa:server:security' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/security/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server:single-node +Evaluating project ':x-pack:plugin:sql:qa:server:single-node' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/single-node/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc:security:with-ssl +Evaluating project ':x-pack:plugin:sql:qa:jdbc:security:with-ssl' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:jdbc:security:without-ssl +Evaluating project ':x-pack:plugin:sql:qa:jdbc:security:without-ssl' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/jdbc/security/without-ssl/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server:security:with-ssl +Evaluating project ':x-pack:plugin:sql:qa:server:security:with-ssl' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle'. + +> Configure project :x-pack:plugin:sql:qa:server:security:without-ssl +Evaluating project ':x-pack:plugin:sql:qa:server:security:without-ssl' using build file '/Users/rene/dev/elastic/elasticsearch/x-pack/plugin/sql/qa/server/security/without-ssl/build.gradle'. +All projects evaluated. +test#with21 +Task name matched 'cleanTest' +Task name matched 'test' +Selected primary task 'cleanTest' from project :libs:simdvec +Selected primary task 'test' from project :libs:simdvec +Tasks to be executed: [task ':libs:simdvec:cleanTest', task ':libs:logging:compileJava', task ':libs:core:compileJava', task ':libs:native:compileJava', task ':libs:simdvec:compileJava', task ':libs:simdvec:processResources', task ':libs:simdvec:classes', task ':libs:simdvec:compileMain21Java', task ':libs:simdvec:processMain21Resources', task ':libs:simdvec:main21Classes', task ':libs:simdvec:compileMain22Java', task ':libs:simdvec:processMain22Resources', task ':libs:simdvec:main22Classes', task ':libs:simdvec:jar', task ':libs:cli:compileJava', task ':libs:x-content:compileJava', task ':libs:entitlement:bridge:compileJava', task ':libs:entitlement:compileJava', task ':libs:geo:compileJava', task ':libs:grok:compileJava', task ':libs:lz4:compileJava', task ':libs:plugin-api:compileJava', task ':libs:plugin-analysis-api:compileJava', task ':libs:secure-sm:compileJava', task ':libs:tdigest:compileJava', task ':server:compileJava', task ':server:processResources', task ':server:classes', task ':server:generateModulesList', task ':server:generatePluginsList', task ':server:jar', task ':client:rest:compileJava', task ':client:rest:processResources', task ':client:rest:classes', task ':client:rest:jar', task ':libs:cli:processResources', task ':libs:cli:classes', task ':libs:cli:jar', task ':libs:core:processResources', task ':libs:core:classes', task ':libs:core:jar', task ':libs:entitlement:asm-provider:compileJava', task ':libs:entitlement:asm-provider:processResources', task ':libs:entitlement:asm-provider:classes', task ':libs:entitlement:asm-provider:jar', task ':libs:entitlement:generateAsm-providerProviderManifest', task ':libs:entitlement:generateAsm-providerProviderImpl', task ':libs:entitlement:processResources', task ':libs:entitlement:classes', task ':libs:entitlement:bridge:processResources', task ':libs:entitlement:bridge:classes', task ':libs:entitlement:bridge:compileMain23Java', task ':libs:entitlement:bridge:processMain23Resources', task ':libs:entitlement:bridge:main23Classes', task ':libs:entitlement:bridge:java23Jar', task ':libs:entitlement:compileMain23Java', task ':libs:entitlement:processMain23Resources', task ':libs:entitlement:main23Classes', task ':libs:entitlement:jar', task ':libs:geo:processResources', task ':libs:geo:classes', task ':libs:geo:jar', task ':libs:grok:processResources', task ':libs:grok:classes', task ':libs:grok:jar', task ':libs:logging:processResources', task ':libs:logging:classes', task ':libs:logging:jar', task ':libs:lz4:processResources', task ':libs:lz4:classes', task ':libs:lz4:jar', task ':libs:native:processResources', task ':libs:native:classes', task ':libs:native:compileMain22Java', task ':libs:native:processMain22Resources', task ':libs:native:main22Classes', task ':libs:native:jar', task ':libs:plugin-analysis-api:processResources', task ':libs:plugin-analysis-api:classes', task ':libs:plugin-analysis-api:jar', task ':libs:plugin-api:processResources', task ':libs:plugin-api:classes', task ':libs:plugin-api:jar', task ':libs:secure-sm:processResources', task ':libs:secure-sm:classes', task ':libs:secure-sm:jar', task ':libs:ssl-config:compileJava', task ':modules:transport-netty4:compileJava', task ':test:framework:compileJava', task ':libs:simdvec:compileTestJava', task ':libs:simdvec:processTestResources', task ':libs:simdvec:testClasses', task ':libs:ssl-config:processResources', task ':libs:ssl-config:classes', task ':libs:ssl-config:jar', task ':libs:tdigest:processResources', task ':libs:tdigest:classes', task ':libs:tdigest:jar', task ':libs:x-content:impl:compileJava', task ':libs:x-content:impl:processResources', task ':libs:x-content:impl:classes', task ':libs:x-content:impl:jar', task ':libs:x-content:generateImplProviderManifest', task ':libs:x-content:generateImplProviderImpl', task ':libs:x-content:processResources', task ':libs:x-content:classes', task ':libs:x-content:jar', task ':modules:transport-netty4:processResources', task ':modules:transport-netty4:classes', task ':modules:transport-netty4:jar', task ':test:framework:processResources', task ':test:framework:classes', task ':test:framework:jar', task ':test:immutable-collections-patch:compileJava', task ':test:immutable-collections-patch:processResources', task ':test:immutable-collections-patch:classes', task ':test:immutable-collections-patch:generatePatch', task ':libs:native:native-libraries:extractLibs', task ':libs:simdvec:test'] +Tasks that were excluded: [] +======================================= +Elasticsearch Build Hamster says Hello! + Gradle Version : 8.11.1 + OS Info : Mac OS X 15.2 (aarch64) + JDK Version : 21.0.5+9-LTS-239 (Oracle) + JAVA_HOME : /Users/rene/.sdkman/candidates/java/21.0.5-oracle + Random Testing Seed : 7B469FBE8B6D0C65 + In FIPS 140 mode : false +======================================= +Ignoring listeners of task graph ready event, as this build (:) has already executed work. +Resolve mutations for :libs:simdvec:cleanTest (Thread[#1322,Execution worker,5,main]) started. +work action Unknown value (Thread[#1392,included builds Thread 2,5,main]) started. +work action Unknown value (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:simdvec:cleanTest (Thread[#1322,Execution worker,5,main]) started. +destroyer locations for task group 0 (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:logging:compileJava (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:simdvec:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +Resolve mutations for :libs:simdvec:processMain22Resources (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :libs:simdvec:processMain21Resources (Thread[#1324,Execution worker Thread 3,5,main]) started. +Resolve mutations for :libs:grok:compileJava (Thread[#1327,Execution worker Thread 6,5,main]) started. +:libs:simdvec:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +:libs:simdvec:processMain21Resources (Thread[#1324,Execution worker Thread 3,5,main]) started. +:libs:simdvec:processMain22Resources (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :libs:geo:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :libs:secure-sm:compileJava (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:entitlement:bridge:compileJava (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :server:generateModulesList (Thread[#1392,included builds Thread 2,5,main]) started. +:libs:secure-sm:compileJava (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :server:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:logging:compileJava (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:plugin-api:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. +:server:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +:server:generateModulesList (Thread[#1392,included builds Thread 2,5,main]) started. +:libs:entitlement:bridge:compileJava (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:geo:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:plugin-api:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:grok:compileJava (Thread[#1327,Execution worker Thread 6,5,main]) started. + +> Task :libs:simdvec:processMain22Resources NO-SOURCE +Skipping task ':libs:simdvec:processMain22Resources' as it has no source files and no previous output files. + +> Task :libs:simdvec:processMain21Resources NO-SOURCE +Skipping task ':libs:simdvec:processMain21Resources' as it has no source files and no previous output files. + +> Task :server:generateModulesList UP-TO-DATE +Caching disabled for task ':server:generateModulesList' because: + Build cache is disabled + Gradle would require more information to cache this task +Skipping task ':server:generateModulesList' as it is up-to-date. + +> Task :libs:simdvec:processResources NO-SOURCE +Skipping task ':libs:simdvec:processResources' as it has no source files and no previous output files. +Resolve mutations for :client:rest:processResources (Thread[#1392,included builds Thread 2,5,main]) started. + +> Task :libs:grok:compileJava UP-TO-DATE +Caching disabled for task ':libs:grok:compileJava' because: + Build cache is disabled +Skipping task ':libs:grok:compileJava' as it is up-to-date. +:client:rest:processResources (Thread[#1392,included builds Thread 2,5,main]) started. +Resolve mutations for :libs:cli:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +Resolve mutations for :client:rest:compileJava (Thread[#1324,Execution worker Thread 3,5,main]) started. + +> Task :libs:plugin-api:compileJava UP-TO-DATE +Caching disabled for task ':libs:plugin-api:compileJava' because: + Build cache is disabled +Skipping task ':libs:plugin-api:compileJava' as it is up-to-date. +No compile result for :libs:plugin-api:compileJava +No compile result for :libs:plugin-api:compileJava +Resolve mutations for :server:generatePluginsList (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :libs:secure-sm:compileJava UP-TO-DATE +Caching disabled for task ':libs:secure-sm:compileJava' because: + Build cache is disabled +Skipping task ':libs:secure-sm:compileJava' as it is up-to-date. +No compile result for :libs:secure-sm:compileJava +No compile result for :libs:secure-sm:compileJava +No compile result for :libs:plugin-api:compileJava +No compile result for :libs:plugin-api:compileJava +:libs:cli:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. + +> Task :libs:geo:compileJava UP-TO-DATE +Caching disabled for task ':libs:geo:compileJava' because: + Build cache is disabled +Skipping task ':libs:geo:compileJava' as it is up-to-date. + +> Task :libs:entitlement:bridge:compileJava UP-TO-DATE +Caching disabled for task ':libs:entitlement:bridge:compileJava' because: + Build cache is disabled +Skipping task ':libs:entitlement:bridge:compileJava' as it is up-to-date. +No compile result for :libs:geo:compileJava +No compile result for :libs:geo:compileJava +No compile result for :libs:grok:compileJava +No compile result for :libs:grok:compileJava + +> Task :server:processResources UP-TO-DATE +Caching disabled for task ':server:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':server:processResources' as it is up-to-date. +No compile result for :libs:grok:compileJava +work action resolve main (project :libs:plugin-api) (Thread[#1330,Execution worker Thread 9,5,main]) started. +No compile result for :libs:geo:compileJava +No compile result for :libs:geo:compileJava +No compile result for :libs:entitlement:bridge:compileJava +No compile result for :libs:entitlement:bridge:compileJava +No compile result for :libs:entitlement:bridge:compileJava +No compile result for :libs:entitlement:bridge:compileJava + +> Task :libs:logging:compileJava UP-TO-DATE +Caching disabled for task ':libs:logging:compileJava' because: + Build cache is disabled +Skipping task ':libs:logging:compileJava' as it is up-to-date. +work action resolve main (project :libs:entitlement:bridge) (Thread[#1325,Execution worker Thread 4,5,main]) started. +work action resolve main (project :libs:plugin-api) (Thread[#1331,Execution worker Thread 10,5,main]) started. +No compile result for :libs:secure-sm:compileJava +:server:generatePluginsList (Thread[#1329,Execution worker Thread 8,5,main]) started. +:client:rest:compileJava (Thread[#1324,Execution worker Thread 3,5,main]) started. +No compile result for :libs:secure-sm:compileJava +Resolve mutations for :libs:plugin-analysis-api:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. +work action resolve main (project :libs:entitlement:bridge) (Thread[#1325,Execution worker Thread 4,5,main]) started. +No compile result for :libs:logging:compileJava +work action resolve main (project :libs:geo) (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :libs:core:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +No compile result for :libs:grok:compileJava +work action resolve main (project :libs:grok) (Thread[#1330,Execution worker Thread 9,5,main]) started. +work action resolve main (project :libs:grok) (Thread[#1327,Execution worker Thread 6,5,main]) started. +:libs:plugin-analysis-api:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. +Resolve mutations for :libs:entitlement:asm-provider:processResources (Thread[#1327,Execution worker Thread 6,5,main]) started. +work action resolve main (project :libs:secure-sm) (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:entitlement:asm-provider:processResources (Thread[#1327,Execution worker Thread 6,5,main]) started. +No compile result for :libs:logging:compileJava +work action resolve main (project :libs:geo) (Thread[#1325,Execution worker Thread 4,5,main]) started. +work action resolve main (project :libs:secure-sm) (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:entitlement:bridge:processResources (Thread[#1325,Execution worker Thread 4,5,main]) started. +No compile result for :libs:logging:compileJava +No compile result for :libs:logging:compileJava +:libs:entitlement:bridge:processResources (Thread[#1325,Execution worker Thread 4,5,main]) started. +work action resolve main (project :libs:logging) (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:entitlement:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:entitlement:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:cli:processResources NO-SOURCE +Skipping task ':libs:cli:processResources' as it has no source files and no previous output files. +:libs:core:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +Resolve mutations for :libs:entitlement:processMain23Resources (Thread[#1326,Execution worker Thread 5,5,main]) started. + +> Task :server:generatePluginsList UP-TO-DATE +Caching disabled for task ':server:generatePluginsList' because: + Build cache is disabled + Gradle would require more information to cache this task +Skipping task ':server:generatePluginsList' as it is up-to-date. +Resolve mutations for :libs:geo:processResources (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:entitlement:processMain23Resources (Thread[#1326,Execution worker Thread 5,5,main]) started. +work action resolve main (project :libs:logging) (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:core:compileJava (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:entitlement:bridge:processMain23Resources (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :libs:entitlement:processResources NO-SOURCE +Skipping task ':libs:entitlement:processResources' as it has no source files and no previous output files. +Resolve mutations for :libs:grok:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:entitlement:bridge:processResources NO-SOURCE +Skipping task ':libs:entitlement:bridge:processResources' as it has no source files and no previous output files. + +> Task :libs:plugin-analysis-api:compileJava UP-TO-DATE +Caching disabled for task ':libs:plugin-analysis-api:compileJava' because: + Build cache is disabled +Skipping task ':libs:plugin-analysis-api:compileJava' as it is up-to-date. +:libs:grok:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:entitlement:processMain23Resources NO-SOURCE +Skipping task ':libs:entitlement:processMain23Resources' as it has no source files and no previous output files. +:libs:geo:processResources (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :client:rest:processResources UP-TO-DATE +Caching disabled for task ':client:rest:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':client:rest:processResources' as it is up-to-date. +:libs:core:compileJava (Thread[#1323,Execution worker Thread 2,5,main]) started. + +> Task :client:rest:compileJava UP-TO-DATE +Caching disabled for task ':client:rest:compileJava' because: + Build cache is disabled +Skipping task ':client:rest:compileJava' as it is up-to-date. +No compile result for :client:rest:compileJava +No compile result for :libs:plugin-analysis-api:compileJava +Resolve mutations for :libs:entitlement:bridge:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:entitlement:bridge:processMain23Resources (Thread[#1332,Execution worker Thread 11,5,main]) started. +No compile result for :libs:plugin-analysis-api:compileJava +Resolve mutations for :libs:lz4:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +Resolve mutations for :libs:logging:processResources (Thread[#1392,included builds Thread 2,5,main]) started. +No compile result for :client:rest:compileJava +:libs:logging:processResources (Thread[#1392,included builds Thread 2,5,main]) started. + +> Task :libs:geo:processResources NO-SOURCE +Skipping task ':libs:geo:processResources' as it has no source files and no previous output files. + +> Task :libs:core:processResources NO-SOURCE +Skipping task ':libs:core:processResources' as it has no source files and no previous output files. +No compile result for :client:rest:compileJava +:libs:lz4:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +No compile result for :libs:plugin-analysis-api:compileJava + +> Task :libs:entitlement:asm-provider:processResources UP-TO-DATE +Caching disabled for task ':libs:entitlement:asm-provider:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:entitlement:asm-provider:processResources' as it is up-to-date. + +> Task :libs:grok:processResources UP-TO-DATE +Caching disabled for task ':libs:grok:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:grok:processResources' as it is up-to-date. +:libs:entitlement:bridge:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:native:processMain22Resources (Thread[#1327,Execution worker Thread 6,5,main]) started. +Resolve mutations for :libs:grok:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. +No compile result for :libs:plugin-analysis-api:compileJava + +> Task :libs:entitlement:bridge:processMain23Resources NO-SOURCE +Skipping task ':libs:entitlement:bridge:processMain23Resources' as it has no source files and no previous output files. +:libs:grok:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. +work action resolve main (project :libs:plugin-analysis-api) (Thread[#1331,Execution worker Thread 10,5,main]) started. +work action resolve main (project :libs:plugin-analysis-api) (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:plugin-analysis-api:processResources (Thread[#1331,Execution worker Thread 10,5,main]) started. +Resolve mutations for :libs:plugin-api:processResources (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :libs:logging:processResources NO-SOURCE +Skipping task ':libs:logging:processResources' as it has no source files and no previous output files. +No compile result for :client:rest:compileJava +:libs:plugin-analysis-api:processResources (Thread[#1331,Execution worker Thread 10,5,main]) started. +Resolve mutations for :client:rest:classes (Thread[#1324,Execution worker Thread 3,5,main]) started. +Resolve mutations for :libs:logging:classes (Thread[#1392,included builds Thread 2,5,main]) started. +:client:rest:classes (Thread[#1324,Execution worker Thread 3,5,main]) started. +Resolve mutations for :libs:native:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:logging:classes (Thread[#1392,included builds Thread 2,5,main]) started. +Resolve mutations for :libs:geo:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:native:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:plugin-api:processResources (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :libs:plugin-analysis-api:processResources NO-SOURCE +Skipping task ':libs:plugin-analysis-api:processResources' as it has no source files and no previous output files. +:libs:native:processMain22Resources (Thread[#1327,Execution worker Thread 6,5,main]) started. +Resolve mutations for :libs:plugin-analysis-api:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:plugin-analysis-api:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:grok:classes UP-TO-DATE +Skipping task ':libs:grok:classes' as it has no actions. + +> Task :libs:core:compileJava UP-TO-DATE +Caching disabled for task ':libs:core:compileJava' because: + Build cache is disabled +Skipping task ':libs:core:compileJava' as it is up-to-date. +No compile result for :libs:core:compileJava +:libs:geo:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :libs:logging:classes UP-TO-DATE +Skipping task ':libs:logging:classes' as it has no actions. + +> Task :client:rest:classes UP-TO-DATE +Skipping task ':client:rest:classes' as it has no actions. + +> Task :libs:geo:classes UP-TO-DATE +Skipping task ':libs:geo:classes' as it has no actions. +Resolve mutations for :client:rest:jar (Thread[#1324,Execution worker Thread 3,5,main]) started. +Resolve mutations for :libs:logging:jar (Thread[#1392,included builds Thread 2,5,main]) started. +No compile result for :libs:core:compileJava + +> Task :libs:native:processMain22Resources NO-SOURCE +Skipping task ':libs:native:processMain22Resources' as it has no source files and no previous output files. +No compile result for :libs:core:compileJava +:client:rest:jar (Thread[#1324,Execution worker Thread 3,5,main]) started. +Resolve mutations for :libs:grok:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:native:processResources NO-SOURCE +Skipping task ':libs:native:processResources' as it has no source files and no previous output files. +work action resolve main (project :client:rest) (Thread[#1330,Execution worker Thread 9,5,main]) started. +Resolve mutations for :libs:simdvec:processTestResources (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :libs:entitlement:bridge:classes UP-TO-DATE +Skipping task ':libs:entitlement:bridge:classes' as it has no actions. + +> Task :libs:plugin-analysis-api:classes UP-TO-DATE +Skipping task ':libs:plugin-analysis-api:classes' as it has no actions. + +> Task :libs:lz4:processResources NO-SOURCE +Skipping task ':libs:lz4:processResources' as it has no source files and no previous output files. +:libs:simdvec:processTestResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +Resolve mutations for :libs:entitlement:bridge:compileMain23Java (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:grok:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :libs:ssl-config:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +Resolve mutations for :libs:secure-sm:processResources (Thread[#1327,Execution worker Thread 6,5,main]) started. +:libs:logging:jar (Thread[#1392,included builds Thread 2,5,main]) started. + +> Task :libs:simdvec:processTestResources NO-SOURCE +Skipping task ':libs:simdvec:processTestResources' as it has no source files and no previous output files. +No compile result for :libs:core:compileJava + +> Task :libs:plugin-api:processResources NO-SOURCE +Skipping task ':libs:plugin-api:processResources' as it has no source files and no previous output files. +Resolve mutations for :libs:geo:jar (Thread[#1329,Execution worker Thread 8,5,main]) started. +work action resolve main (project :libs:core) (Thread[#1332,Execution worker Thread 11,5,main]) started. +work action resolve main (project :libs:core) (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:tdigest:processResources (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:secure-sm:processResources (Thread[#1327,Execution worker Thread 6,5,main]) started. +:libs:ssl-config:processResources (Thread[#1326,Execution worker Thread 5,5,main]) started. +:libs:entitlement:bridge:compileMain23Java (Thread[#1331,Execution worker Thread 10,5,main]) started. +Resolve mutations for :libs:plugin-analysis-api:jar (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:lz4:compileJava (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:x-content:compileJava (Thread[#1330,Execution worker Thread 9,5,main]) started. +Resolve mutations for :libs:cli:compileJava (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:native:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :libs:core:classes (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:tdigest:compileJava (Thread[#1332,Execution worker Thread 11,5,main]) started. +:libs:lz4:compileJava (Thread[#1325,Execution worker Thread 4,5,main]) started. + +> Task :libs:simdvec:cleanTest +Caching disabled for task ':libs:simdvec:cleanTest' because: + Build cache is disabled +Task ':libs:simdvec:cleanTest' is not up-to-date because: + Task has not declared any outputs despite executing actions. +:libs:cli:compileJava (Thread[#1323,Execution worker Thread 2,5,main]) started. + +> Task :libs:ssl-config:processResources NO-SOURCE +Skipping task ':libs:ssl-config:processResources' as it has no source files and no previous output files. +:libs:tdigest:compileJava (Thread[#1332,Execution worker Thread 11,5,main]) started. +:libs:native:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:x-content:compileJava (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :libs:secure-sm:processResources NO-SOURCE +Skipping task ':libs:secure-sm:processResources' as it has no source files and no previous output files. +:libs:geo:jar (Thread[#1326,Execution worker Thread 5,5,main]) started. +:libs:core:classes (Thread[#1322,Execution worker,5,main]) started. +:libs:plugin-analysis-api:jar (Thread[#1327,Execution worker Thread 6,5,main]) started. + +> Task :libs:entitlement:bridge:compileMain23Java UP-TO-DATE +Caching disabled for task ':libs:entitlement:bridge:compileMain23Java' because: + Build cache is disabled +Skipping task ':libs:entitlement:bridge:compileMain23Java' as it is up-to-date. + +> Task :libs:lz4:compileJava UP-TO-DATE +Caching disabled for task ':libs:lz4:compileJava' because: + Build cache is disabled +Skipping task ':libs:lz4:compileJava' as it is up-to-date. +No compile result for :libs:lz4:compileJava +No compile result for :libs:lz4:compileJava +No compile result for :libs:lz4:compileJava +No compile result for :libs:entitlement:bridge:compileMain23Java + +> Task :libs:cli:compileJava UP-TO-DATE +Caching disabled for task ':libs:cli:compileJava' because: + Build cache is disabled +Skipping task ':libs:cli:compileJava' as it is up-to-date. +No compile result for :libs:cli:compileJava +No compile result for :libs:cli:compileJava +No compile result for :libs:cli:compileJava +No compile result for :libs:cli:compileJava +work action resolve main (project :libs:cli) (Thread[#1323,Execution worker Thread 2,5,main]) started. +No compile result for :libs:entitlement:bridge:compileMain23Java +No compile result for :libs:entitlement:bridge:compileMain23Java +No compile result for :libs:lz4:compileJava +work action resolve main (project :libs:lz4) (Thread[#1325,Execution worker Thread 4,5,main]) started. +No compile result for :libs:entitlement:bridge:compileMain23Java + +> Task :libs:native:compileJava UP-TO-DATE +Caching disabled for task ':libs:native:compileJava' because: + Build cache is disabled +Skipping task ':libs:native:compileJava' as it is up-to-date. +No compile result for :libs:native:compileJava +Resolve mutations for :libs:cli:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. +work action resolve main (project :libs:cli) (Thread[#1323,Execution worker Thread 2,5,main]) started. +No compile result for :libs:native:compileJava +work action resolve main (project :libs:lz4) (Thread[#1325,Execution worker Thread 4,5,main]) started. + +> Task :libs:core:classes UP-TO-DATE +Skipping task ':libs:core:classes' as it has no actions. +No compile result for :libs:native:compileJava +:libs:cli:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:tdigest:compileJava UP-TO-DATE +Caching disabled for task ':libs:tdigest:compileJava' because: + Build cache is disabled +Skipping task ':libs:tdigest:compileJava' as it is up-to-date. +Resolve mutations for :libs:entitlement:bridge:main23Classes (Thread[#1323,Execution worker Thread 2,5,main]) started. +No compile result for :libs:tdigest:compileJava +:libs:entitlement:bridge:main23Classes (Thread[#1323,Execution worker Thread 2,5,main]) started. + +> Task :libs:x-content:compileJava UP-TO-DATE +Caching disabled for task ':libs:x-content:compileJava' because: + Build cache is disabled +Skipping task ':libs:x-content:compileJava' as it is up-to-date. +No compile result for :libs:native:compileJava + +> Task :libs:entitlement:bridge:main23Classes UP-TO-DATE +Skipping task ':libs:entitlement:bridge:main23Classes' as it has no actions. +Resolve mutations for :libs:core:jar (Thread[#1322,Execution worker,5,main]) started. +work action resolve main (project :libs:native) (Thread[#1323,Execution worker Thread 2,5,main]) started. +Resolve mutations for :libs:lz4:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:entitlement:bridge:java23Jar (Thread[#1323,Execution worker Thread 2,5,main]) started. +:libs:lz4:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +work action resolve main (project :libs:native) (Thread[#1329,Execution worker Thread 8,5,main]) started. +No compile result for :libs:x-content:compileJava +:libs:entitlement:bridge:java23Jar (Thread[#1323,Execution worker Thread 2,5,main]) started. +No compile result for :libs:tdigest:compileJava +No compile result for :libs:tdigest:compileJava +No compile result for :libs:tdigest:compileJava +No compile result for :libs:x-content:compileJava +Resolve mutations for :libs:simdvec:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:core:jar (Thread[#1322,Execution worker,5,main]) started. + +> Task :libs:lz4:classes UP-TO-DATE +Skipping task ':libs:lz4:classes' as it has no actions. +No compile result for :libs:x-content:compileJava +work action resolve main (project :libs:tdigest) (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :libs:cli:classes UP-TO-DATE +Skipping task ':libs:cli:classes' as it has no actions. +Resolve mutations for :libs:lz4:jar (Thread[#1332,Execution worker Thread 11,5,main]) started. +No compile result for :libs:x-content:compileJava +work action resolve main (project :libs:x-content) (Thread[#1330,Execution worker Thread 9,5,main]) started. +work action resolve main (project :libs:tdigest) (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:lz4:jar (Thread[#1332,Execution worker Thread 11,5,main]) started. +Resolve mutations for :libs:native:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:simdvec:compileJava (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:native:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +work action resolve main (project :libs:x-content) (Thread[#1330,Execution worker Thread 9,5,main]) started. +Resolve mutations for :libs:cli:jar (Thread[#1331,Execution worker Thread 10,5,main]) started. +Resolve mutations for :libs:entitlement:compileJava (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:cli:jar (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:native:classes UP-TO-DATE +Skipping task ':libs:native:classes' as it has no actions. +Resolve mutations for :libs:native:compileMain22Java (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:entitlement:compileJava (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :libs:entitlement:bridge:java23Jar UP-TO-DATE +Caching disabled for task ':libs:entitlement:bridge:java23Jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:entitlement:bridge:java23Jar' as it is up-to-date. +work action resolve elasticsearch-entitlement-bridge-9.0.0-SNAPSHOT.jar (project :libs:entitlement:bridge) (Thread[#1323,Execution worker Thread 2,5,main]) started. +:libs:native:compileMain22Java (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:plugin-api:classes (Thread[#1323,Execution worker Thread 2,5,main]) started. +:libs:plugin-api:classes (Thread[#1323,Execution worker Thread 2,5,main]) started. + +> Task :libs:plugin-api:classes UP-TO-DATE +Skipping task ':libs:plugin-api:classes' as it has no actions. +Resolve mutations for :libs:plugin-api:jar (Thread[#1323,Execution worker Thread 2,5,main]) started. +:libs:plugin-api:jar (Thread[#1323,Execution worker Thread 2,5,main]) started. + +> Task :libs:simdvec:compileJava UP-TO-DATE +Caching disabled for task ':libs:simdvec:compileJava' because: + Build cache is disabled +Skipping task ':libs:simdvec:compileJava' as it is up-to-date. +No compile result for :libs:simdvec:compileJava +No compile result for :libs:simdvec:compileJava +No compile result for :libs:simdvec:compileJava +No compile result for :libs:simdvec:compileJava +Resolve mutations for :libs:simdvec:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:simdvec:classes (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :libs:entitlement:compileJava UP-TO-DATE +Caching disabled for task ':libs:entitlement:compileJava' because: + Build cache is disabled +Skipping task ':libs:entitlement:compileJava' as it is up-to-date. +No compile result for :libs:entitlement:compileJava + +> Task :libs:native:compileMain22Java UP-TO-DATE +Caching disabled for task ':libs:native:compileMain22Java' because: + Build cache is disabled +Skipping task ':libs:native:compileMain22Java' as it is up-to-date. +No compile result for :libs:entitlement:compileJava +No compile result for :libs:native:compileMain22Java +No compile result for :libs:native:compileMain22Java +No compile result for :libs:entitlement:compileJava +No compile result for :libs:native:compileMain22Java +No compile result for :libs:native:compileMain22Java +No compile result for :libs:entitlement:compileJava +work action resolve main (project :libs:simdvec) (Thread[#1325,Execution worker Thread 4,5,main]) started. +work action resolve main (project :libs:entitlement) (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :libs:simdvec:classes UP-TO-DATE +Skipping task ':libs:simdvec:classes' as it has no actions. +work action resolve main (project :libs:simdvec) (Thread[#1330,Execution worker Thread 9,5,main]) started. +work action resolve main (project :libs:entitlement) (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:simdvec:compileMain21Java (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :libs:native:main22Classes (Thread[#1330,Execution worker Thread 9,5,main]) started. +Resolve mutations for :server:compileJava (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:entitlement:asm-provider:compileJava (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:simdvec:compileMain21Java (Thread[#1329,Execution worker Thread 8,5,main]) started. +:server:compileJava (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:entitlement:asm-provider:compileJava (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :client:rest:jar UP-TO-DATE +Caching disabled for task ':client:rest:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':client:rest:jar' as it is up-to-date. +:libs:native:main22Classes (Thread[#1324,Execution worker Thread 3,5,main]) started. + +> Task :libs:native:main22Classes UP-TO-DATE +Skipping task ':libs:native:main22Classes' as it has no actions. +Resolve mutations for :libs:native:jar (Thread[#1324,Execution worker Thread 3,5,main]) started. +:libs:native:jar (Thread[#1324,Execution worker Thread 3,5,main]) started. + +> Task :libs:simdvec:compileMain21Java UP-TO-DATE +Caching disabled for task ':libs:simdvec:compileMain21Java' because: + Build cache is disabled +Skipping task ':libs:simdvec:compileMain21Java' as it is up-to-date. +No compile result for :libs:simdvec:compileMain21Java +No compile result for :libs:simdvec:compileMain21Java + +> Task :libs:entitlement:asm-provider:compileJava UP-TO-DATE +Caching disabled for task ':libs:entitlement:asm-provider:compileJava' because: + Build cache is disabled +Skipping task ':libs:entitlement:asm-provider:compileJava' as it is up-to-date. +No compile result for :libs:simdvec:compileMain21Java +No compile result for :libs:entitlement:asm-provider:compileJava +No compile result for :libs:simdvec:compileMain21Java +No compile result for :libs:entitlement:asm-provider:compileJava +Resolve mutations for :libs:simdvec:main21Classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +No compile result for :libs:entitlement:asm-provider:compileJava +No compile result for :libs:entitlement:asm-provider:compileJava +:libs:simdvec:main21Classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :libs:entitlement:asm-provider:classes (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:entitlement:asm-provider:classes (Thread[#1330,Execution worker Thread 9,5,main]) started. + +> Task :libs:simdvec:main21Classes UP-TO-DATE +Skipping task ':libs:simdvec:main21Classes' as it has no actions. + +> Task :libs:entitlement:asm-provider:classes UP-TO-DATE +Skipping task ':libs:entitlement:asm-provider:classes' as it has no actions. +Resolve mutations for :libs:simdvec:compileMain22Java (Thread[#1329,Execution worker Thread 8,5,main]) started. +Resolve mutations for :libs:entitlement:asm-provider:jar (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:entitlement:asm-provider:jar (Thread[#1330,Execution worker Thread 9,5,main]) started. +:libs:simdvec:compileMain22Java (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :libs:simdvec:compileMain22Java UP-TO-DATE +Caching disabled for task ':libs:simdvec:compileMain22Java' because: + Build cache is disabled +Skipping task ':libs:simdvec:compileMain22Java' as it is up-to-date. +No compile result for :libs:simdvec:compileMain22Java +No compile result for :libs:simdvec:compileMain22Java +No compile result for :libs:simdvec:compileMain22Java +No compile result for :libs:simdvec:compileMain22Java +Resolve mutations for :libs:simdvec:main22Classes (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:simdvec:main22Classes (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :libs:simdvec:main22Classes UP-TO-DATE +Skipping task ':libs:simdvec:main22Classes' as it has no actions. +Resolve mutations for :libs:simdvec:jar (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:simdvec:jar (Thread[#1329,Execution worker Thread 8,5,main]) started. + +> Task :libs:logging:jar UP-TO-DATE +Caching disabled for task ':libs:logging:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:logging:jar' as it is up-to-date. +Resolve mutations for :libs:secure-sm:classes (Thread[#1392,included builds Thread 2,5,main]) started. +:libs:secure-sm:classes (Thread[#1392,included builds Thread 2,5,main]) started. + +> Task :libs:secure-sm:classes UP-TO-DATE +Skipping task ':libs:secure-sm:classes' as it has no actions. +Resolve mutations for :libs:secure-sm:jar (Thread[#1392,included builds Thread 2,5,main]) started. +:libs:secure-sm:jar (Thread[#1392,included builds Thread 2,5,main]) started. + +> Task :server:compileJava UP-TO-DATE +Caching disabled for task ':server:compileJava' because: + Build cache is disabled +Skipping task ':server:compileJava' as it is up-to-date. +No compile result for :server:compileJava +No compile result for :server:compileJava +No compile result for :server:compileJava +No compile result for :server:compileJava +Resolve mutations for :server:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. +:server:classes (Thread[#1325,Execution worker Thread 4,5,main]) started. + +> Task :server:classes UP-TO-DATE +Skipping task ':server:classes' as it has no actions. + +> Task :libs:grok:jar UP-TO-DATE +Caching disabled for task ':libs:grok:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:grok:jar' as it is up-to-date. +Resolve mutations for :server:jar (Thread[#1325,Execution worker Thread 4,5,main]) started. +Resolve mutations for :libs:ssl-config:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +:server:jar (Thread[#1325,Execution worker Thread 4,5,main]) started. +:libs:ssl-config:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:ssl-config:compileJava UP-TO-DATE +Caching disabled for task ':libs:ssl-config:compileJava' because: + Build cache is disabled +Skipping task ':libs:ssl-config:compileJava' as it is up-to-date. +No compile result for :libs:ssl-config:compileJava +No compile result for :libs:ssl-config:compileJava +No compile result for :libs:ssl-config:compileJava +No compile result for :libs:ssl-config:compileJava +work action resolve main (project :server) (Thread[#1328,Execution worker Thread 7,5,main]) started. +work action resolve main (project :libs:ssl-config) (Thread[#1328,Execution worker Thread 7,5,main]) started. +work action resolve main (project :server) (Thread[#1328,Execution worker Thread 7,5,main]) started. +work action resolve main (project :libs:ssl-config) (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :modules:transport-netty4:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +:modules:transport-netty4:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:geo:jar UP-TO-DATE +Caching disabled for task ':libs:geo:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:geo:jar' as it is up-to-date. +Resolve mutations for :libs:ssl-config:classes (Thread[#1326,Execution worker Thread 5,5,main]) started. +:libs:ssl-config:classes (Thread[#1326,Execution worker Thread 5,5,main]) started. + +> Task :libs:ssl-config:classes UP-TO-DATE +Skipping task ':libs:ssl-config:classes' as it has no actions. + +> Task :modules:transport-netty4:compileJava UP-TO-DATE +Caching disabled for task ':modules:transport-netty4:compileJava' because: + Build cache is disabled +Skipping task ':modules:transport-netty4:compileJava' as it is up-to-date. +No compile result for :modules:transport-netty4:compileJava +Resolve mutations for :libs:ssl-config:jar (Thread[#1326,Execution worker Thread 5,5,main]) started. +No compile result for :modules:transport-netty4:compileJava +No compile result for :modules:transport-netty4:compileJava +No compile result for :modules:transport-netty4:compileJava +:libs:ssl-config:jar (Thread[#1326,Execution worker Thread 5,5,main]) started. +work action resolve main (project :modules:transport-netty4) (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :test:framework:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +:test:framework:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :test:framework:compileJava UP-TO-DATE +Caching disabled for task ':test:framework:compileJava' because: + Build cache is disabled +Skipping task ':test:framework:compileJava' as it is up-to-date. +No compile result for :test:framework:compileJava +No compile result for :test:framework:compileJava +No compile result for :test:framework:compileJava +No compile result for :test:framework:compileJava +work action resolve main (project :test:framework) (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :libs:simdvec:compileTestJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:simdvec:compileTestJava (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:plugin-analysis-api:jar UP-TO-DATE +Caching disabled for task ':libs:plugin-analysis-api:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:plugin-analysis-api:jar' as it is up-to-date. +:libs:tdigest:processResources (Thread[#1327,Execution worker Thread 6,5,main]) started. + +> Task :libs:tdigest:processResources NO-SOURCE +Skipping task ':libs:tdigest:processResources' as it has no source files and no previous output files. +Resolve mutations for :libs:tdigest:classes (Thread[#1327,Execution worker Thread 6,5,main]) started. +:libs:tdigest:classes (Thread[#1327,Execution worker Thread 6,5,main]) started. + +> Task :libs:tdigest:classes UP-TO-DATE +Skipping task ':libs:tdigest:classes' as it has no actions. +Resolve mutations for :libs:tdigest:jar (Thread[#1327,Execution worker Thread 6,5,main]) started. +:libs:tdigest:jar (Thread[#1327,Execution worker Thread 6,5,main]) started. + +> Task :libs:simdvec:compileTestJava UP-TO-DATE +Caching disabled for task ':libs:simdvec:compileTestJava' because: + Build cache is disabled +Skipping task ':libs:simdvec:compileTestJava' as it is up-to-date. +No compile result for :libs:simdvec:compileTestJava +No compile result for :libs:simdvec:compileTestJava +No compile result for :libs:simdvec:compileTestJava +No compile result for :libs:simdvec:compileTestJava +Resolve mutations for :libs:simdvec:testClasses (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:simdvec:testClasses (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:simdvec:testClasses UP-TO-DATE +Skipping task ':libs:simdvec:testClasses' as it has no actions. +Resolve mutations for :libs:x-content:impl:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:impl:compileJava (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:x-content:impl:compileJava UP-TO-DATE +Caching disabled for task ':libs:x-content:impl:compileJava' because: + Build cache is disabled +Skipping task ':libs:x-content:impl:compileJava' as it is up-to-date. +No compile result for :libs:x-content:impl:compileJava +No compile result for :libs:x-content:impl:compileJava +No compile result for :libs:x-content:impl:compileJava +No compile result for :libs:x-content:impl:compileJava +Resolve mutations for :libs:x-content:impl:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:impl:processResources (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:x-content:impl:processResources UP-TO-DATE +Caching disabled for task ':libs:x-content:impl:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:x-content:impl:processResources' as it is up-to-date. +Resolve mutations for :libs:x-content:impl:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:impl:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:x-content:impl:classes UP-TO-DATE +Skipping task ':libs:x-content:impl:classes' as it has no actions. +Resolve mutations for :libs:x-content:impl:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:impl:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:lz4:jar UP-TO-DATE +Caching disabled for task ':libs:lz4:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:lz4:jar' as it is up-to-date. +Resolve mutations for :libs:x-content:processResources (Thread[#1332,Execution worker Thread 11,5,main]) started. +:libs:x-content:processResources (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :libs:x-content:processResources NO-SOURCE +Skipping task ':libs:x-content:processResources' as it has no source files and no previous output files. +Resolve mutations for :modules:transport-netty4:processResources (Thread[#1332,Execution worker Thread 11,5,main]) started. +:modules:transport-netty4:processResources (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :modules:transport-netty4:processResources NO-SOURCE +Skipping task ':modules:transport-netty4:processResources' as it has no source files and no previous output files. +Resolve mutations for :modules:transport-netty4:classes (Thread[#1332,Execution worker Thread 11,5,main]) started. +:modules:transport-netty4:classes (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :modules:transport-netty4:classes UP-TO-DATE +Skipping task ':modules:transport-netty4:classes' as it has no actions. +Resolve mutations for :modules:transport-netty4:jar (Thread[#1332,Execution worker Thread 11,5,main]) started. +:modules:transport-netty4:jar (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :libs:core:jar UP-TO-DATE +Caching disabled for task ':libs:core:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:core:jar' as it is up-to-date. +Resolve mutations for :test:framework:processResources (Thread[#1322,Execution worker,5,main]) started. +:test:framework:processResources (Thread[#1322,Execution worker,5,main]) started. + +> Task :test:framework:processResources UP-TO-DATE +Caching disabled for task ':test:framework:processResources' because: + Build cache is disabled + Not worth caching +Skipping task ':test:framework:processResources' as it is up-to-date. +Resolve mutations for :test:framework:classes (Thread[#1322,Execution worker,5,main]) started. +:test:framework:classes (Thread[#1322,Execution worker,5,main]) started. + +> Task :test:framework:classes UP-TO-DATE +Skipping task ':test:framework:classes' as it has no actions. +Resolve mutations for :test:framework:jar (Thread[#1322,Execution worker,5,main]) started. +:test:framework:jar (Thread[#1322,Execution worker,5,main]) started. + +> Task :libs:cli:jar UP-TO-DATE +Caching disabled for task ':libs:cli:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:cli:jar' as it is up-to-date. +Resolve mutations for :test:immutable-collections-patch:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. +:test:immutable-collections-patch:compileJava (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :test:immutable-collections-patch:compileJava UP-TO-DATE +Caching disabled for task ':test:immutable-collections-patch:compileJava' because: + Build cache is disabled +Skipping task ':test:immutable-collections-patch:compileJava' as it is up-to-date. +No compile result for :test:immutable-collections-patch:compileJava +No compile result for :test:immutable-collections-patch:compileJava +No compile result for :test:immutable-collections-patch:compileJava +No compile result for :test:immutable-collections-patch:compileJava +Resolve mutations for :test:immutable-collections-patch:processResources (Thread[#1331,Execution worker Thread 10,5,main]) started. +:test:immutable-collections-patch:processResources (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :test:immutable-collections-patch:processResources NO-SOURCE +Skipping task ':test:immutable-collections-patch:processResources' as it has no source files and no previous output files. +Resolve mutations for :test:immutable-collections-patch:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. +:test:immutable-collections-patch:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :test:immutable-collections-patch:classes UP-TO-DATE +Skipping task ':test:immutable-collections-patch:classes' as it has no actions. +Resolve mutations for :test:immutable-collections-patch:generatePatch (Thread[#1331,Execution worker Thread 10,5,main]) started. +:test:immutable-collections-patch:generatePatch (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :test:immutable-collections-patch:generatePatch UP-TO-DATE +Caching disabled for task ':test:immutable-collections-patch:generatePatch' because: + Build cache is disabled + Gradle would require more information to cache this task +Skipping task ':test:immutable-collections-patch:generatePatch' as it is up-to-date. +Resolve mutations for :libs:native:native-libraries:extractLibs (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:native:native-libraries:extractLibs (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:plugin-api:jar UP-TO-DATE +Caching disabled for task ':libs:plugin-api:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:plugin-api:jar' as it is up-to-date. +work action resolve jdk-patches (project :test:immutable-collections-patch) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-grok-9.0.0-SNAPSHOT.jar (project :libs:grok) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-plugin-api-9.0.0-SNAPSHOT.jar (project :libs:plugin-api) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-geo-9.0.0-SNAPSHOT.jar (project :libs:geo) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-rest-client-9.0.0-SNAPSHOT.jar (project :client:rest) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-core-9.0.0-SNAPSHOT.jar (project :libs:core) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-lz4-9.0.0-SNAPSHOT.jar (project :libs:lz4) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-cli-9.0.0-SNAPSHOT.jar (project :libs:cli) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-plugin-analysis-api-9.0.0-SNAPSHOT.jar (project :libs:plugin-analysis-api) (Thread[#1323,Execution worker Thread 2,5,main]) started. +work action resolve elasticsearch-logging-9.0.0-SNAPSHOT.jar (project :libs:logging) (Thread[#1323,Execution worker Thread 2,5,main]) started. + +> Task :libs:native:native-libraries:extractLibs UP-TO-DATE +Transforming zstd-1.5.5-darwin-aarch64.jar (org.elasticsearch:zstd:1.5.5) with UnzipTransform +Transforming zstd-1.5.5-darwin-x86-64.jar (org.elasticsearch:zstd:1.5.5) with UnzipTransform +Transforming zstd-1.5.5-linux-aarch64.jar (org.elasticsearch:zstd:1.5.5) with UnzipTransform +Transforming zstd-1.5.5-linux-x86-64.jar (org.elasticsearch:zstd:1.5.5) with UnzipTransform +Transforming zstd-1.5.5-windows-x86-64.jar (org.elasticsearch:zstd:1.5.5) with UnzipTransform +Transforming vec-1.0.10.zip (org.elasticsearch:vec:1.0.10) with UnzipTransform +Caching disabled for task ':libs:native:native-libraries:extractLibs' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:native:native-libraries:extractLibs' as it is up-to-date. +work action resolve platform (project :libs:native:native-libraries) (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:native:jar UP-TO-DATE +Caching disabled for task ':libs:native:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:native:jar' as it is up-to-date. +work action resolve elasticsearch-native-9.0.0-SNAPSHOT.jar (project :libs:native) (Thread[#1324,Execution worker Thread 3,5,main]) started. + +> Task :libs:entitlement:asm-provider:jar UP-TO-DATE +Caching disabled for task ':libs:entitlement:asm-provider:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:entitlement:asm-provider:jar' as it is up-to-date. +work action resolve elasticsearch-entitlement-asm-provider-9.0.0-SNAPSHOT.jar (project :libs:entitlement:asm-provider) (Thread[#1330,Execution worker Thread 9,5,main]) started. +UnzipTransform (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Transform elasticsearch-entitlement-asm-provider-9.0.0-SNAPSHOT.jar (project :libs:entitlement:asm-provider) with UnzipTransform +Transforming elasticsearch-entitlement-asm-provider-9.0.0-SNAPSHOT.jar (project :libs:entitlement:asm-provider) with UnzipTransform +Resolve mutations for :libs:entitlement:generateAsm-providerProviderManifest (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:entitlement:generateAsm-providerProviderManifest (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:entitlement:generateAsm-providerProviderManifest UP-TO-DATE +Transforming asm-9.7.1.jar (org.ow2.asm:asm:9.7.1) with UnzipTransform +Caching disabled for task ':libs:entitlement:generateAsm-providerProviderManifest' because: + Build cache is disabled + Caching has not been enabled for the task +Skipping task ':libs:entitlement:generateAsm-providerProviderManifest' as it is up-to-date. +Resolve mutations for :libs:entitlement:generateAsm-providerProviderImpl (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:entitlement:generateAsm-providerProviderImpl (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:entitlement:generateAsm-providerProviderImpl UP-TO-DATE +Caching disabled for task ':libs:entitlement:generateAsm-providerProviderImpl' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:entitlement:generateAsm-providerProviderImpl' as it is up-to-date. +Resolve mutations for :libs:entitlement:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:entitlement:classes (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:entitlement:classes UP-TO-DATE +Skipping task ':libs:entitlement:classes' as it has no actions. +Resolve mutations for :libs:entitlement:compileMain23Java (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:entitlement:compileMain23Java (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:entitlement:compileMain23Java UP-TO-DATE +Caching disabled for task ':libs:entitlement:compileMain23Java' because: + Build cache is disabled +Skipping task ':libs:entitlement:compileMain23Java' as it is up-to-date. +No compile result for :libs:entitlement:compileMain23Java +No compile result for :libs:entitlement:compileMain23Java +No compile result for :libs:entitlement:compileMain23Java + +> Task :libs:simdvec:jar UP-TO-DATE +Caching disabled for task ':libs:simdvec:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:simdvec:jar' as it is up-to-date. +No compile result for :libs:entitlement:compileMain23Java +Resolve mutations for :libs:entitlement:main23Classes (Thread[#1331,Execution worker Thread 10,5,main]) started. +work action resolve elasticsearch-simdvec-9.0.0-SNAPSHOT.jar (project :libs:simdvec) (Thread[#1329,Execution worker Thread 8,5,main]) started. +:libs:entitlement:main23Classes (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:entitlement:main23Classes UP-TO-DATE +Skipping task ':libs:entitlement:main23Classes' as it has no actions. +Resolve mutations for :libs:entitlement:jar (Thread[#1331,Execution worker Thread 10,5,main]) started. +:libs:entitlement:jar (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:secure-sm:jar UP-TO-DATE +Caching disabled for task ':libs:secure-sm:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:secure-sm:jar' as it is up-to-date. +work action resolve elasticsearch-secure-sm-9.0.0-SNAPSHOT.jar (project :libs:secure-sm) (Thread[#1392,included builds Thread 2,5,main]) started. + +> Task :server:jar UP-TO-DATE +Caching disabled for task ':server:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':server:jar' as it is up-to-date. +work action resolve elasticsearch-9.0.0-SNAPSHOT.jar (project :server) (Thread[#1325,Execution worker Thread 4,5,main]) started. + +> Task :libs:ssl-config:jar UP-TO-DATE +Caching disabled for task ':libs:ssl-config:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:ssl-config:jar' as it is up-to-date. +work action resolve elasticsearch-ssl-config-9.0.0-SNAPSHOT.jar (project :libs:ssl-config) (Thread[#1326,Execution worker Thread 5,5,main]) started. + +> Task :libs:tdigest:jar UP-TO-DATE +Caching disabled for task ':libs:tdigest:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:tdigest:jar' as it is up-to-date. +work action resolve elasticsearch-tdigest-9.0.0-SNAPSHOT.jar (project :libs:tdigest) (Thread[#1327,Execution worker Thread 6,5,main]) started. + +> Task :libs:x-content:impl:jar UP-TO-DATE +Caching disabled for task ':libs:x-content:impl:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:x-content:impl:jar' as it is up-to-date. +work action resolve x-content-impl-9.0.0-SNAPSHOT.jar (project :libs:x-content:impl) (Thread[#1328,Execution worker Thread 7,5,main]) started. +UnzipTransform (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Transform x-content-impl-9.0.0-SNAPSHOT.jar (project :libs:x-content:impl) with UnzipTransform +Transforming x-content-impl-9.0.0-SNAPSHOT.jar (project :libs:x-content:impl) with UnzipTransform +Resolve mutations for :libs:x-content:generateImplProviderManifest (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:generateImplProviderManifest (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:x-content:generateImplProviderManifest UP-TO-DATE +Transforming jackson-core-2.17.2.jar (com.fasterxml.jackson.core:jackson-core:2.17.2) with UnzipTransform +Transforming jackson-dataformat-smile-2.17.2.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.17.2) with UnzipTransform +Transforming jackson-dataformat-yaml-2.17.2.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.17.2) with UnzipTransform +Transforming jackson-dataformat-cbor-2.17.2.jar (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.17.2) with UnzipTransform +Transforming snakeyaml-2.0.jar (org.yaml:snakeyaml:2.0) with UnzipTransform +Caching disabled for task ':libs:x-content:generateImplProviderManifest' because: + Build cache is disabled + Caching has not been enabled for the task +Skipping task ':libs:x-content:generateImplProviderManifest' as it is up-to-date. +Resolve mutations for :libs:x-content:generateImplProviderImpl (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:generateImplProviderImpl (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:x-content:generateImplProviderImpl UP-TO-DATE +Caching disabled for task ':libs:x-content:generateImplProviderImpl' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:x-content:generateImplProviderImpl' as it is up-to-date. +Resolve mutations for :libs:x-content:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:classes (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :libs:x-content:classes UP-TO-DATE +Skipping task ':libs:x-content:classes' as it has no actions. +Resolve mutations for :libs:x-content:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:x-content:jar (Thread[#1328,Execution worker Thread 7,5,main]) started. + +> Task :modules:transport-netty4:jar UP-TO-DATE +Caching disabled for task ':modules:transport-netty4:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':modules:transport-netty4:jar' as it is up-to-date. +work action resolve transport-netty4-9.0.0-SNAPSHOT.jar (project :modules:transport-netty4) (Thread[#1332,Execution worker Thread 11,5,main]) started. + +> Task :test:framework:jar UP-TO-DATE +Caching disabled for task ':test:framework:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':test:framework:jar' as it is up-to-date. +work action resolve framework-9.0.0-SNAPSHOT.jar (project :test:framework) (Thread[#1322,Execution worker,5,main]) started. + +> Task :libs:entitlement:jar UP-TO-DATE +Caching disabled for task ':libs:entitlement:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:entitlement:jar' as it is up-to-date. +work action resolve elasticsearch-entitlement-9.0.0-SNAPSHOT.jar (project :libs:entitlement) (Thread[#1331,Execution worker Thread 10,5,main]) started. + +> Task :libs:x-content:jar UP-TO-DATE +Caching disabled for task ':libs:x-content:jar' because: + Build cache is disabled + Not worth caching +Skipping task ':libs:x-content:jar' as it is up-to-date. +work action resolve elasticsearch-x-content-9.0.0-SNAPSHOT.jar (project :libs:x-content) (Thread[#1328,Execution worker Thread 7,5,main]) started. +Resolve mutations for :libs:simdvec:test (Thread[#1328,Execution worker Thread 7,5,main]) started. +:libs:simdvec:test (Thread[#1328,Execution worker Thread 7,5,main]) started. +Gradle Test Executor 18 started executing tests. +Gradle Test Executor 17 started executing tests. +Gradle Test Executor 19 started executing tests. +Gradle Test Executor 19 finished executing tests. +WARNING: A terminally deprecated method in java.lang.System has been called +WARNING: System::setSecurityManager has been called by org.gradle.api.internal.tasks.testing.worker.TestWorker (file:/Users/rene/.gradle/wrapper/dists/gradle-8.11.1-all/2qik7nd48slq1ooc2496ixf4i/gradle-8.11.1/lib/plugins/gradle-testing-base-infrastructure-8.11.1.jar) +WARNING: Please consider reporting this to the maintainers of org.gradle.api.internal.tasks.testing.worker.TestWorker +WARNING: System::setSecurityManager will be removed in a future release +Gradle Test Executor 18 finished executing tests. +WARNING: A terminally deprecated method in java.lang.System has been called +WARNING: System::setSecurityManager has been called by org.gradle.api.internal.tasks.testing.worker.TestWorker (file:/Users/rene/.gradle/wrapper/dists/gradle-8.11.1-all/2qik7nd48slq1ooc2496ixf4i/gradle-8.11.1/lib/plugins/gradle-testing-base-infrastructure-8.11.1.jar) +WARNING: Please consider reporting this to the maintainers of org.gradle.api.internal.tasks.testing.worker.TestWorker +WARNING: System::setSecurityManager will be removed in a future release +Gradle Test Executor 17 finished executing tests. + +> Task :libs:simdvec:test +Caching disabled for task ':libs:simdvec:test' because: + Build cache is disabled +Task ':libs:simdvec:test' is not up-to-date because: + Output property 'binaryResultsDirectory' file /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/test-results/test/binary has been removed. + Output property 'binaryResultsDirectory' file /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/test-results/test/binary/output.bin has been removed. + Output property 'binaryResultsDirectory' file /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/test-results/test/binary/output.bin.idx has been removed. + and more... + and more... +Starting process 'Gradle Test Executor 17'. Working directory: /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/testrun/test Command: /Users/rene/.gradle/jdks/oracle_corporation-23-aarch64-os_x.2/jdk-23.jdk/Contents/Home/bin/java -Des.nativelibs.path=/Users/rene/dev/elastic/elasticsearch/libs/native/libraries/build/platform/darwin-aarch64 -Des.scripting.update.ctx_in_params=false -Des.search.rewrite_sort=true -Des.transport.cname_in_publish_address=true -Dgradle.dist.lib=/Users/rene/.gradle/wrapper/dists/gradle-8.11.1-all/2qik7nd48slq1ooc2496ixf4i/gradle-8.11.1/lib -Dgradle.user.home=/Users/rene/.gradle -Dgradle.worker.jar=/Users/rene/.gradle/caches/8.11.1/workerMain/gradle-worker.jar -Dio.netty.noKeySetOptimization=true -Dio.netty.noUnsafe=true -Dio.netty.recycler.maxCapacityPerThread=0 -Djava.awt.headless=true -Djava.locale.providers=CLDR -Djava.security.manager=allow -Djna.nosys=true -Dorg.gradle.internal.worker.tmpdir=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/tmp/test/work -Dtests.artifact=simdvec -Dtests.gradle=true -Dtests.hackImmutableCollections=true -Dtests.logger.level=WARN -Dtests.security.manager=true -Dtests.seed=7B469FBE8B6D0C65 -Dtests.task=:libs:simdvec:test -Dtests.testfeatures.enabled=true -Dworkspace.dir=/Users/rene/dev/elastic/elasticsearch --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.security.cert=ALL-UNNAMED --add-opens=java.base/java.nio.channels=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/javax.net.ssl=ALL-UNNAMED --add-opens=java.base/java.nio.file=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.management/java.lang.management=ALL-UNNAMED -XX:+HeapDumpOnOutOfMemoryError -esa --add-modules=jdk.incubator.vector -XX:HeapDumpPath=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/heapdump --patch-module=java.base=/Users/rene/dev/elastic/elasticsearch/test/immutable-collections-patch/build/jdk-patches/java.base --add-opens=java.base/java.util=ALL-UNNAMED @/Users/rene/.gradle/.tmp/gradle-worker-classpath15834082604113316666txt -Xms512m -Xmx512m -Dfile.encoding=UTF-8 -Djava.io.tmpdir=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/testrun/test/temp -Duser.country=DE -Duser.language=en -Duser.variant -ea worker.org.gradle.process.internal.worker.GradleWorkerMain 'Gradle Test Executor 17' +Starting process 'Gradle Test Executor 19'. Working directory: /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/testrun/test Command: /Users/rene/.gradle/jdks/oracle_corporation-23-aarch64-os_x.2/jdk-23.jdk/Contents/Home/bin/java -Des.nativelibs.path=/Users/rene/dev/elastic/elasticsearch/libs/native/libraries/build/platform/darwin-aarch64 -Des.scripting.update.ctx_in_params=false -Des.search.rewrite_sort=true -Des.transport.cname_in_publish_address=true -Dgradle.dist.lib=/Users/rene/.gradle/wrapper/dists/gradle-8.11.1-all/2qik7nd48slq1ooc2496ixf4i/gradle-8.11.1/lib -Dgradle.user.home=/Users/rene/.gradle -Dgradle.worker.jar=/Users/rene/.gradle/caches/8.11.1/workerMain/gradle-worker.jar -Dio.netty.noKeySetOptimization=true -Dio.netty.noUnsafe=true -Dio.netty.recycler.maxCapacityPerThread=0 -Djava.awt.headless=true -Djava.locale.providers=CLDR -Djava.security.manager=allow -Djna.nosys=true -Dorg.gradle.internal.worker.tmpdir=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/tmp/test/work -Dtests.artifact=simdvec -Dtests.gradle=true -Dtests.hackImmutableCollections=true -Dtests.logger.level=WARN -Dtests.security.manager=true -Dtests.seed=7B469FBE8B6D0C65 -Dtests.task=:libs:simdvec:test -Dtests.testfeatures.enabled=true -Dworkspace.dir=/Users/rene/dev/elastic/elasticsearch --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.security.cert=ALL-UNNAMED --add-opens=java.base/java.nio.channels=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/javax.net.ssl=ALL-UNNAMED --add-opens=java.base/java.nio.file=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.management/java.lang.management=ALL-UNNAMED -XX:+HeapDumpOnOutOfMemoryError -esa --add-modules=jdk.incubator.vector -XX:HeapDumpPath=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/heapdump --patch-module=java.base=/Users/rene/dev/elastic/elasticsearch/test/immutable-collections-patch/build/jdk-patches/java.base --add-opens=java.base/java.util=ALL-UNNAMED @/Users/rene/.gradle/.tmp/gradle-worker-classpath8760626708071365177txt -Xms512m -Xmx512m -Dfile.encoding=UTF-8 -Djava.io.tmpdir=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/testrun/test/temp -Duser.country=DE -Duser.language=en -Duser.variant -ea worker.org.gradle.process.internal.worker.GradleWorkerMain 'Gradle Test Executor 19' +Starting process 'Gradle Test Executor 18'. Working directory: /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/testrun/test Command: /Users/rene/.gradle/jdks/oracle_corporation-23-aarch64-os_x.2/jdk-23.jdk/Contents/Home/bin/java -Des.nativelibs.path=/Users/rene/dev/elastic/elasticsearch/libs/native/libraries/build/platform/darwin-aarch64 -Des.scripting.update.ctx_in_params=false -Des.search.rewrite_sort=true -Des.transport.cname_in_publish_address=true -Dgradle.dist.lib=/Users/rene/.gradle/wrapper/dists/gradle-8.11.1-all/2qik7nd48slq1ooc2496ixf4i/gradle-8.11.1/lib -Dgradle.user.home=/Users/rene/.gradle -Dgradle.worker.jar=/Users/rene/.gradle/caches/8.11.1/workerMain/gradle-worker.jar -Dio.netty.noKeySetOptimization=true -Dio.netty.noUnsafe=true -Dio.netty.recycler.maxCapacityPerThread=0 -Djava.awt.headless=true -Djava.locale.providers=CLDR -Djava.security.manager=allow -Djna.nosys=true -Dorg.gradle.internal.worker.tmpdir=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/tmp/test/work -Dtests.artifact=simdvec -Dtests.gradle=true -Dtests.hackImmutableCollections=true -Dtests.logger.level=WARN -Dtests.security.manager=true -Dtests.seed=7B469FBE8B6D0C65 -Dtests.task=:libs:simdvec:test -Dtests.testfeatures.enabled=true -Dworkspace.dir=/Users/rene/dev/elastic/elasticsearch --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.security.cert=ALL-UNNAMED --add-opens=java.base/java.nio.channels=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/javax.net.ssl=ALL-UNNAMED --add-opens=java.base/java.nio.file=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.management/java.lang.management=ALL-UNNAMED -XX:+HeapDumpOnOutOfMemoryError -esa --add-modules=jdk.incubator.vector -XX:HeapDumpPath=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/heapdump --patch-module=java.base=/Users/rene/dev/elastic/elasticsearch/test/immutable-collections-patch/build/jdk-patches/java.base --add-opens=java.base/java.util=ALL-UNNAMED @/Users/rene/.gradle/.tmp/gradle-worker-classpath6774643281130261042txt -Xms512m -Xmx512m -Dfile.encoding=UTF-8 -Djava.io.tmpdir=/Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/testrun/test/temp -Duser.country=DE -Duser.language=en -Duser.variant -ea worker.org.gradle.process.internal.worker.GradleWorkerMain 'Gradle Test Executor 18' +Successfully started process 'Gradle Test Executor 17' +Successfully started process 'Gradle Test Executor 18' +Successfully started process 'Gradle Test Executor 19' +WARNING: Using incubator modules: jdk.incubator.vector +WARNING: Using incubator modules: jdk.incubator.vector +WARNING: Using incubator modules: jdk.incubator.vector + +VectorScorerFactoryTests STANDARD_ERROR + WARNING: A restricted method in + java.lang.foreign.Linker + has been called + WARNING: + java.lang.foreign.Linker::downcallHandle + has been called by + org.elasticsearch.nativeaccess.jdk.LinkerHelper + in + an unnamed module + + WARNING: Use --enable-native-access= + ALL-UNNAMED + to avoid a warning for callers in this module + WARNING: Restricted methods will be blocked in a future release unless native access is enabled + + +ESVectorUtilTests STANDARD_ERROR + WARNING: A restricted method in + java.lang.foreign.Linker + has been called + WARNING: + java.lang.foreign.Linker::downcallHandle + has been called by + org.elasticsearch.nativeaccess.jdk.LinkerHelper + in + an unnamed module + + WARNING: Use --enable-native-access= + ALL-UNNAMED + to avoid a warning for callers in this module + WARNING: Restricted methods will be blocked in a future release unless native access is enabled + + +VectorScorerFactoryTests STANDARD_OUT + [2024-12-23T09:52:40,834][INFO ][o.e.n.j.JdkVectorLibrary ] [[SUITE-VectorScorerFactoryTests-seed#[7B469FBE8B6D0C65]]] vec_caps=1 + [2024-12-23T09:52:40,837][INFO ][o.e.n.NativeAccess ] [[SUITE-VectorScorerFactoryTests-seed#[7B469FBE8B6D0C65]]] Using native vector library; to disable start with -Dorg.elasticsearch.nativeaccess.enableVectorLibrary=false + +ESVectorUtilTests STANDARD_OUT + [2024-12-23T09:52:40,837][INFO ][o.e.n.j.JdkVectorLibrary ] [[SUITE-ESVectorUtilTests-seed#[7B469FBE8B6D0C65]]] vec_caps=1 + [2024-12-23T09:52:40,840][INFO ][o.e.n.NativeAccess ] [[SUITE-ESVectorUtilTests-seed#[7B469FBE8B6D0C65]]] Using native vector library; to disable start with -Dorg.elasticsearch.nativeaccess.enableVectorLibrary=false + +VectorScorerFactoryTests STANDARD_OUT + [2024-12-23T09:52:40,842][INFO ][o.e.n.NativeAccess ] [[SUITE-VectorScorerFactoryTests-seed#[7B469FBE8B6D0C65]]] Using [jdk] native provider and native methods for [MacOS] + +ESVectorUtilTests STANDARD_OUT + [2024-12-23T09:52:40,844][INFO ][o.e.n.NativeAccess ] [[SUITE-ESVectorUtilTests-seed#[7B469FBE8B6D0C65]]] Using [jdk] native provider and native methods for [MacOS] + [2024-12-23T09:52:41,180][INFO ][o.e.s.i.v.ESVectorizationProvider] [[SUITE-ESVectorUtilTests-seed#[7B469FBE8B6D0C65]]] Java vector incubator API enabled; uses preferredBitSize=128 + +ESVectorUtilTests STANDARD_ERROR + Dec 23, 2024 9:52:41 AM org.apache.lucene.internal.vectorization.PanamaVectorizationProvider + INFO: Java vector incubator API enabled; uses preferredBitSize=128 + +VectorScorerFactoryTests STANDARD_ERROR + Dec 23, 2024 9:52:41 AM org.apache.lucene.internal.vectorization.PanamaVectorizationProvider + INFO: Java vector incubator API enabled; uses preferredBitSize=128 + +VectorScorerFactoryTests > testRandomScorerMax STANDARD_OUT + [2024-12-23T04:52:41,272][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] JDK=23, os=Mac OS X, arch=aarch64 + +ESVectorUtilTests > testBitAndCount STANDARD_OUT + [2024-12-23T20:52:41,275][INFO ][o.e.s.ESVectorUtilTests ] [testBitAndCount] before test + [2024-12-23T20:52:41,282][INFO ][o.e.s.i.v.ESVectorizationProvider] [testBitAndCount] Java vector incubator API enabled; uses preferredBitSize=128 + +VectorScorerFactoryTests > testRandomScorerMax STANDARD_OUT + [2024-12-23T04:52:41,282][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] before test + +ESVectorUtilTests > testBitAndCount STANDARD_OUT + [2024-12-23T20:52:41,293][INFO ][o.e.s.ESVectorUtilTests ] [testBitAndCount] after test + +VectorScorerFactoryTests > testRandomScorerMax STANDARD_OUT + [2024-12-23T04:52:41,295][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] Testing testRandom-COSINE-2168.vex + +ESVectorUtilTests > testIpByteBin STANDARD_OUT + [2024-12-23T20:52:41,298][INFO ][o.e.s.ESVectorUtilTests ] [testIpByteBin] before test + +VectorScorerFactoryTests > testRandomScorerMax STANDARD_OUT + [2024-12-23T04:52:41,494][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] Testing testRandom-DOT_PRODUCT-1685.vex + +ESVectorUtilTests > testIpByteBin STANDARD_OUT + [2024-12-23T20:52:41,494][INFO ][o.e.s.ESVectorUtilTests ] [testIpByteBin] after test + +ESVectorUtilTests > testIpByteBinInvariants STANDARD_OUT + [2024-12-23T20:52:41,498][INFO ][o.e.s.ESVectorUtilTests ] [testIpByteBinInvariants] before test + [2024-12-23T20:52:41,499][INFO ][o.e.s.ESVectorUtilTests ] [testIpByteBinInvariants] after test + +ESVectorUtilTests > testBasicIpByteBin STANDARD_OUT + [2024-12-23T20:52:41,502][INFO ][o.e.s.ESVectorUtilTests ] [testBasicIpByteBin] before test + [2024-12-23T20:52:41,502][INFO ][o.e.s.ESVectorUtilTests ] [testBasicIpByteBin] after test + +ESVectorUtilTests > testIpByteBit STANDARD_OUT + [2024-12-23T20:52:41,505][INFO ][o.e.s.ESVectorUtilTests ] [testIpByteBit] before test + [2024-12-23T20:52:41,505][INFO ][o.e.s.ESVectorUtilTests ] [testIpByteBit] after test + +ESVectorUtilTests > testIpFloatBit STANDARD_OUT + [2024-12-23T20:52:41,507][INFO ][o.e.s.ESVectorUtilTests ] [testIpFloatBit] before test + [2024-12-23T20:52:41,508][INFO ][o.e.s.ESVectorUtilTests ] [testIpFloatBit] after test + +VectorScorerFactoryTests > testRandomScorerMax STANDARD_OUT + [2024-12-23T04:52:41,517][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] Testing testRandom-EUCLIDEAN-723.vex + [2024-12-23T04:52:41,592][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] Testing testRandom-MAXIMUM_INNER_PRODUCT-2519.vex + [2024-12-23T04:52:41,628][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerMax] after test + +VectorScorerFactoryTests > testSimpleMaxChunkSizeSmall STANDARD_OUT + [2024-12-23T04:52:41,629][INFO ][o.e.s.VectorScorerFactoryTests] [testSimpleMaxChunkSizeSmall] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,632][INFO ][o.e.s.VectorScorerFactoryTests] [testSimpleMaxChunkSizeSmall] before test + [2024-12-23T04:52:41,632][INFO ][o.e.s.VectorScorerFactoryTests] [testSimpleMaxChunkSizeSmall] maxChunkSize=14 + [2024-12-23T04:52:41,645][INFO ][o.e.s.VectorScorerFactoryTests] [testSimpleMaxChunkSizeSmall] after test + +VectorScorerFactoryTests > testRace STANDARD_OUT + [2024-12-23T04:52:41,645][INFO ][o.e.s.VectorScorerFactoryTests] [testRace] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,648][INFO ][o.e.s.VectorScorerFactoryTests] [testRace] before test + [2024-12-23T04:52:41,659][INFO ][o.e.s.VectorScorerFactoryTests] [testRace] after test + +VectorScorerFactoryTests > testNonNegativeDotProduct STANDARD_OUT + [2024-12-23T04:52:41,659][INFO ][o.e.s.VectorScorerFactoryTests] [testNonNegativeDotProduct] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,662][INFO ][o.e.s.VectorScorerFactoryTests] [testNonNegativeDotProduct] before test + [2024-12-23T04:52:41,663][INFO ][o.e.s.VectorScorerFactoryTests] [testNonNegativeDotProduct] after test + +VectorScorerFactoryTests > testRandomSlice STANDARD_OUT + [2024-12-23T04:52:41,664][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,666][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] before test + [2024-12-23T04:52:41,667][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-0-30 + [2024-12-23T04:52:41,669][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-1-30 + [2024-12-23T04:52:41,671][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-2-30 + [2024-12-23T04:52:41,672][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-3-30 + [2024-12-23T04:52:41,674][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-4-30 + [2024-12-23T04:52:41,675][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-5-30 + [2024-12-23T04:52:41,676][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-6-30 + [2024-12-23T04:52:41,677][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-7-30 + [2024-12-23T04:52:41,679][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-8-30 + [2024-12-23T04:52:41,681][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-9-30 + [2024-12-23T04:52:41,682][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-10-30 + [2024-12-23T04:52:41,683][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-11-30 + [2024-12-23T04:52:41,684][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-12-30 + [2024-12-23T04:52:41,685][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-13-30 + [2024-12-23T04:52:41,687][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-14-30 + [2024-12-23T04:52:41,689][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-15-30 + [2024-12-23T04:52:41,690][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-16-30 + [2024-12-23T04:52:41,691][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-17-30 + [2024-12-23T04:52:41,692][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-18-30 + [2024-12-23T04:52:41,693][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-19-30 + [2024-12-23T04:52:41,694][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-20-30 + [2024-12-23T04:52:41,696][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-21-30 + [2024-12-23T04:52:41,697][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-22-30 + [2024-12-23T04:52:41,698][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-23-30 + [2024-12-23T04:52:41,699][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-24-30 + [2024-12-23T04:52:41,700][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-25-30 + [2024-12-23T04:52:41,702][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-26-30 + [2024-12-23T04:52:41,703][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-27-30 + [2024-12-23T04:52:41,703][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-28-30 + [2024-12-23T04:52:41,705][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-29-30 + [2024-12-23T04:52:41,706][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-30-30 + [2024-12-23T04:52:41,707][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-31-30 + [2024-12-23T04:52:41,708][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-32-30 + [2024-12-23T04:52:41,710][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-33-30 + [2024-12-23T04:52:41,711][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-34-30 + [2024-12-23T04:52:41,712][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-35-30 + [2024-12-23T04:52:41,713][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-36-30 + [2024-12-23T04:52:41,715][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-37-30 + [2024-12-23T04:52:41,716][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-38-30 + [2024-12-23T04:52:41,717][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-39-30 + [2024-12-23T04:52:41,718][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-40-30 + [2024-12-23T04:52:41,719][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-41-30 + [2024-12-23T04:52:41,720][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-42-30 + [2024-12-23T04:52:41,722][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-43-30 + [2024-12-23T04:52:41,723][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-44-30 + [2024-12-23T04:52:41,724][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-45-30 + [2024-12-23T04:52:41,725][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-46-30 + [2024-12-23T04:52:41,727][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-47-30 + [2024-12-23T04:52:41,727][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-48-30 + [2024-12-23T04:52:41,728][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-49-30 + [2024-12-23T04:52:41,729][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-50-30 + [2024-12-23T04:52:41,731][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-51-30 + [2024-12-23T04:52:41,732][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-52-30 + [2024-12-23T04:52:41,733][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-53-30 + [2024-12-23T04:52:41,734][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-54-30 + [2024-12-23T04:52:41,735][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-55-30 + [2024-12-23T04:52:41,736][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-56-30 + [2024-12-23T04:52:41,738][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-57-30 + [2024-12-23T04:52:41,739][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-58-30 + [2024-12-23T04:52:41,740][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-59-30 + [2024-12-23T04:52:41,741][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-60-30 + [2024-12-23T04:52:41,742][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-61-30 + [2024-12-23T04:52:41,743][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-62-30 + [2024-12-23T04:52:41,744][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-63-30 + [2024-12-23T04:52:41,746][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-64-30 + [2024-12-23T04:52:41,746][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-65-30 + [2024-12-23T04:52:41,747][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-66-30 + [2024-12-23T04:52:41,749][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-67-30 + [2024-12-23T04:52:41,750][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-68-30 + [2024-12-23T04:52:41,751][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-69-30 + [2024-12-23T04:52:41,752][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-70-30 + [2024-12-23T04:52:41,753][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-71-30 + [2024-12-23T04:52:41,755][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-72-30 + [2024-12-23T04:52:41,755][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-73-30 + [2024-12-23T04:52:41,757][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-74-30 + [2024-12-23T04:52:41,758][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-75-30 + [2024-12-23T04:52:41,759][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-76-30 + [2024-12-23T04:52:41,760][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-77-30 + [2024-12-23T04:52:41,761][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-78-30 + [2024-12-23T04:52:41,761][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-79-30 + [2024-12-23T04:52:41,762][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-80-30 + [2024-12-23T04:52:41,764][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-81-30 + [2024-12-23T04:52:41,764][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-82-30 + [2024-12-23T04:52:41,766][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-83-30 + [2024-12-23T04:52:41,767][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-84-30 + [2024-12-23T04:52:41,768][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-85-30 + [2024-12-23T04:52:41,769][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-86-30 + [2024-12-23T04:52:41,770][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-87-30 + [2024-12-23T04:52:41,771][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-88-30 + [2024-12-23T04:52:41,772][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-89-30 + [2024-12-23T04:52:41,772][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-90-30 + [2024-12-23T04:52:41,774][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-91-30 + [2024-12-23T04:52:41,774][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-92-30 + [2024-12-23T04:52:41,775][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-93-30 + [2024-12-23T04:52:41,776][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-94-30 + [2024-12-23T04:52:41,777][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-95-30 + [2024-12-23T04:52:41,778][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-96-30 + [2024-12-23T04:52:41,779][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-97-30 + [2024-12-23T04:52:41,780][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-98-30 + [2024-12-23T04:52:41,781][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] Testing testRandomSliceImpl-99-30 + [2024-12-23T04:52:41,782][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomSlice] after test + +VectorScorerFactoryTests > testRandomMin STANDARD_OUT + [2024-12-23T04:52:41,783][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMin] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,785][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMin] before test + [2024-12-23T04:52:41,786][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMin] Testing testRandom-279 + [2024-12-23T04:52:41,799][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMin] after test + +VectorScorerFactoryTests > testRandomMaxChunkSizeSmall STANDARD_OUT + [2024-12-23T04:52:41,799][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMaxChunkSizeSmall] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,801][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMaxChunkSizeSmall] before test + [2024-12-23T04:52:41,802][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMaxChunkSizeSmall] maxChunkSize=118 + [2024-12-23T04:52:41,802][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMaxChunkSizeSmall] Testing testRandom-3935 + [2024-12-23T04:52:41,916][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMaxChunkSizeSmall] after test + +VectorScorerFactoryTests > testLarge SKIPPED + +VectorScorerFactoryTests > testRandomMax STANDARD_OUT + [2024-12-23T04:52:41,917][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMax] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,919][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMax] before test + [2024-12-23T04:52:41,920][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMax] Testing testRandom-3531 + [2024-12-23T04:52:41,946][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomMax] after test + +VectorScorerFactoryTests > testRandom STANDARD_OUT + [2024-12-23T04:52:41,946][INFO ][o.e.s.VectorScorerFactoryTests] [testRandom] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,949][INFO ][o.e.s.VectorScorerFactoryTests] [testRandom] before test + [2024-12-23T04:52:41,949][INFO ][o.e.s.VectorScorerFactoryTests] [testRandom] Testing testRandom-1840 + [2024-12-23T04:52:41,980][INFO ][o.e.s.VectorScorerFactoryTests] [testRandom] after test + +VectorScorerFactoryTests > testRandomScorer STANDARD_OUT + [2024-12-23T04:52:41,981][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:41,985][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] before test + [2024-12-23T04:52:41,986][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] Testing testRandom-COSINE-199.vex + [2024-12-23T04:52:41,989][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] Testing testRandom-DOT_PRODUCT-3144.vex + [2024-12-23T04:52:41,992][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] Testing testRandom-EUCLIDEAN-3017.vex + [2024-12-23T04:52:42,020][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] Testing testRandom-MAXIMUM_INNER_PRODUCT-1915.vex + [2024-12-23T04:52:42,025][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorer] after test + +VectorScorerFactoryTests > testSimple STANDARD_OUT + [2024-12-23T04:52:42,025][INFO ][o.e.s.VectorScorerFactoryTests] [testSimple] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:42,027][INFO ][o.e.s.VectorScorerFactoryTests] [testSimple] before test + [2024-12-23T04:52:42,032][INFO ][o.e.s.VectorScorerFactoryTests] [testSimple] after test + +VectorScorerFactoryTests > testRandomScorerChunkSizeSmall STANDARD_OUT + [2024-12-23T04:52:42,032][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:42,034][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] before test + [2024-12-23T04:52:42,034][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] maxChunkSize=76 + [2024-12-23T04:52:42,035][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] Testing testRandom-COSINE-2044.vex + [2024-12-23T04:52:42,061][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] Testing testRandom-DOT_PRODUCT-159.vex + [2024-12-23T04:52:42,063][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] Testing testRandom-EUCLIDEAN-455.vex + [2024-12-23T04:52:42,069][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] Testing testRandom-MAXIMUM_INNER_PRODUCT-1088.vex + [2024-12-23T04:52:42,071][INFO ][o.e.s.VectorScorerFactoryTests] [testRandomScorerChunkSizeSmall] after test + +VectorScorerFactoryTests > testSupport STANDARD_OUT + [2024-12-23T04:52:42,072][INFO ][o.e.s.VectorScorerFactoryTests] [testSupport] JDK=23, os=Mac OS X, arch=aarch64 + [2024-12-23T04:52:42,074][INFO ][o.e.s.VectorScorerFactoryTests] [testSupport] before test + [2024-12-23T04:52:42,074][INFO ][o.e.s.VectorScorerFactoryTests] [testSupport] after test +Finished generating test XML results (0.001 secs) into: /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/test-results/test +Generating HTML test report... +Finished generating test html results (0.002 secs) into: /Users/rene/dev/elastic/elasticsearch/libs/simdvec/build/reports/tests/test + +0 problems were found storing the configuration cache. + +See the complete report at file:///Users/rene/dev/elastic/elasticsearch/build/reports/configuration-cache/bf67ihcjtxveqczwzl7s4owb2/9o6bg5x67n4fyxqy871utzrqc/configuration-cache-report.html + +BUILD SUCCESSFUL in 4s +86 actionable tasks: 2 executed, 84 up-to-date +Watched directory hierarchies: [/Users/rene/dev/elastic/elasticsearch] + +Publishing build scan... +https://gradle-enterprise.elastic.co/s/s75qfwti22ejk + +Configuration cache entry stored. diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java index 06724b049f82..61bec8e41b16 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java @@ -111,7 +111,7 @@ public class MergingDigest extends AbstractTDigest { // based on accumulated k-index. This can be much faster since we // scale functions are more expensive than the corresponding // weight limits. - public static boolean useWeightLimit = true; + public static final boolean useWeightLimit = true; static MergingDigest create(TDigestArrays arrays, double compression) { arrays.adjustBreaker(SHALLOW_SIZE); diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 3b5fb6ddecde..d65b6e8fd2dd 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Adds "built in" aggregations to Elasticsearch.' - classname 'org.elasticsearch.aggregations.AggregationsPlugin' + description = 'Adds "built in" aggregations to Elasticsearch.' + classname ='org.elasticsearch.aggregations.AggregationsPlugin' extendedPlugins = ['lang-painless'] } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 9e7932bbf667..173e1eeef60a 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -13,8 +13,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Adds "built in" analyzers to Elasticsearch.' - classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin' + description = 'Adds "built in" analyzers to Elasticsearch.' + classname = 'org.elasticsearch.analysis.common.CommonAnalysisPlugin' extendedPlugins = ['lang-painless'] } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintAnalyzerProvider.java index 1fcbfb4bc2c8..68acf46f21c2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintAnalyzerProvider.java @@ -24,11 +24,11 @@ import org.elasticsearch.xcontent.ParseField; */ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider { - public static ParseField SEPARATOR = new ParseField("separator"); - public static ParseField MAX_OUTPUT_SIZE = new ParseField("max_output_size"); + public static final ParseField SEPARATOR = new ParseField("separator"); + public static final ParseField MAX_OUTPUT_SIZE = new ParseField("max_output_size"); - public static int DEFAULT_MAX_OUTPUT_SIZE = 255; - public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET; + public static final int DEFAULT_MAX_OUTPUT_SIZE = 255; + public static final CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET; public static final char DEFAULT_SEPARATOR = ' '; private final FingerprintAnalyzer analyzer; diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle index b510e2403e93..07e6c7a04213 100644 --- a/modules/apm/build.gradle +++ b/modules/apm/build.gradle @@ -7,9 +7,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'apm' - description 'Provides APM integration for Elasticsearch' - classname 'org.elasticsearch.telemetry.apm.APM' + name = 'apm' + description = 'Provides APM integration for Elasticsearch' + classname ='org.elasticsearch.telemetry.apm.APM' } def otelVersion = '1.31.0' diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index cb74d6213781..f60179d53395 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; +import org.elasticsearch.lucene.util.automaton.MinimizationOperations; import org.elasticsearch.tasks.Task; import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; import org.elasticsearch.telemetry.tracing.TraceContext; @@ -439,7 +440,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); + return new CharacterRunAutomaton(MinimizationOperations.minimize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index fa9756a909bb..aeb9060b21b1 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -7,8 +7,8 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'Elasticsearch Expanded Pack Plugin - Data Streams' - classname 'org.elasticsearch.datastreams.DataStreamsPlugin' + description = 'Elasticsearch Expanded Pack Plugin - Data Streams' + classname ='org.elasticsearch.datastreams.DataStreamsPlugin' } restResources { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index e3d767c92a68..574ea44d6235 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -244,7 +244,7 @@ public class DataStreamGetWriteIndexTests extends ESTestCase { new MetadataFieldMapper[] { dtfm }, Collections.emptyMap() ); - MappingLookup mappingLookup = MappingLookup.fromMappers(mapping, List.of(dtfm, dateFieldMapper), List.of()); + MappingLookup mappingLookup = MappingLookup.fromMappers(mapping, List.of(dtfm, dateFieldMapper), List.of(), null); indicesService = DataStreamTestHelper.mockIndicesServices(mappingLookup); } diff --git a/modules/dot-prefix-validation/build.gradle b/modules/dot-prefix-validation/build.gradle index 6e232570b4a2..bbbbbb5609f1 100644 --- a/modules/dot-prefix-validation/build.gradle +++ b/modules/dot-prefix-validation/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Validation for dot-prefixed indices for non-operator users' - classname 'org.elasticsearch.validation.DotPrefixValidationPlugin' + description = 'Validation for dot-prefixed indices for non-operator users' + classname = 'org.elasticsearch.validation.DotPrefixValidationPlugin' } restResources { diff --git a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java index 7f65bbb6937d..80fb8a3b2e36 100644 --- a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java +++ b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java @@ -67,7 +67,7 @@ public abstract class DotPrefixValidator implements MappedActionFil ".ml-state", ".ml-anomalies-unrelated" ); - public static Setting> IGNORED_INDEX_PATTERNS_SETTING = Setting.stringListSetting( + public static final Setting> IGNORED_INDEX_PATTERNS_SETTING = Setting.stringListSetting( "cluster.indices.validate_ignored_dot_patterns", List.of( "\\.ml-state-\\d+", diff --git a/modules/health-shards-availability/build.gradle b/modules/health-shards-availability/build.gradle index 4bf914fb5fdd..62686a44a933 100644 --- a/modules/health-shards-availability/build.gradle +++ b/modules/health-shards-availability/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Health report API extension providing the shards_availability output' - classname 'org.elasticsearch.health.plugin.ShardsAvailabilityPlugin' + description = 'Health report API extension providing the shards_availability output' + classname = 'org.elasticsearch.health.plugin.ShardsAvailabilityPlugin' } restResources { diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 8fe2b82fe21f..a172112948fd 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'Ingest processor that uses Apache Tika to extract contents' - classname 'org.elasticsearch.ingest.attachment.IngestAttachmentPlugin' + description = 'Ingest processor that uses Apache Tika to extract contents' + classname = 'org.elasticsearch.ingest.attachment.IngestAttachmentPlugin' } // this overwrites the 'versions' map from Elasticsearch itself, but indeed we want that -- we're interested in managing our dependencies diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index d28d6a54b441..0de79a04681e 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -13,8 +13,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources' - classname 'org.elasticsearch.ingest.common.IngestCommonPlugin' + description = 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources' + classname ='org.elasticsearch.ingest.common.IngestCommonPlugin' extendedPlugins = ['lang-painless'] } diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 4312221b3393..988ca317b473 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -14,8 +14,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Ingest processor that uses lookup geo data based on IP addresses using the MaxMind geo database' - classname 'org.elasticsearch.ingest.geoip.IngestGeoIpPlugin' + description = 'Ingest processor that uses lookup geo data based on IP addresses using the MaxMind geo database' + classname ='org.elasticsearch.ingest.geoip.IngestGeoIpPlugin' bundleSpec.from("${project.buildDir}/ingest-geoip") { into '/' diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java index e627ecf6aeb9..0b6c04f14600 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java @@ -15,9 +15,10 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.ingest.geoip.direct.DatabaseConfigurationMetadata; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -90,8 +91,8 @@ public final class IngestGeoIpMetadata implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(DATABASES_FIELD.getPreferredName(), databases); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.concat(ChunkedToXContentHelper.xContentValuesMap(DATABASES_FIELD.getPreferredName(), databases)); } @Override diff --git a/modules/ingest-user-agent/build.gradle b/modules/ingest-user-agent/build.gradle index a504b5a64807..76f88544ea10 100644 --- a/modules/ingest-user-agent/build.gradle +++ b/modules/ingest-user-agent/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'Ingest processor that extracts information from a user agent' - classname 'org.elasticsearch.ingest.useragent.IngestUserAgentPlugin' + description = 'Ingest processor that extracts information from a user agent' + classname ='org.elasticsearch.ingest.useragent.IngestUserAgentPlugin' } restResources { diff --git a/modules/kibana/build.gradle b/modules/kibana/build.gradle index 6ead4cc68506..deaf2664236b 100644 --- a/modules/kibana/build.gradle +++ b/modules/kibana/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - description 'Plugin exposing APIs for Kibana system indices' - classname 'org.elasticsearch.kibana.KibanaPlugin' + description = 'Plugin exposing APIs for Kibana system indices' + classname ='org.elasticsearch.kibana.KibanaPlugin' } dependencies { diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index 134245075515..a7f30c4fbb5c 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Lucene expressions integration for Elasticsearch' - classname 'org.elasticsearch.script.expression.ExpressionPlugin' + description = 'Lucene expressions integration for Elasticsearch' + classname ='org.elasticsearch.script.expression.ExpressionPlugin' } dependencies { diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index 45037857db09..e8e539d2d6b5 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -12,8 +12,8 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Mustache scripting integration for Elasticsearch' - classname 'org.elasticsearch.script.mustache.MustachePlugin' + description = 'Mustache scripting integration for Elasticsearch' + classname ='org.elasticsearch.script.mustache.MustachePlugin' } dependencies { diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MustacheSettingsIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MustacheSettingsIT.java new file mode 100644 index 000000000000..7ab3f1bd3b80 --- /dev/null +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MustacheSettingsIT.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.mustache; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +public class MustacheSettingsIT extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return List.of(MustachePlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder().put(MustacheScriptEngine.MUSTACHE_RESULT_SIZE_LIMIT.getKey(), "10b").build(); + } + + public void testResultSizeLimit() throws Exception { + createIndex("test"); + prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "value1").endObject()).get(); + indicesAdmin().prepareRefresh().get(); + + String query = """ + { "query": {"match_all": {}}, "size" : "{{my_size}}" }"""; + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices("test"); + var e = expectThrows( + ElasticsearchParseException.class, + () -> new SearchTemplateRequestBuilder(client()).setRequest(searchRequest) + .setScript(query) + .setScriptType(ScriptType.INLINE) + .setScriptParams(Collections.singletonMap("my_size", 1)) + .get() + ); + assertThat(e.getMessage(), equalTo("Mustache script result size limit exceeded")); + } +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index b24d60cb8d88..cfdd57db4dd5 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.features.NodeFeature; @@ -73,4 +74,9 @@ public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin new RestRenderSearchTemplateAction() ); } + + @Override + public List> getSettings() { + return List.of(MustacheScriptEngine.MUSTACHE_RESULT_SIZE_LIMIT); + } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 9bb80d5688b5..f8a56dcfc990 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -27,7 +27,7 @@ import java.io.IOException; import java.io.InputStream; public class SearchTemplateResponse extends ActionResponse implements ToXContentObject { - public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output"); + public static final ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output"); /** Contains the source of the rendered template **/ private BytesReference source; diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index b01c9201dcff..b030d3c2d5e1 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -15,8 +15,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'An easy, safe and fast scripting language for Elasticsearch' - classname 'org.elasticsearch.painless.PainlessPlugin' + description = 'An easy, safe and fast scripting language for Elasticsearch' + classname ='org.elasticsearch.painless.PainlessPlugin' } testClusters.configureEach { diff --git a/modules/legacy-geo/build.gradle b/modules/legacy-geo/build.gradle index 55171221396a..4ed8d84ab96b 100644 --- a/modules/legacy-geo/build.gradle +++ b/modules/legacy-geo/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Placeholder plugin for geospatial features in ES' - classname 'org.elasticsearch.legacygeo.LegacyGeoPlugin' + description = 'Placeholder plugin for geospatial features in ES' + classname = 'org.elasticsearch.legacygeo.LegacyGeoPlugin' } dependencies { diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java index b0634f0f1332..6127b4beb71f 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java @@ -359,7 +359,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper< } @Deprecated - public static Mapper.TypeParser PARSER = (name, node, parserContext) -> { + public static final Mapper.TypeParser PARSER = (name, node, parserContext) -> { boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(parserContext.getSettings()); boolean coerceByDefault = COERCE_SETTING.get(parserContext.getSettings()); FieldMapper.Builder builder = new LegacyGeoShapeFieldMapper.Builder( diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index 992f39a22b28..4ce00ab87303 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -12,8 +12,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Adds advanced field mappers' - classname 'org.elasticsearch.index.mapper.extras.MapperExtrasPlugin' + description = 'Adds advanced field mappers' + classname ='org.elasticsearch.index.mapper.extras.MapperExtrasPlugin' } restResources { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java index fd0098851c5f..db2762a028e6 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java @@ -127,7 +127,7 @@ public class TokenCountFieldMapper extends FieldMapper { } } - public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); private final boolean index; private final boolean hasDocValues; diff --git a/modules/multi-project/build.gradle b/modules/multi-project/build.gradle index d036aa38dd19..530c99b78f62 100644 --- a/modules/multi-project/build.gradle +++ b/modules/multi-project/build.gradle @@ -7,8 +7,8 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'The Multi-Project module adds functionality for working with multiple projects in ES (only for internal use).' - classname 'org.elasticsearch.multiproject.MultiProjectPlugin' + description = 'The Multi-Project module adds functionality for working with multiple projects in ES (only for internal use).' + classname = 'org.elasticsearch.multiproject.MultiProjectPlugin' } restResources { diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index 0d34b5f6e3b4..efe52225121c 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'This module adds the support parent-child queries and aggregations' - classname 'org.elasticsearch.join.ParentJoinPlugin' + description = 'This module adds the support parent-child queries and aggregations' + classname = 'org.elasticsearch.join.ParentJoinPlugin' } restResources { diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index 2d2f6767f5e6..320683452179 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Percolator module adds capability to index queries and query these queries by specifying documents' - classname 'org.elasticsearch.percolator.PercolatorPlugin' + description = 'Percolator module adds capability to index queries and query these queries by specifying documents' + classname = 'org.elasticsearch.percolator.PercolatorPlugin' } dependencies { diff --git a/modules/rank-eval/build.gradle b/modules/rank-eval/build.gradle index c9016798c18b..fe7453750647 100644 --- a/modules/rank-eval/build.gradle +++ b/modules/rank-eval/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Rank Eval module adds APIs to evaluate ranking quality.' - classname 'org.elasticsearch.index.rankeval.RankEvalPlugin' + description = 'The Rank Eval module adds APIs to evaluate ranking quality.' + classname = 'org.elasticsearch.index.rankeval.RankEvalPlugin' } restResources { diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 7281c161e2c4..47a3f51115b1 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -21,8 +21,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Reindex module adds APIs to reindex from one index to another or update documents in place.' - classname 'org.elasticsearch.reindex.ReindexPlugin' + description = 'The Reindex module adds APIs to reindex from one index to another or update documents in place.' + classname = 'org.elasticsearch.reindex.ReindexPlugin' } testClusters.configureEach { @@ -135,7 +135,8 @@ if (OS.current() == OS.WINDOWS) { TaskProvider fixture = tasks.register("oldEs${version}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, oldEsDependency executable = "${buildParams.runtimeJavaHome.get()}/bin/java" - env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}" + def oldesFixtureConfiguration = project.configurations.oldesFixture + env 'CLASSPATH', "${-> oldesFixtureConfiguration.asPath}" // old versions of Elasticsearch need JAVA_HOME env 'JAVA_HOME', jdks.legacy.javaHomePath // If we are running on certain arm systems we need to explicitly set the stack size to overcome JDK page size bug diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 8c1ca3891bc1..ded138efb4af 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -15,8 +15,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Azure Repository plugin adds support for Azure storage repositories.' - classname 'org.elasticsearch.repositories.azure.AzureRepositoryPlugin' + description = 'The Azure Repository plugin adds support for Azure storage repositories.' + classname ='org.elasticsearch.repositories.azure.AzureRepositoryPlugin' } versions << [ @@ -56,7 +56,7 @@ dependencies { api "io.projectreactor.netty:reactor-netty-core:${versions.azureReactorNetty}" api "io.projectreactor.netty:reactor-netty-http:${versions.azureReactorNetty}" api "io.projectreactor:reactor-core:3.4.38" - api "org.reactivestreams:reactive-streams:1.0.4" + api "org.reactivestreams:reactive-streams:${versions.reactive_streams}" // Others api "com.fasterxml.woodstox:woodstox-core:6.7.0" diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index 7373ed948578..4c7d42e6080c 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -37,7 +37,7 @@ public class AzureStorageService { * The maximum size of a BlockBlob block. * See https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs */ - public static ByteSizeValue MAX_BLOCK_SIZE = new ByteSizeValue(100, ByteSizeUnit.MB); + public static final ByteSizeValue MAX_BLOCK_SIZE = new ByteSizeValue(100, ByteSizeUnit.MB); /** * The maximum number of blocks. diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index 811645d154c7..d23a0f4a7e44 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -17,8 +17,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The GCS repository plugin adds Google Cloud Storage support for repositories.' - classname 'org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' + description = 'The GCS repository plugin adds Google Cloud Storage support for repositories.' + classname ='org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' } dependencies { diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index f0dc1ca71495..1db83b9e9bc4 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -14,8 +14,8 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - description 'The S3 repository plugin adds S3 repositories' - classname 'org.elasticsearch.repositories.s3.S3RepositoryPlugin' + description = 'The S3 repository plugin adds S3 repositories' + classname ='org.elasticsearch.repositories.s3.S3RepositoryPlugin' } dependencies { @@ -44,12 +44,14 @@ dependencies { internalClusterTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" yamlRestTestImplementation project(':modules:repository-s3') + yamlRestTestImplementation project(':test:fixtures:aws-fixture-utils') yamlRestTestImplementation project(':test:fixtures:s3-fixture') yamlRestTestImplementation project(':test:fixtures:testcontainer-utils') yamlRestTestImplementation project(':test:framework') yamlRestTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" javaRestTestImplementation project(':modules:repository-s3') + javaRestTestImplementation project(':test:fixtures:aws-fixture-utils') javaRestTestImplementation project(':test:fixtures:aws-sts-fixture') javaRestTestImplementation project(':test:fixtures:ec2-imds-fixture') javaRestTestImplementation project(':test:fixtures:minio-fixture') diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java index 45844703683b..10901424512a 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java @@ -20,6 +20,8 @@ import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import static fixture.aws.AwsCredentialsUtils.fixedAccessKey; + @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3BasicCredentialsRestIT extends AbstractRepositoryS3RestTestCase { @@ -31,7 +33,7 @@ public class RepositoryS3BasicCredentialsRestIT extends AbstractRepositoryS3Rest private static final String SECRET_KEY = PREFIX + "secret-key"; private static final String CLIENT = "basic_credentials_client"; - private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, S3HttpFixture.fixedAccessKey(ACCESS_KEY)); + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, fixedAccessKey(ACCESS_KEY)); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java index 4f0bf8300064..c525c2ea42f5 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java @@ -9,10 +9,10 @@ package org.elasticsearch.repositories.s3; +import fixture.aws.DynamicAwsCredentials; import fixture.aws.imds.Ec2ImdsHttpFixture; import fixture.aws.imds.Ec2ImdsServiceBuilder; import fixture.aws.imds.Ec2ImdsVersion; -import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; @@ -35,14 +35,14 @@ public class RepositoryS3EcsCredentialsRestIT extends AbstractRepositoryS3RestTe private static final String BASE_PATH = PREFIX + "base_path"; private static final String CLIENT = "ecs_credentials_client"; - private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicS3Credentials::addValidCredentials) + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicCredentials::addValidCredentials) .alternativeCredentialsEndpoints(Set.of("/ecs_credentials_endpoint")) ); - private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicCredentials::isAuthorized); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java index bc41b9fd62ca..bc689ea52ca3 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java @@ -9,10 +9,10 @@ package org.elasticsearch.repositories.s3; +import fixture.aws.DynamicAwsCredentials; import fixture.aws.imds.Ec2ImdsHttpFixture; import fixture.aws.imds.Ec2ImdsServiceBuilder; import fixture.aws.imds.Ec2ImdsVersion; -import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; @@ -33,13 +33,13 @@ public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3Res private static final String BASE_PATH = PREFIX + "base_path"; private static final String CLIENT = "imdsv1_credentials_client"; - private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicS3Credentials::addValidCredentials) + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicCredentials::addValidCredentials) ); - private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicCredentials::isAuthorized); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java index 34500ff5227f..dedf205d3def 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java @@ -9,10 +9,10 @@ package org.elasticsearch.repositories.s3; +import fixture.aws.DynamicAwsCredentials; import fixture.aws.imds.Ec2ImdsHttpFixture; import fixture.aws.imds.Ec2ImdsServiceBuilder; import fixture.aws.imds.Ec2ImdsVersion; -import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; @@ -33,13 +33,13 @@ public class RepositoryS3ImdsV2CredentialsRestIT extends AbstractRepositoryS3Res private static final String BASE_PATH = PREFIX + "base_path"; private static final String CLIENT = "imdsv2_credentials_client"; - private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V2).newCredentialsConsumer(dynamicS3Credentials::addValidCredentials) + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V2).newCredentialsConsumer(dynamicCredentials::addValidCredentials) ); - private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicCredentials::isAuthorized); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java index 1f09fa6b081b..065d1c6c9ea2 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java @@ -26,6 +26,7 @@ import org.junit.rules.TestRule; import java.io.IOException; +import static fixture.aws.AwsCredentialsUtils.mutableAccessKey; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; @@ -38,12 +39,7 @@ public class RepositoryS3RestReloadCredentialsIT extends ESRestTestCase { private static volatile String repositoryAccessKey; - public static final S3HttpFixture s3Fixture = new S3HttpFixture( - true, - BUCKET, - BASE_PATH, - S3HttpFixture.mutableAccessKey(() -> repositoryAccessKey) - ); + public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, mutableAccessKey(() -> repositoryAccessKey)); private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java index 84a327ee131a..a8009d594926 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java @@ -20,6 +20,8 @@ import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import static fixture.aws.AwsCredentialsUtils.fixedAccessKeyAndToken; + @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3SessionCredentialsRestIT extends AbstractRepositoryS3RestTestCase { @@ -36,7 +38,7 @@ public class RepositoryS3SessionCredentialsRestIT extends AbstractRepositoryS3Re true, BUCKET, BASE_PATH, - S3HttpFixture.fixedAccessKeyAndToken(ACCESS_KEY, SESSION_TOKEN) + fixedAccessKeyAndToken(ACCESS_KEY, SESSION_TOKEN) ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java index de80e4179ef5..61f0acdb1615 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.repositories.s3; +import fixture.aws.DynamicAwsCredentials; import fixture.aws.sts.AwsStsHttpFixture; -import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; @@ -32,9 +32,9 @@ public class RepositoryS3StsCredentialsRestIT extends AbstractRepositoryS3RestTe private static final String BASE_PATH = PREFIX + "base_path"; private static final String CLIENT = "sts_credentials_client"; - private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(); - private static final S3HttpFixture s3HttpFixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); + private static final S3HttpFixture s3HttpFixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicCredentials::isAuthorized); private static final String WEB_IDENTITY_TOKEN_FILE_CONTENTS = """ Atza|IQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDans\ @@ -42,7 +42,7 @@ public class RepositoryS3StsCredentialsRestIT extends AbstractRepositoryS3RestTe zTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ"""; private static final AwsStsHttpFixture stsHttpFixture = new AwsStsHttpFixture( - dynamicS3Credentials::addValidCredentials, + dynamicCredentials::addValidCredentials, WEB_IDENTITY_TOKEN_FILE_CONTENTS ); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java index 03106c26c9a2..6f28eda81a9b 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoriesMetrics.java @@ -21,7 +21,7 @@ public record S3RepositoriesMetrics( LongHistogram retryDeletesHistogram ) { - public static S3RepositoriesMetrics NOOP = new S3RepositoriesMetrics(RepositoriesMetrics.NOOP); + public static final S3RepositoriesMetrics NOOP = new S3RepositoriesMetrics(RepositoriesMetrics.NOOP); public static final String METRIC_RETRY_EVENT_TOTAL = "es.repositories.s3.input_stream.retry.event.total"; public static final String METRIC_RETRY_SUCCESS_TOTAL = "es.repositories.s3.input_stream.retry.success.total"; diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java index 3d34934e5494..89919f0f3ddf 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java @@ -23,6 +23,8 @@ import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import static fixture.aws.AwsCredentialsUtils.fixedAccessKey; + @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3ClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { @@ -34,7 +36,7 @@ public class RepositoryS3ClientYamlTestSuiteIT extends AbstractRepositoryS3Clien true, "bucket", "base_path_integration_tests", - S3HttpFixture.fixedAccessKey(ACCESS_KEY) + fixedAccessKey(ACCESS_KEY) ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() diff --git a/modules/repository-url/build.gradle b/modules/repository-url/build.gradle index fd1c5089cdf1..857a9b00f898 100644 --- a/modules/repository-url/build.gradle +++ b/modules/repository-url/build.gradle @@ -14,8 +14,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Module for URL repository' - classname 'org.elasticsearch.plugin.repository.url.URLRepositoryPlugin' + description = 'Module for URL repository' + classname ='org.elasticsearch.plugin.repository.url.URLRepositoryPlugin' } restResources { diff --git a/modules/rest-root/build.gradle b/modules/rest-root/build.gradle index adb8aeb02863..a387cc4c5dd5 100644 --- a/modules/rest-root/build.gradle +++ b/modules/rest-root/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'Adds HEAD and GET / endpoint to Elasticsearch' - classname 'org.elasticsearch.rest.root.MainRestPlugin' + description = 'Adds HEAD and GET / endpoint to Elasticsearch' + classname ='org.elasticsearch.rest.root.MainRestPlugin' } restResources { diff --git a/modules/runtime-fields-common/build.gradle b/modules/runtime-fields-common/build.gradle index e8e06f0a9c4c..0cbb6a4d43c9 100644 --- a/modules/runtime-fields-common/build.gradle +++ b/modules/runtime-fields-common/build.gradle @@ -12,8 +12,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'Module for runtime fields features and extensions that have large dependencies' - classname 'org.elasticsearch.runtimefields.RuntimeFieldsCommonPlugin' + description = 'Module for runtime fields features and extensions that have large dependencies' + classname = 'org.elasticsearch.runtimefields.RuntimeFieldsCommonPlugin' extendedPlugins = ['lang-painless'] } diff --git a/modules/systemd/build.gradle b/modules/systemd/build.gradle index 8eb48e1d5f63..915555479758 100644 --- a/modules/systemd/build.gradle +++ b/modules/systemd/build.gradle @@ -8,8 +8,8 @@ */ esplugin { - description 'Integrates Elasticsearch with systemd' - classname 'org.elasticsearch.systemd.SystemdPlugin' + description = 'Integrates Elasticsearch with systemd' + classname ='org.elasticsearch.systemd.SystemdPlugin' } dependencies { diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 13dfdf2b3c7b..4b64b9c56917 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -23,8 +23,8 @@ apply plugin: 'elasticsearch.publish' * fix the hack in the build framework that copies transport-netty4 into the integ test cluster */ esplugin { - description 'Netty 4 based transport implementation' - classname 'org.elasticsearch.transport.netty4.Netty4Plugin' + description = 'Netty 4 based transport implementation' + classname = 'org.elasticsearch.transport.netty4.Netty4Plugin' } // exclude transitively pulled in version via the esplugin plugin to always build from fresh sources and make jar-hell checks pass diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java index 449fd72669da..3feaa2874ebd 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java @@ -91,7 +91,7 @@ public class Netty4Plugin extends Plugin implements NetworkPlugin { */ private static final ByteSizeValue MTU = ByteSizeValue.ofBytes(Long.parseLong(System.getProperty("es.net.mtu", "1500"))); private static final String SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = "http.netty.max_composite_buffer_components"; - public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = new Setting<>( + public static final Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = new Setting<>( SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, (s) -> { ByteSizeValue maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(s); diff --git a/muted-tests.yml b/muted-tests.yml index 1b559baac3f7..d912f30584b8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -113,8 +113,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/20_operator_privileges_disabled/Operator only settings can be set and restored by non-operator user when operator privileges is disabled} issue: https://github.com/elastic/elasticsearch/issues/116775 -- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT - issue: https://github.com/elastic/elasticsearch/issues/116851 - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 @@ -145,11 +143,12 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/117349 +- class: org.elasticsearch.xpack.inference.InferenceRestIT + method: test {p0=inference/30_semantic_text_inference_bwc/Calculates embeddings using the default ELSER 2 endpoint} + issue: https://github.com/elastic/elasticsearch/issues/117349 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test reset running transform} issue: https://github.com/elastic/elasticsearch/issues/117473 -- class: org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/117525 - class: org.elasticsearch.search.ccs.CrossClusterIT method: testCancel issue: https://github.com/elastic/elasticsearch/issues/108061 @@ -158,24 +157,12 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117815 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 -- class: org.elasticsearch.packaging.test.ArchiveGenerateInitialCredentialsTests - method: test20NoAutoGenerationWhenAutoConfigurationDisabled - issue: https://github.com/elastic/elasticsearch/issues/117891 -- class: org.elasticsearch.packaging.test.BootstrapCheckTests - method: test20RunWithBootstrapChecks - issue: https://github.com/elastic/elasticsearch/issues/117890 - class: org.elasticsearch.xpack.esql.plugin.ClusterRequestTests method: testFallbackIndicesOptions issue: https://github.com/elastic/elasticsearch/issues/117937 - class: org.elasticsearch.xpack.ml.integration.RegressionIT method: testTwoJobsWithSameRandomizeSeedUseSameTrainingSet issue: https://github.com/elastic/elasticsearch/issues/117805 -- class: org.elasticsearch.packaging.test.ArchiveGenerateInitialCredentialsTests - method: test30NoAutogenerationWhenDaemonized - issue: https://github.com/elastic/elasticsearch/issues/117956 -- class: org.elasticsearch.packaging.test.CertGenCliTests - method: test40RunWithCert - issue: https://github.com/elastic/elasticsearch/issues/117955 - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 @@ -185,45 +172,15 @@ tests: - class: org.elasticsearch.index.reindex.ReindexNodeShutdownIT method: testReindexWithShutdown issue: https://github.com/elastic/elasticsearch/issues/118040 -- class: org.elasticsearch.packaging.test.ConfigurationTests - method: test20HostnameSubstitution - issue: https://github.com/elastic/elasticsearch/issues/118028 -- class: org.elasticsearch.packaging.test.ArchiveTests - method: test40AutoconfigurationNotTriggeredWhenNodeIsMeantToJoinExistingCluster - issue: https://github.com/elastic/elasticsearch/issues/118029 -- class: org.elasticsearch.packaging.test.ConfigurationTests - method: test30SymlinkedDataPath - issue: https://github.com/elastic/elasticsearch/issues/118111 -- class: org.elasticsearch.packaging.test.KeystoreManagementTests - method: test30KeystorePasswordFromFile - issue: https://github.com/elastic/elasticsearch/issues/118123 -- class: org.elasticsearch.packaging.test.KeystoreManagementTests - method: test31WrongKeystorePasswordFromFile - issue: https://github.com/elastic/elasticsearch/issues/118123 -- class: org.elasticsearch.packaging.test.ArchiveTests - method: test41AutoconfigurationNotTriggeredWhenNodeCannotContainData - issue: https://github.com/elastic/elasticsearch/issues/118110 - class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS2UnavailableRemotesIT method: testEsqlRcs2UnavailableRemoteScenarios issue: https://github.com/elastic/elasticsearch/issues/117419 -- class: org.elasticsearch.packaging.test.DebPreservationTests - method: test40RestartOnUpgrade - issue: https://github.com/elastic/elasticsearch/issues/118170 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultRerank issue: https://github.com/elastic/elasticsearch/issues/118184 - class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT method: testCancelRequestWhenFailingFetchingPages issue: https://github.com/elastic/elasticsearch/issues/118193 -- class: org.elasticsearch.packaging.test.MemoryLockingTests - method: test20MemoryLockingEnabled - issue: https://github.com/elastic/elasticsearch/issues/118195 -- class: org.elasticsearch.packaging.test.ArchiveTests - method: test42AutoconfigurationNotTriggeredWhenNodeCannotBecomeMaster - issue: https://github.com/elastic/elasticsearch/issues/118196 -- class: org.elasticsearch.packaging.test.ArchiveTests - method: test43AutoconfigurationNotTriggeredWhenTlsAlreadyConfigured - issue: https://github.com/elastic/elasticsearch/issues/118202 - class: org.elasticsearch.packaging.test.ArchiveTests method: test44AutoConfigurationNotTriggeredOnNotWriteableConfDir issue: https://github.com/elastic/elasticsearch/issues/118208 @@ -233,9 +190,6 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/120_data_streams_stats/Multiple data stream} issue: https://github.com/elastic/elasticsearch/issues/118217 -- class: org.elasticsearch.packaging.test.ArchiveTests - method: test60StartAndStop - issue: https://github.com/elastic/elasticsearch/issues/118216 - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/118214 @@ -245,26 +199,11 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testSearcherId issue: https://github.com/elastic/elasticsearch/issues/118374 -- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT - method: test {p0=/10_info/Info} - issue: https://github.com/elastic/elasticsearch/issues/118394 -- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT - method: test {p0=/11_nodes/Additional disk information} - issue: https://github.com/elastic/elasticsearch/issues/118395 -- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT - method: test {p0=/11_nodes/Test cat nodes output with full_id set} - issue: https://github.com/elastic/elasticsearch/issues/118396 -- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT - method: test {p0=/11_nodes/Test cat nodes output} - issue: https://github.com/elastic/elasticsearch/issues/118397 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/118220 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/118238 -- class: org.elasticsearch.packaging.test.DockerTests - method: test011SecurityEnabledStatus - issue: https://github.com/elastic/elasticsearch/issues/118517 - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/116521 @@ -292,8 +231,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115727 - class: org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationIT issue: https://github.com/elastic/elasticsearch/issues/118414 -- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlClientYamlIT - issue: https://github.com/elastic/elasticsearch/issues/119086 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/search/search-your-data/retrievers-examples/line_98} issue: https://github.com/elastic/elasticsearch/issues/119155 @@ -303,6 +240,32 @@ tests: - class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT method: test {yaml=indices.create/20_synthetic_source/create index with use_synthetic_source} issue: https://github.com/elastic/elasticsearch/issues/119191 +- class: org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapperTests + method: testCartesianBoundsBlockLoader + issue: https://github.com/elastic/elasticsearch/issues/119201 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/data_frame_analytics_cat_apis/Test cat data frame analytics all jobs with header} + issue: https://github.com/elastic/elasticsearch/issues/119332 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + method: test {lookup-join.MvJoinKeyOnTheDataNode ASYNC} + issue: https://github.com/elastic/elasticsearch/issues/119179 +- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/119191 +- class: org.elasticsearch.xpack.esql.EsqlSecurityIT + method: testLookupJoinIndexAllowed + issue: https://github.com/elastic/elasticsearch/issues/119268 +- class: org.elasticsearch.xpack.esql.EsqlSecurityIT + method: testLookupJoinIndexForbidden + issue: https://github.com/elastic/elasticsearch/issues/119269 +- class: org.elasticsearch.xpack.esql.EsqlAsyncSecurityIT + method: testLookupJoinIndexForbidden + issue: https://github.com/elastic/elasticsearch/issues/119270 +- class: org.elasticsearch.xpack.esql.EsqlAsyncSecurityIT + method: testLookupJoinIndexAllowed + issue: https://github.com/elastic/elasticsearch/issues/119271 +- class: org.elasticsearch.xpack.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT + method: testEsqlTermsAggregationByMethod + issue: https://github.com/elastic/elasticsearch/issues/119355 # Examples: # diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 05cd2cb44124..0d576d316f85 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The ICU Analysis plugin integrates the Lucene ICU module into Elasticsearch, adding ICU-related analysis components.' - classname 'org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin' + description = 'The ICU Analysis plugin integrates the Lucene ICU module into Elasticsearch, adding ICU-related analysis components.' + classname ='org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin' } tasks.named("forbiddenApisMain").configure { diff --git a/plugins/analysis-kuromoji/build.gradle b/plugins/analysis-kuromoji/build.gradle index bb59fed0db2e..82fa59e5773c 100644 --- a/plugins/analysis-kuromoji/build.gradle +++ b/plugins/analysis-kuromoji/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - description 'The Japanese (kuromoji) Analysis plugin integrates Lucene kuromoji analysis module into elasticsearch.' - classname 'org.elasticsearch.plugin.analysis.kuromoji.AnalysisKuromojiPlugin' + description = 'The Japanese (kuromoji) Analysis plugin integrates Lucene kuromoji analysis module into elasticsearch.' + classname ='org.elasticsearch.plugin.analysis.kuromoji.AnalysisKuromojiPlugin' } dependencies { diff --git a/plugins/analysis-nori/build.gradle b/plugins/analysis-nori/build.gradle index 0e602886d29c..6254a56f0657 100644 --- a/plugins/analysis-nori/build.gradle +++ b/plugins/analysis-nori/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - description 'The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.' - classname 'org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin' + description = 'The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.' + classname ='org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin' } dependencies { diff --git a/plugins/analysis-phonetic/build.gradle b/plugins/analysis-phonetic/build.gradle index e34a179a384f..018e2c0e52b8 100644 --- a/plugins/analysis-phonetic/build.gradle +++ b/plugins/analysis-phonetic/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - description 'The Phonetic Analysis plugin integrates phonetic token filter analysis with elasticsearch.' - classname 'org.elasticsearch.plugin.analysis.phonetic.AnalysisPhoneticPlugin' + description = 'The Phonetic Analysis plugin integrates phonetic token filter analysis with elasticsearch.' + classname ='org.elasticsearch.plugin.analysis.phonetic.AnalysisPhoneticPlugin' } dependencies { diff --git a/plugins/analysis-smartcn/build.gradle b/plugins/analysis-smartcn/build.gradle index cbc45f32bf59..b4ac03935aab 100644 --- a/plugins/analysis-smartcn/build.gradle +++ b/plugins/analysis-smartcn/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - description 'Smart Chinese Analysis plugin integrates Lucene Smart Chinese analysis module into elasticsearch.' - classname 'org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin' + description = 'Smart Chinese Analysis plugin integrates Lucene Smart Chinese analysis module into elasticsearch.' + classname ='org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin' } dependencies { diff --git a/plugins/analysis-stempel/build.gradle b/plugins/analysis-stempel/build.gradle index 2d2c187970d8..0fb15ec7d36d 100644 --- a/plugins/analysis-stempel/build.gradle +++ b/plugins/analysis-stempel/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - description 'The Stempel (Polish) Analysis plugin integrates Lucene stempel (polish) analysis module into elasticsearch.' - classname 'org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin' + description = 'The Stempel (Polish) Analysis plugin integrates Lucene stempel (polish) analysis module into elasticsearch.' + classname ='org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin' } dependencies { diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index 2794e5bcfe33..2be48240a887 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - description 'The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.' - classname 'org.elasticsearch.plugin.analysis.ukrainian.AnalysisUkrainianPlugin' + description = 'The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.' + classname ='org.elasticsearch.plugin.analysis.ukrainian.AnalysisUkrainianPlugin' } dependencies { diff --git a/plugins/build.gradle b/plugins/build.gradle index 60c65885639e..32fd646ef0be 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -22,10 +22,10 @@ configure(subprojects.findAll { it.parent.path == project.path }) { esplugin { // for local ES plugins, the name of the plugin is the same as the directory - name project.name + name = project.name - licenseFile rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + licenseFile = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 9549236775bf..4455e798e3a2 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -13,8 +13,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism' - classname 'org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin' + description = 'The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism' + classname ='org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin' } def localVersions = versions + [ diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 95715217fa59..a8255c1b5451 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.' - classname 'org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin' + description = 'The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.' + classname ='org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin' } dependencies { diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java index 73213090b6f9..af3b3d67951d 100644 --- a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java @@ -9,30 +9,24 @@ package org.elasticsearch.discovery.ec2; -import com.amazonaws.util.EC2MetadataUtils; - import org.elasticsearch.client.Request; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; import java.io.IOException; -public class DiscoveryEc2AvailabilityZoneAttributeNoImdsIT extends ESRestTestCase { +public class DiscoveryEc2AvailabilityZoneAttributeNoImdsIT extends DiscoveryEc2AvailabilityZoneAttributeTestCase { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .plugin("discovery-ec2") - .setting(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), "true") - .build(); + // use an address which definitely isn't running an IMDS, just in case we're running these tests in EC2 + public static ElasticsearchCluster cluster = DiscoveryEc2AvailabilityZoneAttributeTestCase.buildCluster(() -> "http://127.0.0.1:1"); @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); } + @Override // the base class asserts that the attribute is set, but we don't want that here public void testAvailabilityZoneAttribute() throws IOException { - assumeTrue("test only in non-AWS environment", EC2MetadataUtils.getInstanceId() == null); - final var nodesInfoResponse = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_nodes/_all/_none"))); for (final var nodeId : nodesInfoResponse.evaluateMapKeys("nodes")) { assertNull(nodesInfoResponse.evaluateExact("nodes", nodeId, "attributes", "aws_availability_zone")); diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 530070f9e007..c6beaf3f332c 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.' - classname 'org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin' + description = 'The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.' + classname ='org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin' } versions << [ diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle index 72cb429b4907..c9d87cd6beb1 100644 --- a/plugins/discovery-gce/qa/gce/build.gradle +++ b/plugins/discovery-gce/qa/gce/build.gradle @@ -28,8 +28,9 @@ restResources { /** A task to start the GCEFixture which emulates a GCE service **/ def gceFixtureProvider = tasks.register("gceFixture", AntFixture) { - dependsOn project.sourceSets.yamlRestTest.runtimeClasspath - env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" + def runtimeClasspath = project.sourceSets.yamlRestTest.runtimeClasspath + dependsOn runtimeClasspath + env 'CLASSPATH', "${-> runtimeClasspath.asPath}" executable = "${buildParams.runtimeJavaHome.get()}/bin/java" args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, "${buildDir}/testclusters/yamlRestTest-1/config/unicast_hosts.txt" } diff --git a/plugins/examples/custom-processor/build.gradle b/plugins/examples/custom-processor/build.gradle index 1aa15e2ed514..6c0281d899a4 100644 --- a/plugins/examples/custom-processor/build.gradle +++ b/plugins/examples/custom-processor/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'custom-processor' - description 'An example plugin showing how to register a custom ingest processor' - classname 'org.elasticsearch.example.customprocessor.ExampleProcessorPlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'custom-processor' + description = 'An example plugin showing how to register a custom ingest processor' + classname ='org.elasticsearch.example.customprocessor.ExampleProcessorPlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/custom-settings/build.gradle b/plugins/examples/custom-settings/build.gradle index 4fc1f25e0d63..2774bf6e75c7 100644 --- a/plugins/examples/custom-settings/build.gradle +++ b/plugins/examples/custom-settings/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'custom-settings' - description 'An example plugin showing how to register custom settings' - classname 'org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'custom-settings' + description = 'An example plugin showing how to register custom settings' + classname ='org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } testClusters.configureEach { diff --git a/plugins/examples/custom-significance-heuristic/build.gradle b/plugins/examples/custom-significance-heuristic/build.gradle index 6491e9384d55..f2f0cefa6d6f 100644 --- a/plugins/examples/custom-significance-heuristic/build.gradle +++ b/plugins/examples/custom-significance-heuristic/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'custom-significance-heuristic' - description 'An example plugin showing how to write and register a custom significance heuristic' - classname 'org.elasticsearch.example.customsigheuristic.CustomSignificanceHeuristicPlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'custom-significance-heuristic' + description = 'An example plugin showing how to write and register a custom significance heuristic' + classname ='org.elasticsearch.example.customsigheuristic.CustomSignificanceHeuristicPlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/custom-suggester/build.gradle b/plugins/examples/custom-suggester/build.gradle index 6c9210f74327..a1cf345f5e81 100644 --- a/plugins/examples/custom-suggester/build.gradle +++ b/plugins/examples/custom-suggester/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'custom-suggester' - description 'An example plugin showing how to write and register a custom suggester' - classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'custom-suggester' + description = 'An example plugin showing how to write and register a custom suggester' + classname ='org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } testClusters.configureEach { diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 22286c90de3d..e712035eabc7 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip +distributionSha256Sum=7ebdac923867a3cec0098302416d1e3c6c0c729fc4e2e05c10637a8af33a76c5 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index f6bbe0fc2b43..f501bd466ebe 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -10,12 +10,12 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'painless-whitelist' - description 'An example whitelisting additional classes and methods in painless' - classname 'org.elasticsearch.example.painlesswhitelist.MyWhitelistPlugin' + name = 'painless-whitelist' + description = 'An example whitelisting additional classes and methods in painless' + classname ='org.elasticsearch.example.painlesswhitelist.MyWhitelistPlugin' extendedPlugins = ['lang-painless'] - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/rescore/build.gradle b/plugins/examples/rescore/build.gradle index 93139b9b54d9..023033349dd8 100644 --- a/plugins/examples/rescore/build.gradle +++ b/plugins/examples/rescore/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'example-rescore' - description 'An example plugin implementing rescore and verifying that plugins *can* implement rescore' - classname 'org.elasticsearch.example.rescore.ExampleRescorePlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'example-rescore' + description = 'An example plugin implementing rescore and verifying that plugins *can* implement rescore' + classname ='org.elasticsearch.example.rescore.ExampleRescorePlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index b53ef6be39d5..43590b166a54 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'rest-handler' - description 'An example plugin showing how to register a REST handler' - classname 'org.elasticsearch.example.resthandler.ExampleRestHandlerPlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'rest-handler' + description = 'An example plugin showing how to register a REST handler' + classname ='org.elasticsearch.example.resthandler.ExampleRestHandlerPlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/script-expert-scoring/build.gradle b/plugins/examples/script-expert-scoring/build.gradle index 8947938e227a..0fb1baaea2f0 100644 --- a/plugins/examples/script-expert-scoring/build.gradle +++ b/plugins/examples/script-expert-scoring/build.gradle @@ -10,11 +10,11 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'script-expert-scoring' - description 'An example script engine to use low level Lucene internals for expert scoring' - classname 'org.elasticsearch.example.expertscript.ExpertScriptPlugin' - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'script-expert-scoring' + description = 'An example script engine to use low level Lucene internals for expert scoring' + classname ='org.elasticsearch.example.expertscript.ExpertScriptPlugin' + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/security-authorization-engine/build.gradle b/plugins/examples/security-authorization-engine/build.gradle index 3b49a7eb4db3..faf32774a20a 100644 --- a/plugins/examples/security-authorization-engine/build.gradle +++ b/plugins/examples/security-authorization-engine/build.gradle @@ -2,12 +2,12 @@ apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.java-rest-test' esplugin { - name 'security-authorization-engine' - description 'An example spi extension plugin for security that implements an Authorization Engine' - classname 'org.elasticsearch.example.AuthorizationEnginePlugin' + name = 'security-authorization-engine' + description = 'An example spi extension plugin for security that implements an Authorization Engine' + classname ='org.elasticsearch.example.AuthorizationEnginePlugin' extendedPlugins = ['x-pack-security'] - licenseFile rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } dependencies { diff --git a/plugins/examples/stable-analysis/build.gradle b/plugins/examples/stable-analysis/build.gradle index 358e2585623e..57126853da8e 100644 --- a/plugins/examples/stable-analysis/build.gradle +++ b/plugins/examples/stable-analysis/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'elasticsearch.stable-esplugin' apply plugin: 'elasticsearch.yaml-rest-test' esplugin { - name 'stable-analysis-plugin' - description 'An example analysis plugin using stable plugin api' + name = 'stable-analysis-plugin' + description = 'An example analysis plugin using stable plugin api' } //TODO write module-info diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index 435ad83974ef..63eb75865794 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.' - classname 'org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextPlugin' + description = 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.' + classname ='org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextPlugin' } restResources { diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index c12849d545b3..6eaa95c7f055 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -167,7 +167,7 @@ public class AnnotatedTextFieldMapper extends FieldMapper { } } - public static TypeParser PARSER = new TypeParser( + public static final TypeParser PARSER = new TypeParser( (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), SourceFieldMapper.isSynthetic(c.getIndexSettings())) ); diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index 0271296df934..54423b2b990d 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - description 'The Mapper Murmur3 plugin allows to compute hashes of a field\'s values at index-time and to store them in the index.' - classname 'org.elasticsearch.plugin.mapper.MapperMurmur3Plugin' + description = 'The Mapper Murmur3 plugin allows to compute hashes of a field\'s values at index-time and to store them in the index.' + classname ='org.elasticsearch.plugin.mapper.MapperMurmur3Plugin' extendedPlugins = ['lang-painless'] } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 2440668ff57b..738fb7ab7c25 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -63,7 +63,7 @@ public class Murmur3FieldMapper extends FieldMapper { } } - public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); // this only exists so a check can be done to match the field type to using murmur3 hashing... public static class Murmur3FieldType extends MappedFieldType { diff --git a/plugins/mapper-size/build.gradle b/plugins/mapper-size/build.gradle index 93fa6fc320c1..0f9bf59767d9 100644 --- a/plugins/mapper-size/build.gradle +++ b/plugins/mapper-size/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Mapper Size plugin allows document to record their uncompressed size at index time.' - classname 'org.elasticsearch.plugin.mapper.MapperSizePlugin' + description = 'The Mapper Size plugin allows document to record their uncompressed size at index time.' + classname ='org.elasticsearch.plugin.mapper.MapperSizePlugin' } restResources { diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index dea1e1bdd273..7df46699b79e 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -14,8 +14,8 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' - classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' + description = 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' + classname ='org.elasticsearch.repositories.hdfs.HdfsPlugin' } versions << [ diff --git a/plugins/repository-hdfs/hadoop-client-api/build.gradle b/plugins/repository-hdfs/hadoop-client-api/build.gradle index 5e87b8129250..46b0d949cdee 100644 --- a/plugins/repository-hdfs/hadoop-client-api/build.gradle +++ b/plugins/repository-hdfs/hadoop-client-api/build.gradle @@ -1,3 +1,5 @@ +import org.gradle.api.file.ArchiveOperations + apply plugin: 'elasticsearch.java' sourceSets { @@ -27,16 +29,22 @@ def patchTask = tasks.register("patchClasses", JavaExec) { outputs.dir(outputDir) classpath = sourceSets.patcher.runtimeClasspath mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher' + def thejar = configurations.thejar doFirst { - args(configurations.thejar.singleFile, outputDir.get().asFile) + args(thejar.singleFile, outputDir.get().asFile) } } +interface InjectedArchiveOps { + @Inject ArchiveOperations getArchiveOperations() +} + tasks.named('jar').configure { dependsOn(configurations.thejar) - + def injected = project.objects.newInstance(InjectedArchiveOps) + def thejar = configurations.thejar from(patchTask) - from({ project.zipTree(configurations.thejar.singleFile) }) { + from({ injected.getArchiveOperations().zipTree(thejar.singleFile) }) { eachFile { if (outputDir.get().file(it.relativePath.pathString).asFile.exists()) { it.exclude() diff --git a/plugins/store-smb/build.gradle b/plugins/store-smb/build.gradle index 1186f5359ad5..727f9ed58867 100644 --- a/plugins/store-smb/build.gradle +++ b/plugins/store-smb/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - description 'The Store SMB plugin adds support for SMB stores.' - classname 'org.elasticsearch.plugin.store.smb.SMBStorePlugin' + description = 'The Store SMB plugin adds support for SMB stores.' + classname ='org.elasticsearch.plugin.store.smb.SMBStorePlugin' } restResources { restApi { diff --git a/qa/logging-spi/build.gradle b/qa/logging-spi/build.gradle index 04f09b9638bf..d6b707eb24dd 100644 --- a/qa/logging-spi/build.gradle +++ b/qa/logging-spi/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - name 'logging-spi-test' - description 'An test plugin to test the SPI behaviour of ES logging' - classname 'org.elasticsearch.test.logging.plugin.TestLoggingPlugin' + name = 'logging-spi-test' + description = 'An test plugin to test the SPI behaviour of ES logging' + classname ='org.elasticsearch.test.logging.plugin.TestLoggingPlugin' } dependencies { diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index 8a28ea8fd1c5..e3d42850375d 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -39,14 +39,22 @@ elasticsearch_distributions { } } + +interface Injected { + @Inject + FileSystemOperations getFs() +} + tasks.named("preProcessFixture").configure { dependsOn "copyNodeKeyMaterial", elasticsearch_distributions.docker + def injected = project.objects.newInstance(Injected) + doLast { // tests expect to have an empty repo - project.delete( - "${testFixturesDir}/repo", - "${testFixturesDir}/oss-repo" - ) + injected.fs.delete { + it.delete("${testFixturesDir}/repo") + it.delete("${testFixturesDir}/oss-repo") + } createAndSetWritable( "${testFixturesDir}/repo", "${testFixturesDir}/oss-repo", @@ -69,7 +77,8 @@ dockerCompose { def createAndSetWritable(Object... locations) { locations.each { location -> - File file = file(location) + println "location = $location" + File file = new File(location) file.mkdirs() file.setWritable(true, false) } diff --git a/qa/remote-clusters/output.log b/qa/remote-clusters/output.log new file mode 100644 index 000000000000..11eebe6fb8fd --- /dev/null +++ b/qa/remote-clusters/output.log @@ -0,0 +1,125 @@ +Using gradle at '/Users/rene/dev/elastic/elasticsearch/gradlew' to run buildfile '/Users/rene/dev/elastic/elasticsearch/qa/remote-clusters/build.gradle': + +Starting a Gradle Daemon (subsequent builds will be faster) +Calculating task graph as no cached configuration is available for tasks: check + +> Task :build-conventions:compileJava +Note: Some input files use or override a deprecated API. +Note: Recompile with -Xlint:deprecation for details. +Note: /Users/rene/dev/elastic/elasticsearch/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/LicensingPlugin.java uses unchecked or unsafe operations. +Note: Recompile with -Xlint:unchecked for details. + +> Task :build-conventions:pluginDescriptors +> Task :build-conventions:processResources +> Task :build-conventions:classes +> Task :build-conventions:jar +> Task :build-tools:reaper:compileJava FROM-CACHE +> Task :build-tools:reaper:processResources NO-SOURCE +> Task :build-tools:reaper:classes UP-TO-DATE +> Task :build-tools-internal:extractPluginRequests FROM-CACHE +> Task :build-tools:reaper:jar +> Task :build-tools-internal:generatePluginAdapters FROM-CACHE +> Task :build-tools-internal:pluginDescriptors +> Task :build-tools:compileJava FROM-CACHE +> Task :build-tools:compileGroovy NO-SOURCE +> Task :build-tools:generateVersionProperties FROM-CACHE +> Task :build-tools:pluginDescriptors +> Task :build-tools:processResources +> Task :build-tools:classes +> Task :build-tools:jar +> Task :build-tools-internal:processResources + +> Task :build-tools-internal:compileJava +Note: Some input files use or override a deprecated API. +Note: Recompile with -Xlint:deprecation for details. +Note: Some input files use unchecked or unsafe operations. +Note: Recompile with -Xlint:unchecked for details. + +> Task :build-tools-internal:compileGroovy +> Task :build-tools-internal:compileGroovyPlugins +> Task :build-tools-internal:classes +> Task :build-tools-internal:jar +======================================= +Elasticsearch Build Hamster says Hello! + Gradle Version : 8.12 + OS Info : Mac OS X 15.2 (aarch64) + JDK Version : 21.0.5+9-LTS-239 (Oracle) + JAVA_HOME : /Users/rene/.sdkman/candidates/java/21.0.5-oracle + Random Testing Seed : 8D9A9CFD8E09C560 + In FIPS 140 mode : false +======================================= +> Task :qa:remote-clusters:processTestResources +> Task :server:generateModulesList +> Task :server:generatePluginsList +> Task :qa:remote-clusters:copyCheckstyleConf +> Task :qa:remote-clusters:filepermissions +> Task :client:sniffer:processResources NO-SOURCE +> Task :libs:core:processResources NO-SOURCE +> Task :libs:cli:processResources NO-SOURCE +> Task :libs:entitlement:processResources NO-SOURCE +> Task :libs:entitlement:bridge:processResources NO-SOURCE +> Task :libs:entitlement:asm-provider:processResources +> Task :libs:entitlement:processMain23Resources NO-SOURCE +> Task :libs:entitlement:bridge:processMain23Resources NO-SOURCE +> Task :libs:geo:processResources NO-SOURCE +> Task :libs:logging:processResources NO-SOURCE +> Task :libs:lz4:processResources NO-SOURCE +> Task :libs:native:processResources NO-SOURCE +> Task :libs:native:processMain22Resources NO-SOURCE +> Task :libs:plugin-analysis-api:processResources NO-SOURCE +> Task :libs:plugin-api:processResources NO-SOURCE +> Task :libs:secure-sm:processResources NO-SOURCE +> Task :libs:simdvec:processResources NO-SOURCE +> Task :libs:simdvec:processMain21Resources NO-SOURCE +> Task :libs:simdvec:processMain22Resources NO-SOURCE +> Task :client:rest:processResources +> Task :libs:tdigest:processResources NO-SOURCE +> Task :libs:x-content:impl:processResources +> Task :libs:x-content:processResources NO-SOURCE +> Task :modules:transport-netty4:processResources NO-SOURCE +> Task :server:processResources +> Task :libs:ssl-config:processResources NO-SOURCE +> Task :test:yaml-rest-runner:processResources NO-SOURCE +> Task :qa:remote-clusters:forbiddenApisResources +> Task :qa:remote-clusters:forbiddenPatterns +> Task :qa:remote-clusters:licenseHeaders +> Task :test:logger-usage:processResources NO-SOURCE +> Task :libs:grok:processResources +> Task :qa:remote-clusters:testingConventions UP-TO-DATE +> Task :qa:remote-clusters:copyNodeKeyMaterial FAILED +> Task :spotlessInternalRegisterDependencies +> Task :test:framework:processResources +> Task :libs:plugin-api:compileJava +> Task :libs:logging:compileJava +> Task :libs:entitlement:bridge:compileJava + +> Task :libs:secure-sm:compileJava +Note: Some input files use or override a deprecated API that is marked for removal. +Note: Recompile with -Xlint:removal for details. + +> Task :test:logger-usage:compileJava +> Task :qa:remote-clusters:spotlessJava +> Task :libs:grok:compileJava +> Task :libs:geo:compileJava +> Task :client:rest:compileJava +> Task :qa:remote-clusters:checkstyleTest + +[Incubating] Problems report is available at: file:///Users/rene/dev/elastic/elasticsearch/build/reports/problems/problems-report.html + +FAILURE: Build failed with an exception. + +* Where: +Build file '/Users/rene/dev/elastic/elasticsearch/qa/remote-clusters/build.gradle' line: 27 + +* What went wrong: +Execution failed for task ':qa:remote-clusters:copyNodeKeyMaterial'. +> Cannot reference a Gradle script object from a Groovy closure as these are not supported with the configuration cache. + +* Try: +> Run with --stacktrace option to get the stack trace. +> Run with --info or --debug option to get more log output. +> Get more help at https://help.gradle.org. + +BUILD FAILED in 1m 4s +45 actionable tasks: 40 executed, 5 from cache +Configuration cache entry stored. diff --git a/qa/system-indices/build.gradle b/qa/system-indices/build.gradle index d035b2f57d55..c619d4f02e52 100644 --- a/qa/system-indices/build.gradle +++ b/qa/system-indices/build.gradle @@ -11,11 +11,11 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - name 'system-indices-qa' - description 'Plugin for performing QA of system indices' - classname 'org.elasticsearch.system.indices.SystemIndicesQA' - licenseFile rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = 'system-indices-qa' + description = 'Plugin for performing QA of system indices' + classname ='org.elasticsearch.system.indices.SystemIndicesQA' + licenseFile = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } testClusters.configureEach { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json index 12202a7a2a7b..1d66312f053c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json @@ -25,6 +25,15 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json index 1395a3d3275a..71f1727a8638 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json @@ -25,6 +25,15 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json index f6ce3e75cc37..4166122d5bf1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json @@ -19,6 +19,15 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json index 94d0772a405d..406fee601552 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json @@ -31,6 +31,15 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json index aa693ad31711..05281ff46cb8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json @@ -19,6 +19,15 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json index 92ba1b4c321e..404f92f55921 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json @@ -19,6 +19,15 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json index 7e7babb987c7..621ed870ffdb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json @@ -26,7 +26,16 @@ } ] }, - "params":{}, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + }, "body":{ "description":"The snapshot lifecycle policy definition to register" } diff --git a/server/build.gradle b/server/build.gradle index 0bd807751ecb..b186999c36ca 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -15,7 +15,7 @@ apply plugin: 'elasticsearch.internal-test-artifact' publishing { publications { elastic { - artifactId 'elasticsearch' + artifactId = 'elasticsearch' } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java index a8028e867145..31c7720ffec1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java @@ -16,19 +16,14 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.FailingFieldPlugin; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matcher; @@ -37,7 +32,6 @@ import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Set; import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.equalTo; @@ -455,48 +449,4 @@ public class IndicesMetricsIT extends ESIntegTestCase { return parser.map(); } } - - public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { - - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "failing_field"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - throw new IllegalStateException("Accessing failing field"); - } - }; - } - }; - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } - } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java index 9f40b1928dce..2530dd35946f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -19,22 +19,17 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.search.stats.SearchStats.Stats; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.FailingFieldPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -80,50 +75,6 @@ public class SearchStatsIT extends ESIntegTestCase { } } - public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { - - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "failing_field"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - throw new IllegalArgumentException("Accessing failing field"); - } - }; - } - }; - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } - } - @Override protected int numberOfReplicas() { return 0; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java index 755ee960be73..aeac8959df61 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java @@ -753,11 +753,16 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { clusterService.submitUnbatchedStateUpdateTask("mark node for removal", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { - final var nodeId = currentState.nodes().resolveNode(nodeName).getId(); + final var node = currentState.nodes().resolveNode(nodeName); return currentState.copyAndUpdateMetadata( mdb -> mdb.putCustom( NodesShutdownMetadata.TYPE, - new NodesShutdownMetadata(Map.of(nodeId, shutdownMetadataBuilder.setNodeId(nodeId).build())) + new NodesShutdownMetadata( + Map.of( + node.getId(), + shutdownMetadataBuilder.setNodeId(node.getId()).setNodeEphemeralId(node.getEphemeralId()).build() + ) + ) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java index 15c5e3379734..ac322868989b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java @@ -1235,15 +1235,16 @@ public class SnapshotStressTestsIT extends AbstractSnapshotIntegTestCase { Strings.toString(currentState), currentState.metadata().nodeShutdowns().getAll().isEmpty() ); - final var nodeId = currentState.nodes().resolveNode(node.nodeName).getId(); + final var discoveryNode = currentState.nodes().resolveNode(node.nodeName); return currentState.copyAndUpdateMetadata( mdb -> mdb.putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( Map.of( - nodeId, + discoveryNode.getId(), SingleNodeShutdownMetadata.builder() - .setNodeId(nodeId) + .setNodeId(discoveryNode.getId()) + .setNodeEphemeralId(discoveryNode.getEphemeralId()) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(clusterService.threadPool().absoluteTimeInMillis()) .setReason("test") diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index d7ba415687af..aba8f075b858 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -481,4 +481,5 @@ module org.elasticsearch.server { exports org.elasticsearch.inference.configuration; exports org.elasticsearch.monitor.metrics; exports org.elasticsearch.plugins.internal.rewriter to org.elasticsearch.inference; + exports org.elasticsearch.lucene.util.automaton; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9eb4235b3feb..5e3947a3cd13 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -147,6 +147,8 @@ public class TransportVersions { public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0); public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_00_0); public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_00_0); + public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_00_0); + public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_00_0); /* * WARNING: DO NOT MERGE INTO MAIN! diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index e19410173848..4ead19fbf30e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -15,9 +15,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.http.HttpStats; @@ -38,10 +40,13 @@ import org.elasticsearch.transport.TransportStats; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; +import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk; + /** * Node statistics (dynamic, changes depending on when created). */ @@ -342,7 +347,7 @@ public class NodeStats extends BaseNodeResponse implements ChunkedToXContent { @Override public Iterator toXContentChunked(ToXContent.Params outerParams) { - return ChunkedToXContent.builder(outerParams).append((builder, params) -> { + return Iterators.concat(singleChunk((builder, params) -> { builder.field("name", getNode().getName()); builder.field("transport_address", getNode().getAddress().toString()); builder.field("host", getNode().getHostName()); @@ -353,7 +358,6 @@ public class NodeStats extends BaseNodeResponse implements ChunkedToXContent { builder.value(role.roleName()); } builder.endArray(); - if (getNode().getAttributes().isEmpty() == false) { builder.startObject("attributes"); for (Map.Entry attrEntry : getNode().getAttributes().entrySet()) { @@ -361,30 +365,40 @@ public class NodeStats extends BaseNodeResponse implements ChunkedToXContent { } builder.endObject(); } + return builder; - }) - - .appendIfPresent(getIndices()) - .append((builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p)) - - .appendIfPresent(getThreadPool()) - .appendIfPresent(getFs()) - .appendIfPresent(getTransport()) - .appendIfPresent(getHttp()) - .appendIfPresent(getBreaker()) - .appendIfPresent(getScriptStats()) - .appendIfPresent(getDiscoveryStats()) - .appendIfPresent(getIngestStats()) - .appendIfPresent(getAdaptiveSelectionStats()) - .appendIfPresent(getScriptCacheStats()) - .append( + }), + ifPresent(getIndices()).toXContentChunked(outerParams), + singleChunk( + (builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p) + ), + ifPresent(getThreadPool()).toXContentChunked(outerParams), + singleChunkIfPresent(getFs()), + ifPresent(getTransport()).toXContentChunked(outerParams), + ifPresent(getHttp()).toXContentChunked(outerParams), + singleChunkIfPresent(getBreaker()), + ifPresent(getScriptStats()).toXContentChunked(outerParams), + singleChunkIfPresent(getDiscoveryStats()), + ifPresent(getIngestStats()).toXContentChunked(outerParams), + singleChunkIfPresent(getAdaptiveSelectionStats()), + singleChunkIfPresent(getScriptCacheStats()), + singleChunk( (builder, p) -> builder.value(ifPresent(getIndexingPressureStats()), p) .value(ifPresent(getRepositoriesStats()), p) .value(ifPresent(getNodeAllocationStats()), p) - ); + ) + ); + } + + private static ChunkedToXContent ifPresent(@Nullable ChunkedToXContent chunkedToXContent) { + return Objects.requireNonNullElse(chunkedToXContent, ChunkedToXContent.EMPTY); } private static ToXContent ifPresent(@Nullable ToXContent toXContent) { return Objects.requireNonNullElse(toXContent, ToXContent.EMPTY); } + + private static Iterator singleChunkIfPresent(ToXContent toXContent) { + return toXContent == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(toXContent); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java index 1f0e0daf77d7..25d34957b695 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -14,9 +14,10 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.nodes.BaseNodesXContentResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -41,12 +42,15 @@ public class NodesStatsResponse extends BaseNodesXContentResponse { @Override protected Iterator xContentChunks(ToXContent.Params outerParams) { - return ChunkedToXContent.builder(outerParams) - .object( - "nodes", - getNodes().iterator(), - (b, ns) -> b.object(ns.getNode().getId(), ob -> ob.field("timestamp", ns.getTimestamp()).append(ns)) - ); + return Iterators.concat( + ChunkedToXContentHelper.startObject("nodes"), + Iterators.flatMap(getNodes().iterator(), nodeStats -> Iterators.concat(Iterators.single((builder, params) -> { + builder.startObject(nodeStats.getNode().getId()); + builder.field("timestamp", nodeStats.getTimestamp()); + return builder; + }), nodeStats.toXContentChunked(outerParams), ChunkedToXContentHelper.endObject())), + ChunkedToXContentHelper.endObject() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 45ee00a98c2e..3f12c9e7a5bd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -13,11 +13,12 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.master.IsAcknowledgedSupplier; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV10; @@ -25,6 +26,7 @@ import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; +import java.util.Collections; import java.util.Iterator; import java.util.Objects; @@ -92,14 +94,17 @@ public class ClusterRerouteResponse extends ActionResponse implements IsAcknowle if (emitState(outerParams)) { deprecationLogger.critical(DeprecationCategory.API, "reroute_cluster_state", STATE_FIELD_DEPRECATION_MESSAGE); } - return ChunkedToXContent.builder(outerParams).object(b -> { - b.field(ACKNOWLEDGED_KEY, isAcknowledged()); - if (emitState(outerParams)) { - b.xContentObject("state", state); - } - if (outerParams.paramAsBoolean("explain", false)) { - b.append(explanations); - } - }); + return Iterators.concat( + Iterators.single((builder, params) -> builder.startObject().field(ACKNOWLEDGED_KEY, isAcknowledged())), + emitState(outerParams) + ? ChunkedToXContentHelper.wrapWithObject("state", state.toXContentChunked(outerParams)) + : Collections.emptyIterator(), + Iterators.single((builder, params) -> { + if (params.paramAsBoolean("explain", false)) { + explanations.toXContent(builder, params); + } + return builder.endObject(); + }) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index b6ced0662330..bfdf41e58f6d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -61,7 +61,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest repositoryFailures; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index b7a513b3cb08..03e05ca0e424 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -54,7 +54,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> ob.append((b, p) -> { - b.field(ERRORS, hasFailures()); - b.field(TOOK, tookInMillis); + return Iterators.concat(Iterators.single((builder, p) -> { + builder.startObject(); + builder.field(ERRORS, hasFailures()); + builder.field(TOOK, tookInMillis); if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { - b.field(INGEST_TOOK, ingestTookInMillis); + builder.field(INGEST_TOOK, ingestTookInMillis); } - return b; - }).array(ITEMS, Iterators.forArray(responses))); + return builder.startArray(ITEMS); + }), Iterators.forArray(responses), Iterators.single((builder, p) -> builder.endArray().endObject())); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index ca9388c3a4f6..06d1c3ae2310 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -49,9 +49,11 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; @@ -330,7 +332,8 @@ public class TransportShardBulkAction extends TransportWriteAction 0) { + // Retrieves the inference metadata field containing the inference results for all semantic fields defined in the mapping. + return new String[] { RoutingFieldMapper.NAME, InferenceMetadataFieldsMapper.NAME }; + } + } + return new String[] { RoutingFieldMapper.NAME }; + } + private static boolean handleMappingUpdateRequired( BulkPrimaryExecutionContext context, MappingUpdatePerformer mappingUpdater, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 041b3ae73c1e..18edc83bf3dc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -14,12 +14,13 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; @@ -382,17 +383,24 @@ public class SearchResponse extends ActionResponse implements ChunkedToXContentO @Override public Iterator toXContentChunked(ToXContent.Params params) { assert hasReferences(); - return ChunkedToXContent.builder(params).xContentObject(innerToXContentChunked(params)); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + this.innerToXContentChunked(params), + ChunkedToXContentHelper.endObject() + ); } public Iterator innerToXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .append(SearchResponse.this::headerToXContent) - .append(clusters) - .append(hits) - .appendIfPresent(aggregations) - .appendIfPresent(suggest) - .appendIfPresent(profileResults); + return Iterators.concat( + ChunkedToXContentHelper.singleChunk(SearchResponse.this::headerToXContent), + Iterators.single(clusters), + Iterators.concat( + hits.toXContentChunked(params), + aggregations == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(aggregations), + suggest == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(suggest), + profileResults == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(profileResults) + ) + ); } public XContentBuilder headerToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java index 4b65af23c622..18172bf4e8f3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java @@ -9,8 +9,9 @@ package org.elasticsearch.action.support.broadcast; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.xcontent.ToXContent; @@ -35,8 +36,11 @@ public abstract class ChunkedBroadcastResponse extends BaseBroadcastResponse imp @Override public final Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .object(ob -> ob.append((b, p) -> RestActions.buildBroadcastShardsHeader(b, p, this)).append(this::customXContentChunks)); + return Iterators.concat(Iterators.single((b, p) -> { + b.startObject(); + RestActions.buildBroadcastShardsHeader(b, p, this); + return b; + }), customXContentChunks(params), ChunkedToXContentHelper.endObject()); } protected abstract Iterator customXContentChunks(ToXContent.Params params); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 50255034ef69..faac1b010ec8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -615,7 +615,7 @@ public abstract class TransportBroadcastByNodeAction< * which there is no shard-level return value. */ public static final class EmptyResult implements Writeable { - public static EmptyResult INSTANCE = new EmptyResult(); + public static final EmptyResult INSTANCE = new EmptyResult(); private EmptyResult() {} diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesXContentResponse.java b/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesXContentResponse.java index b9fb2807a05c..2a2760656906 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesXContentResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesXContentResponse.java @@ -11,7 +11,8 @@ package org.elasticsearch.action.support.nodes; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.xcontent.ToXContent; @@ -29,12 +30,11 @@ public abstract class BaseNodesXContentResponse toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .object( - ob -> ob.append((b, p) -> RestActions.buildNodesHeader(b, p, this)) - .field("cluster_name", getClusterName().value()) - .append(xContentChunks(params)) - ); + return Iterators.concat(Iterators.single((b, p) -> { + b.startObject(); + RestActions.buildNodesHeader(b, p, this); + return b.field("cluster_name", getClusterName().value()); + }), xContentChunks(params), ChunkedToXContentHelper.endObject()); } protected abstract Iterator xContentChunks(ToXContent.Params outerParams); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 966f2c757770..e36579119f61 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -45,6 +45,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.InferenceFieldMapper; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.shard.IndexShard; @@ -386,7 +387,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio IndexMetadata indexMetadata, MappingLookup mappingLookup ) { - if (result.getResponseResult() != DocWriteResponse.Result.UPDATED) { + if (result.getResponseResult() != DocWriteResponse.Result.UPDATED || InferenceMetadataFieldsMapper.isEnabled(mappingLookup)) { return result; } @@ -415,7 +416,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio String inferenceFieldName = entry.getKey(); Mapper mapper = mappingLookup.getMapper(inferenceFieldName); - if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) { + if (mapper instanceof InferenceFieldMapper) { String[] sourceFields = entry.getValue().getSourceFields(); for (String sourceField : sourceFields) { if (sourceField.equals(inferenceFieldName) == false @@ -424,7 +425,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio // This has two important side effects: // - The inference field value will remain parsable by its mapper // - The inference results will be removed, forcing them to be re-generated downstream - updatedSource.put(inferenceFieldName, inferenceFieldMapper.getOriginalValue(updatedSource)); + updatedSource.put(inferenceFieldName, getOriginalValueLegacy(inferenceFieldName, updatedSource)); updatedSourceModified = true; break; } @@ -447,4 +448,24 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio return returnedResult; } + + /** + * Get the field's original value (i.e. the value the user specified) from the provided source. + * + * @param sourceAsMap The source as a map + * @return The field's original value, or {@code null} if none was provided + */ + private static Object getOriginalValueLegacy(String fullPath, Map sourceAsMap) { + // TODO: Fix bug here when semantic text field is in an object + Object fieldValue = sourceAsMap.get(fullPath); + if (fieldValue == null) { + return null; + } else if (fieldValue instanceof Map == false) { + // Don't try to further validate the non-map value, that will be handled when the source is fully parsed + return fieldValue; + } + + Map fieldValueMap = XContentMapValues.nodeMapValue(fieldValue, "Field [" + fullPath + "]"); + return XContentMapValues.extractValue("text", fieldValueMap); + } } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index a645c156b63c..39600c7eca66 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -60,7 +60,15 @@ public class UpdateHelper { * Prepares an update request by converting it into an index or delete request or an update response (no action). */ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis) throws IOException { - final GetResult getResult = indexShard.getService().getForUpdate(request.id(), request.ifSeqNo(), request.ifPrimaryTerm()); + // TODO: Don't hard-code gFields + return prepare(request, indexShard, nowInMillis, new String[] { RoutingFieldMapper.NAME }); + } + + /** + * Prepares an update request by converting it into an index or delete request or an update response (no action). + */ + public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis, String[] gFields) throws IOException { + final GetResult getResult = indexShard.getService().getForUpdate(request.id(), request.ifSeqNo(), request.ifPrimaryTerm(), gFields); return prepare(indexShard, request, getResult, nowInMillis); } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java index 5b5a6577082d..495306a334c2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java @@ -11,9 +11,10 @@ package org.elasticsearch.cluster; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ToXContent; @@ -278,12 +279,15 @@ public class ClusterFeatures implements Diffable, ChunkedToXCon @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .array(nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).iterator(), e -> (builder, p) -> { + return Iterators.concat( + ChunkedToXContentHelper.startArray(), + nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).map(e -> (builder, p) -> { String[] features = e.getValue().toArray(String[]::new); Arrays.sort(features); return builder.startObject().field("node_id", e.getKey()).array("features", features).endObject(); - }); + }).iterator(), + ChunkedToXContentHelper.endArray() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java index d60d0a1859ca..fc7aa65e80f5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -35,6 +36,9 @@ import java.util.Set; import static org.elasticsearch.cluster.routing.ShardRouting.newUnassigned; import static org.elasticsearch.cluster.routing.UnassignedInfo.Reason.REINITIALIZED; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endArray; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; /** * ClusterInfo is an object representing a map of nodes to {@link DiskUsage} @@ -138,7 +142,7 @@ public class ClusterInfo implements ChunkedToXContent, Writeable { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object("nodes", leastAvailableSpaceUsage.entrySet().iterator(), c -> (builder, p) -> { + return Iterators.concat(startObject("nodes"), Iterators.map(leastAvailableSpaceUsage.entrySet().iterator(), c -> (builder, p) -> { builder.startObject(c.getKey()); { // node builder.field("node_name", c.getValue().nodeName()); @@ -157,31 +161,48 @@ public class ClusterInfo implements ChunkedToXContent, Writeable { builder.endObject(); // end "most_available" } return builder.endObject(); // end $nodename - }) - .object( - "shard_sizes", + }), + singleChunk( + (builder, p) -> builder.endObject() // end "nodes" + .startObject("shard_sizes") + ), + + Iterators.map( shardSizes.entrySet().iterator(), c -> (builder, p) -> builder.humanReadableField(c.getKey() + "_bytes", c.getKey(), ByteSizeValue.ofBytes(c.getValue())) - ) - .object( - "shard_data_set_sizes", + ), + singleChunk( + (builder, p) -> builder.endObject() // end "shard_sizes" + .startObject("shard_data_set_sizes") + ), + Iterators.map( shardDataSetSizes.entrySet().iterator(), c -> (builder, p) -> builder.humanReadableField( c.getKey() + "_bytes", c.getKey().toString(), ByteSizeValue.ofBytes(c.getValue()) ) - ) - .object("shard_paths", dataPath.entrySet().iterator(), (xb, c) -> xb.field(c.getKey().toString(), c.getValue())) - .array("reserved_sizes", reservedSpace.entrySet().iterator(), c -> (builder, p) -> { + ), + singleChunk( + (builder, p) -> builder.endObject() // end "shard_data_set_sizes" + .startObject("shard_paths") + ), + Iterators.map(dataPath.entrySet().iterator(), c -> (builder, p) -> builder.field(c.getKey().toString(), c.getValue())), + singleChunk( + (builder, p) -> builder.endObject() // end "shard_paths" + .startArray("reserved_sizes") + ), + Iterators.map(reservedSpace.entrySet().iterator(), c -> (builder, p) -> { builder.startObject(); { builder.field("node_id", c.getKey().nodeId); builder.field("path", c.getKey().path); - c.getValue().toXContent(builder, p); + c.getValue().toXContent(builder, params); } return builder.endObject(); // NodeAndPath - }); + }), + endArray() // end "reserved_sizes" + ); } /** @@ -290,7 +311,7 @@ public class ClusterInfo implements ChunkedToXContent, Writeable { // exposed for tests, computed here rather than exposing all the collections separately int getChunkCount() { - return leastAvailableSpaceUsage.size() + shardSizes.size() + shardDataSetSizes.size() + dataPath.size() + reservedSpace.size() + 10; + return leastAvailableSpaceUsage.size() + shardSizes.size() + shardDataSetSizes.size() + dataPath.size() + reservedSpace.size() + 6; } public record NodeAndShard(String nodeId, ShardId shardId) implements Writeable { diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java b/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java index ac96a2d55bc7..b859b0d13a58 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java @@ -36,7 +36,7 @@ public record ClusterSnapshotStats( List statsByRepository ) implements ToXContentObject, Writeable { - public static ClusterSnapshotStats EMPTY = new ClusterSnapshotStats(0, 0, 0, 0, List.of()); + public static final ClusterSnapshotStats EMPTY = new ClusterSnapshotStats(0, 0, 0, 0, List.of()); public static ClusterSnapshotStats of(ClusterState clusterState, long currentTimeMillis) { return of( diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 845ebbc4a1a9..e00600c677bd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -726,7 +726,7 @@ public class ClusterState implements ChunkedToXContent, Diffable { return Iterators.concat( // header chunk - Iterators.single(((builder, params) -> { + Iterators.single((builder, params) -> { // always provide the cluster_uuid as part of the top-level response (also part of the metadata response) builder.field("cluster_uuid", metadata().clusterUUID()); @@ -742,7 +742,7 @@ public class ClusterState implements ChunkedToXContent, Diffable { } return builder; - })), + }), // blocks metrics.contains(Metric.BLOCKS) ? blocksXContent(multiProject, singleProjectId) : Collections.emptyIterator(), @@ -825,8 +825,10 @@ public class ClusterState implements ChunkedToXContent, Diffable { // customs metrics.contains(Metric.CUSTOMS) - ? ChunkedToXContent.builder(outerParams) - .forEach(customs.entrySet().iterator(), (b, e) -> b.xContentObject(e.getKey(), e.getValue())) + ? Iterators.flatMap( + customs.entrySet().iterator(), + e -> ChunkedToXContentHelper.wrapWithObject(e.getKey(), e.getValue().toXContentChunked(outerParams)) + ) : Collections.emptyIterator() ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java index 1e00a5751f10..a4b2561d130b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -102,8 +102,8 @@ public class ComponentTemplateMetadata implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(COMPONENT_TEMPLATE.getPreferredName(), componentTemplates); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.xContentValuesMap(COMPONENT_TEMPLATE.getPreferredName(), componentTemplates); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 998217e93c42..45433c4cba10 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -427,7 +427,7 @@ public class ComposableIndexTemplate implements SimpleDiffable toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(INDEX_TEMPLATE.getPreferredName(), indexTemplates); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.xContentValuesMap(INDEX_TEMPLATE.getPreferredName(), indexTemplates); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 82e58df47871..92c642a7952b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -82,7 +82,7 @@ public final class DataStream implements SimpleDiffable, ToXContentO public static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); public static final String TIMESTAMP_FIELD_NAME = "@timestamp"; // Timeseries indices' leaf readers should be sorted by desc order of their timestamp field, as it allows search time optimizations - public static Comparator TIMESERIES_LEAF_READERS_SORTER = Comparator.comparingLong((LeafReader r) -> { + public static final Comparator TIMESERIES_LEAF_READERS_SORTER = Comparator.comparingLong((LeafReader r) -> { try { PointValues points = r.getPointValues(TIMESTAMP_FIELD_NAME); if (points != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java index 7146a8858779..85c8f12c8d85 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java @@ -17,9 +17,10 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -231,10 +232,13 @@ public class DataStreamMetadata implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .xContentObjectFields(DATA_STREAM.getPreferredName(), dataStreams) - .xContentObject(DATA_STREAM_ALIASES.getPreferredName(), dataStreamAliases.values().iterator()); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.concat( + ChunkedToXContentHelper.xContentValuesMap(DATA_STREAM.getPreferredName(), dataStreams), + ChunkedToXContentHelper.startObject(DATA_STREAM_ALIASES.getPreferredName()), + dataStreamAliases.values().iterator(), + ChunkedToXContentHelper.endObject() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 0a5d9019e0c4..ba1f236ac75b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.index.Index; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ObjectParser; @@ -128,8 +128,8 @@ public final class IndexGraveyard implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(TOMBSTONES_FIELD.getPreferredName(), tombstones.iterator()); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.array(TOMBSTONES_FIELD.getPreferredName(), tombstones.iterator()); } public static IndexGraveyard fromXContent(final XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index a1f1a151828b..a7687c2777b4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.FixForMultiProject; @@ -105,25 +106,25 @@ public class Metadata implements Diffable, ChunkedToXContent { /** * Indicates that this custom metadata will be returned as part of an API call but will not be persisted */ - public static EnumSet API_ONLY = EnumSet.of(XContentContext.API); + public static final EnumSet API_ONLY = EnumSet.of(XContentContext.API); /** * Indicates that this custom metadata will be returned as part of an API call and will be persisted between * node restarts, but will not be a part of a snapshot global state */ - public static EnumSet API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY); + public static final EnumSet API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY); /** * Indicates that this custom metadata will be returned as part of an API call and stored as a part of * a snapshot global state, but will not be persisted between node restarts */ - public static EnumSet API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT); + public static final EnumSet API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT); /** * Indicates that this custom metadata will be returned as part of an API call, stored as a part of * a snapshot global state, and will be persisted between node restarts */ - public static EnumSet ALL_CONTEXTS = EnumSet.allOf(XContentContext.class); + public static final EnumSet ALL_CONTEXTS = EnumSet.allOf(XContentContext.class); public interface MetadataCustom extends NamedDiffable, ChunkedToXContent { @@ -626,64 +627,86 @@ public class Metadata implements Diffable, ChunkedToXContent { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - XContentContext context = XContentContext.from(params); + public Iterator toXContentChunked(ToXContent.Params p) { + XContentContext context = XContentContext.from(p); + final Iterator start = context == XContentContext.API + ? ChunkedToXContentHelper.startObject("metadata") + : Iterators.single((builder, params) -> builder.startObject("meta-data").field("version", version())); - var builder = ChunkedToXContent.builder(params) - .append( - context == XContentContext.API - ? (b, p) -> b.startObject("metadata") - : (b, p) -> b.startObject("meta-data").field("version", version()) - ) - .append((b, p) -> { - b.field("cluster_uuid", clusterUUID); - b.field("cluster_uuid_committed", clusterUUIDCommitted); - b.startObject("cluster_coordination"); - coordinationMetadata().toXContent(b, p); - return b.endObject(); + final Iterator persistentSettings = context != XContentContext.API && persistentSettings().isEmpty() == false + ? Iterators.single((builder, params) -> { + builder.startObject("settings"); + persistentSettings().toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); + return builder.endObject(); }) - .execute(xb -> { - if (context != XContentContext.API && persistentSettings().isEmpty() == false) { - xb.append((b, p) -> { - b.startObject("settings"); - persistentSettings().toXContent(b, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); - return b.endObject(); - }); - } - }); + : Collections.emptyIterator(); // use a tree map so the order is deterministic Map clusterReservedState = new TreeMap<>(reservedStateMetadata); @FixForMultiProject // Need to revisit whether this should be a param or something else. - boolean multiProject = params.paramAsBoolean("multi-project", false); + final boolean multiProject = p.paramAsBoolean("multi-project", false); if (multiProject) { - builder.array( - "projects", - projectMetadata.entrySet().iterator(), - (b, e) -> b.object(ob -> ob.field("id", e.getKey()).append(e.getValue())) + return Iterators.concat(start, Iterators.single((builder, params) -> { + builder.field("cluster_uuid", clusterUUID); + builder.field("cluster_uuid_committed", clusterUUIDCommitted); + builder.startObject("cluster_coordination"); + coordinationMetadata().toXContent(builder, params); + return builder.endObject(); + }), + persistentSettings, + ChunkedToXContentHelper.array( + "projects", + Iterators.flatMap( + projectMetadata.entrySet().iterator(), + e -> Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.single((builder, params) -> builder.field("id", e.getKey())), + e.getValue().toXContentChunked(p), + ChunkedToXContentHelper.endObject() + ) + ) + ), + Iterators.flatMap( + customs.entrySet().iterator(), + entry -> entry.getValue().context().contains(context) + ? ChunkedToXContentHelper.wrapWithObject(entry.getKey(), entry.getValue().toXContentChunked(p)) + : Collections.emptyIterator() + ), + ChunkedToXContentHelper.wrapWithObject("reserved_state", reservedStateMetadata().values().iterator()), + ChunkedToXContentHelper.endObject() ); } else { - if (projectMetadata.size() == 1) { - // Need to rethink what to do here. This might be right, but maybe not... - @FixForMultiProject - final ProjectMetadata project = projectMetadata.values().iterator().next(); - - // need to combine reserved state together into a single block so we don't get duplicate keys - // and not include it in the project xcontent output (through the lack of multi-project params) - clusterReservedState.putAll(project.reservedStateMetadata()); - builder.append(project); - } else { + if (projectMetadata.size() != 1) { throw new MultiProjectPendingException("There are multiple projects " + projectMetadata.keySet()); } - } + // Need to rethink what to do here. This might be right, but maybe not... + @FixForMultiProject + final ProjectMetadata project = projectMetadata.values().iterator().next(); - return builder.forEach(customs.entrySet().iterator(), (b, e) -> { - if (e.getValue().context().contains(context)) { - b.xContentObject(e.getKey(), e.getValue()); - } - }).xContentObject("reserved_state", clusterReservedState.values().iterator()).append((b, p) -> b.endObject()); + // need to combine reserved state together into a single block so we don't get duplicate keys + // and not include it in the project xcontent output (through the lack of multi-project params) + clusterReservedState.putAll(project.reservedStateMetadata()); + return Iterators.concat(start, Iterators.single((builder, params) -> { + builder.field("cluster_uuid", clusterUUID); + builder.field("cluster_uuid_committed", clusterUUIDCommitted); + builder.startObject("cluster_coordination"); + coordinationMetadata().toXContent(builder, params); + return builder.endObject(); + }), + persistentSettings, + project.toXContentChunked(p), + Iterators.flatMap( + customs.entrySet().iterator(), + entry -> entry.getValue().context().contains(context) + ? ChunkedToXContentHelper.wrapWithObject(entry.getKey(), entry.getValue().toXContentChunked(p)) + : Collections.emptyIterator() + ), + ChunkedToXContentHelper.wrapWithObject("reserved_state", clusterReservedState.values().iterator()), + ChunkedToXContentHelper.endObject() + ); + } } private static final DiffableUtils.KeySerializer PROJECT_ID_SERIALIZER = DiffableUtils.getWriteableKeySerializer( diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java index 95fc9e9d1b34..91f4382ceae2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -191,7 +191,7 @@ public class NodesShutdownMetadata implements Metadata.ClusterCustom { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(NODES_FIELD.getPreferredName(), nodes); + return ChunkedToXContentHelper.xContentValuesMap(NODES_FIELD.getPreferredName(), nodes); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java index 61431c47013f..65143733247d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.routing.GlobalRoutingTable; import org.elasticsearch.cluster.routing.allocation.IndexMetadataUpdater; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; @@ -28,6 +29,7 @@ import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.Nullable; @@ -1990,28 +1992,32 @@ public class ProjectMetadata implements Iterable, Diffable toXContentChunked(ToXContent.Params p) { Metadata.XContentContext context = Metadata.XContentContext.from(p); - return ChunkedToXContent.builder(p) - .object( + Iterator indices = context == Metadata.XContentContext.API + ? ChunkedToXContentHelper.wrapWithObject("indices", indices().values().iterator()) + : Collections.emptyIterator(); + + Iterator customs = Iterators.flatMap( + customs().entrySet().iterator(), + entry -> entry.getValue().context().contains(context) + ? ChunkedToXContentHelper.wrapWithObject(entry.getKey(), entry.getValue().toXContentChunked(p)) + : Collections.emptyIterator() + ); + + final var multiProject = p.paramAsBoolean("multi-project", false); + return Iterators.concat( + ChunkedToXContentHelper.wrapWithObject( "templates", - templates().values().iterator(), - template -> (builder, params) -> IndexTemplateMetadata.Builder.toXContentWithTypes(template, builder, params) - ) - .execute(b -> { - if (context == Metadata.XContentContext.API) { - b.xContentObject("indices", indices().values().iterator()); - } - }) - .forEach(customs.entrySet().iterator(), (b, e) -> { - if (e.getValue().context().contains(context)) { - b.xContentObject(e.getKey(), e.getValue()); - } - }) - .execute(b -> { - // if not multi-project, all reserved state is added to the cluster metadata reserved state block - if (b.params().paramAsBoolean("multi-project", false)) { - b.xContentObject("reserved_state", reservedStateMetadata().values().iterator()); - } - }); + Iterators.map( + templates().values().iterator(), + template -> (builder, params) -> IndexTemplateMetadata.Builder.toXContentWithTypes(template, builder, params) + ) + ), + indices, + customs, + multiProject + ? ChunkedToXContentHelper.wrapWithObject("reserved_state", reservedStateMetadata().values().iterator()) + : Collections.emptyIterator() + ); } public static ProjectMetadata readFrom(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java index aa8b092ffcca..ebd7a7db13f8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.Locale; import java.util.Objects; +import static org.elasticsearch.TransportVersions.NODE_SHUTDOWN_EPHEMERAL_ID_ADDED; import static org.elasticsearch.core.Strings.format; /** @@ -40,6 +41,7 @@ public class SingleNodeShutdownMetadata implements SimpleDiffable new SingleNodeShutdownMetadata( (String) a[0], - Type.valueOf((String) a[1]), - (String) a[2], - (long) a[3], - (boolean) a[4], - (TimeValue) a[5], - (String) a[6], - (TimeValue) a[7] + (String) a[1], + Type.valueOf((String) a[2]), + (String) a[3], + (long) a[4], + (boolean) a[5], + (TimeValue) a[6], + (String) a[7], + (TimeValue) a[8] ) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_ID_FIELD); + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> p.textOrNull(), + NODE_EPHEMERAL_ID_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); PARSER.declareString(ConstructingObjectParser.constructorArg(), TYPE_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), REASON_FIELD); PARSER.declareLong(ConstructingObjectParser.constructorArg(), STARTED_AT_MILLIS_FIELD); @@ -91,6 +100,8 @@ public class SingleNodeShutdownMetadata implements SimpleDiffable { /** * Represents the role for a content node. */ - public static DiscoveryNodeRole DATA_CONTENT_NODE_ROLE = new DiscoveryNodeRole("data_content", "s", true) { + public static final DiscoveryNodeRole DATA_CONTENT_NODE_ROLE = new DiscoveryNodeRole("data_content", "s", true) { @Override public boolean isEnabledByDefault(final Settings settings) { @@ -164,7 +164,7 @@ public class DiscoveryNodeRole implements Comparable { /** * Represents the role for a hot node. */ - public static DiscoveryNodeRole DATA_HOT_NODE_ROLE = new DiscoveryNodeRole("data_hot", "h", true) { + public static final DiscoveryNodeRole DATA_HOT_NODE_ROLE = new DiscoveryNodeRole("data_hot", "h", true) { @Override public boolean isEnabledByDefault(final Settings settings) { @@ -175,7 +175,7 @@ public class DiscoveryNodeRole implements Comparable { /** * Represents the role for a warm node. */ - public static DiscoveryNodeRole DATA_WARM_NODE_ROLE = new DiscoveryNodeRole("data_warm", "w", true) { + public static final DiscoveryNodeRole DATA_WARM_NODE_ROLE = new DiscoveryNodeRole("data_warm", "w", true) { @Override public boolean isEnabledByDefault(final Settings settings) { @@ -186,7 +186,7 @@ public class DiscoveryNodeRole implements Comparable { /** * Represents the role for a cold node. */ - public static DiscoveryNodeRole DATA_COLD_NODE_ROLE = new DiscoveryNodeRole("data_cold", "c", true) { + public static final DiscoveryNodeRole DATA_COLD_NODE_ROLE = new DiscoveryNodeRole("data_cold", "c", true) { @Override public boolean isEnabledByDefault(final Settings settings) { @@ -197,7 +197,7 @@ public class DiscoveryNodeRole implements Comparable { /** * Represents the role for a frozen node. */ - public static DiscoveryNodeRole DATA_FROZEN_NODE_ROLE = new DiscoveryNodeRole("data_frozen", "f", true) { + public static final DiscoveryNodeRole DATA_FROZEN_NODE_ROLE = new DiscoveryNodeRole("data_frozen", "f", true) { @Override public boolean isEnabledByDefault(final Settings settings) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java index 827cc378ef3a..7bb97faa6b2d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java @@ -12,7 +12,6 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -141,11 +140,7 @@ public abstract class AbstractAllocationDecision implements ChunkedToXContentObj return Collections.emptyIterator(); } - return Iterators.concat( - ChunkedToXContentHelper.startArray("node_allocation_decisions"), - nodeDecisions.iterator(), - ChunkedToXContentHelper.endArray() - ); + return ChunkedToXContentHelper.array("node_allocation_decisions", nodeDecisions.iterator()); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java index a61199e26191..c0543a93ea9d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java @@ -13,9 +13,9 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -296,7 +296,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision { @Override public Iterator toXContentChunked(ToXContent.Params params) { checkDecisionState(); - return ChunkedToXContent.builder(params).append((builder, p) -> { + return Iterators.concat(Iterators.single((builder, p) -> { builder.field("can_allocate", getAllocationDecision()); builder.field("allocate_explanation", getExplanation()); if (targetNode != null) { @@ -320,7 +320,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision { ); } return builder; - }).append(nodeDecisionsToXContentChunked(nodeDecisions)); + }), nodeDecisionsToXContentChunked(nodeDecisions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java index 5dfac293de49..891818b8e68f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -12,9 +12,9 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; @@ -260,7 +260,7 @@ public final class MoveDecision extends AbstractAllocationDecision { @Override public Iterator toXContentChunked(ToXContent.Params params) { checkDecisionState(); - return ChunkedToXContent.builder(params).append((builder, p) -> { + return Iterators.concat(Iterators.single((builder, p) -> { if (targetNode != null) { builder.startObject("target_node"); discoveryNodeToXContent(targetNode, true, builder); @@ -289,7 +289,7 @@ public final class MoveDecision extends AbstractAllocationDecision { builder.field("move_explanation", getExplanation()); } return builder; - }).append(nodeDecisionsToXContentChunked(nodeDecisions)); + }), nodeDecisionsToXContentChunked(nodeDecisions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java index ce67979661e0..5d78cf5f9511 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java @@ -39,7 +39,7 @@ public record ClusterBalanceStats( Map nodes ) implements Writeable, ToXContentObject { - public static ClusterBalanceStats EMPTY = new ClusterBalanceStats(0, 0, Map.of(), Map.of()); + public static final ClusterBalanceStats EMPTY = new ClusterBalanceStats(0, 0, Map.of(), Map.of()); public static ClusterBalanceStats createFrom( ClusterState clusterState, diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterStateUpdateStats.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterStateUpdateStats.java index 01ee60a29b34..75b33a7e20f5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterStateUpdateStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterStateUpdateStats.java @@ -140,7 +140,7 @@ public class ClusterStateUpdateStats implements Writeable, ToXContentFragment { out.writeVLong(failedNotificationElapsedMillis); } - public static ClusterStateUpdateStats EMPTY = new ClusterStateUpdateStats( + public static final ClusterStateUpdateStats EMPTY = new ClusterStateUpdateStats( 0L, 0L, 0L, diff --git a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java index c85df7c6be90..ef5b318a8b42 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +++ b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -39,7 +39,7 @@ public class DeprecationLogger { * Deprecation messages are logged at this level. * More serious that WARN by 1, but less serious than ERROR */ - public static Level CRITICAL = Level.forName("CRITICAL", Level.WARN.intLevel() - 1); + public static final Level CRITICAL = Level.forName("CRITICAL", Level.WARN.intLevel() - 1); private static volatile List skipTheseDeprecations = Collections.emptyList(); private final Logger logger; diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 09561661ccd5..48099edeffda 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.similarity.SimilarityService; @@ -190,6 +191,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING, IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING, + InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT, // validate that built-in similarities don't get redefined Setting.groupSetting("index.similarity.", (s) -> { diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java index 7b4f82c1072c..376333cf7cdf 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java @@ -27,7 +27,7 @@ public final class SettingsFilter { /** * Can be used to specify settings filter that will be used to filter out matching settings in toXContent method */ - public static String SETTINGS_FILTER_PARAM = "settings_filter"; + public static final String SETTINGS_FILTER_PARAM = "settings_filter"; private final Set patterns; private final String patternString; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 9120576815ba..b45b348376fd 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -510,14 +510,14 @@ public class EsExecutors { public static class TaskTrackingConfig { // This is a random starting point alpha. TODO: revisit this with actual testing and/or make it configurable - public static double DEFAULT_EWMA_ALPHA = 0.3; + public static final double DEFAULT_EWMA_ALPHA = 0.3; private final boolean trackExecutionTime; private final boolean trackOngoingTasks; private final double ewmaAlpha; - public static TaskTrackingConfig DO_NOT_TRACK = new TaskTrackingConfig(false, false, DEFAULT_EWMA_ALPHA); - public static TaskTrackingConfig DEFAULT = new TaskTrackingConfig(true, false, DEFAULT_EWMA_ALPHA); + public static final TaskTrackingConfig DO_NOT_TRACK = new TaskTrackingConfig(false, false, DEFAULT_EWMA_ALPHA); + public static final TaskTrackingConfig DEFAULT = new TaskTrackingConfig(true, false, DEFAULT_EWMA_ALPHA); public TaskTrackingConfig(boolean trackOngoingTasks, double ewmaAlpha) { this(true, trackOngoingTasks, ewmaAlpha); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java index 1970983654b8..e3a6c291c543 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContent.java @@ -26,14 +26,6 @@ import java.util.Iterator; */ public interface ChunkedToXContent { - /** - * Returns a new {@link ChunkedToXContentBuilder} to construct a series of XContent chunks - * @param params Params to use during generation for any nested {@code ChunkedToXContent} objects. - */ - static ChunkedToXContentBuilder builder(ToXContent.Params params) { - return new ChunkedToXContentBuilder(params); - } - /** * Create an iterator of {@link ToXContent} chunks for a REST response for the given {@link RestApiVersion}. Each chunk is serialized * with the same {@link XContentBuilder} and {@link ToXContent.Params}, which is also the same as the {@link ToXContent.Params} passed diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java deleted file mode 100644 index a3243ef3865a..000000000000 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java +++ /dev/null @@ -1,477 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.common.xcontent; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ToXContent; - -import java.util.Iterator; -import java.util.Map; -import java.util.Objects; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Stream; - -/** - * A fluent builder to create {@code Iterator<ToXContent>} objects - */ -public class ChunkedToXContentBuilder implements Iterator { - - private final ToXContent.Params params; - private final Stream.Builder builder = Stream.builder(); - private Iterator iterator; - - public ChunkedToXContentBuilder(ToXContent.Params params) { - this.params = params; - } - - private void addChunk(ToXContent content) { - assert iterator == null : "Builder has been read, cannot add any more chunks"; - builder.add(Objects.requireNonNull(content)); - } - - public ToXContent.Params params() { - return params; - } - - private void startObject() { - addChunk((b, p) -> b.startObject()); - } - - private void startObject(String name) { - addChunk((b, p) -> b.startObject(name)); - } - - private void endObject() { - addChunk((b, p) -> b.endObject()); - } - - /** - * Creates an object, with the specified {@code contents} - */ - public ChunkedToXContentBuilder xContentObject(ToXContent contents) { - addChunk((b, p) -> contents.toXContent(b.startObject(), p).endObject()); - return this; - } - - /** - * Creates an object named {@code name}, with the specified {@code contents} - */ - public ChunkedToXContentBuilder xContentObject(String name, ToXContent contents) { - addChunk((b, p) -> contents.toXContent(b.startObject(name), p).endObject()); - return this; - } - - /** - * Creates an object, with the specified {@code contents} - */ - public ChunkedToXContentBuilder xContentObject(ChunkedToXContent contents) { - startObject(); - append(contents); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the specified {@code contents} - */ - public ChunkedToXContentBuilder xContentObject(String name, ChunkedToXContent contents) { - startObject(name); - append(contents); - endObject(); - return this; - } - - /** - * Creates an object, with the specified {@code contents} - */ - public ChunkedToXContentBuilder xContentObject(Iterator contents) { - startObject(); - append(contents); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the specified {@code contents} - */ - public ChunkedToXContentBuilder xContentObject(String name, Iterator contents) { - startObject(name); - append(contents); - endObject(); - return this; - } - - /** - * Creates an object with the contents of each field created from each entry in {@code map} - */ - public ChunkedToXContentBuilder xContentObjectFields(Map map) { - startObject(); - map.forEach(this::field); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the contents of each field created from each entry in {@code map} - */ - public ChunkedToXContentBuilder xContentObjectFields(String name, Map map) { - startObject(name); - map.forEach(this::field); - endObject(); - return this; - } - - /** - * Creates an object with the contents of each field each another object created from each entry in {@code map} - */ - public ChunkedToXContentBuilder xContentObjectFieldObjects(Map map) { - startObject(); - map.forEach(this::xContentObject); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the contents of each field each another object created from each entry in {@code map} - */ - public ChunkedToXContentBuilder xContentObjectFieldObjects(String name, Map map) { - startObject(name); - map.forEach(this::xContentObject); - endObject(); - return this; - } - - /** - * Creates an object, with the contents set by {@code contents} - */ - public ChunkedToXContentBuilder object(Consumer contents) { - startObject(); - execute(contents); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the contents set by {@code contents} - */ - public ChunkedToXContentBuilder object(String name, Consumer contents) { - startObject(name); - execute(contents); - endObject(); - return this; - } - - /** - * Creates an object, with the contents set by calling {@code create} on each item returned by {@code items} - */ - public ChunkedToXContentBuilder object(Iterator items, BiConsumer create) { - startObject(); - forEach(items, create); - endObject(); - return this; - } - - /** - * Creates an object, with the contents set by appending together - * the return values of {@code create} called on each item returned by {@code items} - */ - public ChunkedToXContentBuilder object(Iterator items, Function create) { - startObject(); - forEach(items, create); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the contents set by calling {@code create} on each item returned by {@code items} - */ - public ChunkedToXContentBuilder object(String name, Iterator items, BiConsumer create) { - startObject(name); - forEach(items, create); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the contents set by appending together - * the return values of {@code create} called on each item returned by {@code items} - */ - public ChunkedToXContentBuilder object(String name, Iterator items, Function create) { - startObject(name); - forEach(items, create); - endObject(); - return this; - } - - /** - * Creates an object with the contents of each field set by {@code map} - */ - public ChunkedToXContentBuilder object(Map map) { - startObject(); - map.forEach(this::field); - endObject(); - return this; - } - - /** - * Creates an object named {@code name}, with the contents of each field set by {@code map} - */ - public ChunkedToXContentBuilder object(String name, Map map) { - startObject(name); - map.forEach(this::field); - endObject(); - return this; - } - - private void startArray() { - addChunk((b, p) -> b.startArray()); - } - - private void startArray(String name) { - addChunk((b, p) -> b.startArray(name)); - } - - private void endArray() { - addChunk((b, p) -> b.endArray()); - } - - public ChunkedToXContentBuilder array(String name, String[] values) { - addChunk((b, p) -> b.array(name, values)); - return this; - } - - /** - * Creates an array, with the contents set by calling {@code create} on each item returned by {@code items} - */ - public ChunkedToXContentBuilder array(Iterator items, BiConsumer create) { - startArray(); - forEach(items, create); - endArray(); - return this; - } - - /** - * Creates an array with the contents set by appending together the contents of {@code items} - */ - public ChunkedToXContentBuilder array(Iterator items) { - startArray(); - items.forEachRemaining(this::append); - endArray(); - return this; - } - - /** - * Creates an array, with the contents set by appending together - * the return values of {@code create} called on each item returned by {@code items} - */ - public ChunkedToXContentBuilder array(Iterator items, Function create) { - startArray(); - forEach(items, create); - endArray(); - return this; - } - - /** - * Creates an array named {@code name}, with the contents set by calling {@code create} on each item returned by {@code items} - */ - public ChunkedToXContentBuilder array(String name, Iterator items, BiConsumer create) { - startArray(name); - forEach(items, create); - endArray(); - return this; - } - - /** - * Creates an array named {@code name}, with the contents set by appending together the contents of {@code items} - */ - public ChunkedToXContentBuilder array(String name, Iterator items) { - startArray(name); - items.forEachRemaining(this::append); - endArray(); - return this; - } - - /** - * Creates an array named {@code name}, with the contents set by appending together - * the return values of {@code create} called on each item returned by {@code items} - */ - public ChunkedToXContentBuilder array(String name, Iterator items, Function create) { - startArray(name); - forEach(items, create); - endArray(); - return this; - } - - public ChunkedToXContentBuilder field(String name) { - addChunk((b, p) -> b.field(name)); - return this; - } - - public ChunkedToXContentBuilder field(String name, boolean value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, Boolean value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, int value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, Integer value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, long value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, Long value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, float value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, Float value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, double value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, Double value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, String value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, Enum value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - public ChunkedToXContentBuilder field(String name, ToXContent value) { - addChunk((b, p) -> b.field(name, value, p)); - return this; - } - - public ChunkedToXContentBuilder field(String name, ChunkedToXContent value) { - addChunk((b, p) -> b.field(name)); - append(value); - return this; - } - - public ChunkedToXContentBuilder field(String name, Object value) { - addChunk((b, p) -> b.field(name, value)); - return this; - } - - /** - * Passes this object to {@code consumer}. - *

- * This may seem superfluous, but it allows callers to 'break out' into more declarative code - * inside the lambda, whilst maintaining the top-level fluent method calls. - */ - public ChunkedToXContentBuilder execute(Consumer consumer) { - consumer.accept(this); - return this; - } - - /** - * Adds chunks from an iterator. Each item is passed to {@code create} to add chunks to this builder. - */ - public ChunkedToXContentBuilder forEach(Iterator items, BiConsumer create) { - items.forEachRemaining(t -> create.accept(this, t)); - return this; - } - - /** - * Adds chunks from an iterator. Each item is passed to {@code create}, and the resulting {@code ToXContent} objects - * are added to this builder in order. - */ - public ChunkedToXContentBuilder forEach(Iterator items, Function create) { - items.forEachRemaining(t -> append(create.apply(t))); - return this; - } - - public ChunkedToXContentBuilder append(ToXContent xContent) { - if (xContent != ToXContent.EMPTY) { - addChunk(xContent); - } - return this; - } - - public ChunkedToXContentBuilder append(ChunkedToXContent chunk) { - if (chunk != ChunkedToXContent.EMPTY) { - append(chunk.toXContentChunked(params)); - } - return this; - } - - public ChunkedToXContentBuilder append(Iterator xContents) { - xContents.forEachRemaining(this::append); - return this; - } - - public ChunkedToXContentBuilder appendIfPresent(@Nullable ToXContent xContent) { - if (xContent != null) { - append(xContent); - } - return this; - } - - public ChunkedToXContentBuilder appendIfPresent(@Nullable ChunkedToXContent chunk) { - if (chunk != null) { - append(chunk); - } - return this; - } - - private Iterator checkCreateIterator() { - if (iterator == null) { - iterator = builder.build().iterator(); - } - return iterator; - } - - @Override - public boolean hasNext() { - return checkCreateIterator().hasNext(); - } - - @Override - public ToXContent next() { - return checkCreateIterator().next(); - } - - @Override - public void forEachRemaining(Consumer action) { - checkCreateIterator().forEachRemaining(action); - } -} diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index 6a5aa2943de9..52c16f88e61f 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -14,6 +14,8 @@ import org.elasticsearch.xcontent.ToXContent; import java.util.Collections; import java.util.Iterator; +import java.util.Map; +import java.util.function.Function; public enum ChunkedToXContentHelper { ; @@ -42,6 +44,36 @@ public enum ChunkedToXContentHelper { return Iterators.single(((builder, params) -> builder.endArray())); } + public static Iterator map(String name, Map map) { + return map(name, map, entry -> (ToXContent) (builder, params) -> builder.field(entry.getKey(), entry.getValue())); + } + + public static Iterator xContentFragmentValuesMap(String name, Map map) { + return map( + name, + map, + entry -> (ToXContent) (builder, params) -> entry.getValue().toXContent(builder.startObject(entry.getKey()), params).endObject() + ); + } + + public static Iterator xContentValuesMap(String name, Map map) { + return map( + name, + map, + entry -> (ToXContent) (builder, params) -> entry.getValue().toXContent(builder.field(entry.getKey()), params) + ); + } + + /** + * Like xContentFragmentValuesMap, but allows the underlying XContent object to define its own "name" with startObject(string) + * and endObject, rather than assuming that the key in the map should be the name in the XContent output. + * @param name name to use in the XContent for the outer object wrapping the map being rendered to XContent + * @param map map being rendered to XContent + */ + public static Iterator xContentFragmentValuesMapCreateOwnName(String name, Map map) { + return map(name, map, entry -> (ToXContent) (builder, params) -> entry.getValue().toXContent(builder, params)); + } + public static Iterator field(String name, boolean value) { return Iterators.single(((builder, params) -> builder.field(name, value))); } @@ -62,6 +94,46 @@ public enum ChunkedToXContentHelper { } } + /** + * Creates an Iterator to serialize a named field where the value is represented by a {@link ChunkedToXContentObject}. + * Chunked equivalent for {@code XContentBuilder field(String name, ToXContent value)} + * @param name name of the field + * @param value value for this field + * @param params params to propagate for XContent serialization + * @return Iterator composing field name and value serialization + */ + public static Iterator field(String name, ChunkedToXContentObject value, ToXContent.Params params) { + return Iterators.concat(Iterators.single((builder, innerParam) -> builder.field(name)), value.toXContentChunked(params)); + } + + public static Iterator array(String name, Iterator contents) { + return Iterators.concat(ChunkedToXContentHelper.startArray(name), contents, ChunkedToXContentHelper.endArray()); + } + + /** + * Creates an Iterator to serialize a named field where the value is represented by an iterator of {@link ChunkedToXContentObject}. + * Chunked equivalent for {@code XContentBuilder array(String name, ToXContent value)} + * @param name name of the field + * @param contents values for this field + * @param params params to propagate for XContent serialization + * @return Iterator composing field name and value serialization + */ + public static Iterator array(String name, Iterator contents, ToXContent.Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startArray(name), + Iterators.flatMap(contents, c -> c.toXContentChunked(params)), + ChunkedToXContentHelper.endArray() + ); + } + + public static Iterator wrapWithObject(String name, Iterator iterator) { + return Iterators.concat(startObject(name), iterator, endObject()); + } + + public static Iterator map(String name, Map map, Function, ToXContent> toXContent) { + return wrapWithObject(name, Iterators.map(map.entrySet().iterator(), toXContent)); + } + /** * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link * Iterators#single}, but still useful because it avoids any type ambiguity. diff --git a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java index 70aa3f6639a4..913a352debfb 100644 --- a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java @@ -30,7 +30,7 @@ import java.util.Objects; */ final class DefaultBuildVersion extends BuildVersion { - public static BuildVersion CURRENT = new DefaultBuildVersion(Version.CURRENT.id()); + public static final BuildVersion CURRENT = new DefaultBuildVersion(Version.CURRENT.id()); final Version version; diff --git a/server/src/main/java/org/elasticsearch/health/Diagnosis.java b/server/src/main/java/org/elasticsearch/health/Diagnosis.java index b1af4a1c383d..41301e2d52a5 100644 --- a/server/src/main/java/org/elasticsearch/health/Diagnosis.java +++ b/server/src/main/java/org/elasticsearch/health/Diagnosis.java @@ -10,12 +10,14 @@ package org.elasticsearch.health; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; import java.util.Collection; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -76,20 +78,22 @@ public record Diagnosis(Definition definition, @Nullable List affected } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - var builder = ChunkedToXContent.builder(params); + public Iterator toXContentChunked(ToXContent.Params outerParams) { + final Iterator valuesIterator; if (nodes != null) { - return builder.array(type.displayValue, nodes.iterator(), node -> (b, p) -> { - b.startObject(); - b.field(ID_FIELD, node.getId()); + valuesIterator = Iterators.map(nodes.iterator(), node -> (builder, params) -> { + builder.startObject(); + builder.field(ID_FIELD, node.getId()); if (node.getName() != null) { - b.field(NAME_FIELD, node.getName()); + builder.field(NAME_FIELD, node.getName()); } - return b.endObject(); + builder.endObject(); + return builder; }); } else { - return builder.array(type.displayValue, values.toArray(String[]::new)); + valuesIterator = Iterators.map(values.iterator(), value -> (builder, params) -> builder.value(value)); } + return ChunkedToXContentHelper.array(type.displayValue, valuesIterator); } @Override @@ -140,18 +144,30 @@ public record Diagnosis(Definition definition, @Nullable List affected } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> { - ob.append((b, p) -> { - b.field("id", definition.getUniqueId()); - b.field("cause", definition.cause); - b.field("action", definition.action); - b.field("help_url", definition.helpURL); - return b; - }); - if (affectedResources != null && affectedResources.isEmpty() == false) { - ob.object("affected_resources", affectedResources.iterator(), ChunkedToXContentBuilder::append); + public Iterator toXContentChunked(ToXContent.Params outerParams) { + final Iterator resourcesIterator; + if (affectedResources == null) { + resourcesIterator = Collections.emptyIterator(); + } else { + resourcesIterator = Iterators.flatMap(affectedResources.iterator(), s -> s.toXContentChunked(outerParams)); + } + return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { + builder.startObject(); + builder.field("id", definition.getUniqueId()); + builder.field("cause", definition.cause); + builder.field("action", definition.action); + builder.field("help_url", definition.helpURL); + + if (affectedResources != null && affectedResources.size() > 0) { + builder.startObject("affected_resources"); } - }); + return builder; + }), resourcesIterator, Iterators.single((builder, params) -> { + if (affectedResources != null && affectedResources.size() > 0) { + builder.endObject(); + } + builder.endObject(); + return builder; + })); } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index 1a84abd9f7c1..6944ac74c811 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -9,11 +9,11 @@ package org.elasticsearch.health; -import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.xcontent.ToXContent; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -26,22 +26,33 @@ public record HealthIndicatorResult( List diagnosisList ) implements ChunkedToXContentObject { @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> { - ob.append((b, p) -> { - b.field("status", status.xContentValue()); - b.field("symptom", symptom); - if (details != null && HealthIndicatorDetails.EMPTY.equals(details) == false) { - b.field("details", details, p); - } - if (impacts != null && impacts.isEmpty() == false) { - b.field("impacts", impacts); - } - return b; - }); - if (diagnosisList != null && diagnosisList.isEmpty() == false) { - ob.array("diagnosis", diagnosisList.iterator(), ChunkedToXContentBuilder::append); + public Iterator toXContentChunked(ToXContent.Params outerParams) { + final Iterator diagnosisIterator; + if (diagnosisList == null) { + diagnosisIterator = Collections.emptyIterator(); + } else { + diagnosisIterator = Iterators.flatMap(diagnosisList.iterator(), s -> s.toXContentChunked(outerParams)); + } + return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { + builder.startObject(); + builder.field("status", status.xContentValue()); + builder.field("symptom", symptom); + if (details != null && HealthIndicatorDetails.EMPTY.equals(details) == false) { + builder.field("details", details, params); } - }); + if (impacts != null && impacts.isEmpty() == false) { + builder.field("impacts", impacts); + } + if (diagnosisList != null && diagnosisList.isEmpty() == false) { + builder.startArray("diagnosis"); + } + return builder; + }), diagnosisIterator, Iterators.single((builder, params) -> { + if (diagnosisList != null && diagnosisList.isEmpty() == false) { + builder.endArray(); + } + builder.endObject(); + return builder; + })); } } diff --git a/server/src/main/java/org/elasticsearch/index/Index.java b/server/src/main/java/org/elasticsearch/index/Index.java index b6aa8cf19a7e..d587d8ef2fee 100644 --- a/server/src/main/java/org/elasticsearch/index/Index.java +++ b/server/src/main/java/org/elasticsearch/index/Index.java @@ -30,7 +30,7 @@ public class Index implements Writeable, ToXContentObject { public static final Index[] EMPTY_ARRAY = new Index[0]; - public static Comparator COMPARE_BY_NAME = Comparator.comparing(Index::getName); + public static final Comparator COMPARE_BY_NAME = Comparator.comparing(Index::getName); private static final String INDEX_UUID_KEY = "index_uuid"; private static final String INDEX_NAME_KEY = "index_name"; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 8af10524813c..7a5cd97e5a3a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -138,6 +138,8 @@ public class IndexVersions { public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index 0f772b49bf92..b1c311af88e2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -440,7 +440,7 @@ final class TranslogDirectoryReader extends DirectoryReader { SourceFieldMapper mapper = mappingLookup.getMapping().getMetadataMapperByClass(SourceFieldMapper.class); if (mapper != null) { try { - sourceBytes = mapper.applyFilters(sourceBytes, null); + sourceBytes = mapper.applyFilters(mappingLookup, sourceBytes, null); } catch (IOException e) { throw new IOException("Failed to reapply filters after reading from translog", e); } diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 43b5d2c7d3f7..9ed9fc6dabf3 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -9,7 +9,9 @@ package org.elasticsearch.index.get; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; @@ -26,6 +28,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperMetrics; @@ -39,6 +42,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.MultiEngineGet; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.search.lookup.SourceFilter; import java.io.IOException; import java.util.ArrayList; @@ -190,9 +194,13 @@ public final class ShardGetService extends AbstractIndexShardComponent { } public GetResult getForUpdate(String id, long ifSeqNo, long ifPrimaryTerm) throws IOException { + return getForUpdate(id, ifSeqNo, ifPrimaryTerm, new String[] { RoutingFieldMapper.NAME }); + } + + public GetResult getForUpdate(String id, long ifSeqNo, long ifPrimaryTerm, String[] gFields) throws IOException { return get( id, - new String[] { RoutingFieldMapper.NAME }, + gFields, true, Versions.MATCH_ANY, VersionType.INTERNAL, @@ -288,11 +296,18 @@ public final class ShardGetService extends AbstractIndexShardComponent { boolean forceSyntheticSource ) throws IOException { assert get.exists() : "method should only be called if document could be retrieved"; - // check first if stored fields to be loaded don't contain an object field MappingLookup mappingLookup = mapperService.mappingLookup(); + final IndexVersion indexVersion = indexSettings.getIndexVersionCreated(); + final Set storedFieldSet = new HashSet<>(); + boolean hasInferenceMetadataFields = false; if (storedFields != null) { for (String field : storedFields) { + if (field.equals(InferenceMetadataFieldsMapper.NAME) + && InferenceMetadataFieldsMapper.isEnabled(indexShard.mapperService().mappingLookup())) { + hasInferenceMetadataFields = true; + continue; + } Mapper fieldMapper = mappingLookup.getMapper(field); if (fieldMapper == null) { if (mappingLookup.objectMappers().get(field) != null) { @@ -300,6 +315,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { throw new IllegalArgumentException("field [" + field + "] isn't a leaf field"); } } + storedFieldSet.add(field); } } @@ -313,8 +329,8 @@ public final class ShardGetService extends AbstractIndexShardComponent { () -> mappingLookup.getMapping().syntheticFieldLoader(sourceFilter), mapperMetrics.sourceFieldMetrics() ) - : mappingLookup.newSourceLoader(fetchSourceContext.filter(), mapperMetrics.sourceFieldMetrics()); - StoredFieldLoader storedFieldLoader = buildStoredFieldLoader(storedFields, fetchSourceContext, loader); + : mappingLookup.newSourceLoader(sourceFilter, mapperMetrics.sourceFieldMetrics()); + StoredFieldLoader storedFieldLoader = buildStoredFieldLoader(storedFieldSet, fetchSourceContext, loader); LeafStoredFieldLoader leafStoredFieldLoader = storedFieldLoader.getLoader(docIdAndVersion.reader.getContext(), null); try { leafStoredFieldLoader.advanceTo(docIdAndVersion.docId); @@ -323,7 +339,6 @@ public final class ShardGetService extends AbstractIndexShardComponent { } // put stored fields into result objects - final IndexVersion indexVersion = indexSettings.getIndexVersionCreated(); if (leafStoredFieldLoader.storedFields().isEmpty() == false) { Set needed = new HashSet<>(); if (storedFields != null) { @@ -372,6 +387,19 @@ public final class ShardGetService extends AbstractIndexShardComponent { if (mapperService.mappingLookup().isSourceEnabled() && fetchSourceContext.fetchSource()) { Source source = loader.leaf(docIdAndVersion.reader, new int[] { docIdAndVersion.docId }) .source(leafStoredFieldLoader, docIdAndVersion.docId); + + SourceFilter filter = fetchSourceContext.filter(); + if (filter != null) { + source = source.filter(filter); + } + + if (hasInferenceMetadataFields) { + /** + * Adds the {@link InferenceMetadataFieldsMapper#NAME} field from the document fields + * to the original _source if it has been requested. + */ + source = addInferenceMetadataFields(mapperService, docIdAndVersion.reader.getContext(), docIdAndVersion.docId, source); + } sourceBytes = source.internalSourceRef(); } @@ -404,18 +432,38 @@ public final class ShardGetService extends AbstractIndexShardComponent { return new DocumentField(IgnoredFieldMapper.NAME, ignoredValues); } - private static StoredFieldLoader buildStoredFieldLoader(String[] fields, FetchSourceContext fetchSourceContext, SourceLoader loader) { - Set fieldsToLoad = new HashSet<>(); - if (fields != null && fields.length > 0) { - Collections.addAll(fieldsToLoad, fields); + private static Source addInferenceMetadataFields(MapperService mapperService, LeafReaderContext readerContext, int docId, Source source) + throws IOException { + var mappingLookup = mapperService.mappingLookup(); + var inferenceMetadata = (InferenceMetadataFieldsMapper) mappingLookup.getMapping() + .getMetadataMapperByName(InferenceMetadataFieldsMapper.NAME); + if (inferenceMetadata == null || mapperService.mappingLookup().inferenceFields().isEmpty()) { + return source; } + var inferenceLoader = inferenceMetadata.fieldType() + .valueFetcher(mappingLookup, mapperService.getBitSetProducer(), new IndexSearcher(readerContext.reader())); + inferenceLoader.setNextReader(readerContext); + List values = inferenceLoader.fetchValues(source, docId, List.of()); + if (values.size() == 1) { + var newSource = source.source(); + newSource.put(InferenceMetadataFieldsMapper.NAME, values.get(0)); + return Source.fromMap(newSource, source.sourceContentType()); + } + return source; + } + + private static StoredFieldLoader buildStoredFieldLoader( + Set fields, + FetchSourceContext fetchSourceContext, + SourceLoader loader + ) { if (fetchSourceContext.fetchSource()) { - fieldsToLoad.addAll(loader.requiredStoredFields()); + fields.addAll(loader.requiredStoredFields()); } else { - if (fieldsToLoad.isEmpty()) { + if (fields.isEmpty()) { return StoredFieldLoader.empty(); } } - return StoredFieldLoader.create(fetchSourceContext.fetchSource(), fieldsToLoad); + return StoredFieldLoader.create(fetchSourceContext.fetchSource(), fields); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java index 4b0542f7f7b0..318e877c7ebb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java @@ -124,7 +124,10 @@ public abstract class AbstractShapeGeometryFieldMapper extends AbstractGeomet private void read(BinaryDocValues binaryDocValues, int doc, GeometryDocValueReader reader, BytesRefBuilder builder) throws IOException { - binaryDocValues.advanceExact(doc); + if (binaryDocValues.advanceExact(doc) == false) { + builder.appendNull(); + return; + } reader.reset(binaryDocValues.binaryValue()); var extent = reader.getExtent(); // This is rather silly: an extent is already encoded as ints, but we convert it to Rectangle to diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CustomSyntheticSourceFieldLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/CustomSyntheticSourceFieldLookup.java new file mode 100644 index 000000000000..dbbee8a9035d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/CustomSyntheticSourceFieldLookup.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexSettings; + +import java.util.HashMap; +import java.util.Map; + +/** + * Contains lookup information needed to perform custom synthetic source logic. + * For example fields that use fallback synthetic source implementation or fields that preserve array ordering + * in synthetic source; + */ +public class CustomSyntheticSourceFieldLookup { + private final Map fieldsWithCustomSyntheticSourceHandling; + + public CustomSyntheticSourceFieldLookup(Mapping mapping, @Nullable IndexSettings indexSettings, boolean isSourceSynthetic) { + var fields = new HashMap(); + if (isSourceSynthetic && indexSettings != null) { + populateFields(fields, mapping.getRoot(), indexSettings.sourceKeepMode()); + } + this.fieldsWithCustomSyntheticSourceHandling = Map.copyOf(fields); + } + + private void populateFields(Map fields, ObjectMapper currentLevel, Mapper.SourceKeepMode defaultSourceKeepMode) { + if (currentLevel.isEnabled() == false) { + fields.put(currentLevel.fullPath(), Reason.DISABLED_OBJECT); + return; + } + if (sourceKeepMode(currentLevel, defaultSourceKeepMode) == Mapper.SourceKeepMode.ALL) { + fields.put(currentLevel.fullPath(), Reason.SOURCE_KEEP_ALL); + return; + } + if (currentLevel.isNested() == false && sourceKeepMode(currentLevel, defaultSourceKeepMode) == Mapper.SourceKeepMode.ARRAYS) { + fields.put(currentLevel.fullPath(), Reason.SOURCE_KEEP_ARRAYS); + } + + for (Mapper child : currentLevel) { + if (child instanceof ObjectMapper objectMapper) { + populateFields(fields, objectMapper, defaultSourceKeepMode); + } else if (child instanceof FieldMapper fieldMapper) { + // The order here is important. + // If fallback logic is used, it should be always correctly marked as FALLBACK_SYNTHETIC_SOURCE. + // This allows us to apply an optimization for SOURCE_KEEP_ARRAYS and don't store arrays that have one element. + // If this order is changed and a field that both has SOURCE_KEEP_ARRAYS and FALLBACK_SYNTHETIC_SOURCE + // is marked as SOURCE_KEEP_ARRAYS we would lose data for this field by applying such an optimization. + if (fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK) { + fields.put(fieldMapper.fullPath(), Reason.FALLBACK_SYNTHETIC_SOURCE); + } else if (sourceKeepMode(fieldMapper, defaultSourceKeepMode) == Mapper.SourceKeepMode.ALL) { + fields.put(fieldMapper.fullPath(), Reason.SOURCE_KEEP_ALL); + } else if (sourceKeepMode(fieldMapper, defaultSourceKeepMode) == Mapper.SourceKeepMode.ARRAYS) { + fields.put(fieldMapper.fullPath(), Reason.SOURCE_KEEP_ARRAYS); + } + } + } + } + + private Mapper.SourceKeepMode sourceKeepMode(ObjectMapper mapper, Mapper.SourceKeepMode defaultSourceKeepMode) { + return mapper.sourceKeepMode().orElse(defaultSourceKeepMode); + } + + private Mapper.SourceKeepMode sourceKeepMode(FieldMapper mapper, Mapper.SourceKeepMode defaultSourceKeepMode) { + return mapper.sourceKeepMode().orElse(defaultSourceKeepMode); + } + + public Map getFieldsWithCustomSyntheticSourceHandling() { + return fieldsWithCustomSyntheticSourceHandling; + } + + /** + * Specifies why this field needs custom handling. + */ + public enum Reason { + SOURCE_KEEP_ARRAYS, + SOURCE_KEEP_ALL, + FALLBACK_SYNTHETIC_SOURCE, + DISABLED_OBJECT + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 03e6c343c7ab..5abb7b5a1b72 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -49,6 +49,7 @@ public class DocumentMapper { mapping, mapping.toCompressedXContent(), IndexVersion.current(), + mapperService.getIndexSettings(), mapperService.getMapperMetrics(), mapperService.index().getName() ); @@ -59,12 +60,13 @@ public class DocumentMapper { Mapping mapping, CompressedXContent source, IndexVersion version, + IndexSettings indexSettings, MapperMetrics mapperMetrics, String indexName ) { this.documentParser = documentParser; this.type = mapping.getRoot().fullPath(); - this.mappingLookup = MappingLookup.fromMapping(mapping); + this.mappingLookup = MappingLookup.fromMapping(mapping, indexSettings); this.mappingSource = source; this.mapperMetrics = mapperMetrics; this.indexVersion = version; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 9ddb6f0d496a..0fc14d4dbeeb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -27,6 +27,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParseException; @@ -43,6 +44,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.BiFunction; import java.util.function.Consumer; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT; @@ -60,10 +62,22 @@ public final class DocumentParser { private final XContentParserConfiguration parserConfiguration; private final MappingParserContext mappingParserContext; + private final BiFunction listenersFactory; DocumentParser(XContentParserConfiguration parserConfiguration, MappingParserContext mappingParserContext) { this.mappingParserContext = mappingParserContext; this.parserConfiguration = parserConfiguration; + this.listenersFactory = this::createDefaultListeners; + } + + DocumentParser( + XContentParserConfiguration parserConfiguration, + MappingParserContext mappingParserContext, + BiFunction listenersFactory + ) { + this.mappingParserContext = mappingParserContext; + this.parserConfiguration = parserConfiguration; + this.listenersFactory = listenersFactory; } /** @@ -81,13 +95,11 @@ public final class DocumentParser { final RootDocumentParserContext context; final XContentType xContentType = source.getXContentType(); + Listeners listeners = listenersFactory.apply(mappingLookup, xContentType); + XContentMeteringParserDecorator meteringParserDecorator = source.getMeteringParserDecorator(); - try ( - XContentParser parser = meteringParserDecorator.decorate( - XContentHelper.createParser(parserConfiguration, source.source(), xContentType) - ) - ) { - context = new RootDocumentParserContext(mappingLookup, mappingParserContext, source, parser); + try (XContentParser parser = meteringParserDecorator.decorate(createParser(source, xContentType, listeners))) { + context = new RootDocumentParserContext(mappingLookup, mappingParserContext, source, listeners, parser); validateStart(context.parser()); MetadataFieldMapper[] metadataFieldsMappers = mappingLookup.getMapping().getSortedMetadataMappers(); internalParseDocument(metadataFieldsMappers, context); @@ -121,6 +133,152 @@ public final class DocumentParser { }; } + private Listeners createDefaultListeners(MappingLookup mappingLookup, XContentType xContentType) { + if (mappingLookup.isSourceSynthetic() && mappingParserContext.getIndexSettings().getSkipIgnoredSourceWrite() == false) { + return new Listeners.Single(new SyntheticSourceDocumentParserListener(mappingLookup, xContentType)); + } + + return Listeners.NOOP; + } + + private XContentParser createParser(SourceToParse sourceToParse, XContentType xContentType, Listeners listeners) throws IOException { + XContentParser plainParser = XContentHelper.createParser(parserConfiguration, sourceToParse.source(), xContentType); + + if (listeners.isNoop()) { + return plainParser; + } + + return new ListenerAwareXContentParser(plainParser, listeners); + } + + static class ListenerAwareXContentParser extends FilterXContentParserWrapper { + private final Listeners listeners; + + ListenerAwareXContentParser(XContentParser parser, Listeners listeners) { + super(parser); + this.listeners = listeners; + } + + @Override + public Token nextToken() throws IOException { + var token = delegate().nextToken(); + + if (listeners.anyActive()) { + var listenerToken = DocumentParserListener.Token.current(delegate()); + listeners.publish(listenerToken); + } + + return token; + } + + @Override + public void skipChildren() throws IOException { + // We can not use "native" implementation because some listeners may want to see + // skipped parts. + Token token = currentToken(); + if (token != Token.START_OBJECT && token != Token.START_ARRAY) { + return; + } + + int depth = 0; + while (token != null) { + if (token == Token.START_OBJECT || token == Token.START_ARRAY) { + depth += 1; + } + if (token == Token.END_OBJECT || token == Token.END_ARRAY) { + depth -= 1; + if (depth == 0) { + return; + } + } + + token = nextToken(); + } + } + } + + /** + * Encapsulates listeners that are subscribed to this document parser. This allows to generalize logic without knowing + * how many listeners are present (and if they are present at all). + */ + public interface Listeners { + void publish(DocumentParserListener.Event event, DocumentParserContext context) throws IOException; + + void publish(DocumentParserListener.Token token) throws IOException; + + DocumentParserListener.Output finish(); + + boolean isNoop(); + + boolean anyActive(); + + /** + * No listeners are present. + */ + Listeners NOOP = new Listeners() { + @Override + public void publish(DocumentParserListener.Event event, DocumentParserContext context) {} + + @Override + public void publish(DocumentParserListener.Token token) { + + } + + @Override + public DocumentParserListener.Output finish() { + return DocumentParserListener.Output.empty(); + } + + @Override + public boolean isNoop() { + return true; + } + + @Override + public boolean anyActive() { + return false; + } + }; + + /** + * One or more listeners are present. + */ + class Single implements Listeners { + private final DocumentParserListener listener; + + public Single(DocumentParserListener listener) { + this.listener = listener; + } + + @Override + public void publish(DocumentParserListener.Event event, DocumentParserContext context) throws IOException { + listener.consume(event); + } + + @Override + public void publish(DocumentParserListener.Token token) throws IOException { + if (listener.isActive()) { + listener.consume(token); + } + } + + @Override + public DocumentParserListener.Output finish() { + return listener.finish(); + } + + @Override + public boolean isNoop() { + return false; + } + + @Override + public boolean anyActive() { + return listener.isActive(); + } + } + } + private void internalParseDocument(MetadataFieldMapper[] metadataFieldsMappers, DocumentParserContext context) { try { final boolean emptyDoc = isEmptyDoc(context.root(), context.parser()); @@ -129,26 +287,19 @@ public final class DocumentParser { metadataMapper.preParse(context); } + context.publishEvent(new DocumentParserListener.Event.DocumentStart(context.root(), context.doc())); + if (context.root().isEnabled() == false) { // entire type is disabled - if (context.canAddIgnoredField()) { - context.addIgnoredField( - new IgnoredSourceFieldMapper.NameValue( - MapperService.SINGLE_MAPPING_NAME, - 0, - context.encodeFlattenedToken(), - context.doc() - ) - ); - } else { - context.parser().skipChildren(); - } + context.parser().skipChildren(); } else if (emptyDoc == false) { parseObjectOrNested(context); } executeIndexTimeScripts(context); + context.finishListeners(); + for (MetadataFieldMapper metadataMapper : metadataFieldsMappers) { metadataMapper.postParse(context); } @@ -274,22 +425,12 @@ public final class DocumentParser { } static void parseObjectOrNested(DocumentParserContext context) throws IOException { + XContentParser parser = context.parser(); String currentFieldName = parser.currentName(); if (context.parent().isEnabled() == false) { // entire type is disabled - if (context.canAddIgnoredField()) { - context.addIgnoredField( - new IgnoredSourceFieldMapper.NameValue( - context.parent().fullPath(), - context.parent().fullPath().lastIndexOf(context.parent().leafName()), - context.encodeFlattenedToken(), - context.doc() - ) - ); - } else { - parser.skipChildren(); - } + parser.skipChildren(); return; } XContentParser.Token token = parser.currentToken(); @@ -302,22 +443,6 @@ public final class DocumentParser { throwOnConcreteValue(context.parent(), currentFieldName, context); } - var sourceKeepMode = getSourceKeepMode(context, context.parent().sourceKeepMode()); - if (context.canAddIgnoredField() - && (sourceKeepMode == Mapper.SourceKeepMode.ALL - || (sourceKeepMode == Mapper.SourceKeepMode.ARRAYS && context.inArrayScope()))) { - context = context.addIgnoredFieldFromContext( - new IgnoredSourceFieldMapper.NameValue( - context.parent().fullPath(), - context.parent().fullPath().lastIndexOf(context.parent().leafName()), - null, - context.doc() - ) - ); - token = context.parser().currentToken(); - parser = context.parser(); - } - if (context.parent().isNested()) { // Handle a nested object that doesn't contain an array. Arrays are handled in #parseNonDynamicArray. context = context.createNestedContext((NestedObjectMapper) context.parent()); @@ -441,6 +566,9 @@ public final class DocumentParser { static void parseObjectOrField(DocumentParserContext context, Mapper mapper) throws IOException { if (mapper instanceof ObjectMapper objectMapper) { + context.publishEvent( + new DocumentParserListener.Event.ObjectStart(objectMapper, context.inArrayScope(), context.parent(), context.doc()) + ); parseObjectOrNested(context.createChildContext(objectMapper)); } else if (mapper instanceof FieldMapper fieldMapper) { if (shouldFlattenObject(context, fieldMapper)) { @@ -450,12 +578,19 @@ public final class DocumentParser { parseObjectOrNested(context.createFlattenContext(currentFieldName)); context.path().add(currentFieldName); } else { - var sourceKeepMode = getSourceKeepMode(context, fieldMapper.sourceKeepMode()); + context.publishEvent( + new DocumentParserListener.Event.LeafValue( + fieldMapper, + context.inArrayScope(), + context.parent(), + context.doc(), + context.parser() + ) + ); + if (context.canAddIgnoredField() - && (fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK - || sourceKeepMode == Mapper.SourceKeepMode.ALL - || (sourceKeepMode == Mapper.SourceKeepMode.ARRAYS && context.inArrayScope()) - || (context.isWithinCopyTo() == false && context.isCopyToDestinationField(mapper.fullPath())))) { + && context.isWithinCopyTo() == false + && context.isCopyToDestinationField(mapper.fullPath())) { context = context.addIgnoredFieldFromContext( IgnoredSourceFieldMapper.NameValue.fromContext(context, fieldMapper.fullPath(), null) ); @@ -685,40 +820,17 @@ public final class DocumentParser { ) throws IOException { String fullPath = context.path().pathAsText(arrayFieldName); + if (mapper instanceof ObjectMapper objectMapper) { + context.publishEvent(new DocumentParserListener.Event.ObjectArrayStart(objectMapper, context.parent(), context.doc())); + } else if (mapper instanceof FieldMapper fieldMapper) { + context.publishEvent(new DocumentParserListener.Event.LeafArrayStart(fieldMapper, context.parent(), context.doc())); + } + // Check if we need to record the array source. This only applies to synthetic source. - boolean canRemoveSingleLeafElement = false; if (context.canAddIgnoredField()) { - Mapper.SourceKeepMode mode = Mapper.SourceKeepMode.NONE; - boolean objectWithFallbackSyntheticSource = false; - if (mapper instanceof ObjectMapper objectMapper) { - mode = getSourceKeepMode(context, objectMapper.sourceKeepMode()); - objectWithFallbackSyntheticSource = mode == Mapper.SourceKeepMode.ALL - || (mode == Mapper.SourceKeepMode.ARRAYS && objectMapper instanceof NestedObjectMapper == false); - } - boolean fieldWithFallbackSyntheticSource = false; - boolean fieldWithStoredArraySource = false; - if (mapper instanceof FieldMapper fieldMapper) { - mode = getSourceKeepMode(context, fieldMapper.sourceKeepMode()); - fieldWithFallbackSyntheticSource = fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK; - fieldWithStoredArraySource = mode != Mapper.SourceKeepMode.NONE; - } boolean copyToFieldHasValuesInDocument = context.isWithinCopyTo() == false && context.isCopyToDestinationField(fullPath); - - canRemoveSingleLeafElement = mapper instanceof FieldMapper - && mode == Mapper.SourceKeepMode.ARRAYS - && fieldWithFallbackSyntheticSource == false - && copyToFieldHasValuesInDocument == false; - - if (objectWithFallbackSyntheticSource - || fieldWithFallbackSyntheticSource - || fieldWithStoredArraySource - || copyToFieldHasValuesInDocument) { + if (copyToFieldHasValuesInDocument) { context = context.addIgnoredFieldFromContext(IgnoredSourceFieldMapper.NameValue.fromContext(context, fullPath, null)); - } else if (mapper instanceof ObjectMapper objectMapper && (objectMapper.isEnabled() == false)) { - // No need to call #addIgnoredFieldFromContext as both singleton and array instances of this object - // get tracked through ignored source. - context.addIgnoredField(IgnoredSourceFieldMapper.NameValue.fromContext(context, fullPath, context.encodeFlattenedToken())); - return; } } @@ -729,28 +841,20 @@ public final class DocumentParser { XContentParser parser = context.parser(); XContentParser.Token token; - int elements = 0; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { - elements = Integer.MAX_VALUE; parseObject(context, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { - elements = Integer.MAX_VALUE; parseArray(context, lastFieldName); } else if (token == XContentParser.Token.VALUE_NULL) { - elements++; parseNullValue(context, lastFieldName); } else if (token == null) { throwEOFOnParseArray(arrayFieldName, context); } else { assert token.isValue(); - elements++; parseValue(context, lastFieldName); } } - if (elements <= 1 && canRemoveSingleLeafElement) { - context.removeLastIgnoredField(fullPath); - } postProcessDynamicArrayMapping(context, lastFieldName); } @@ -1049,12 +1153,14 @@ public final class DocumentParser { MappingLookup mappingLookup, MappingParserContext mappingParserContext, SourceToParse source, + Listeners listeners, XContentParser parser ) throws IOException { super( mappingLookup, mappingParserContext, source, + listeners, mappingLookup.getMapping().getRoot(), ObjectMapper.Dynamic.getRootDynamic(mappingLookup) ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 51e4e9f4c1b5..625f3a3d19af 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -117,6 +117,7 @@ public abstract class DocumentParserContext { private final MappingLookup mappingLookup; private final MappingParserContext mappingParserContext; private final SourceToParse sourceToParse; + private final DocumentParser.Listeners listeners; private final Set ignoredFields; private final List ignoredFieldValues; @@ -149,6 +150,7 @@ public abstract class DocumentParserContext { MappingLookup mappingLookup, MappingParserContext mappingParserContext, SourceToParse sourceToParse, + DocumentParser.Listeners listeners, Set ignoreFields, List ignoredFieldValues, Scope currentScope, @@ -169,6 +171,7 @@ public abstract class DocumentParserContext { this.mappingLookup = mappingLookup; this.mappingParserContext = mappingParserContext; this.sourceToParse = sourceToParse; + this.listeners = listeners; this.ignoredFields = ignoreFields; this.ignoredFieldValues = ignoredFieldValues; this.currentScope = currentScope; @@ -192,6 +195,7 @@ public abstract class DocumentParserContext { in.mappingLookup, in.mappingParserContext, in.sourceToParse, + in.listeners, in.ignoredFields, in.ignoredFieldValues, in.currentScope, @@ -215,6 +219,7 @@ public abstract class DocumentParserContext { MappingLookup mappingLookup, MappingParserContext mappingParserContext, SourceToParse source, + DocumentParser.Listeners listeners, ObjectMapper parent, ObjectMapper.Dynamic dynamic ) { @@ -222,6 +227,7 @@ public abstract class DocumentParserContext { mappingLookup, mappingParserContext, source, + listeners, new HashSet<>(), new ArrayList<>(), Scope.SINGLETON, @@ -301,12 +307,6 @@ public abstract class DocumentParserContext { } } - final void removeLastIgnoredField(String name) { - if (ignoredFieldValues.isEmpty() == false && ignoredFieldValues.getLast().name().equals(name)) { - ignoredFieldValues.removeLast(); - } - } - /** * Return the collection of values for fields that have been ignored so far. */ @@ -470,6 +470,15 @@ public abstract class DocumentParserContext { return copyToFields; } + public void publishEvent(DocumentParserListener.Event event) throws IOException { + listeners.publish(event, this); + } + + public void finishListeners() { + var output = listeners.finish(); + ignoredFieldValues.addAll(output.ignoredSourceValues()); + } + /** * Add a new mapper dynamically created while parsing. * @@ -659,7 +668,7 @@ public abstract class DocumentParserContext { /** * Return a new context that will be used within a nested document. */ - public final DocumentParserContext createNestedContext(NestedObjectMapper nestedMapper) { + public final DocumentParserContext createNestedContext(NestedObjectMapper nestedMapper) throws IOException { if (isWithinCopyTo()) { // nested context will already have been set up for copy_to fields return this; @@ -688,7 +697,7 @@ public abstract class DocumentParserContext { /** * Return a new context that has the provided document as the current document. */ - public final DocumentParserContext switchDoc(final LuceneDocument document) { + public final DocumentParserContext switchDoc(final LuceneDocument document) throws IOException { DocumentParserContext cloned = new Wrapper(this.parent, this) { @Override public LuceneDocument doc() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserListener.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserListener.java new file mode 100644 index 000000000000..7ea902235da1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserListener.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; + +/** + * Component that listens to events produced by {@link DocumentParser} in order to implement some parsing related logic. + * It allows to keep such logic separate from actual document parsing workflow which is by itself complex. + */ +public interface DocumentParserListener { + /** + * Specifies if this listener is currently actively consuming tokens. + * This is used to avoid doing unnecessary work. + * @return + */ + boolean isActive(); + + /** + * Sends a {@link Token} to this listener. + * This is only called when {@link #isActive()} returns true since it involves a somewhat costly operation of creating a token instance + * and tokens are low level meaning this is called very frequently. + * @param token + * @throws IOException + */ + void consume(Token token) throws IOException; + + /** + * Sends an {@link Event} to this listener. Unlike tokens events are always sent to a listener. + * The logic here is that based on the event listener can decide to change the return value of {@link #isActive()}. + * @param event + * @throws IOException + */ + void consume(Event event) throws IOException; + + Output finish(); + + /** + * A lower level notification passed from the parser to a listener. + * This token is closely related to {@link org.elasticsearch.xcontent.XContentParser.Token} and is used for use cases like + * preserving the exact structure of the parsed document. + */ + sealed interface Token permits Token.FieldName, Token.StartObject, Token.EndObject, Token.StartArray, Token.EndArray, + Token.StringAsCharArrayValue, Token.NullValue, Token.ValueToken { + + record FieldName(String name) implements Token {} + + record StartObject() implements Token {} + + record EndObject() implements Token {} + + record StartArray() implements Token {} + + record EndArray() implements Token {} + + record NullValue() implements Token {} + + final class StringAsCharArrayValue implements Token { + private final XContentParser parser; + + public StringAsCharArrayValue(XContentParser parser) { + this.parser = parser; + } + + char[] buffer() throws IOException { + return parser.textCharacters(); + } + + int length() throws IOException { + return parser.textLength(); + } + + int offset() throws IOException { + return parser.textOffset(); + } + } + + non-sealed interface ValueToken extends Token { + T value() throws IOException; + } + + Token START_OBJECT = new StartObject(); + Token END_OBJECT = new EndObject(); + Token START_ARRAY = new StartArray(); + Token END_ARRAY = new EndArray(); + + static Token current(XContentParser parser) throws IOException { + return switch (parser.currentToken()) { + case START_OBJECT -> Token.START_OBJECT; + case END_OBJECT -> Token.END_OBJECT; + case START_ARRAY -> Token.START_ARRAY; + case END_ARRAY -> Token.END_ARRAY; + case FIELD_NAME -> new FieldName(parser.currentName()); + case VALUE_STRING -> { + if (parser.hasTextCharacters()) { + yield new StringAsCharArrayValue(parser); + } else { + yield (ValueToken) parser::text; + } + } + case VALUE_NUMBER -> switch (parser.numberType()) { + case INT -> (ValueToken) parser::intValue; + case BIG_INTEGER -> (ValueToken) () -> (BigInteger) parser.numberValue(); + case LONG -> (ValueToken) parser::longValue; + case FLOAT -> (ValueToken) parser::floatValue; + case DOUBLE -> (ValueToken) parser::doubleValue; + case BIG_DECIMAL -> { + // See @XContentGenerator#copyCurrentEvent + assert false : "missing xcontent number handling for type [" + parser.numberType() + "]"; + yield null; + } + }; + case VALUE_BOOLEAN -> (ValueToken) parser::booleanValue; + case VALUE_EMBEDDED_OBJECT -> (ValueToken) parser::binaryValue; + case VALUE_NULL -> new NullValue(); + case null -> null; + }; + } + } + + /** + * High level notification passed from the parser to a listener. + * Events represent meaningful logical operations during parsing and contain relevant context for the operation + * like a mapper being used. + * A listener can use events and/or tokens depending on the use case. For example, it can wait for a specific event and then switch + * to consuming tokens instead. + */ + sealed interface Event permits Event.DocumentStart, Event.ObjectStart, Event.ObjectArrayStart, Event.LeafArrayStart, Event.LeafValue { + record DocumentStart(RootObjectMapper rootObjectMapper, LuceneDocument document) implements Event {} + + record ObjectStart(ObjectMapper objectMapper, boolean insideObjectArray, ObjectMapper parentMapper, LuceneDocument document) + implements + Event {} + + record ObjectArrayStart(ObjectMapper objectMapper, ObjectMapper parentMapper, LuceneDocument document) implements Event {} + + final class LeafValue implements Event { + private final FieldMapper fieldMapper; + private final boolean insideObjectArray; + private final ObjectMapper parentMapper; + private final LuceneDocument document; + private final XContentParser parser; + private final boolean isObjectOrArray; + private final boolean isArray; + + public LeafValue( + FieldMapper fieldMapper, + boolean insideObjectArray, + ObjectMapper parentMapper, + LuceneDocument document, + XContentParser parser + ) { + this.fieldMapper = fieldMapper; + this.insideObjectArray = insideObjectArray; + this.parentMapper = parentMapper; + this.document = document; + this.parser = parser; + this.isObjectOrArray = parser.currentToken().isValue() == false && parser.currentToken() != XContentParser.Token.VALUE_NULL; + this.isArray = parser.currentToken() == XContentParser.Token.START_ARRAY; + } + + public FieldMapper fieldMapper() { + return fieldMapper; + } + + public boolean insideObjectArray() { + return insideObjectArray; + } + + public ObjectMapper parentMapper() { + return parentMapper; + } + + public LuceneDocument document() { + return document; + } + + /** + * @return whether a value is an object or an array vs a single value like a long. + */ + boolean isContainer() { + return isObjectOrArray; + } + + boolean isArray() { + return isArray; + } + + BytesRef encodeValue() throws IOException { + assert isContainer() == false : "Objects should not be handled with direct encoding"; + + return XContentDataHelper.encodeToken(parser); + } + } + + record LeafArrayStart(FieldMapper fieldMapper, ObjectMapper parentMapper, LuceneDocument document) implements Event {} + } + + record Output(List ignoredSourceValues) { + static Output empty() { + return new Output(new ArrayList<>()); + } + + void merge(Output part) { + this.ignoredSourceValues.addAll(part.ignoredSourceValues); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 0824e5381ff6..4ebcbbf8a068 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -228,7 +228,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper new Builder( n, c.scriptCompiler(), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java index 249ef5004e59..f7c6eef7dfd4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java @@ -12,7 +12,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.inference.InferenceService; -import java.util.Map; import java.util.Set; /** @@ -26,12 +25,4 @@ public interface InferenceFieldMapper { * @param sourcePaths The source path that populates the input for the field (before inference) */ InferenceFieldMetadata getMetadata(Set sourcePaths); - - /** - * Get the field's original value (i.e. the value the user specified) from the provided source. - * - * @param sourceAsMap The source as a map - * @return The field's original value, or {@code null} if none was provided - */ - Object getOriginalValue(Map sourceAsMap); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java new file mode 100644 index 000000000000..6051aafb9f74 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.query.SearchExecutionContext; + +import java.util.Map; +import java.util.function.Function; + +/** + * An abstract {@link MetadataFieldMapper} used as a placeholder for implementation + * in the inference module. It is required by {@link SourceFieldMapper} to identify + * the field name for removal from _source. + */ +public abstract class InferenceMetadataFieldsMapper extends MetadataFieldMapper { + /** + * Internal index setting to control the format used for semantic text fields. + * Determines whether to use the legacy format (default: true). + * This setting is immutable and can only be defined at index creation + * to ensure the internal format remains consistent throughout the index's lifecycle. + */ + public static final Setting USE_LEGACY_SEMANTIC_TEXT_FORMAT = Setting.boolSetting( + "index.mapping.semantic_text.use_legacy_format", + // don't use the new format by default yet + true, + Setting.Property.Final, + Setting.Property.IndexScope, + Setting.Property.InternalIndex + ); + + public static final String NAME = "_inference_fields"; + public static final String CONTENT_TYPE = "_inference_fields"; + + protected InferenceMetadataFieldsMapper(MappedFieldType inferenceFieldType) { + super(inferenceFieldType); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + public InferenceMetadataFieldType fieldType() { + return (InferenceMetadataFieldType) super.fieldType(); + } + + public abstract static class InferenceMetadataFieldType extends MappedFieldType { + public InferenceMetadataFieldType() { + super(NAME, false, false, false, TextSearchInfo.NONE, Map.of()); + } + + /** + * Returns a {@link ValueFetcher} without requiring the construction of a full {@link SearchExecutionContext}. + */ + public abstract ValueFetcher valueFetcher( + MappingLookup mappingLookup, + Function bitSetCache, + IndexSearcher searcher + ); + } + + /** + * Checks if the {@link InferenceMetadataFieldsMapper} is enabled for the given {@link Settings}. + * + * This indicates whether the new format for semantic text fields is active. + * The new format is enabled if: + * 1. The index version is on or after {@link IndexVersions#INFERENCE_METADATA_FIELDS}, and + * 2. The legacy semantic text format is disabled. + * + * @param settings the index settings to evaluate + * @return {@code true} if the new format is enabled; {@code false} otherwise + */ + public static boolean isEnabled(Settings settings) { + return IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).onOrAfter(IndexVersions.INFERENCE_METADATA_FIELDS) + && USE_LEGACY_SEMANTIC_TEXT_FORMAT.get(settings) == false; + } + + /** + * Checks if the {@link InferenceMetadataFieldsMapper} is enabled based on the provided {@link Mapping}. + * + * This indicates whether the new format for semantic text fields is active by verifying the existence + * of the {@link InferenceMetadataFieldsMapper} in the mapping's metadata. + * + * @param mappingLookup the mapping to evaluate + * @return {@code true} if the {@link InferenceMetadataFieldsMapper} is present; {@code false} otherwise + */ + public static boolean isEnabled(MappingLookup mappingLookup) { + return mappingLookup != null + && mappingLookup.getMapping() + .getMetadataMapperByName(InferenceMetadataFieldsMapper.NAME) instanceof InferenceMetadataFieldsMapper; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index e97078922505..fb9448978e3d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -104,7 +104,7 @@ public final class KeywordFieldMapper extends FieldMapper { FIELD_TYPE = freezeAndDeduplicateFieldType(ft); } - public static TextSearchInfo TEXT_SEARCH_INFO = new TextSearchInfo( + public static final TextSearchInfo TEXT_SEARCH_INFO = new TextSearchInfo( FIELD_TYPE, null, Lucene.KEYWORD_ANALYZER, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java index fa87dd449dba..d107957179d6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java @@ -14,5 +14,5 @@ package org.elasticsearch.index.mapper; * Main purpose of this class is to avoid verbosity of passing individual metric instances around. */ public record MapperMetrics(SourceFieldMetrics sourceFieldMetrics) { - public static MapperMetrics NOOP = new MapperMetrics(SourceFieldMetrics.NOOP); + public static final MapperMetrics NOOP = new MapperMetrics(SourceFieldMetrics.NOOP); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 1673b1719d8b..689cbdcdbda7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -171,6 +171,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { private final IndexVersion indexVersionCreated; private final MapperRegistry mapperRegistry; private final Supplier mappingParserContextSupplier; + private final Function bitSetProducer; private final MapperMetrics mapperMetrics; private volatile DocumentMapper mapper; @@ -245,6 +246,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { this::getMetadataMappers, this::resolveDocumentType ); + this.bitSetProducer = bitSetProducer; this.mapperMetrics = mapperMetrics; } @@ -590,6 +592,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { mapping, mappingSource, indexVersionCreated, + indexSettings, mapperMetrics, index().getName() ); @@ -826,6 +829,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return mapperRegistry; } + public Function getBitSetProducer() { + return bitSetProducer; + } + public MapperMetrics getMapperMetrics() { return mapperMetrics; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index 1278ebf0a393..907f7265e98a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -106,7 +106,7 @@ public final class Mapping implements ToXContentFragment { return (T) metadataMappersMap.get(clazz); } - MetadataFieldMapper getMetadataMapperByName(String mapperName) { + public MetadataFieldMapper getMetadataMapperByName(String mapperName) { return metadataMappersByName.get(mapperName); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index ed02e5fc2961..80dfa37a0ee0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -43,7 +43,7 @@ public final class MappingLookup { * A lookup representing an empty mapping. It can be used to look up fields, although it won't hold any, but it does not * hold a valid {@link DocumentParser}, {@link IndexSettings} or {@link IndexAnalyzers}. */ - public static final MappingLookup EMPTY = fromMappers(Mapping.EMPTY, List.of(), List.of()); + public static final MappingLookup EMPTY = fromMappers(Mapping.EMPTY, List.of(), List.of(), null); private final CacheKey cacheKey = new CacheKey(); @@ -59,14 +59,16 @@ public final class MappingLookup { private final List indexTimeScriptMappers; private final Mapping mapping; private final int totalFieldsCount; + private final CustomSyntheticSourceFieldLookup customSyntheticSourceFieldLookup; /** * Creates a new {@link MappingLookup} instance by parsing the provided mapping and extracting its field definitions. * * @param mapping the mapping source + * @param indexSettings index settings * @return the newly created lookup instance */ - public static MappingLookup fromMapping(Mapping mapping) { + public static MappingLookup fromMapping(Mapping mapping, IndexSettings indexSettings) { List newObjectMappers = new ArrayList<>(); List newFieldMappers = new ArrayList<>(); List newFieldAliasMappers = new ArrayList<>(); @@ -79,7 +81,7 @@ public final class MappingLookup { for (Mapper child : mapping.getRoot()) { collect(child, newObjectMappers, newFieldMappers, newFieldAliasMappers, newPassThroughMappers); } - return new MappingLookup(mapping, newFieldMappers, newObjectMappers, newFieldAliasMappers, newPassThroughMappers); + return new MappingLookup(mapping, newFieldMappers, newObjectMappers, newFieldAliasMappers, newPassThroughMappers, indexSettings); } private static void collect( @@ -120,6 +122,7 @@ public final class MappingLookup { * @param objectMappers the object mappers * @param aliasMappers the field alias mappers * @param passThroughMappers the pass-through mappers + * @param indexSettings index settings * @return the newly created lookup instance */ public static MappingLookup fromMappers( @@ -127,13 +130,19 @@ public final class MappingLookup { Collection mappers, Collection objectMappers, Collection aliasMappers, - Collection passThroughMappers + Collection passThroughMappers, + @Nullable IndexSettings indexSettings ) { - return new MappingLookup(mapping, mappers, objectMappers, aliasMappers, passThroughMappers); + return new MappingLookup(mapping, mappers, objectMappers, aliasMappers, passThroughMappers, indexSettings); } - public static MappingLookup fromMappers(Mapping mapping, Collection mappers, Collection objectMappers) { - return new MappingLookup(mapping, mappers, objectMappers, List.of(), List.of()); + public static MappingLookup fromMappers( + Mapping mapping, + Collection mappers, + Collection objectMappers, + @Nullable IndexSettings indexSettings + ) { + return new MappingLookup(mapping, mappers, objectMappers, List.of(), List.of(), indexSettings); } private MappingLookup( @@ -141,7 +150,8 @@ public final class MappingLookup { Collection mappers, Collection objectMappers, Collection aliasMappers, - Collection passThroughMappers + Collection passThroughMappers, + @Nullable IndexSettings indexSettings ) { this.totalFieldsCount = mapping.getRoot().getTotalFieldsCount(); this.mapping = mapping; @@ -207,6 +217,7 @@ public final class MappingLookup { this.runtimeFieldMappersCount = runtimeFields.size(); this.indexAnalyzersMap = Map.copyOf(indexAnalyzersMap); this.indexTimeScriptMappers = List.copyOf(indexTimeScriptMappers); + this.customSyntheticSourceFieldLookup = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, isSourceSynthetic()); runtimeFields.stream().flatMap(RuntimeField::asMappedFieldTypes).map(MappedFieldType::name).forEach(this::validateDoesNotShadow); assert assertMapperNamesInterned(this.fieldMappers, this.objectMappers); @@ -543,4 +554,8 @@ public final class MappingLookup { throw new MapperParsingException("Field [" + name + "] attempted to shadow a time_series_metric"); } } + + public CustomSyntheticSourceFieldLookup getCustomSyntheticSourceFieldLookup() { + return customSyntheticSourceFieldLookup; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 03818f7b5c83..08b0fb32a10c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -245,6 +245,10 @@ public class NestedObjectMapper extends ObjectMapper { this.indexSettings = indexSettings; } + public IndexSettings indexSettings() { + return indexSettings; + } + public Query parentTypeFilter() { return parentTypeFilter; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 46b70193ba0e..6a107dbaa9e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -855,7 +855,7 @@ public class ObjectMapper extends Mapper { } - ObjectMapper findParentMapper(String leafFieldPath) { + public ObjectMapper findParentMapper(String leafFieldPath) { var pathComponents = leafFieldPath.split("\\."); int startPathComponent = 0; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 5f1ba6f0ab2a..f2823ab06fe3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -405,7 +405,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { public void preParse(DocumentParserContext context) throws IOException { BytesReference originalSource = context.sourceToParse().source(); XContentType contentType = context.sourceToParse().getXContentType(); - final BytesReference adaptedSource = applyFilters(originalSource, contentType); + final BytesReference adaptedSource = applyFilters(context.mappingLookup(), originalSource, contentType); if (adaptedSource != null) { final BytesRef ref = adaptedSource.toBytesRef(); @@ -433,13 +433,32 @@ public class SourceFieldMapper extends MetadataFieldMapper { } @Nullable - public BytesReference applyFilters(@Nullable BytesReference originalSource, @Nullable XContentType contentType) throws IOException { - if (stored() == false) { + public BytesReference applyFilters( + @Nullable MappingLookup mappingLookup, + @Nullable BytesReference originalSource, + @Nullable XContentType contentType + ) throws IOException { + if (stored() == false || originalSource == null) { return null; } - if (originalSource != null && sourceFilter != null) { + var modSourceFilter = sourceFilter; + if (mappingLookup != null + && InferenceMetadataFieldsMapper.isEnabled(mappingLookup) + && mappingLookup.inferenceFields().isEmpty() == false) { + /** + * Removes {@link InferenceMetadataFieldsMapper} content from _source. + * This content is re-generated at query time (if requested) using stored fields and doc values. + */ + String[] modExcludes = new String[excludes != null ? excludes.length + 1 : 1]; + if (excludes != null) { + System.arraycopy(excludes, 0, modExcludes, 0, excludes.length); + } + modExcludes[modExcludes.length - 1] = InferenceMetadataFieldsMapper.NAME; + modSourceFilter = new SourceFilter(includes, modExcludes); + } + if (modSourceFilter != null) { // Percolate and tv APIs may not set the source and that is ok, because these APIs will not index any data - return Source.fromBytes(originalSource, contentType).filter(sourceFilter).internalSourceRef(); + return Source.fromBytes(originalSource, contentType).filter(modSourceFilter).internalSourceRef(); } else { return originalSource; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SyntheticSourceDocumentParserListener.java b/server/src/main/java/org/elasticsearch/index/mapper/SyntheticSourceDocumentParserListener.java new file mode 100644 index 000000000000..eabb11763557 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/SyntheticSourceDocumentParserListener.java @@ -0,0 +1,398 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Listens for document parsing events and stores an additional copy of source data when it is needed for synthetic _source. + *
+ * Note that synthetic source logic for dynamic fields and fields involved in copy_to logic is still handled in {@link DocumentParser}. + */ +class SyntheticSourceDocumentParserListener implements DocumentParserListener { + private final CustomSyntheticSourceFieldLookup customSyntheticSourceFieldLookup; + private final XContentType xContentType; + + private final Map>> ignoredSourceValues; + + private State state; + + SyntheticSourceDocumentParserListener(MappingLookup mappingLookup, XContentType xContentType) { + this.customSyntheticSourceFieldLookup = mappingLookup.getCustomSyntheticSourceFieldLookup(); + this.xContentType = xContentType; + + this.ignoredSourceValues = new HashMap<>(); + this.state = new Tracking(); + } + + @Override + public boolean isActive() { + return state instanceof Storing; + } + + @Override + public void consume(Token token) throws IOException { + if (token == null) { + return; + } + + this.state = state.consume(token); + } + + @Override + public void consume(Event event) throws IOException { + if (event == null) { + return; + } + + this.state = state.consume(event); + } + + @Override + public Output finish() { + var values = new ArrayList(); + + for (var fieldToValueMap : ignoredSourceValues.values()) { + for (var fieldValues : fieldToValueMap.values()) { + long singleElementArrays = 0; + long stashedValuesForSourceKeepArrays = 0; + + for (var fieldValue : fieldValues) { + if (fieldValue instanceof StoredValue.Array arr) { + // Arrays are stored to preserve the order of elements. + // If there is a single element it does not matter and we can drop such data. + if (arr.length == 1 && arr.reason() == StoreReason.LEAF_STORED_ARRAY) { + singleElementArrays += 1; + } + } + if (fieldValue instanceof StoredValue.Singleton singleton) { + // Stash values are values of fields that are inside object arrays and have synthetic_source_keep: "arrays". + // With current logic either all field values should be in ignored source + // or none of them. + // With object arrays the same field can be parsed multiple times (one time for every object array entry) + // and it is possible that one of the value is an array. + // Due to the rule above we need to proactively store all values of such fields because we may later discover + // that there is an array and we need to "switch" to ignored source usage. + // However if we stored all values but the array is not there, the field will be correctly constructed + // using regular logic and therefore we can drop this and save some space. + if (singleton.reason() == StoreReason.LEAF_VALUE_STASH_FOR_STORED_ARRAYS) { + stashedValuesForSourceKeepArrays += 1; + } + } + } + + // Only if all values match one of the optimization criteria we skip them, otherwise add all of them to resulting list. + if (singleElementArrays != fieldValues.size() && stashedValuesForSourceKeepArrays != fieldValues.size()) { + for (var storedValue : fieldValues) { + values.add(storedValue.nameValue()); + } + } + } + } + + return new Output(values); + } + + sealed interface StoredValue permits StoredValue.Array, StoredValue.Singleton { + IgnoredSourceFieldMapper.NameValue nameValue(); + + /** + * An array of values is stored f.e. due to synthetic_source_keep: "arrays". + */ + record Array(IgnoredSourceFieldMapper.NameValue nameValue, StoreReason reason, long length) implements StoredValue {} + + /** + * A single value. + */ + record Singleton(IgnoredSourceFieldMapper.NameValue nameValue, StoreReason reason) implements StoredValue {} + + } + + /** + * Reason for storing this value. + */ + enum StoreReason { + /** + * Leaf array that is stored due to "synthetic_source_keep": "arrays". + */ + LEAF_STORED_ARRAY, + + /** + * "Stashed" value needed to only in case there are mixed arrays and single values + * for this field. + * Can be dropped in some cases. + */ + LEAF_VALUE_STASH_FOR_STORED_ARRAYS, + + /** + * There is currently no need to distinguish other reasons. + */ + OTHER + } + + private void addIgnoredSourceValue(StoredValue storedValue, String fullPath, LuceneDocument luceneDocument) { + var values = ignoredSourceValues.computeIfAbsent(luceneDocument, ld -> new HashMap<>()) + .computeIfAbsent(fullPath, p -> new ArrayList<>()); + + values.add(storedValue); + } + + interface State { + State consume(Token token) throws IOException; + + State consume(Event event) throws IOException; + } + + class Storing implements State { + private final State returnState; + private final String fullPath; + private final ObjectMapper parentMapper; + private final StoreReason reason; + private final LuceneDocument document; + + private final XContentBuilder builder; + // Current object/array depth, needed to understand when the top-most object/arrays ends vs a nested one. + private int depth; + // If we are storing an array this is the length of the array. + private int length; + + Storing( + State returnState, + Token startingToken, + String fullPath, + ObjectMapper parentMapper, + StoreReason reason, + LuceneDocument document + ) throws IOException { + this.returnState = returnState; + this.fullPath = fullPath; + this.parentMapper = parentMapper; + this.reason = reason; + this.document = document; + + this.builder = XContentBuilder.builder(xContentType.xContent()); + + this.depth = 0; + this.length = 0; + + consume(startingToken); + } + + public State consume(Token token) throws IOException { + switch (token) { + case Token.StartObject startObject -> { + builder.startObject(); + if (depth == 1) { + length += 1; + } + depth += 1; + } + case Token.EndObject endObject -> { + builder.endObject(); + + if (processEndObjectOrArray(endObject)) { + return returnState; + } + } + case Token.StartArray startArray -> { + builder.startArray(); + depth += 1; + } + case Token.EndArray endArray -> { + builder.endArray(); + + if (processEndObjectOrArray(endArray)) { + return returnState; + } + } + case Token.FieldName fieldName -> builder.field(fieldName.name()); + case Token.StringAsCharArrayValue stringAsCharArrayValue -> { + if (depth == 1) { + length += 1; + } + builder.generator() + .writeString(stringAsCharArrayValue.buffer(), stringAsCharArrayValue.offset(), stringAsCharArrayValue.length()); + } + case Token.ValueToken valueToken -> { + if (depth == 1) { + length += 1; + } + builder.value(valueToken.value()); + } + case Token.NullValue nullValue -> { + if (depth == 1) { + length += 1; + } + builder.nullValue(); + } + case null -> { + } + } + + return this; + } + + public State consume(Event event) { + // We are currently storing something so events are not relevant. + return this; + } + + private boolean processEndObjectOrArray(Token token) throws IOException { + assert token instanceof Token.EndObject || token instanceof Token.EndArray + : "Unexpected token when storing ignored source value"; + + depth -= 1; + if (depth == 0) { + var parentOffset = parentMapper.isRoot() ? 0 : parentMapper.fullPath().length() + 1; + var nameValue = new IgnoredSourceFieldMapper.NameValue( + fullPath, + parentOffset, + XContentDataHelper.encodeXContentBuilder(builder), + document + ); + var storedValue = token instanceof Token.EndObject + ? new StoredValue.Singleton(nameValue, reason) + : new StoredValue.Array(nameValue, reason, length); + + addIgnoredSourceValue(storedValue, fullPath, document); + + return true; + } + + return false; + } + } + + class Tracking implements State { + public State consume(Token token) throws IOException { + return this; + } + + public State consume(Event event) throws IOException { + switch (event) { + case Event.DocumentStart documentStart -> { + if (documentStart.rootObjectMapper().isEnabled() == false) { + return new Storing( + this, + Token.START_OBJECT, + documentStart.rootObjectMapper().fullPath(), + documentStart.rootObjectMapper(), + StoreReason.OTHER, + documentStart.document() + ); + } + } + case Event.ObjectStart objectStart -> { + var reason = customSyntheticSourceFieldLookup.getFieldsWithCustomSyntheticSourceHandling() + .get(objectStart.objectMapper().fullPath()); + if (reason == null) { + return this; + } + if (reason == CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS && objectStart.insideObjectArray() == false) { + return this; + } + + return new Storing( + this, + Token.START_OBJECT, + objectStart.objectMapper().fullPath(), + objectStart.parentMapper(), + StoreReason.OTHER, + objectStart.document() + ); + } + case Event.ObjectArrayStart objectArrayStart -> { + var reason = customSyntheticSourceFieldLookup.getFieldsWithCustomSyntheticSourceHandling() + .get(objectArrayStart.objectMapper().fullPath()); + if (reason == null) { + return this; + } + + return new Storing( + this, + Token.START_ARRAY, + objectArrayStart.objectMapper().fullPath(), + objectArrayStart.parentMapper(), + StoreReason.OTHER, + objectArrayStart.document() + ); + } + case Event.LeafValue leafValue -> { + var reason = customSyntheticSourceFieldLookup.getFieldsWithCustomSyntheticSourceHandling() + .get(leafValue.fieldMapper().fullPath()); + if (reason == null) { + return this; + } + if (reason == CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS && leafValue.insideObjectArray() == false) { + return this; + } + + var storeReason = reason == CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS + ? StoreReason.LEAF_VALUE_STASH_FOR_STORED_ARRAYS + : StoreReason.OTHER; + + if (leafValue.isContainer()) { + return new Storing( + this, + leafValue.isArray() ? Token.START_ARRAY : Token.START_OBJECT, + leafValue.fieldMapper().fullPath(), + leafValue.parentMapper(), + storeReason, + leafValue.document() + ); + } + + var parentMapper = leafValue.parentMapper(); + var parentOffset = parentMapper.isRoot() ? 0 : parentMapper.fullPath().length() + 1; + + var nameValue = new IgnoredSourceFieldMapper.NameValue( + leafValue.fieldMapper().fullPath(), + parentOffset, + leafValue.encodeValue(), + leafValue.document() + ); + addIgnoredSourceValue( + new StoredValue.Singleton(nameValue, storeReason), + leafValue.fieldMapper().fullPath(), + leafValue.document() + ); + } + case Event.LeafArrayStart leafArrayStart -> { + var reason = customSyntheticSourceFieldLookup.getFieldsWithCustomSyntheticSourceHandling() + .get(leafArrayStart.fieldMapper().fullPath()); + if (reason == null) { + return this; + } + + var storeReason = reason == CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS + ? StoreReason.LEAF_STORED_ARRAY + : StoreReason.OTHER; + return new Storing( + this, + Token.START_ARRAY, + leafArrayStart.fieldMapper().fullPath(), + leafArrayStart.parentMapper(), + storeReason, + leafArrayStart.document() + ); + } + } + + return this; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java index 6336ed831b78..e155249b9c38 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java @@ -49,7 +49,7 @@ public class TimeSeriesRoutingHashFieldMapper extends MetadataFieldMapper { public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesRoutingHashFieldMapper()); static final NodeFeature TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF = new NodeFeature("tsdb.ts_routing_hash_doc_value_parse_byte_ref"); - public static DocValueFormat TS_ROUTING_HASH_DOC_VALUE_FORMAT = TimeSeriesRoutingHashFieldType.DOC_VALUE_FORMAT; + public static final DocValueFormat TS_ROUTING_HASH_DOC_VALUE_FORMAT = TimeSeriesRoutingHashFieldType.DOC_VALUE_FORMAT; static final class TimeSeriesRoutingHashFieldType extends MappedFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 8c6e874ff577..3584523505ae 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -119,10 +119,11 @@ public class DenseVectorFieldMapper extends FieldMapper { public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0; public static final String CONTENT_TYPE = "dense_vector"; - public static short MAX_DIMS_COUNT = 4096; // maximum allowed number of dimensions - public static int MAX_DIMS_COUNT_BIT = 4096 * Byte.SIZE; // maximum allowed number of dimensions + public static final short MAX_DIMS_COUNT = 4096; // maximum allowed number of dimensions + public static final int MAX_DIMS_COUNT_BIT = 4096 * Byte.SIZE; // maximum allowed number of dimensions - public static short MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING = 128; // minimum number of dims for floats to be dynamically mapped to vector + public static final short MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING = 128; // minimum number of dims for floats to be dynamically mapped to + // vector public static final int MAGNITUDE_BYTES = 4; public static final int NUM_CANDS_OVERSAMPLE_LIMIT = 10_000; // Max oversample allowed for k and num_candidates diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index b4de73e3b62c..c73f4fef2336 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -78,6 +78,11 @@ public class SparseVectorFieldMapper extends FieldMapper { super(name); } + public Builder setStored(boolean value) { + stored.setValue(value); + return this; + } + @Override protected Parameter[] getParameters() { return new Parameter[] { stored, meta }; diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index b63cf232eca9..458ff598737f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -41,9 +41,9 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder FIELDS_SET = new HashSet<>(); + public static final Set FIELDS_SET = new HashSet<>(); static { FIELDS_SET.add(SLICE_ID_FIELD); FIELDS_SET.add(TOTAL_FIELD); @@ -774,7 +774,7 @@ public class BulkByScrollTask extends CancellableTask { private final Status status; private final Exception exception; - public static Set EXPECTED_EXCEPTION_FIELDS = new HashSet<>(); + public static final Set EXPECTED_EXCEPTION_FIELDS = new HashSet<>(); static { EXPECTED_EXCEPTION_FIELDS.add("type"); EXPECTED_EXCEPTION_FIELDS.add("reason"); diff --git a/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java index 5707abc220d3..54904323a25a 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java @@ -39,8 +39,8 @@ public class GlobalCheckpointSyncAction extends TransportReplicationAction< GlobalCheckpointSyncAction.Request, ReplicationResponse> { - public static String ACTION_NAME = "indices:admin/seq_no/global_checkpoint_sync"; - public static ActionType TYPE = new ActionType<>(ACTION_NAME); + public static final String ACTION_NAME = "indices:admin/seq_no/global_checkpoint_sync"; + public static final ActionType TYPE = new ActionType<>(ACTION_NAME); @Inject public GlobalCheckpointSyncAction( diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java index b6b6e709f4f1..76b8d814ee8f 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java @@ -118,7 +118,7 @@ public class RetentionLeases implements ToXContentFragment, Writeable { * Represents an empty an un-versioned retention lease collection. This is used when no retention lease collection is found in the * commit point */ - public static RetentionLeases EMPTY = new RetentionLeases(1, 0, Collections.emptyList()); + public static final RetentionLeases EMPTY = new RetentionLeases(1, 0, Collections.emptyList()); /** * Constructs a new retention lease collection with the specified version and underlying collection of retention leases. diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 75f38ed5f734..dd13f905b6cb 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -971,7 +971,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public record Location(long generation, long translogLocation, int size) implements Comparable { - public static Location EMPTY = new Location(0, 0, 0); + public static final Location EMPTY = new Location(0, 0, 0); @Override public String toString() { diff --git a/server/src/main/java/org/elasticsearch/indices/ExecutorNames.java b/server/src/main/java/org/elasticsearch/indices/ExecutorNames.java index e1bf11227592..2bfc642111d8 100644 --- a/server/src/main/java/org/elasticsearch/indices/ExecutorNames.java +++ b/server/src/main/java/org/elasticsearch/indices/ExecutorNames.java @@ -20,7 +20,7 @@ public record ExecutorNames(String threadPoolForGet, String threadPoolForSearch, /** * The thread pools for a typical system index. */ - public static ExecutorNames DEFAULT_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames( + public static final ExecutorNames DEFAULT_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames( ThreadPool.Names.SYSTEM_READ, ThreadPool.Names.SYSTEM_READ, ThreadPool.Names.SYSTEM_WRITE @@ -29,7 +29,7 @@ public record ExecutorNames(String threadPoolForGet, String threadPoolForSearch, /** * The thread pools for a typical system data stream. These are also the usual thread pools for non-system indices and data streams. */ - public static ExecutorNames DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS = new ExecutorNames( + public static final ExecutorNames DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS = new ExecutorNames( ThreadPool.Names.GET, ThreadPool.Names.SEARCH, ThreadPool.Names.WRITE @@ -38,7 +38,7 @@ public record ExecutorNames(String threadPoolForGet, String threadPoolForSearch, /** * The thread pools that should be used for critical system index operations. */ - public static ExecutorNames CRITICAL_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames( + public static final ExecutorNames CRITICAL_SYSTEM_INDEX_THREAD_POOLS = new ExecutorNames( ThreadPool.Names.SYSTEM_CRITICAL_READ, ThreadPool.Names.SYSTEM_CRITICAL_READ, ThreadPool.Names.SYSTEM_CRITICAL_WRITE diff --git a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java index cba0282f7fed..5b3ee08d205d 100644 --- a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java @@ -24,7 +24,7 @@ import java.util.Map; public record EmptyTaskSettings() implements TaskSettings { public static final String NAME = "empty_task_settings"; - public static EmptyTaskSettings INSTANCE = new EmptyTaskSettings(); + public static final EmptyTaskSettings INSTANCE = new EmptyTaskSettings(); public EmptyTaskSettings(StreamInput in) { this(); diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index fcb8ea721379..f319157828df 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -31,7 +31,7 @@ public enum TaskType implements Writeable { } }; - public static String NAME = "task_type"; + public static final String NAME = "task_type"; public static TaskType fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java index 3f898cfbd9da..325a6b22b62c 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -18,7 +18,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -98,8 +98,8 @@ public final class IngestMetadata implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(PIPELINES_FIELD.getPreferredName(), pipelines.values().iterator()); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.array(PIPELINES_FIELD.getPreferredName(), pipelines.values().iterator()); } @Override diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index 39df4e53423a..c75cd3a022cb 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -143,12 +143,10 @@ public record IngestStats(Stats totalStats, List pipelineStats, Ma return builder; } ), - - Iterators.single((builder, params) -> builder.endArray().endObject()) + Iterators.single((builder, params) -> builder.endArray().endObject()) ) ), - - Iterators.single((builder, params) -> builder.endObject().endObject()) + Iterators.single((builder, params) -> builder.endObject().endObject()) ); } diff --git a/server/src/main/java/org/elasticsearch/lucene/util/automaton/MinimizationOperations.java b/server/src/main/java/org/elasticsearch/lucene/util/automaton/MinimizationOperations.java new file mode 100644 index 000000000000..0f1445e3afc3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/lucene/util/automaton/MinimizationOperations.java @@ -0,0 +1,333 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.lucene.util.automaton; + +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntHashSet; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.Transition; + +import java.util.BitSet; +import java.util.LinkedList; + +/** + * Operations for minimizing automata. + *

+ * Lucene 10 removed minimization, but Elasticsearch still requires it. + * Minimization is critical in the security codebase to reduce the heap + * usage of automata used for permission checks. + *

+ * Copied of Lucene's AutomatonTestUtil + */ +public final class MinimizationOperations { + + private MinimizationOperations() {} + + /** + * Minimizes (and determinizes if not already deterministic) the given automaton using Hopcroft's + * algorithm. + * + * @param determinizeWorkLimit maximum effort to spend determinizing the automaton. Set higher to + * allow more complex queries and lower to prevent memory exhaustion. Use {@link + * Operations#DEFAULT_DETERMINIZE_WORK_LIMIT} as a decent default if you don't otherwise know + * what to specify. + */ + public static Automaton minimize(Automaton a, int determinizeWorkLimit) { + + if (a.getNumStates() == 0 || (a.isAccept(0) == false && a.getNumTransitions(0) == 0)) { + // Fastmatch for common case + return new Automaton(); + } + a = Operations.determinize(a, determinizeWorkLimit); + // a.writeDot("adet"); + if (a.getNumTransitions(0) == 1) { + Transition t = new Transition(); + a.getTransition(0, 0, t); + if (t.dest == 0 && t.min == Character.MIN_CODE_POINT && t.max == Character.MAX_CODE_POINT) { + // Accepts all strings + return a; + } + } + a = totalize(a); + // a.writeDot("atot"); + + // initialize data structures + final int[] sigma = a.getStartPoints(); + final int sigmaLen = sigma.length, statesLen = a.getNumStates(); + + final IntArrayList[][] reverse = new IntArrayList[statesLen][sigmaLen]; + final IntHashSet[] partition = new IntHashSet[statesLen]; + final IntArrayList[] splitblock = new IntArrayList[statesLen]; + final int[] block = new int[statesLen]; + final StateList[][] active = new StateList[statesLen][sigmaLen]; + final StateListNode[][] active2 = new StateListNode[statesLen][sigmaLen]; + final LinkedList pending = new LinkedList<>(); + final BitSet pending2 = new BitSet(sigmaLen * statesLen); + final BitSet split = new BitSet(statesLen), refine = new BitSet(statesLen), refine2 = new BitSet(statesLen); + for (int q = 0; q < statesLen; q++) { + splitblock[q] = new IntArrayList(); + partition[q] = new IntHashSet(); + for (int x = 0; x < sigmaLen; x++) { + active[q][x] = StateList.EMPTY; + } + } + // find initial partition and reverse edges + for (int q = 0; q < statesLen; q++) { + // TODO moved the following into the loop because we cannot reset transition.transitionUpto (pkg private) + Transition transition = new Transition(); + final int j = a.isAccept(q) ? 0 : 1; + partition[j].add(q); + block[q] = j; + transition.source = q; + // TODO we'd need to be able to access pkg private transition.transitionUpto if we want to optimize the following + // transition.transitionUpto = -1; + for (int x = 0; x < sigmaLen; x++) { + final IntArrayList[] r = reverse[a.next(transition, sigma[x])]; + if (r[x] == null) { + r[x] = new IntArrayList(); + } + r[x].add(q); + } + } + // initialize active sets + for (int j = 0; j <= 1; j++) { + for (int x = 0; x < sigmaLen; x++) { + for (IntCursor qCursor : partition[j]) { + int q = qCursor.value; + if (reverse[q][x] != null) { + StateList stateList = active[j][x]; + if (stateList == StateList.EMPTY) { + stateList = new StateList(); + active[j][x] = stateList; + } + active2[q][x] = stateList.add(q); + } + } + } + } + + // initialize pending + for (int x = 0; x < sigmaLen; x++) { + final int j = (active[0][x].size <= active[1][x].size) ? 0 : 1; + pending.add(new IntPair(j, x)); + pending2.set(x * statesLen + j); + } + + // process pending until fixed point + int k = 2; + // System.out.println("start min"); + while (false == pending.isEmpty()) { + // System.out.println(" cycle pending"); + final IntPair ip = pending.removeFirst(); + final int p = ip.n1; + final int x = ip.n2; + // System.out.println(" pop n1=" + ip.n1 + " n2=" + ip.n2); + pending2.clear(x * statesLen + p); + // find states that need to be split off their blocks + for (StateListNode m = active[p][x].first; m != null; m = m.next) { + final IntArrayList r = reverse[m.q][x]; + if (r != null) { + for (IntCursor iCursor : r) { + final int i = iCursor.value; + if (false == split.get(i)) { + split.set(i); + final int j = block[i]; + splitblock[j].add(i); + if (false == refine2.get(j)) { + refine2.set(j); + refine.set(j); + } + } + } + } + } + + // refine blocks + for (int j = refine.nextSetBit(0); j >= 0; j = refine.nextSetBit(j + 1)) { + final IntArrayList sb = splitblock[j]; + if (sb.size() < partition[j].size()) { + final IntHashSet b1 = partition[j]; + final IntHashSet b2 = partition[k]; + for (IntCursor iCursor : sb) { + final int s = iCursor.value; + b1.remove(s); + b2.add(s); + block[s] = k; + for (int c = 0; c < sigmaLen; c++) { + final StateListNode sn = active2[s][c]; + if (sn != null && sn.sl == active[j][c]) { + sn.remove(); + StateList stateList = active[k][c]; + if (stateList == StateList.EMPTY) { + stateList = new StateList(); + active[k][c] = stateList; + } + active2[s][c] = stateList.add(s); + } + } + } + // update pending + for (int c = 0; c < sigmaLen; c++) { + final int aj = active[j][c].size, ak = active[k][c].size, ofs = c * statesLen; + if ((false == pending2.get(ofs + j)) && 0 < aj && aj <= ak) { + pending2.set(ofs + j); + pending.add(new IntPair(j, c)); + } else { + pending2.set(ofs + k); + pending.add(new IntPair(k, c)); + } + } + k++; + } + refine2.clear(j); + for (IntCursor iCursor : sb) { + final int s = iCursor.value; + split.clear(s); + } + sb.clear(); + } + refine.clear(); + } + + Automaton result = new Automaton(); + + Transition t = new Transition(); + + // System.out.println(" k=" + k); + + // make a new state for each equivalence class, set initial state + int[] stateMap = new int[statesLen]; + int[] stateRep = new int[k]; + + result.createState(); + + // System.out.println("min: k=" + k); + for (int n = 0; n < k; n++) { + // System.out.println(" n=" + n); + + boolean isInitial = partition[n].contains(0); + + int newState; + if (isInitial) { + // System.out.println(" isInitial!"); + newState = 0; + } else { + newState = result.createState(); + } + + // System.out.println(" newState=" + newState); + + for (IntCursor qCursor : partition[n]) { + int q = qCursor.value; + stateMap[q] = newState; + // System.out.println(" q=" + q + " isAccept?=" + a.isAccept(q)); + result.setAccept(newState, a.isAccept(q)); + stateRep[newState] = q; // select representative + } + } + + // build transitions and set acceptance + for (int n = 0; n < k; n++) { + int numTransitions = a.initTransition(stateRep[n], t); + for (int i = 0; i < numTransitions; i++) { + a.getNextTransition(t); + // System.out.println(" add trans"); + result.addTransition(n, stateMap[t.dest], t.min, t.max); + } + } + result.finishState(); + // System.out.println(result.getNumStates() + " states"); + + return Operations.removeDeadStates(result); + } + + record IntPair(int n1, int n2) {} + + static final class StateList { + + // Empty list that should never be mutated, used as a memory saving optimization instead of null + // so we don't need to branch the read path in #minimize + static final StateList EMPTY = new StateList(); + + int size; + + StateListNode first, last; + + StateListNode add(int q) { + assert this != EMPTY; + return new StateListNode(q, this); + } + } + + static final class StateListNode { + + final int q; + + StateListNode next, prev; + + final StateList sl; + + StateListNode(int q, StateList sl) { + this.q = q; + this.sl = sl; + if (sl.size++ == 0) sl.first = sl.last = this; + else { + sl.last.next = this; + prev = sl.last; + sl.last = this; + } + } + + void remove() { + sl.size--; + if (sl.first == this) sl.first = next; + else prev.next = next; + if (sl.last == this) sl.last = prev; + else next.prev = prev; + } + } + + static Automaton totalize(Automaton a) { + Automaton result = new Automaton(); + int numStates = a.getNumStates(); + for (int i = 0; i < numStates; i++) { + result.createState(); + result.setAccept(i, a.isAccept(i)); + } + + int deadState = result.createState(); + result.addTransition(deadState, deadState, Character.MIN_CODE_POINT, Character.MAX_CODE_POINT); + + Transition t = new Transition(); + for (int i = 0; i < numStates; i++) { + int maxi = Character.MIN_CODE_POINT; + int count = a.initTransition(i, t); + for (int j = 0; j < count; j++) { + a.getNextTransition(t); + result.addTransition(i, t.dest, t.min, t.max); + if (t.min > maxi) { + result.addTransition(i, deadState, maxi, t.min - 1); + } + if (t.max + 1 > maxi) { + maxi = t.max + 1; + } + } + + if (maxi <= Character.MAX_CODE_POINT) { + result.addTransition(i, deadState, maxi, Character.MAX_CODE_POINT); + } + } + + result.finishState(); + return result; + } +} diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java index 528d5e84cf05..9ef0c2b0f5c7 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java @@ -19,11 +19,12 @@ import org.elasticsearch.cluster.ProjectState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -577,8 +578,11 @@ public final class PersistentTasksCustomMetadata extends AbstractNamedDiffable toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).field("last_allocation_id", lastAllocationId).array("tasks", tasks.values().iterator()); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.concat( + Iterators.single((builder, params) -> builder.field("last_allocation_id", lastAllocationId)), + ChunkedToXContentHelper.array("tasks", tasks.values().iterator()) + ); } public static Builder builder() { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index 2cd6e2b11ef7..a4185a12d7c3 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -33,7 +33,7 @@ public record RepositoriesMetrics( LongHistogram httpRequestTimeInMillisHistogram ) { - public static RepositoriesMetrics NOOP = new RepositoriesMetrics(MeterRegistry.NOOP); + public static final RepositoriesMetrics NOOP = new RepositoriesMetrics(MeterRegistry.NOOP); /** * Is incremented for each request sent to the blob store (including retries) diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index c6d3bf334207..e97a2a3fa9b2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -955,7 +955,7 @@ public class RepositoriesService extends AbstractLifecycleComponent implements C return preRestoreChecks; } - public static String COUNT_USAGE_STATS_NAME = "count"; + public static final String COUNT_USAGE_STATS_NAME = "count"; public RepositoryUsageStats getUsageStats() { if (repositories.isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 713f7eacb2c1..2ebbf24d6567 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -1153,7 +1153,7 @@ public final class RepositoryData { */ public static class SnapshotDetails { - public static SnapshotDetails EMPTY = new SnapshotDetails(null, null, -1, -1, null); + public static final SnapshotDetails EMPTY = new SnapshotDetails(null, null, -1, -1, null); @Nullable // TODO forbid nulls here, this only applies to very old repositories private final SnapshotState snapshotState; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java index 900a352d42f3..9083c781ae16 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java @@ -26,5 +26,5 @@ public class CreateIndexCapabilities { */ private static final String LOOKUP_INDEX_MODE_CAPABILITY = "lookup_index_mode"; - public static Set CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY); + public static final Set CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY); } diff --git a/server/src/main/java/org/elasticsearch/script/Metadata.java b/server/src/main/java/org/elasticsearch/script/Metadata.java index f6212de58df8..dc5ae51e45af 100644 --- a/server/src/main/java/org/elasticsearch/script/Metadata.java +++ b/server/src/main/java/org/elasticsearch/script/Metadata.java @@ -51,7 +51,7 @@ public class Metadata { protected static final String IF_PRIMARY_TERM = "_if_primary_term"; protected static final String DYNAMIC_TEMPLATES = "_dynamic_templates"; - public static FieldProperty ObjectField = new FieldProperty<>(Object.class); + public static final FieldProperty ObjectField = new FieldProperty<>(Object.class); public static FieldProperty StringField = new FieldProperty<>(String.class); public static FieldProperty LongField = new FieldProperty<>(Number.class).withValidation(FieldProperty.LONGABLE_NUMBER); @@ -335,7 +335,7 @@ public class Metadata { return new FieldProperty<>(type, nullable, writable, extendedValidation); } - public static BiConsumer LONGABLE_NUMBER = (k, v) -> { + public static final BiConsumer LONGABLE_NUMBER = (k, v) -> { long version = v.longValue(); // did we round? if (v.doubleValue() == version) { @@ -346,7 +346,7 @@ public class Metadata { ); }; - public static FieldProperty ALLOW_ALL = new FieldProperty<>(null, true, true, null); + public static final FieldProperty ALLOW_ALL = new FieldProperty<>(null, true, true, null); @SuppressWarnings("fallthrough") public void check(MapOperation op, String key, Object value) { diff --git a/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java b/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java index adc1f65b8873..e752f5f811a4 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java @@ -13,20 +13,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Collections; -import java.util.Iterator; import java.util.Map; import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.script.ScriptCacheStats.Fields.SCRIPT_CACHE_STATS; // This class is deprecated in favor of ScriptStats and ScriptContextStats -public record ScriptCacheStats(Map context, ScriptStats general) implements Writeable, ChunkedToXContent { +public record ScriptCacheStats(Map context, ScriptStats general) implements Writeable, ToXContentFragment { public ScriptCacheStats(Map context) { this(Collections.unmodifiableMap(context), null); @@ -68,26 +64,37 @@ public record ScriptCacheStats(Map context, ScriptStats gen } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - Function statsFields = s -> (b, p) -> b.field(ScriptStats.Fields.COMPILATIONS, s.getCompilations()) - .field(ScriptStats.Fields.CACHE_EVICTIONS, s.getCacheEvictions()) - .field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, s.getCompilationLimitTriggered()); + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.SCRIPT_CACHE_STATS); + builder.startObject(Fields.SUM); + if (general != null) { + builder.field(ScriptStats.Fields.COMPILATIONS, general.getCompilations()); + builder.field(ScriptStats.Fields.CACHE_EVICTIONS, general.getCacheEvictions()); + builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, general.getCompilationLimitTriggered()); + builder.endObject().endObject(); + return builder; + } - return ChunkedToXContent.builder(outerParams).object(SCRIPT_CACHE_STATS, sb -> { - if (general != null) { - sb.xContentObject(Fields.SUM, statsFields.apply(general)); - } else { - sb.xContentObject(Fields.SUM, statsFields.apply(sum())); - sb.array( - Fields.CONTEXTS, - context.entrySet().stream().sorted(Map.Entry.comparingByKey()).iterator(), - (eb, e) -> eb.object(ebo -> { - ebo.field(Fields.CONTEXT, e.getKey()); - ebo.append(statsFields.apply(e.getValue())); - }) - ); - } - }); + ScriptStats sum = sum(); + builder.field(ScriptStats.Fields.COMPILATIONS, sum.getCompilations()); + builder.field(ScriptStats.Fields.CACHE_EVICTIONS, sum.getCacheEvictions()); + builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, sum.getCompilationLimitTriggered()); + builder.endObject(); + + builder.startArray(Fields.CONTEXTS); + for (String name : context.keySet().stream().sorted().toList()) { + ScriptStats stats = context.get(name); + builder.startObject(); + builder.field(Fields.CONTEXT, name); + builder.field(ScriptStats.Fields.COMPILATIONS, stats.getCompilations()); + builder.field(ScriptStats.Fields.CACHE_EVICTIONS, stats.getCacheEvictions()); + builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, stats.getCompilationLimitTriggered()); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + + return builder; } /** diff --git a/server/src/main/java/org/elasticsearch/script/ScriptStats.java b/server/src/main/java/org/elasticsearch/script/ScriptStats.java index f24052ef7e3a..a25d9587bcb0 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptStats.java @@ -10,11 +10,13 @@ package org.elasticsearch.script; import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -25,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.collect.Iterators.single; import static org.elasticsearch.script.ScriptContextStats.Fields.COMPILATIONS_HISTORY; import static org.elasticsearch.script.ScriptStats.Fields.CACHE_EVICTIONS; import static org.elasticsearch.script.ScriptStats.Fields.COMPILATIONS; @@ -190,19 +193,24 @@ public record ScriptStats( } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(SCRIPT_STATS, ob -> { - ob.field(COMPILATIONS, compilations); - ob.field(CACHE_EVICTIONS, cacheEvictions); - ob.field(COMPILATION_LIMIT_TRIGGERED, compilationLimitTriggered); + public Iterator toXContentChunked(ToXContent.Params outerParams) { + return Iterators.concat(single((builder, params) -> { + builder.startObject(SCRIPT_STATS) + .field(COMPILATIONS, compilations) + .field(CACHE_EVICTIONS, cacheEvictions) + .field(COMPILATION_LIMIT_TRIGGERED, compilationLimitTriggered); if (compilationsHistory != null && compilationsHistory.areTimingsEmpty() == false) { - ob.xContentObject(COMPILATIONS_HISTORY, compilationsHistory); + builder.startObject(COMPILATIONS_HISTORY); + compilationsHistory.toXContent(builder, params); + builder.endObject(); } if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) { - ob.xContentObject(COMPILATIONS_HISTORY, cacheEvictionsHistory); + builder.startObject(COMPILATIONS_HISTORY); + cacheEvictionsHistory.toXContent(builder, params); + builder.endObject(); } - ob.array(CONTEXTS, contextStats.iterator()); - }); + return builder; + }), ChunkedToXContentHelper.array(CONTEXTS, contextStats.iterator()), ChunkedToXContentHelper.endObject()); } static final class Fields { diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index 2e2ae4e73c6f..79725eb9c3b8 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -170,8 +170,8 @@ public class ScriptedMetricAggContexts { CombineScript newInstance(Map params, Map state); } - public static String[] PARAMETERS = {}; - public static ScriptContext CONTEXT = new ScriptContext<>("aggs_combine", Factory.class); + public static final String[] PARAMETERS = {}; + public static final ScriptContext CONTEXT = new ScriptContext<>("aggs_combine", Factory.class); } public abstract static class ReduceScript { @@ -198,6 +198,6 @@ public class ScriptedMetricAggContexts { } public static String[] PARAMETERS = {}; - public static ScriptContext CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 98f7c92d9997..701d451ac2c1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -517,6 +517,10 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted this.metaFields.putAll(metaFields); } + public DocumentField removeDocumentField(String field) { + return documentFields.remove(field); + } + /** * @return a map of metadata fields for this hit */ diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index fe133cbac335..b2e9970631f4 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.SimpleRefCounted; @@ -284,22 +285,25 @@ public final class SearchHits implements Writeable, ChunkedToXContent, RefCounte @Override public Iterator toXContentChunked(ToXContent.Params params) { assert hasReferences(); - return ChunkedToXContent.builder(params).object(Fields.HITS, ob -> { - boolean totalHitAsInt = ob.params().paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); + return Iterators.concat(Iterators.single((b, p) -> { + b.startObject(Fields.HITS); + boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - ob.field(Fields.TOTAL, totalHits == null ? -1 : totalHits.value()); + long total = totalHits == null ? -1 : totalHits.value(); + b.field(Fields.TOTAL, total); } else if (totalHits != null) { - ob.append((b, p) -> { - b.startObject(Fields.TOTAL); - b.field("value", totalHits.value()); - b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); - return b.endObject(); - }); + b.startObject(Fields.TOTAL); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); + b.endObject(); } - - ob.field(Fields.MAX_SCORE, Float.isNaN(maxScore) ? null : maxScore); - ob.array(Fields.HITS, Iterators.forArray(hits)); - }); + if (Float.isNaN(maxScore)) { + b.nullField(Fields.MAX_SCORE); + } else { + b.field(Fields.MAX_SCORE, maxScore); + } + return b; + }), ChunkedToXContentHelper.array(Fields.HITS, Iterators.forArray(hits)), ChunkedToXContentHelper.endObject()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 3294e1ba03f6..d983dd1ff78d 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1049,8 +1049,9 @@ public class SearchModule { registerFetchSubPhase(new StoredFieldsPhase()); registerFetchSubPhase(new FetchDocValuesPhase()); registerFetchSubPhase(new ScriptFieldsPhase()); - registerFetchSubPhase(new FetchSourcePhase()); registerFetchSubPhase(new FetchFieldsPhase()); + // register after fetch fields to handle metadata fields that needs to be copied in _source (e.g. _inference_fields). + registerFetchSubPhase(new FetchSourcePhase()); registerFetchSubPhase(new FetchVersionPhase()); registerFetchSubPhase(new SeqNoPrimaryTermPhase()); registerFetchSubPhase(new MatchedQueriesPhase()); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index 0bbbff3a5d5f..2523c6201521 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.fetch; import org.apache.lucene.search.Query; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.ParsedQuery; @@ -87,6 +88,10 @@ public class FetchContext { return sfc; } + public BitsetFilterCache bitsetFilterCache() { + return searchContext.bitsetFilterCache(); + } + /** * The name of the index that documents are being fetched from */ diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 79e51036a91b..df99a718887e 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -63,6 +64,7 @@ public final class FetchSourcePhase implements FetchSubPhase { // If this is a parent document and there are no source filters, then add the source as-is. if (nestedHit == false && sourceFilter == null) { + source = replaceInferenceMetadataFields(hitContext.hit(), source); hitContext.hit().sourceRef(source.internalSourceRef()); fastPath++; return; @@ -77,10 +79,30 @@ public final class FetchSourcePhase implements FetchSubPhase { } if (nestedHit) { source = extractNested(source, hitContext.hit().getNestedIdentity()); + } else { + source = replaceInferenceMetadataFields(hitContext.hit(), source); } hitContext.hit().sourceRef(source.internalSourceRef()); } + /** + * Transfers the {@link InferenceMetadataFieldsMapper#NAME} field from the document fields + * to the original _source if it has been requested. + */ + private Source replaceInferenceMetadataFields(SearchHit hit, Source source) { + if (InferenceMetadataFieldsMapper.isEnabled(fetchContext.getSearchExecutionContext().getMappingLookup()) == false) { + return source; + } + + var field = hit.removeDocumentField(InferenceMetadataFieldsMapper.NAME); + if (field == null || field.getValues().isEmpty()) { + return source; + } + var newSource = source.source(); + newSource.put(InferenceMetadataFieldsMapper.NAME, field.getValues().get(0)); + return Source.fromMap(newSource, source.sourceContentType()); + } + @Override public Map getDebugInfo() { return Map.of("fast_path", fastPath); diff --git a/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java b/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java index b51149f180e4..2d3d5db0b3e4 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java +++ b/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java @@ -18,7 +18,7 @@ import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -57,8 +57,8 @@ public class FeatureMigrationResults implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(RESULTS_FIELD.getPreferredName(), featureStatuses); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.xContentValuesMap(RESULTS_FIELD.getPreferredName(), featureStatuses); } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index a7058e5d6cd8..dccdbee23c77 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -497,7 +497,7 @@ public class NodeStatsTests extends ESTestCase { + chunkIfPresent(nodeStats.getDiscoveryStats()) //
+ assertExpectedChunks(nodeStats.getIngestStats(), NodeStatsTests::expectedChunks, params) //
+ chunkIfPresent(nodeStats.getAdaptiveSelectionStats()) //
- + assertExpectedChunks(nodeStats.getScriptCacheStats(), NodeStatsTests::expectedChunks, params); + + chunkIfPresent(nodeStats.getScriptCacheStats()); } private static int chunkIfPresent(ToXContent xcontent) { @@ -511,17 +511,8 @@ public class NodeStatsTests extends ESTestCase { return chunks; } - private static int expectedChunks(ScriptCacheStats scriptCacheStats) { - var chunks = 3; // start, end, SUM - if (scriptCacheStats.general() == null) { - chunks += 2 + scriptCacheStats.context().size() * 4; - } - - return chunks; - } - private static int expectedChunks(ScriptStats scriptStats) { - return 7 + (scriptStats.compilationsHistory() != null && scriptStats.compilationsHistory().areTimingsEmpty() == false ? 1 : 0) + return 4 + (scriptStats.compilationsHistory() != null && scriptStats.compilationsHistory().areTimingsEmpty() == false ? 1 : 0) + (scriptStats.cacheEvictionsHistory() != null && scriptStats.cacheEvictionsHistory().areTimingsEmpty() == false ? 1 : 0) + scriptStats.contextStats().size(); } @@ -555,7 +546,7 @@ public class NodeStatsTests extends ESTestCase { } private static int expectedChunks(NodeIndicesStats nodeIndicesStats, NodeStatsLevel level) { - return switch (level) { + return nodeIndicesStats == null ? 0 : switch (level) { case NODE -> 2; case INDICES -> 5; // only one index case SHARDS -> 9; // only one shard diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index b59cc13a20ff..aada157e8ca8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -312,15 +312,11 @@ public class ClusterRerouteResponseTests extends ESTestCase { fail(e); } - int[] expectedChunks = new int[] { 3 }; - if (Objects.equals(params.param("metric"), "none") == false) { - expectedChunks[0] += 2 + ClusterStateTests.expectedChunkCount(params, response.getState()); - } - if (params.paramAsBoolean("explain", false)) { - expectedChunks[0]++; - } + final var expectedChunks = Objects.equals(params.param("metric"), "none") + ? 2 + : 4 + ClusterStateTests.expectedChunkCount(params, response.getState()); - AbstractChunkedSerializingTestCase.assertChunkCount(response, params, o -> expectedChunks[0]); + AbstractChunkedSerializingTestCase.assertChunkCount(response, params, o -> expectedChunks); assertCriticalWarnings(criticalDeprecationWarnings); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java index 144db693b8ab..e33f97e7cb58 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java @@ -68,7 +68,7 @@ public class IndicesSegmentResponseTests extends ESTestCase { 0, Collections.emptyList() ), - response -> 11 * response.getIndices().size() + 5 + response -> 11 * response.getIndices().size() + 4 ); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java index a34e36e81dd8..7b707eb4c31b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java @@ -47,7 +47,7 @@ public class FieldUsageStatsResponseTests extends ESTestCase { AbstractChunkedSerializingTestCase.assertChunkCount( new FieldUsageStatsResponse(indices, indices, 0, List.of(), perIndex), - ignored -> 3 * indices + 3 + ignored -> 3 * indices + 2 ); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java index e6b3da3106b7..987e3fbcca8e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java @@ -139,12 +139,12 @@ public class IndicesStatsResponseTests extends ESTestCase { AbstractChunkedSerializingTestCase.assertChunkCount( indicesStatsResponse, new ToXContent.MapParams(Map.of("level", "cluster")), - ignored1 -> 4 + ignored1 -> 3 ); AbstractChunkedSerializingTestCase.assertChunkCount( indicesStatsResponse, new ToXContent.MapParams(Map.of("level", "indices")), - ignored -> 5 + 2 * shards + ignored -> 4 + 2 * shards ); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index b389e33993b9..f25f47765548 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -92,8 +92,16 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { private final ShardId shardId = new ShardId("index", "_na_", 0); private final Settings idxSettings = indexSettings(IndexVersion.current(), 1, 0).build(); - private IndexMetadata indexMetadata() throws IOException { - return IndexMetadata.builder("index").putMapping(""" + private IndexMetadata indexMetadata(String mapping) { + IndexMetadata.Builder builder = IndexMetadata.builder("index").settings(idxSettings).primaryTerm(0, 1); + if (mapping != null) { + builder.putMapping(mapping); + } + return builder.build(); + } + + private IndexMetadata indexMetadata() { + return indexMetadata(""" { "properties": { "foo": { @@ -106,7 +114,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { } } } - }""").settings(idxSettings).primaryTerm(0, 1).build(); + }"""); } public void testExecuteBulkIndexRequest() throws Exception { @@ -502,7 +510,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { IndexShard shard = mockShard(null, null); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( noopUpdateResponse, DocWriteResponse.Result.NOOP, @@ -557,7 +565,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { ); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, @@ -618,7 +626,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { ); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, @@ -681,7 +689,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { ); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( updateResponse, created ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, @@ -738,7 +746,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( updateResponse, DocWriteResponse.Result.DELETED, @@ -784,7 +792,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { UpdateHelper updateHelper = mock(UpdateHelper.class); final ElasticsearchException err = new ElasticsearchException("oops"); - when(updateHelper.prepare(any(), eq(shard), any())).thenThrow(err); + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenThrow(err); BulkItemRequest[] items = new BulkItemRequest[] { primaryRequest }; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -916,7 +924,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { }); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, @@ -1129,7 +1137,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { ); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"), randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, @@ -1195,7 +1203,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { ); UpdateHelper updateHelper = mock(UpdateHelper.class); - when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + when(updateHelper.prepare(any(), eq(shard), any(), any())).thenReturn( new UpdateHelper.Result( new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"), randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, @@ -1235,6 +1243,9 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { if (indexSettings != null) { when(shard.indexSettings()).thenReturn(indexSettings); + } else { + IndexSettings defaultIndexSettings = new IndexSettings(indexMetadata(null), Settings.EMPTY); + when(shard.indexSettings()).thenReturn(defaultIndexSettings); } if (mapperService != null) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadataTests.java index 3ab87a913737..267ebad6d2cd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadataTests.java @@ -78,6 +78,7 @@ public class NodesShutdownMetadataTests extends ChunkedToXContentDiffableSeriali "this_node", SingleNodeShutdownMetadata.builder() .setNodeId("this_node") + .setNodeEphemeralId("this_node") .setReason("shutdown for a unit test") .setType(type) .setStartedAtMillis(randomNonNegativeLong()) @@ -106,6 +107,7 @@ public class NodesShutdownMetadataTests extends ChunkedToXContentDiffableSeriali public void testSigtermIsRemoveInOlderVersions() throws IOException { SingleNodeShutdownMetadata metadata = SingleNodeShutdownMetadata.builder() .setNodeId("myid") + .setNodeEphemeralId("myid") .setType(SingleNodeShutdownMetadata.Type.SIGTERM) .setReason("myReason") .setStartedAtMillis(0L) @@ -127,6 +129,7 @@ public class NodesShutdownMetadataTests extends ChunkedToXContentDiffableSeriali SingleNodeShutdownMetadata.Type type; SingleNodeShutdownMetadata.Builder builder = SingleNodeShutdownMetadata.builder() .setNodeId("thenode") + .setNodeEphemeralId("thenode") .setReason("myReason") .setStartedAtMillis(0L); switch (type = randomFrom(SingleNodeShutdownMetadata.Type.values())) { @@ -182,6 +185,7 @@ public class NodesShutdownMetadataTests extends ChunkedToXContentDiffableSeriali final SingleNodeShutdownMetadata.Type type = randomFrom(SingleNodeShutdownMetadata.Type.values()); final SingleNodeShutdownMetadata.Builder builder = SingleNodeShutdownMetadata.builder() .setNodeId(randomAlphaOfLength(5)) + .setNodeEphemeralId(randomAlphaOfLength(5)) .setType(type) .setReason(randomAlphaOfLength(5)) .setStartedAtMillis(randomNonNegativeLong()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 14367155260d..53b98aea7cd7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -664,6 +664,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { shutdowns = shutdowns.putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(randomValueOtherThan(lastNodeId, () -> randomAlphaOfLengthBetween(5, 10))) + .setNodeEphemeralId(randomValueOtherThan(lastNodeId, () -> randomAlphaOfLengthBetween(5, 10))) .setReason(this.getTestName()) .setStartedAtMillis(randomNonNegativeLong()) .setType(type) @@ -684,6 +685,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { NodesShutdownMetadata shutdowns = NodesShutdownMetadata.EMPTY.putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(lastNodeId) + .setNodeEphemeralId(lastNodeId) .setReason(this.getTestName()) .setStartedAtMillis(randomNonNegativeLong()) .setType(type) @@ -704,6 +706,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { NodesShutdownMetadata shutdowns = NodesShutdownMetadata.EMPTY.putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(lastNodeId) + .setNodeEphemeralId(lastNodeId) .setReason(this.getTestName()) .setStartedAtMillis(randomNonNegativeLong()) .setType(SingleNodeShutdownMetadata.Type.RESTART) @@ -726,6 +729,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { NodesShutdownMetadata shutdowns = NodesShutdownMetadata.EMPTY.putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(lastNodeId) + .setNodeEphemeralId(lastNodeId) .setReason(this.getTestName()) .setStartedAtMillis(randomNonNegativeLong()) .setType(SingleNodeShutdownMetadata.Type.RESTART) @@ -754,6 +758,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { NodesShutdownMetadata shutdowns = NodesShutdownMetadata.EMPTY.putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(lastNodeId) + .setNodeEphemeralId(lastNodeId) .setReason(this.getTestName()) .setStartedAtMillis(randomNonNegativeLong()) .setType(SingleNodeShutdownMetadata.Type.RESTART) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java index 6f86e1e7a8f0..daafd10f7a4d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorTests.java @@ -964,6 +964,7 @@ public class DiskThresholdMonitorTests extends ESAllocationTestCase { sourceNode, SingleNodeShutdownMetadata.builder() .setNodeId(sourceNode) + .setNodeEphemeralId(sourceNode) .setReason("testing") .setType(SingleNodeShutdownMetadata.Type.REPLACE) .setTargetNodeName(targetNode) @@ -1359,6 +1360,7 @@ public class DiskThresholdMonitorTests extends ESAllocationTestCase { "node1", SingleNodeShutdownMetadata.builder() .setNodeId("node1") + .setNodeEphemeralId("node1") .setReason("testing") .setType(SingleNodeShutdownMetadata.Type.REPLACE) .setTargetNodeName("node3") diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index faa5cd7f3113..9d4da342bae8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -941,6 +941,7 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { "node-0", SingleNodeShutdownMetadata.builder() .setNodeId("node-0") + .setNodeEphemeralId("node-0") .setType(SingleNodeShutdownMetadata.Type.REPLACE) .setTargetNodeName("node-2") .setStartedAtMillis(System.currentTimeMillis()) @@ -1302,6 +1303,7 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { var builder = SingleNodeShutdownMetadata.builder() .setType(type) .setNodeId("data-node-1") + .setNodeEphemeralId("data-node-1") .setStartedAtMillis(randomNonNegativeLong()) .setReason("test"); if (type.equals(SingleNodeShutdownMetadata.Type.SIGTERM)) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index 9caf89d4d761..21d547c1593b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -868,6 +868,7 @@ public class DesiredBalanceShardsAllocatorTests extends ESAllocationTestCase { final var shutdownType = randomFrom(Type.SIGTERM, Type.REMOVE, Type.REPLACE); final var singleShutdownMetadataBuilder = SingleNodeShutdownMetadata.builder() .setNodeId(node2.getId()) + .setNodeEphemeralId(node2.getEphemeralId()) .setReason("test") .setType(shutdownType) .setStartedAtMillis(randomNonNegativeLong()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java index e8ac6d22698d..7750a01a13a8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeReplacementAllocationDeciderTests.java @@ -420,6 +420,7 @@ public class NodeReplacementAllocationDeciderTests extends ESAllocationTestCase return new NodesShutdownMetadata(new HashMap<>()).putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(sourceNodeId) + .setNodeEphemeralId(sourceNodeId) .setTargetNodeName(targetNodeName) .setType(SingleNodeShutdownMetadata.Type.REPLACE) .setReason(this.getTestName()) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java index e81e27a7343f..d795c80bf5b9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java @@ -227,6 +227,7 @@ public class NodeShutdownAllocationDeciderTests extends ESAllocationTestCase { return new NodesShutdownMetadata(new HashMap<>()).putSingleNodeMetadata( SingleNodeShutdownMetadata.builder() .setNodeId(nodeId) + .setNodeEphemeralId(nodeId) .setType(shutdownType) .setReason(this.getTestName()) .setStartedAtMillis(1L) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDeciderTests.java index 0ef5fcc624e1..7f42beb5181e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDeciderTests.java @@ -216,6 +216,7 @@ public class SnapshotInProgressAllocationDeciderTests extends ESTestCase { nodeId, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId) + .setNodeEphemeralId(nodeId) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(randomNonNegativeLong()) .setReason("test") diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java index 11873397f072..d1e7138e0d3d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java @@ -2258,6 +2258,7 @@ public class ShardsAvailabilityHealthIndicatorServiceTests extends ESTestCase { it -> it.nodeId, it -> SingleNodeShutdownMetadata.builder() .setNodeId(it.nodeId) + .setNodeEphemeralId(it.nodeId) .setType(it.type) .setReason("test") .setNodeSeen(true) diff --git a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 105d9db13112..75a93bde9b01 100644 --- a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -32,12 +32,12 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.index.shard.ShardLongFieldRange; @@ -406,8 +406,12 @@ public class ClusterSerializationTests extends ESAllocationTestCase { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(b -> b.field("custom_string_object", strObject)); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.concat( + Iterators.single((builder, params) -> builder.startObject()), + Iterators.single((builder, params) -> builder.field("custom_string_object", strObject)), + Iterators.single((builder, params) -> builder.endObject()) + ); } @Override @@ -445,8 +449,12 @@ public class ClusterSerializationTests extends ESAllocationTestCase { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(b -> b.field("custom_integer_object", intObject)); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.concat( + Iterators.single((builder, params) -> builder.startObject()), + Iterators.single((builder, params) -> builder.field("custom_integer_object", intObject)), + Iterators.single((builder, params) -> builder.endObject()) + ); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index f9d8173e15be..0fecccc3cb5e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -46,6 +46,8 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.StoppableExecutorServiceWrapper; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -259,9 +261,42 @@ public class MasterServiceTests extends ESTestCase { assertThat(registeredActions.toString(), registeredActions, contains(MasterService.STATE_UPDATE_ACTION_NAME)); } - public void testThreadContext() throws InterruptedException { + public void testThreadContext() { try (var master = createMasterService(true)) { - final CountDownLatch latch = new CountDownLatch(1); + + master.setClusterStatePublisher((clusterStatePublicationEvent, publishListener, ackListener) -> { + ClusterServiceUtils.setAllElapsedMillis(clusterStatePublicationEvent); + try (var ignored = threadPool.getThreadContext().newEmptyContext()) { + if (randomBoolean()) { + randomExecutor(threadPool).execute(() -> publishListener.onResponse(null)); + randomExecutor(threadPool).execute(() -> ackListener.onCommit(TimeValue.timeValueMillis(randomInt(10000)))); + randomExecutor(threadPool).execute( + () -> ackListener.onNodeAck( + clusterStatePublicationEvent.getNewState().nodes().getMasterNode(), + randomBoolean() ? null : new RuntimeException("simulated ack failure") + ) + ); + } else { + randomExecutor(threadPool).execute( + () -> publishListener.onFailure(new FailedToCommitClusterStateException("simulated publish failure")) + ); + } + } + }); + + final Releasable onPublishComplete; + final Releasable onAckingComplete; + final Runnable awaitComplete; + { + final var publishLatch = new CountDownLatch(1); + final var ackingLatch = new CountDownLatch(1); + onPublishComplete = Releasables.assertOnce(publishLatch::countDown); + onAckingComplete = Releasables.assertOnce(ackingLatch::countDown); + awaitComplete = () -> { + safeAwait(publishLatch); + safeAwait(ackingLatch); + }; + } try (ThreadContext.StoredContext ignored = threadPool.getThreadContext().stashContext()) { @@ -272,15 +307,12 @@ public class MasterServiceTests extends ESTestCase { expectedHeaders.put(copiedHeader, randomIdentifier()); } } - - final Map> expectedResponseHeaders = Collections.singletonMap( - "testResponse", - Collections.singletonList("testResponse") - ); threadPool.getThreadContext().putHeader(expectedHeaders); - final TimeValue ackTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); - final TimeValue masterTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); + final Map> expectedResponseHeaders = Map.of("testResponse", List.of(randomIdentifier())); + + final TimeValue ackTimeout = randomBoolean() ? TimeValue.MINUS_ONE : TimeValue.timeValueMillis(randomInt(10000)); + final TimeValue masterTimeout = randomBoolean() ? TimeValue.MINUS_ONE : TimeValue.timeValueMillis(randomInt(10000)); master.submitUnbatchedStateUpdateTask( "test", @@ -289,8 +321,9 @@ public class MasterServiceTests extends ESTestCase { public ClusterState execute(ClusterState currentState) { assertTrue(threadPool.getThreadContext().isSystemContext()); assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getHeaders()); - threadPool.getThreadContext().addResponseHeader("testResponse", "testResponse"); - assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); + expectedResponseHeaders.forEach( + (name, values) -> values.forEach(v -> threadPool.getThreadContext().addResponseHeader(name, v)) + ); if (randomBoolean()) { return ClusterState.builder(currentState).build(); @@ -303,44 +336,44 @@ public class MasterServiceTests extends ESTestCase { @Override public void onFailure(Exception e) { - assertFalse(threadPool.getThreadContext().isSystemContext()); - assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); - assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); - latch.countDown(); + assertExpectedThreadContext( + e instanceof ProcessClusterEventTimeoutException ? Map.of() : expectedResponseHeaders + ); + onPublishComplete.close(); + onAckingComplete.close(); // no acking takes place if publication failed } @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - assertFalse(threadPool.getThreadContext().isSystemContext()); - assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); - assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); - latch.countDown(); + assertExpectedThreadContext(expectedResponseHeaders); + onPublishComplete.close(); } @Override public void onAllNodesAcked() { - assertFalse(threadPool.getThreadContext().isSystemContext()); - assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); - assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); - latch.countDown(); + onAckCompletion(); } @Override public void onAckFailure(Exception e) { - assertFalse(threadPool.getThreadContext().isSystemContext()); - assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); - assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); - latch.countDown(); + onAckCompletion(); } @Override public void onAckTimeout() { + onAckCompletion(); + } + + private void onAckCompletion() { + assertExpectedThreadContext(expectedResponseHeaders); + onAckingComplete.close(); + } + + private void assertExpectedThreadContext(Map> expectedResponseHeaders) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); - latch.countDown(); } - } ); @@ -349,7 +382,7 @@ public class MasterServiceTests extends ESTestCase { assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getResponseHeaders()); } - assertTrue(latch.await(10, TimeUnit.SECONDS)); + awaitComplete.run(); } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java deleted file mode 100644 index ff811f5d6d73..000000000000 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.common.xcontent; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.ToXContent; - -import java.util.function.IntFunction; -import java.util.stream.IntStream; - -import static org.hamcrest.Matchers.equalTo; - -public class ChunkedToXContentBuilderTests extends ESTestCase { - - public void testFieldWithInnerChunkedObject() { - - ToXContent innerXContent = (b, p) -> { - b.startObject(); - b.field("field1", 10); - b.field("field2", "aaa"); - b.endObject(); - return b; - }; - - ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent); - - String expectedContent = Strings.toString(outerXContent); - - ChunkedToXContentObject innerChunkedContent = params -> new ChunkedToXContentBuilder(params).object( - o -> o.field("field1", 10).field("field2", "aaa") - ); - - ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10) - .field("field4", innerChunkedContent); - - assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); - } - - public void testFieldWithInnerChunkedArray() { - - ToXContent innerXContent = (b, p) -> { - b.startArray(); - b.value(10); - b.value(20); - b.endArray(); - return b; - }; - - ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent); - - String expectedContent = Strings.toString(outerXContent); - - IntFunction value = v -> (b, p) -> b.value(v); - - ChunkedToXContentObject innerChunkedContent = params -> new ChunkedToXContentBuilder(params).array( - IntStream.of(10, 20).mapToObj(value).iterator() - ); - - ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10) - .field("field4", innerChunkedContent); - - assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); - } - - public void testFieldWithInnerChunkedField() { - - ToXContent innerXContent = (b, p) -> b.value(10); - ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent); - - String expectedContent = Strings.toString(outerXContent); - - ChunkedToXContentObject innerChunkedContent = params -> Iterators.single((b, p) -> b.value(10)); - - ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10) - .field("field4", innerChunkedContent); - - assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); - } -} diff --git a/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java b/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java index cba0dacccd8b..91936ea8b909 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java @@ -198,8 +198,8 @@ public class HealthIndicatorResultTests extends ESTestCase { diagnosisList.add(diagnosis2); HealthIndicatorResult result = new HealthIndicatorResult(name, status, symptom, details, impacts, diagnosisList); - // -> each Diagnosis yields 6 chunks => 12 chunks from both diagnosis - // -> HealthIndicatorResult surrounds the diagnosis list by 5 chunks - AbstractChunkedSerializingTestCase.assertChunkCount(result, ignored -> (6 * 2) + 5); + // -> each Diagnosis yields 5 chunks => 10 chunks from both diagnosis + // -> HealthIndicatorResult surrounds the diagnosis list by 2 chunks + AbstractChunkedSerializingTestCase.assertChunkCount(result, ignored -> 12); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java index 92bfabf6f197..51dadd815454 100644 --- a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java @@ -196,6 +196,7 @@ public class HealthNodeTaskExecutorTests extends ESTestCase { localNodeId, SingleNodeShutdownMetadata.builder() .setNodeId(localNodeId) + .setNodeEphemeralId(localNodeId) .setReason("shutdown for a unit test") .setType(type) .setStartedAtMillis(randomNonNegativeLong()) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java index bd58f4d443d3..9d344a319055 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java @@ -11,7 +11,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; @@ -30,6 +30,7 @@ import org.elasticsearch.test.hamcrest.RectangleMatcher; import org.elasticsearch.test.hamcrest.WellKnownBinaryBytesRefMatcher; import java.io.IOException; +import java.util.ArrayList; import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; @@ -61,7 +62,7 @@ public class AbstractShapeGeometryFieldMapperTests extends ESTestCase { Function indexerFactory, Function> visitor ) throws IOException { - var geometries = IntStream.range(0, 20).mapToObj(i -> generator.get()).toList(); + var geometries = IntStream.range(0, 50).mapToObj(i -> generator.get()).toList(); var loader = new AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.BoundsBlockLoader("field", encoder); try (Directory directory = newDirectory()) { try (var iw = new RandomIndexWriter(random(), directory)) { @@ -73,23 +74,39 @@ public class AbstractShapeGeometryFieldMapperTests extends ESTestCase { iw.addDocument(doc); } } - var indices = IntStream.range(0, geometries.size() / 2).map(x -> x * 2).toArray(); + // We specifically check just the even indices, to verify the loader can skip documents correctly. + var evenIndices = evenArray(geometries.size()); try (DirectoryReader reader = DirectoryReader.open(directory)) { - LeafReaderContext ctx = reader.leaves().get(0); - TestBlock block = (TestBlock) loader.reader(ctx).read(TestBlock.factory(ctx.reader().numDocs()), TestBlock.docs(indices)); - for (int i = 0; i < indices.length; i++) { - var idx = indices[i]; + var byteRefResults = new ArrayList(); + for (var leaf : reader.leaves()) { + LeafReader leafReader = leaf.reader(); + int numDocs = leafReader.numDocs(); + try ( + TestBlock block = (TestBlock) loader.reader(leaf) + .read(TestBlock.factory(leafReader.numDocs()), TestBlock.docs(evenArray(numDocs))) + ) { + for (int i = 0; i < block.size(); i++) { + byteRefResults.add((BytesRef) block.get(i)); + } + } + } + for (int i = 0; i < evenIndices.length; i++) { + var idx = evenIndices[i]; var geometry = geometries.get(idx); var geoString = geometry.toString(); var geometryString = geoString.length() > 200 ? geoString.substring(0, 200) + "..." : geoString; Rectangle r = visitor.apply(geometry).get(); assertThat( Strings.format("geometries[%d] ('%s') wasn't extracted correctly", idx, geometryString), - (BytesRef) block.get(i), + byteRefResults.get(i), WellKnownBinaryBytesRefMatcher.encodes(RectangleMatcher.closeToFloat(r, 1e-3, encoder)) ); } } } } + + private static int[] evenArray(int maxIndex) { + return IntStream.range(0, maxIndex / 2).map(x -> x * 2).toArray(); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CustomSyntheticSourceFieldLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CustomSyntheticSourceFieldLookupTests.java new file mode 100644 index 000000000000..c3d6bbf285d7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/CustomSyntheticSourceFieldLookupTests.java @@ -0,0 +1,263 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; + +import java.io.IOException; +import java.util.Map; + +public class CustomSyntheticSourceFieldLookupTests extends MapperServiceTestCase { + private static String MAPPING = """ + { + "_doc": { + "properties": { + "keep_all": { + "type": "keyword", + "synthetic_source_keep": "all" + }, + "keep_arrays": { + "type": "keyword", + "synthetic_source_keep": "arrays" + }, + "fallback_impl": { + "type": "long", + "doc_values": "false" + }, + "object_keep_all": { + "properties": {}, + "synthetic_source_keep": "all" + }, + "object_keep_arrays": { + "properties": {}, + "synthetic_source_keep": "arrays" + }, + "object_disabled": { + "properties": {}, + "enabled": "false" + }, + "nested_keep_all": { + "type": "nested", + "properties": {}, + "synthetic_source_keep": "all" + }, + "nested_disabled": { + "type": "nested", + "properties": {}, + "enabled": "false" + }, + "just_field": { + "type": "boolean" + }, + "just_object": { + "properties": {} + }, + "nested_obj": { + "properties": { + "keep_all": { + "type": "keyword", + "synthetic_source_keep": "all" + }, + "keep_arrays": { + "type": "keyword", + "synthetic_source_keep": "arrays" + }, + "fallback_impl": { + "type": "long", + "doc_values": "false" + }, + "object_keep_all": { + "properties": {}, + "synthetic_source_keep": "all" + }, + "object_keep_arrays": { + "properties": {}, + "synthetic_source_keep": "arrays" + }, + "object_disabled": { + "properties": {}, + "enabled": "false" + }, + "nested_keep_all": { + "type": "nested", + "properties": {}, + "synthetic_source_keep": "all" + }, + "nested_disabled": { + "type": "nested", + "properties": {}, + "enabled": "false" + }, + "just_field": { + "type": "boolean" + }, + "just_object": { + "properties": {} + } + } + }, + "nested_nested": { + "properties": { + "keep_all": { + "type": "keyword", + "synthetic_source_keep": "all" + }, + "keep_arrays": { + "type": "keyword", + "synthetic_source_keep": "arrays" + }, + "fallback_impl": { + "type": "long", + "doc_values": "false" + }, + "object_keep_all": { + "properties": {}, + "synthetic_source_keep": "all" + }, + "object_keep_arrays": { + "properties": {}, + "synthetic_source_keep": "arrays" + }, + "object_disabled": { + "properties": {}, + "enabled": "false" + }, + "nested_keep_all": { + "type": "nested", + "properties": {}, + "synthetic_source_keep": "all" + }, + "nested_disabled": { + "type": "nested", + "properties": {}, + "enabled": "false" + }, + "just_field": { + "type": "boolean" + }, + "just_object": { + "properties": {} + } + } + } + } + } + } + """; + + public void testIsNoopWhenSourceIsNotSynthetic() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, false); + + assertEquals(sut.getFieldsWithCustomSyntheticSourceHandling(), Map.of()); + } + + public void testDetectsLeafWithKeepAll() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("keep_all")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_obj.keep_all")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_nested.keep_all")); + } + + public void testDetectsLeafWithKeepArrays() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("keep_arrays")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_obj.keep_arrays")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_nested.keep_arrays")); + } + + public void testDetectsLeafWithFallback() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.FALLBACK_SYNTHETIC_SOURCE, fields.get("fallback_impl")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.FALLBACK_SYNTHETIC_SOURCE, fields.get("nested_obj.fallback_impl")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.FALLBACK_SYNTHETIC_SOURCE, fields.get("nested_nested.fallback_impl")); + } + + public void testDetectsObjectWithKeepAll() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("object_keep_all")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_obj.object_keep_all")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_nested.object_keep_all")); + + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_keep_all")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_obj.nested_keep_all")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ALL, fields.get("nested_nested.nested_keep_all")); + } + + public void testDetectsObjectWithKeepArrays() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("object_keep_arrays")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_obj.object_keep_arrays")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_nested.object_keep_arrays")); + } + + public void testDetectsDisabledObject() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.NONE); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + + assertEquals(CustomSyntheticSourceFieldLookup.Reason.DISABLED_OBJECT, fields.get("object_disabled")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.DISABLED_OBJECT, fields.get("nested_obj.object_disabled")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.DISABLED_OBJECT, fields.get("nested_nested.object_disabled")); + + assertEquals(CustomSyntheticSourceFieldLookup.Reason.DISABLED_OBJECT, fields.get("nested_disabled")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.DISABLED_OBJECT, fields.get("nested_obj.nested_disabled")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.DISABLED_OBJECT, fields.get("nested_nested.nested_disabled")); + } + + public void testAppliesIndexLevelSourceKeepMode() throws IOException { + var mapping = createMapperService(MAPPING).mappingLookup().getMapping(); + var indexSettings = indexSettings(Mapper.SourceKeepMode.ARRAYS); + var sut = new CustomSyntheticSourceFieldLookup(mapping, indexSettings, true); + + var fields = sut.getFieldsWithCustomSyntheticSourceHandling(); + + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("just_field")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_obj.just_field")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_nested.just_field")); + + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("just_object")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_obj.just_object")); + assertEquals(CustomSyntheticSourceFieldLookup.Reason.SOURCE_KEEP_ARRAYS, fields.get("nested_nested.just_object")); + } + + private static IndexSettings indexSettings(Mapper.SourceKeepMode sourceKeepMode) { + return createIndexSettings( + IndexVersion.current(), + Settings.builder().put(Mapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING.getKey(), sourceKeepMode).build() + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index b2ba3d60d217..9e617f638e75 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -81,6 +81,7 @@ public class DocumentMapperTests extends MapperServiceTestCase { merged, merged.toCompressedXContent(), IndexVersion.current(), + null, MapperMetrics.NOOP, "myIndex" ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserListenerTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserListenerTests.java new file mode 100644 index 000000000000..a1dafacfb7b2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserListenerTests.java @@ -0,0 +1,253 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class DocumentParserListenerTests extends MapperServiceTestCase { + private static class MemorizingDocumentParserListener implements DocumentParserListener { + private final List events = new ArrayList<>(); + private final List tokens = new ArrayList<>(); + + @Override + public boolean isActive() { + return true; + } + + @Override + public void consume(Token token) throws IOException { + // Tokens contains information tied to current parser state so we need to "materialize" them. + if (token instanceof Token.StringAsCharArrayValue charArray) { + var string = String.copyValueOf(charArray.buffer(), charArray.offset(), charArray.length()); + tokens.add((Token.ValueToken) () -> string); + } else if (token instanceof Token.ValueToken v) { + var value = v.value(); + tokens.add((Token.ValueToken) () -> value); + } else { + tokens.add(token); + } + } + + @Override + public void consume(Event event) throws IOException { + events.add(event); + } + + @Override + public Output finish() { + return new Output(List.of()); + } + + public List getTokens() { + return tokens; + } + + public List getEvents() { + return events; + } + } + + public void testEventFlow() throws IOException { + var mapping = XContentBuilder.builder(XContentType.JSON.xContent()).startObject().startObject("_doc").startObject("properties"); + { + mapping.startObject("leaf").field("type", "keyword").endObject(); + mapping.startObject("leaf_array").field("type", "keyword").endObject(); + + mapping.startObject("object").startObject("properties"); + { + mapping.startObject("leaf").field("type", "keyword").endObject(); + mapping.startObject("leaf_array").field("type", "keyword").endObject(); + } + mapping.endObject().endObject(); + + mapping.startObject("object_array").startObject("properties"); + { + mapping.startObject("leaf").field("type", "keyword").endObject(); + mapping.startObject("leaf_array").field("type", "keyword").endObject(); + } + mapping.endObject().endObject(); + } + mapping.endObject().endObject().endObject(); + var mappingService = createSytheticSourceMapperService(mapping); + + XContentType xContentType = randomFrom(XContentType.values()); + + var listener = new MemorizingDocumentParserListener(); + var documentParser = new DocumentParser( + XContentParserConfiguration.EMPTY, + mappingService.parserContext(), + (ml, xct) -> new DocumentParser.Listeners.Single(listener) + ); + + var source = XContentBuilder.builder(xContentType.xContent()); + source.startObject(); + { + source.field("leaf", "leaf"); + source.array("leaf_array", "one", "two"); + source.startObject("object"); + { + source.field("leaf", "leaf"); + source.array("leaf_array", "one", "two"); + } + source.endObject(); + source.startArray("object_array"); + { + source.startObject(); + { + source.field("leaf", "leaf"); + source.array("leaf_array", "one", "two"); + } + source.endObject(); + } + source.endArray(); + } + source.endObject(); + + documentParser.parseDocument(new SourceToParse("id1", BytesReference.bytes(source), xContentType), mappingService.mappingLookup()); + var events = listener.getEvents(); + + assertEquals("_doc", ((DocumentParserListener.Event.DocumentStart) events.get(0)).rootObjectMapper().fullPath()); + + assertLeafEvents(events, 1, "", "_doc", false); + + var objectStart = (DocumentParserListener.Event.ObjectStart) events.get(5); + assertEquals("object", objectStart.objectMapper().fullPath()); + assertEquals("_doc", objectStart.parentMapper().fullPath()); + assertFalse(objectStart.insideObjectArray()); + assertLeafEvents(events, 6, "object.", "object", false); + + var objectArrayStart = (DocumentParserListener.Event.ObjectArrayStart) events.get(10); + assertEquals("object_array", objectArrayStart.objectMapper().fullPath()); + assertEquals("_doc", objectArrayStart.parentMapper().fullPath()); + + var objectInArrayStart = (DocumentParserListener.Event.ObjectStart) events.get(11); + assertEquals("object_array", objectInArrayStart.objectMapper().fullPath()); + assertEquals("_doc", objectInArrayStart.parentMapper().fullPath()); + assertTrue(objectInArrayStart.insideObjectArray()); + assertLeafEvents(events, 12, "object_array.", "object_array", true); + } + + public void testTokenFlow() throws IOException { + var mapping = XContentBuilder.builder(XContentType.JSON.xContent()) + .startObject() + .startObject("_doc") + .field("enabled", false) + .endObject() + .endObject(); + var mappingService = createSytheticSourceMapperService(mapping); + + XContentType xContentType = randomFrom(XContentType.values()); + + var listener = new MemorizingDocumentParserListener(); + var documentParser = new DocumentParser( + XContentParserConfiguration.EMPTY, + mappingService.parserContext(), + (ml, xct) -> new DocumentParser.Listeners.Single(listener) + ); + + var source = XContentBuilder.builder(xContentType.xContent()); + source.startObject(); + { + source.field("leaf", "leaf"); + source.array("leaf_array", "one", "two"); + source.startObject("object"); + { + source.field("leaf", "leaf"); + source.array("leaf_array", "one", "two"); + } + source.endObject(); + source.startArray("object_array"); + { + source.startObject(); + { + source.field("leaf", "leaf"); + source.array("leaf_array", "one", "two"); + } + source.endObject(); + } + source.endArray(); + } + source.endObject(); + + documentParser.parseDocument(new SourceToParse("id1", BytesReference.bytes(source), xContentType), mappingService.mappingLookup()); + var tokens = listener.getTokens(); + assertTrue(tokens.get(0) instanceof DocumentParserListener.Token.StartObject); + { + assertLeafTokens(tokens, 1); + assertEquals("object", ((DocumentParserListener.Token.FieldName) tokens.get(8)).name()); + assertTrue(tokens.get(9) instanceof DocumentParserListener.Token.StartObject); + { + assertLeafTokens(tokens, 10); + } + assertTrue(tokens.get(17) instanceof DocumentParserListener.Token.EndObject); + assertEquals("object_array", ((DocumentParserListener.Token.FieldName) tokens.get(18)).name()); + assertTrue(tokens.get(19) instanceof DocumentParserListener.Token.StartArray); + { + assertTrue(tokens.get(20) instanceof DocumentParserListener.Token.StartObject); + { + assertLeafTokens(tokens, 21); + } + assertTrue(tokens.get(28) instanceof DocumentParserListener.Token.EndObject); + } + assertTrue(tokens.get(29) instanceof DocumentParserListener.Token.EndArray); + } + assertTrue(tokens.get(30) instanceof DocumentParserListener.Token.EndObject); + } + + private void assertLeafEvents( + List events, + int start, + String prefix, + String parent, + boolean inObjectArray + ) { + var leafValue = (DocumentParserListener.Event.LeafValue) events.get(start); + assertEquals(prefix + "leaf", leafValue.fieldMapper().fullPath()); + assertEquals(parent, leafValue.parentMapper().fullPath()); + assertFalse(leafValue.isArray()); + assertFalse(leafValue.isContainer()); + assertEquals(inObjectArray, leafValue.insideObjectArray()); + + var leafArray = (DocumentParserListener.Event.LeafArrayStart) events.get(start + 1); + assertEquals(prefix + "leaf_array", leafArray.fieldMapper().fullPath()); + assertEquals(parent, leafArray.parentMapper().fullPath()); + + var arrayValue1 = (DocumentParserListener.Event.LeafValue) events.get(start + 2); + assertEquals(prefix + "leaf_array", arrayValue1.fieldMapper().fullPath()); + assertEquals(parent, arrayValue1.parentMapper().fullPath()); + assertFalse(arrayValue1.isArray()); + assertFalse(arrayValue1.isContainer()); + assertEquals(inObjectArray, leafValue.insideObjectArray()); + + var arrayValue2 = (DocumentParserListener.Event.LeafValue) events.get(start + 3); + assertEquals(prefix + "leaf_array", arrayValue2.fieldMapper().fullPath()); + assertEquals(parent, arrayValue2.parentMapper().fullPath()); + assertFalse(arrayValue2.isArray()); + assertFalse(arrayValue2.isContainer()); + assertEquals(inObjectArray, leafValue.insideObjectArray()); + } + + private void assertLeafTokens(List tokens, int start) throws IOException { + assertEquals("leaf", ((DocumentParserListener.Token.FieldName) tokens.get(start)).name()); + assertEquals("leaf", ((DocumentParserListener.Token.ValueToken) tokens.get(start + 1)).value()); + assertEquals("leaf_array", ((DocumentParserListener.Token.FieldName) tokens.get(start + 2)).name()); + assertTrue(tokens.get(start + 3) instanceof DocumentParserListener.Token.StartArray); + assertEquals("one", ((DocumentParserListener.Token.ValueToken) tokens.get(start + 4)).value()); + assertEquals("two", ((DocumentParserListener.Token.ValueToken) tokens.get(start + 5)).value()); + assertTrue(tokens.get(start + 6) instanceof DocumentParserListener.Token.EndArray); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index d128b25038a5..3699f97e243a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -2688,6 +2688,7 @@ public class DocumentParserTests extends MapperServiceTestCase { newMapping, newMapping.toCompressedXContent(), IndexVersion.current(), + mapperService.getIndexSettings(), MapperMetrics.NOOP, "myIndex" ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index d4d0e67ff414..1f020520e7e3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -9,8 +9,11 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -75,7 +78,14 @@ public class DynamicFieldsBuilderTests extends ESTestCase { ).build(MapperBuilderContext.root(false, false)); Mapping mapping = new Mapping(root, new MetadataFieldMapper[] { sourceMapper }, Map.of()); - DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.fromMapping(mapping), sourceToParse) { + IndexMetadata indexMetadata = IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); + + DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.fromMapping(mapping, indexSettings), sourceToParse) { @Override public XContentParser parser() { return parser; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index e385177b8714..dbfc1f114fff 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -204,6 +204,6 @@ public class FieldAliasMapperValidationTests extends ESTestCase { new MetadataFieldMapper[0], Collections.emptyMap() ); - return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers, fieldAliasMappers, emptyList()); + return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers, fieldAliasMappers, emptyList(), null); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index 4a45824342c7..67b62530f344 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -22,8 +22,6 @@ import org.elasticsearch.test.ESTestCase; import java.util.List; import java.util.stream.Stream; -import static java.util.Collections.emptyList; - public class FieldNamesFieldTypeTests extends ESTestCase { public void testTermQuery() { @@ -36,7 +34,6 @@ public class FieldNamesFieldTypeTests extends ESTestCase { settings ); List mappers = Stream.of(fieldNamesFieldType, fieldType).map(MockFieldMapper::new).toList(); - MappingLookup mappingLookup = MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList()); SearchExecutionContext searchExecutionContext = SearchExecutionContextHelper.createSimple(indexSettings, null, null); Query termQuery = fieldNamesFieldType.termQuery("field_name", searchExecutionContext); assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 14902aa419b9..5c9765eefb98 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -848,7 +848,7 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase { b.field("bool_value", true); }); assertEquals(""" - {"bool_value":true,"path":{"int_value":[20,10]}}""", syntheticSource); + {"bool_value":true,"path":{"int_value":[10,20]}}""", syntheticSource); } public void testIndexStoredArraySourceNestedValueArray() throws IOException { @@ -912,7 +912,7 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase { b.endObject(); }); assertEquals(""" - {"path":{"bool_value":true,"int_value":[10,20,30],"obj":{"foo":[2,1]}}}""", syntheticSource); + {"path":{"bool_value":true,"int_value":[10,20,30],"obj":{"foo":[1,2]}}}""", syntheticSource); } public void testFieldStoredArraySourceNestedValueArray() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java index b1470c1ee5b3..755b83e8eb7a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java @@ -105,11 +105,6 @@ public class MappingLookupInferenceFieldMapperTests extends MapperServiceTestCas return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, SEARCH_INFERENCE_ID, sourcePaths.toArray(new String[0])); } - @Override - public Object getOriginalValue(Map sourceAsMap) { - return null; - } - @Override protected void parseCreateField(DocumentParserContext context) {} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index fd44e68df19a..71edce3d1549 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -13,8 +13,12 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.TimeSeriesParams.MetricType; @@ -49,7 +53,13 @@ public class MappingLookupTests extends ESTestCase { new MetadataFieldMapper[0], Collections.emptyMap() ); - return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers); + IndexMetadata indexMetadata = IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); + return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers, indexSettings); } public void testOnlyRuntimeField() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java index b87ab09c530d..289ec24f7d3b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java @@ -107,7 +107,7 @@ public class MappingParserTests extends MapperServiceTestCase { b.endObject(); }); Mapping mapping = createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder))); - MappingLookup mappingLookup = MappingLookup.fromMapping(mapping); + MappingLookup mappingLookup = MappingLookup.fromMapping(mapping, null); assertNotNull(mappingLookup.getMapper("foo.bar")); assertNotNull(mappingLookup.getMapper("foo.baz.deep.field")); assertNotNull(mappingLookup.objectMappers().get("foo")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SyntheticSourceDocumentParserListenerTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SyntheticSourceDocumentParserListenerTests.java new file mode 100644 index 000000000000..b5496e1001bf --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/SyntheticSourceDocumentParserListenerTests.java @@ -0,0 +1,385 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.math.BigInteger; + +public class SyntheticSourceDocumentParserListenerTests extends MapperServiceTestCase { + public void testStoreLeafValue() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "long").field("synthetic_source_keep", "all")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var doc = new LuceneDocument(); + + var value = XContentBuilder.builder(xContentType.xContent()).value(1234L); + var parser = createParser(value); + parser.nextToken(); + + sut.consume( + new DocumentParserListener.Event.LeafValue( + (FieldMapper) mappingLookup.getMapper("field"), + false, + mappingLookup.getMapping().getRoot(), + doc, + parser + ) + ); + + var output = sut.finish(); + + assertEquals(1, output.ignoredSourceValues().size()); + var valueToStore = output.ignoredSourceValues().get(0); + assertEquals("field", valueToStore.name()); + var decoded = XContentBuilder.builder(xContentType.xContent()); + XContentDataHelper.decodeAndWrite(decoded, valueToStore.value()); + assertEquals(BytesReference.bytes(value), BytesReference.bytes(decoded)); + } + + public void testStoreLeafArray() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "long").field("synthetic_source_keep", "all")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var values = randomList(0, 10, ESTestCase::randomLong); + + var doc = new LuceneDocument(); + + sut.consume( + new DocumentParserListener.Event.LeafArrayStart( + (FieldMapper) mappingLookup.getMapper("field"), + mappingLookup.getMapping().getRoot(), + doc + ) + ); + for (long l : values) { + sut.consume((DocumentParserListener.Token.ValueToken) () -> l); + } + sut.consume(DocumentParserListener.Token.END_ARRAY); + + var output = sut.finish(); + + assertEquals(1, output.ignoredSourceValues().size()); + var valueToStore = output.ignoredSourceValues().get(0); + assertEquals("field", valueToStore.name()); + + var decoded = XContentBuilder.builder(xContentType.xContent()); + XContentDataHelper.decodeAndWrite(decoded, valueToStore.value()); + + var parser = createParser(decoded); + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); + for (long l : values) { + parser.nextToken(); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.currentToken()); + assertEquals(l, parser.longValue()); + } + assertEquals(XContentParser.Token.END_ARRAY, parser.nextToken()); + } + + public void testStoreObject() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "object").field("synthetic_source_keep", "all")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var names = randomList(0, 10, () -> randomAlphaOfLength(10)); + var values = randomList(names.size(), names.size(), () -> randomAlphaOfLength(10)); + + var doc = new LuceneDocument(); + + sut.consume( + new DocumentParserListener.Event.ObjectStart( + mappingLookup.objectMappers().get("field"), + false, + mappingLookup.getMapping().getRoot(), + doc + ) + ); + for (int i = 0; i < names.size(); i++) { + sut.consume(new DocumentParserListener.Token.FieldName(names.get(i))); + var value = values.get(i); + sut.consume((DocumentParserListener.Token.ValueToken) () -> value); + } + sut.consume(DocumentParserListener.Token.END_OBJECT); + + var output = sut.finish(); + + assertEquals(1, output.ignoredSourceValues().size()); + var valueToStore = output.ignoredSourceValues().get(0); + assertEquals("field", valueToStore.name()); + + var decoded = XContentBuilder.builder(xContentType.xContent()); + XContentDataHelper.decodeAndWrite(decoded, valueToStore.value()); + + var parser = createParser(decoded); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + for (int i = 0; i < names.size(); i++) { + parser.nextToken(); + assertEquals(XContentParser.Token.FIELD_NAME, parser.currentToken()); + assertEquals(names.get(i), parser.currentName()); + + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(values.get(i), parser.text()); + } + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + } + + public void testStoreObjectArray() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "object").field("synthetic_source_keep", "all")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var names = randomList(0, 10, () -> randomAlphaOfLength(10)); + var values = randomList(names.size(), names.size(), () -> randomAlphaOfLength(10)); + + var doc = new LuceneDocument(); + + sut.consume( + new DocumentParserListener.Event.ObjectArrayStart( + mappingLookup.objectMappers().get("field"), + mappingLookup.getMapping().getRoot(), + doc + ) + ); + for (int i = 0; i < names.size(); i++) { + sut.consume(DocumentParserListener.Token.START_OBJECT); + + sut.consume(new DocumentParserListener.Token.FieldName(names.get(i))); + var value = values.get(i); + sut.consume((DocumentParserListener.Token.ValueToken) () -> value); + + sut.consume(DocumentParserListener.Token.END_OBJECT); + } + sut.consume(DocumentParserListener.Token.END_ARRAY); + + var output = sut.finish(); + + assertEquals(1, output.ignoredSourceValues().size()); + var valueToStore = output.ignoredSourceValues().get(0); + assertEquals("field", valueToStore.name()); + + var decoded = XContentBuilder.builder(xContentType.xContent()); + XContentDataHelper.decodeAndWrite(decoded, valueToStore.value()); + + var parser = createParser(decoded); + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); + for (int i = 0; i < names.size(); i++) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(names.get(i), parser.currentName()); + + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(values.get(i), parser.text()); + + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + } + assertEquals(XContentParser.Token.END_ARRAY, parser.nextToken()); + } + + public void testStashedLeafValue() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "boolean").field("synthetic_source_keep", "arrays")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var doc = new LuceneDocument(); + + var value = XContentBuilder.builder(xContentType.xContent()).value(false); + var parser = createParser(value); + parser.nextToken(); + + sut.consume( + new DocumentParserListener.Event.LeafValue( + (FieldMapper) mappingLookup.getMapper("field"), + true, + mappingLookup.getMapping().getRoot(), + doc, + parser + ) + ); + + sut.consume( + new DocumentParserListener.Event.LeafValue( + (FieldMapper) mappingLookup.getMapper("field"), + true, + mappingLookup.getMapping().getRoot(), + doc, + parser + ) + ); + + var output = sut.finish(); + + // Single values are optimized away because there are no arrays mixed in and regular synthetic source logic is sufficient + assertEquals(0, output.ignoredSourceValues().size()); + } + + public void testStashedMixedValues() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "boolean").field("synthetic_source_keep", "arrays")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var doc = new LuceneDocument(); + + var value = XContentBuilder.builder(xContentType.xContent()).value(false); + var parser = createParser(value); + parser.nextToken(); + + sut.consume( + new DocumentParserListener.Event.LeafValue( + (FieldMapper) mappingLookup.getMapper("field"), + true, + mappingLookup.getMapping().getRoot(), + doc, + parser + ) + ); + + sut.consume( + new DocumentParserListener.Event.LeafValue( + (FieldMapper) mappingLookup.getMapper("field"), + true, + mappingLookup.getMapping().getRoot(), + doc, + parser + ) + ); + + sut.consume( + new DocumentParserListener.Event.LeafArrayStart( + (FieldMapper) mappingLookup.getMapper("field"), + mappingLookup.getMapping().getRoot(), + doc + ) + ); + sut.consume((DocumentParserListener.Token.ValueToken) () -> true); + sut.consume((DocumentParserListener.Token.ValueToken) () -> true); + sut.consume(DocumentParserListener.Token.END_ARRAY); + + var output = sut.finish(); + + // Both arrays and individual values are stored. + assertEquals(3, output.ignoredSourceValues().size()); + } + + public void testStashedObjectValue() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "object").field("synthetic_source_keep", "arrays")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var doc = new LuceneDocument(); + + var value = XContentBuilder.builder(xContentType.xContent()).value(1234L); + var parser = createParser(value); + parser.nextToken(); + + sut.consume( + new DocumentParserListener.Event.ObjectStart( + mappingLookup.objectMappers().get("field"), + true, + mappingLookup.getMapping().getRoot(), + doc + ) + ); + sut.consume(new DocumentParserListener.Token.FieldName("hello")); + sut.consume((DocumentParserListener.Token.ValueToken) () -> BigInteger.valueOf(13)); + sut.consume(DocumentParserListener.Token.END_OBJECT); + + var output = sut.finish(); + + // Single value optimization does not work for objects because it is possible that one of the fields + // of this object needs to be stored in ignored source. + // Because we stored the entire object we didn't store individual fields separately. + // Optimizing this away would lead to missing data from synthetic source in some cases. + // We could do both, but we don't do it now. + assertEquals(1, output.ignoredSourceValues().size()); + } + + public void testSingleElementArray() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "boolean").field("synthetic_source_keep", "arrays")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var doc = new LuceneDocument(); + + sut.consume( + new DocumentParserListener.Event.LeafArrayStart( + (FieldMapper) mappingLookup.getMapper("field"), + mappingLookup.getMapping().getRoot(), + doc + ) + ); + sut.consume((DocumentParserListener.Token.ValueToken) () -> true); + sut.consume(DocumentParserListener.Token.END_ARRAY); + + var output = sut.finish(); + + // Since there is only one value in the array, order does not matter, + // and we can drop ignored source value and use standard synthetic source logic. + assertEquals(0, output.ignoredSourceValues().size()); + } + + public void testMultipleSingleElementArrays() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + var mapping = fieldMapping(b -> b.field("type", "boolean").field("synthetic_source_keep", "arrays")); + var mappingLookup = createSytheticSourceMapperService(mapping).mappingLookup(); + var sut = new SyntheticSourceDocumentParserListener(mappingLookup, xContentType); + + var doc = new LuceneDocument(); + + sut.consume( + new DocumentParserListener.Event.LeafArrayStart( + (FieldMapper) mappingLookup.getMapper("field"), + mappingLookup.getMapping().getRoot(), + doc + ) + ); + sut.consume((DocumentParserListener.Token.ValueToken) () -> true); + sut.consume(DocumentParserListener.Token.END_ARRAY); + + sut.consume( + new DocumentParserListener.Event.LeafArrayStart( + (FieldMapper) mappingLookup.getMapper("field"), + mappingLookup.getMapping().getRoot(), + doc + ) + ); + sut.consume((DocumentParserListener.Token.ValueToken) () -> false); + sut.consume(DocumentParserListener.Token.END_ARRAY); + + var output = sut.finish(); + + // Since there is only one value in the array, order does not matter, + // and we can drop ignored source value. + assertEquals(0, output.ignoredSourceValues().size()); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index dc70c44a8912..df2c9466f3b7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -297,7 +297,7 @@ public class SearchExecutionContextTests extends ESTestCase { new MetadataFieldMapper[0], Collections.emptyMap() ); - return MappingLookup.fromMappers(mapping, mappers, Collections.emptyList()); + return MappingLookup.fromMappers(mapping, mappers, Collections.emptyList(), null); } public void testSearchRequestRuntimeFields() { @@ -389,7 +389,13 @@ public class SearchExecutionContextTests extends ESTestCase { new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); Mapping mapping = new Mapping(root, new MetadataFieldMapper[] { sourceMapper }, Map.of()); - MappingLookup lookup = MappingLookup.fromMapping(mapping); + IndexMetadata indexMetadata = IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); + MappingLookup lookup = MappingLookup.fromMapping(mapping, indexSettings); SearchExecutionContext sec = createSearchExecutionContext("index", "", lookup, Map.of()); assertTrue(sec.isSourceSynthetic()); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index 773c660caa1c..8afedbd63f14 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -204,7 +204,7 @@ public class IndicesRequestCacheTests extends ESTestCase { public void testCacheDifferentMapping() throws Exception { IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); MappingLookup.CacheKey mappingKey1 = MappingLookup.EMPTY.cacheKey(); - MappingLookup.CacheKey mappingKey2 = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList()).cacheKey(); + MappingLookup.CacheKey mappingKey2 = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList(), null).cacheKey(); AtomicBoolean indexShard = new AtomicBoolean(true); ShardRequestCache requestCacheStats = new ShardRequestCache(); Directory dir = newDirectory(); @@ -364,13 +364,13 @@ public class IndicesRequestCacheTests extends ESTestCase { writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - MappingLookup.CacheKey secondMappingKey = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList()).cacheKey(); + MappingLookup.CacheKey secondMappingKey = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList(), null).cacheKey(); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); Loader secondLoader = new Loader(secondReader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - MappingLookup.CacheKey thirdMappingKey = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList()).cacheKey(); + MappingLookup.CacheKey thirdMappingKey = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList(), null).cacheKey(); AtomicBoolean differentIdentity = new AtomicBoolean(true); TestEntity thirdEntity = new TestEntity(requestCacheStats, differentIdentity); Loader thirdLoader = new Loader(thirdReader, 0); @@ -506,7 +506,7 @@ public class IndicesRequestCacheTests extends ESTestCase { AtomicBoolean trueBoolean = new AtomicBoolean(true); AtomicBoolean falseBoolean = new AtomicBoolean(false); MappingLookup.CacheKey mKey1 = MappingLookup.EMPTY.cacheKey(); - MappingLookup.CacheKey mKey2 = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList()).cacheKey(); + MappingLookup.CacheKey mKey2 = MappingLookup.fromMappers(Mapping.EMPTY, emptyList(), emptyList(), null).cacheKey(); Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(); IndexWriter writer = new IndexWriter(dir, config); diff --git a/server/src/test/java/org/elasticsearch/lucene/util/automaton/MinimizationOperationsTests.java b/server/src/test/java/org/elasticsearch/lucene/util/automaton/MinimizationOperationsTests.java new file mode 100644 index 000000000000..bc9cafc792a8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/lucene/util/automaton/MinimizationOperationsTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.lucene.util.automaton; + +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.test.ESTestCase; + +import static org.apache.lucene.tests.util.automaton.AutomatonTestUtil.subsetOf; + +public class MinimizationOperationsTests extends ESTestCase { + + /** the minimal and non-minimal are compared to ensure they are the same. */ + public void testBasic() { + int num = atLeast(200); + for (int i = 0; i < num; i++) { + Automaton a = AutomatonTestUtil.randomAutomaton(random()); + Automaton la = Operations.determinize(Operations.removeDeadStates(a), Integer.MAX_VALUE); + Automaton lb = MinimizationOperations.minimize(a, Integer.MAX_VALUE); + assertTrue(sameLanguage(la, lb)); + } + } + + /** + * compare minimized against minimized with a slower, simple impl. we check not only that they are + * the same, but that #states/#transitions are the same. + */ + public void testAgainstBrzozowski() { + int num = atLeast(200); + for (int i = 0; i < num; i++) { + Automaton a = AutomatonTestUtil.randomAutomaton(random()); + a = AutomatonTestUtil.minimizeSimple(a); + Automaton b = MinimizationOperations.minimize(a, Integer.MAX_VALUE); + assertTrue(sameLanguage(a, b)); + assertEquals(a.getNumStates(), b.getNumStates()); + int numStates = a.getNumStates(); + + int sum1 = 0; + for (int s = 0; s < numStates; s++) { + sum1 += a.getNumTransitions(s); + } + int sum2 = 0; + for (int s = 0; s < numStates; s++) { + sum2 += b.getNumTransitions(s); + } + + assertEquals(sum1, sum2); + } + } + + private static boolean sameLanguage(Automaton a1, Automaton a2) { + if (a1 == a2) { + return true; + } + return subsetOf(a2, a1) && subsetOf(a1, a2); + } +} diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java index 3eab25562516..5f094f60203c 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java @@ -628,6 +628,7 @@ public class PersistentTasksClusterServiceTests extends ESTestCase { DiscoveryNode::getId, node -> SingleNodeShutdownMetadata.builder() .setNodeId(node.getId()) + .setNodeEphemeralId(node.getEphemeralId()) .setReason("shutdown for a unit test") .setType(type) .setStartedAtMillis(randomNonNegativeLong()) diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index 8644dfc033c9..15febaa8db2a 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -246,6 +246,7 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient httpTransport.node.getId(), SingleNodeShutdownMetadata.builder() .setNodeId(httpTransport.node.getId()) + .setNodeEphemeralId(httpTransport.node.getEphemeralId()) .setReason("testing") .setType(SingleNodeShutdownMetadata.Type.RESTART) .setStartedAtMillis(randomNonNegativeLong()) diff --git a/server/src/test/java/org/elasticsearch/script/ScriptCacheStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptCacheStatsTests.java index 991967c2db88..9a4d6fdef467 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptCacheStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptCacheStatsTests.java @@ -31,9 +31,7 @@ public class ScriptCacheStatsTests extends ESTestCase { var scriptCacheStats = new ScriptCacheStats(stats); builder.startObject(); - for (var it = scriptCacheStats.toXContentChunked(ToXContent.EMPTY_PARAMS); it.hasNext();) { - it.next().toXContent(builder, ToXContent.EMPTY_PARAMS); - } + scriptCacheStats.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); String expected = """ @@ -63,9 +61,7 @@ public class ScriptCacheStatsTests extends ESTestCase { ); builder.startObject(); - for (var it = scriptCacheStats.toXContentChunked(ToXContent.EMPTY_PARAMS); it.hasNext();) { - it.next().toXContent(builder, ToXContent.EMPTY_PARAMS); - } + scriptCacheStats.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); String expected = """ diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index d041121b8a96..926ac534164f 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -227,7 +227,8 @@ public class SearchServiceTests extends IndexShardTestCase { MappingLookup mappingLookup = MappingLookup.fromMappers( mapping, Collections.singletonList(keywordFieldMapper), - Collections.emptyList() + Collections.emptyList(), + indexSettings ); return new SearchExecutionContext( 0, diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index deada75279e3..971f3bab7b6a 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -11,8 +11,12 @@ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.memory.MemoryIndex; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchContext; @@ -190,6 +194,12 @@ public class FetchSourcePhaseTests extends ESTestCase { when(fetchContext.getIndexName()).thenReturn("index"); SearchExecutionContext sec = mock(SearchExecutionContext.class); when(sec.isSourceEnabled()).thenReturn(sourceBuilder != null); + IndexSettings indexSettings = new IndexSettings( + IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), + Settings.EMPTY + ); + when(sec.indexVersionCreated()).thenReturn(indexSettings.getIndexVersionCreated()); + when(sec.getIndexSettings()).thenReturn(indexSettings); when(fetchContext.getSearchExecutionContext()).thenReturn(sec); final SearchHit searchHit = SearchHit.unpooled(1, null, nestedIdentity); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 3e4ed0ebac1b..d78c697e19c8 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -169,7 +169,7 @@ public abstract class AbstractSuggestionBuilderTestCase new TestTemplateService.MockTemplateScript.Factory(((Script) invocation.getArguments()[0]).getIdOrCode()) ); List mappers = Collections.singletonList(new MockFieldMapper(fieldType)); - MappingLookup lookup = MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList()); + MappingLookup lookup = MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList(), idxSettings); SearchExecutionContext mockContext = new SearchExecutionContext( 0, 0, diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java index 91ab253b2e1f..741dfd246643 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java @@ -88,6 +88,7 @@ public class SnapshotsInProgressSerializationTests extends SimpleDiffableWireSer nodeId -> SingleNodeShutdownMetadata.builder() .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setNodeId(nodeId) + .setNodeEphemeralId(nodeId) .setStartedAtMillis(randomNonNegativeLong()) .setReason(getTestName()) .build() @@ -476,6 +477,7 @@ public class SnapshotsInProgressSerializationTests extends SimpleDiffableWireSer "node-id", SingleNodeShutdownMetadata.builder() .setNodeId("node-id") + .setNodeEphemeralId("node-id") .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(randomNonNegativeLong()) .setReason("test") diff --git a/settings.gradle b/settings.gradle index 747fbb3e439f..18be6ba79275 100644 --- a/settings.gradle +++ b/settings.gradle @@ -87,6 +87,7 @@ List projects = [ 'distribution:tools:ansi-console', 'server', 'test:framework', + 'test:fixtures:aws-fixture-utils', 'test:fixtures:aws-sts-fixture', 'test:fixtures:azure-fixture', 'test:fixtures:ec2-imds-fixture', diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index 7f64b33b8142..4a5d6d9d6d91 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -13,8 +13,8 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - description 'Apm integration plugin' - classname 'org.elasticsearch.test.apmintegration.ApmIntegrationPlugin' + description = 'Apm integration plugin' + classname ='org.elasticsearch.test.apmintegration.ApmIntegrationPlugin' } // let the javaRestTest see the classpath of main diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index 3ba6b309071f..47b909dbd708 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -11,8 +11,8 @@ subprojects { apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name it.name - licenseFile rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile rootProject.file('NOTICE.txt') + name = it.name + licenseFile = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') + noticeFile = rootProject.file('NOTICE.txt') } } diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index fae5e93b37fc..5d1291c8ee50 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -14,8 +14,8 @@ tasks.named('yamlRestTest').configure { } esplugin { - description 'A test module that allows to delay aggregations on shards with a configurable time' - classname 'org.elasticsearch.test.delayedshard.DelayedShardAggregationPlugin' + description = 'A test module that allows to delay aggregations on shards with a configurable time' + classname ='org.elasticsearch.test.delayedshard.DelayedShardAggregationPlugin' } restResources { diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index 6c1da40406a5..a6622997bf5d 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -13,8 +13,8 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - description 'Die with dignity plugin' - classname 'org.elasticsearch.test.diewithdignity.DieWithDignityPlugin' + description = 'Die with dignity plugin' + classname ='org.elasticsearch.test.diewithdignity.DieWithDignityPlugin' } // let the javaRestTest see the classpath of main diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index 3c72145c11f8..8ac47c339f21 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -14,8 +14,8 @@ tasks.named('yamlRestTest').configure { } esplugin { - description 'A test module that exposes a way to simulate search shard failures and warnings' - classname 'org.elasticsearch.test.errorquery.ErrorQueryPlugin' + description = 'A test module that exposes a way to simulate search shard failures and warnings' + classname ='org.elasticsearch.test.errorquery.ErrorQueryPlugin' } restResources { diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index ba85e0dbd869..fa8c43048a6d 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -14,8 +14,8 @@ apply plugin: 'elasticsearch.internal-test-artifact' group = 'org.elasticsearch.plugin' esplugin { - description 'A test module that can trigger out of memory' - classname 'org.elasticsearch.test.esql.heap_attack.HeapAttackPlugin' + description = 'A test module that can trigger out of memory' + classname ='org.elasticsearch.test.esql.heap_attack.HeapAttackPlugin' } tasks.named('javaRestTest') { diff --git a/test/external-modules/jvm-crash/build.gradle b/test/external-modules/jvm-crash/build.gradle index 0b06142e8193..bc1a96836889 100644 --- a/test/external-modules/jvm-crash/build.gradle +++ b/test/external-modules/jvm-crash/build.gradle @@ -14,8 +14,8 @@ apply plugin: 'elasticsearch.internal-test-artifact' group = 'org.elasticsearch.plugin' esplugin { - description 'A test module that can trigger A JVM crash' - classname 'org.elasticsearch.test.jvm_crash.JvmCrashPlugin' + description = 'A test module that can trigger A JVM crash' + classname ='org.elasticsearch.test.jvm_crash.JvmCrashPlugin' } tasks.named('javaRestTest') { diff --git a/test/external-modules/latency-simulating-directory/build.gradle b/test/external-modules/latency-simulating-directory/build.gradle index 36437aaf6c4e..70c23c32c8ad 100644 --- a/test/external-modules/latency-simulating-directory/build.gradle +++ b/test/external-modules/latency-simulating-directory/build.gradle @@ -8,8 +8,8 @@ */ esplugin { - description 'A test module that simulates read latencies from an object-store based repository' - classname 'org.elasticsearch.test.simulatedlatencyrepo.LatencySimulatingRepositoryPlugin' + description = 'A test module that simulates read latencies from an object-store based repository' + classname ='org.elasticsearch.test.simulatedlatencyrepo.LatencySimulatingRepositoryPlugin' } apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/test/fixtures/aws-fixture-utils/build.gradle b/test/fixtures/aws-fixture-utils/build.gradle new file mode 100644 index 000000000000..5e2981b00d6e --- /dev/null +++ b/test/fixtures/aws-fixture-utils/build.gradle @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +apply plugin: 'elasticsearch.java' + +description = 'Utils for AWS-related fixtures' + +dependencies { + implementation project(':server') + implementation project(':test:framework') +} diff --git a/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/AwsCredentialsUtils.java b/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/AwsCredentialsUtils.java new file mode 100644 index 000000000000..9000e06ca135 --- /dev/null +++ b/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/AwsCredentialsUtils.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws; + +import com.sun.net.httpserver.HttpExchange; + +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.BiPredicate; +import java.util.function.Supplier; + +import static fixture.aws.AwsFixtureUtils.sendError; + +public enum AwsCredentialsUtils { + ; + + /** + * @return an authorization predicate that ensures the access key matches the given values. + */ + public static BiPredicate fixedAccessKey(String accessKey) { + return mutableAccessKey(() -> accessKey); + } + + /** + * @return an authorization predicate that ensures the access key matches one supplied by the given supplier. + */ + public static BiPredicate mutableAccessKey(Supplier accessKeySupplier) { + return (authorizationHeader, sessionTokenHeader) -> authorizationHeader != null + && authorizationHeader.contains(accessKeySupplier.get()); + } + + /** + * @return an authorization predicate that ensures the access key and session token both match the given values. + */ + public static BiPredicate fixedAccessKeyAndToken(String accessKey, String sessionToken) { + Objects.requireNonNull(sessionToken); + final var accessKeyPredicate = fixedAccessKey(accessKey); + return (authorizationHeader, sessionTokenHeader) -> accessKeyPredicate.test(authorizationHeader, sessionTokenHeader) + && sessionToken.equals(sessionTokenHeader); + } + + /** + * Check the authorization headers of the given {@param exchange} against the given {@param authorizationPredicate}. If they match, + * returns {@code true}. If they do not match, sends a {@code 403 Forbidden} response and returns {@code false}. + */ + public static boolean checkAuthorization(BiPredicate authorizationPredicate, HttpExchange exchange) throws IOException { + if (authorizationPredicate.test( + exchange.getRequestHeaders().getFirst("Authorization"), + exchange.getRequestHeaders().getFirst("x-amz-security-token") + )) { + return true; + } + + sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Access denied by " + authorizationPredicate); + return false; + } +} diff --git a/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/AwsFixtureUtils.java b/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/AwsFixtureUtils.java new file mode 100644 index 000000000000..d46a7e10c4fe --- /dev/null +++ b/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/AwsFixtureUtils.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws; + +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpExchange; + +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; + +public enum AwsFixtureUtils { + ; + + /** + * @return an {@link InetSocketAddress} for a test fixture running on {@code localhost} which binds to any available port. + */ + public static InetSocketAddress getLocalFixtureAddress() { + try { + return new InetSocketAddress(InetAddress.getByName("localhost"), 0); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + } + + /** + * Send an XML-formatted error response typical of an AWS service. + */ + public static void sendError(final HttpExchange exchange, final RestStatus status, final String errorCode, final String message) + throws IOException { + final Headers headers = exchange.getResponseHeaders(); + headers.add("Content-Type", "application/xml"); + + final String requestId = exchange.getRequestHeaders().getFirst("x-amz-request-id"); + if (requestId != null) { + headers.add("x-amz-request-id", requestId); + } + + if (errorCode == null || "HEAD".equals(exchange.getRequestMethod())) { + exchange.sendResponseHeaders(status.getStatus(), -1L); + exchange.close(); + } else { + final byte[] response = ("" + + "" + + errorCode + + "" + + "" + + message + + "" + + "" + + requestId + + "" + + "").getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(status.getStatus(), response.length); + exchange.getResponseBody().write(response); + exchange.close(); + } + } +} diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java b/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/DynamicAwsCredentials.java similarity index 88% rename from test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java rename to test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/DynamicAwsCredentials.java index 4e8f267ad354..24a5b4ea018e 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java +++ b/test/fixtures/aws-fixture-utils/src/main/java/fixture/aws/DynamicAwsCredentials.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package fixture.s3; +package fixture.aws; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -18,10 +18,9 @@ import java.util.Set; /** * Allows dynamic creation of access-key/session-token credentials for accessing AWS services such as S3. Typically there's one service * (e.g. IMDS or STS) which creates credentials dynamically and registers them here using {@link #addValidCredentials}, and then the - * {@link S3HttpFixture} uses {@link #isAuthorized} to validate the credentials it receives corresponds with some previously-generated - * credentials. + * fixture uses {@link #isAuthorized} to validate the credentials it receives corresponds with some previously-generated credentials. */ -public class DynamicS3Credentials { +public class DynamicAwsCredentials { private final Map> validCredentialsMap = ConcurrentCollections.newConcurrentMap(); public boolean isAuthorized(String authorizationHeader, String sessionTokenHeader) { diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 8296bc14fd66..857f497b2908 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -22,7 +22,7 @@ configurations { dependencies { compileOnly("org.apache.hadoop:hadoop-minicluster:2.8.5") api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}") { - transitive false + transitive = false } compileOnly "junit:junit:${versions.junit}" diff --git a/test/fixtures/s3-fixture/build.gradle b/test/fixtures/s3-fixture/build.gradle index e4c35464608a..c7792ce963a8 100644 --- a/test/fixtures/s3-fixture/build.gradle +++ b/test/fixtures/s3-fixture/build.gradle @@ -16,4 +16,5 @@ dependencies { transitive = false } implementation project(':test:framework') + implementation project(':test:fixtures:aws-fixture-utils') } diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java index ab70f043043c..538c57bc2bde 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java @@ -13,16 +13,15 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.rest.RestStatus; import org.junit.rules.ExternalResource; import java.io.IOException; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; import java.util.Objects; import java.util.function.BiPredicate; -import java.util.function.Supplier; + +import static fixture.aws.AwsCredentialsUtils.checkAuthorization; +import static fixture.aws.AwsCredentialsUtils.fixedAccessKey; +import static fixture.aws.AwsFixtureUtils.getLocalFixtureAddress; public class S3HttpFixture extends ExternalResource { @@ -49,14 +48,9 @@ public class S3HttpFixture extends ExternalResource { @Override public void handle(final HttpExchange exchange) throws IOException { try { - if (authorizationPredicate.test( - exchange.getRequestHeaders().getFirst("Authorization"), - exchange.getRequestHeaders().getFirst("x-amz-security-token") - ) == false) { - sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Access denied by " + authorizationPredicate); - return; + if (checkAuthorization(authorizationPredicate, exchange)) { + super.handle(exchange); } - super.handle(exchange); } catch (Error e) { // HttpServer catches Throwable, so we must throw errors on another thread ExceptionsHelper.maybeDieOnAnotherThread(e); @@ -76,10 +70,8 @@ public class S3HttpFixture extends ExternalResource { protected void before() throws Throwable { if (enabled) { - InetSocketAddress inetSocketAddress = resolveAddress(); - this.server = HttpServer.create(inetSocketAddress, 0); - HttpHandler handler = createHandler(); - this.server.createContext("/", Objects.requireNonNull(handler)); + this.server = HttpServer.create(getLocalFixtureAddress(), 0); + this.server.createContext("/", Objects.requireNonNull(createHandler())); server.start(); } } @@ -90,28 +82,4 @@ public class S3HttpFixture extends ExternalResource { stop(0); } } - - private static InetSocketAddress resolveAddress() { - try { - return new InetSocketAddress(InetAddress.getByName("localhost"), 0); - } catch (UnknownHostException e) { - throw new RuntimeException(e); - } - } - - public static BiPredicate fixedAccessKey(String accessKey) { - return mutableAccessKey(() -> accessKey); - } - - public static BiPredicate mutableAccessKey(Supplier accessKeySupplier) { - return (authorizationHeader, sessionTokenHeader) -> authorizationHeader != null - && authorizationHeader.contains(accessKeySupplier.get()); - } - - public static BiPredicate fixedAccessKeyAndToken(String accessKey, String sessionToken) { - Objects.requireNonNull(sessionToken); - final var accessKeyPredicate = fixedAccessKey(accessKey); - return (authorizationHeader, sessionTokenHeader) -> accessKeyPredicate.test(authorizationHeader, sessionTokenHeader) - && sessionToken.equals(sessionTokenHeader); - } } diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java index bfc0428731c5..847711730541 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java @@ -8,7 +8,6 @@ */ package fixture.s3; -import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; @@ -520,37 +519,6 @@ public class S3HttpHandler implements HttpHandler { } } - public static void sendError(final HttpExchange exchange, final RestStatus status, final String errorCode, final String message) - throws IOException { - final Headers headers = exchange.getResponseHeaders(); - headers.add("Content-Type", "application/xml"); - - final String requestId = exchange.getRequestHeaders().getFirst("x-amz-request-id"); - if (requestId != null) { - headers.add("x-amz-request-id", requestId); - } - - if (errorCode == null || "HEAD".equals(exchange.getRequestMethod())) { - exchange.sendResponseHeaders(status.getStatus(), -1L); - exchange.close(); - } else { - final byte[] response = ("" - + "" - + errorCode - + "" - + "" - + message - + "" - + "" - + requestId - + "" - + "").getBytes(StandardCharsets.UTF_8); - exchange.sendResponseHeaders(status.getStatus(), response.length); - exchange.getResponseBody().write(response); - exchange.close(); - } - } - MultipartUpload getUpload(String uploadId) { return uploads.get(uploadId); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index f6ed328d14dd..6294230ad1dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -702,7 +702,7 @@ public final class DataStreamTestHelper { new MetadataFieldMapper[] { dtfm }, Collections.emptyMap() ); - mappingLookup = MappingLookup.fromMappers(mapping, List.of(dtfm, dateFieldMapper), List.of()); + mappingLookup = MappingLookup.fromMappers(mapping, List.of(dtfm, dateFieldMapper), List.of(), null); } IndicesService indicesService = mockIndicesServices(mappingLookup); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java index 49fe9d30239a..02ae0853909f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java @@ -70,6 +70,7 @@ public class TestDocumentParserContext extends DocumentParserContext { } ), source, + DocumentParser.Listeners.NOOP, mappingLookup.getMapping().getRoot(), ObjectMapper.Dynamic.getRootDynamic(mappingLookup) ); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index f057d35d6e7a..4fe41692e150 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -357,7 +357,8 @@ public abstract class AggregatorTestCase extends ESTestCase { Arrays.stream(fieldTypes) .map(ft -> new FieldAliasMapper(ft.name() + "-alias", ft.name() + "-alias", ft.name())) .collect(toList()), - List.of() + List.of(), + indexSettings ); BiFunction> fieldDataBuilder = (fieldType, context) -> fieldType .fielddataBuilder( @@ -465,7 +466,7 @@ public abstract class AggregatorTestCase extends ESTestCase { * of stuff. */ SearchExecutionContext subContext = spy(searchExecutionContext); - MappingLookup disableNestedLookup = MappingLookup.fromMappers(Mapping.EMPTY, Set.of(), Set.of()); + MappingLookup disableNestedLookup = MappingLookup.fromMappers(Mapping.EMPTY, Set.of(), Set.of(), indexSettings); doReturn(new NestedDocuments(disableNestedLookup, bitsetFilterCache::getBitSetProducer, indexSettings.getIndexVersionCreated())) .when(subContext) .getNestedDocuments(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java new file mode 100644 index 000000000000..64c90826fda8 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.test; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +public class FailingFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "failing_field"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new IllegalStateException("Accessing failing field"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestClusterCustomMetadata.java b/test/framework/src/main/java/org/elasticsearch/test/TestClusterCustomMetadata.java index 03c05c1a8372..315c34c828e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestClusterCustomMetadata.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestClusterCustomMetadata.java @@ -13,9 +13,9 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -95,8 +95,8 @@ public abstract class TestClusterCustomMetadata extends AbstractNamedDiffable toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).field("data", getData()); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.single((builder, params) -> builder.field("data", getData())); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java index 2558edea9c16..5e503d0fd8b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java @@ -451,8 +451,8 @@ public class NetworkDisruption implements ServiceDisruptionScheme { */ public static class NetworkDelay extends NetworkLinkDisruptionType { - public static TimeValue DEFAULT_DELAY_MIN = TimeValue.timeValueSeconds(10); - public static TimeValue DEFAULT_DELAY_MAX = TimeValue.timeValueSeconds(90); + public static final TimeValue DEFAULT_DELAY_MIN = TimeValue.timeValueSeconds(10); + public static final TimeValue DEFAULT_DELAY_MAX = TimeValue.timeValueSeconds(90); private final TimeValue delay; diff --git a/x-pack/build.gradle b/x-pack/build.gradle index 86d56ef569ad..da21ffc829d0 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -10,8 +10,8 @@ subprojects { // see: https://wiki.shibboleth.net/confluence/display/DEV/Use+of+Maven+Central repositories { maven { - name "opensaml" - url "https://artifactory.elstc.co/artifactory/shibboleth-releases/" + name = "opensaml" + url = "https://artifactory.elstc.co/artifactory/shibboleth-releases/" content { includeGroup "org.opensaml" includeGroup "net.shibboleth.utilities" diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index 00f28b4badc3..9a21f40a4c4a 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-analytics' - description 'Elasticsearch Expanded Pack Plugin - Analytics' - classname 'org.elasticsearch.xpack.analytics.AnalyticsPlugin' + name = 'x-pack-analytics' + description = 'Elasticsearch Expanded Pack Plugin - Analytics' + classname ='org.elasticsearch.xpack.analytics.AnalyticsPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/apm-data/build.gradle b/x-pack/plugin/apm-data/build.gradle index fbd732e18aae..3c79f3742dc8 100644 --- a/x-pack/plugin/apm-data/build.gradle +++ b/x-pack/plugin/apm-data/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-apm-data' - description 'The APM plugin defines APM data streams and ingest pipelines.' - classname 'org.elasticsearch.xpack.apmdata.APMPlugin' + name = 'x-pack-apm-data' + description = 'The APM plugin defines APM data streams and ingest pipelines.' + classname = 'org.elasticsearch.xpack.apmdata.APMPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/async-search/build.gradle b/x-pack/plugin/async-search/build.gradle index f22ff7fe3d63..b013d1df46b9 100644 --- a/x-pack/plugin/async-search/build.gradle +++ b/x-pack/plugin/async-search/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-async-search' - description 'A module which allows to track the progress of a search asynchronously.' - classname 'org.elasticsearch.xpack.search.AsyncSearch' + name = 'x-pack-async-search' + description = 'A module which allows to track the progress of a search asynchronously.' + classname ='org.elasticsearch.xpack.search.AsyncSearch' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle index eb758c2c0ef5..7363aed336aa 100644 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ b/x-pack/plugin/async-search/qa/rest/build.gradle @@ -11,9 +11,9 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' esplugin { - name 'x-pack-test-deprecated-query' - description 'Deprecated query plugin' - classname 'org.elasticsearch.query.DeprecatedQueryPlugin' + name = 'x-pack-test-deprecated-query' + description = 'Deprecated query plugin' + classname ='org.elasticsearch.query.DeprecatedQueryPlugin' } dependencies { diff --git a/x-pack/plugin/async/build.gradle b/x-pack/plugin/async/build.gradle index 36f1c6dea9b9..5ce5d07439cb 100644 --- a/x-pack/plugin/async/build.gradle +++ b/x-pack/plugin/async/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'x-pack-async' - description 'A module which handles common async operations' - classname 'org.elasticsearch.xpack.async.AsyncResultsIndexPlugin' + name = 'x-pack-async' + description = 'A module which handles common async operations' + classname ='org.elasticsearch.xpack.async.AsyncResultsIndexPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/autoscaling/build.gradle b/x-pack/plugin/autoscaling/build.gradle index edca88ff6bf2..24400a0fc418 100644 --- a/x-pack/plugin/autoscaling/build.gradle +++ b/x-pack/plugin/autoscaling/build.gradle @@ -2,12 +2,12 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-autoscaling' - description 'Elasticsearch Expanded Pack Plugin - Autoscaling' - classname 'org.elasticsearch.xpack.autoscaling.Autoscaling' + name = 'x-pack-autoscaling' + description = 'Elasticsearch Expanded Pack Plugin - Autoscaling' + classname ='org.elasticsearch.xpack.autoscaling.Autoscaling' extendedPlugins = ['x-pack-core'] - hasNativeController false - requiresKeystore true + hasNativeController =false + requiresKeystore =true } base { archivesName = 'x-pack-autoscaling' diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java index 08435cb8bb06..e78511d9d99e 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java @@ -16,7 +16,7 @@ import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -118,8 +118,8 @@ public class AutoscalingMetadata implements Metadata.ClusterCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(POLICIES_FIELD.getPreferredName(), policies); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.xContentValuesMap(POLICIES_FIELD.getPreferredName(), policies); } @Override diff --git a/x-pack/plugin/blob-cache/build.gradle b/x-pack/plugin/blob-cache/build.gradle index c5c91a5ef87e..6fa4d2c6da72 100644 --- a/x-pack/plugin/blob-cache/build.gradle +++ b/x-pack/plugin/blob-cache/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'blob-cache' - description 'Provides a blob store cache' - classname 'org.elasticsearch.blobcache.BlobCachePlugin' + name = 'blob-cache' + description = 'Provides a blob store cache' + classname = 'org.elasticsearch.blobcache.BlobCachePlugin' } dependencies { diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java index 0fb4267745cb..218569c82525 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java @@ -96,7 +96,7 @@ public class BlobCacheMetrics { this.cachePopulationTime = cachePopulationTime; } - public static BlobCacheMetrics NOOP = new BlobCacheMetrics(TelemetryProvider.NOOP.getMeterRegistry()); + public static final BlobCacheMetrics NOOP = new BlobCacheMetrics(TelemetryProvider.NOOP.getMeterRegistry()); public LongCounter getCacheMissCounter() { return cacheMissCounter; diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java index ad0d99104e8a..7e1d732befe3 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java @@ -44,7 +44,7 @@ public class SharedBytes extends AbstractRefCounted { ); private static final Logger logger = LogManager.getLogger(SharedBytes.class); - public static int PAGE_SIZE = 4096; + public static final int PAGE_SIZE = 4096; private static final String CACHE_FILE_NAME = "shared_snapshot_cache"; diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index aa6e8de4ec27..5ab0112d822c 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -95,5 +95,6 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/61_enrich_ip/Invalid IP strings", "We switched from exceptions to null+warnings for ENRICH runtime errors") task.skipTest("esql/180_match_operator/match with non text field", "Match operator can now be used on non-text fields") task.skipTest("esql/180_match_operator/match with functions", "Error message changed") + task.skipTest("esql/40_unsupported_types/semantic_text declared in mapping", "The semantic text field format changed") }) diff --git a/x-pack/plugin/ccr/build.gradle b/x-pack/plugin/ccr/build.gradle index b5e96ac2a8b3..765c70abee37 100644 --- a/x-pack/plugin/ccr/build.gradle +++ b/x-pack/plugin/ccr/build.gradle @@ -2,11 +2,11 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - name 'x-pack-ccr' - description 'Elasticsearch Expanded Pack Plugin - CCR' - classname 'org.elasticsearch.xpack.ccr.Ccr' - hasNativeController false - requiresKeystore true + name = 'x-pack-ccr' + description = 'Elasticsearch Expanded Pack Plugin - CCR' + classname ='org.elasticsearch.xpack.ccr.Ccr' + hasNativeController =false + requiresKeystore =true extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 51d770936e64..04458c5ecaa5 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -22,11 +22,11 @@ base { } esplugin { - name 'x-pack-core' - description 'Elasticsearch Expanded Pack Plugin - Core' - classname 'org.elasticsearch.xpack.core.XPackPlugin' - hasNativeController false - requiresKeystore false + name = 'x-pack-core' + description = 'Elasticsearch Expanded Pack Plugin - Core' + classname ='org.elasticsearch.xpack.core.XPackPlugin' + hasNativeController =false + requiresKeystore =false } tasks.named("dependencyLicenses").configure { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index f8d8ec7d2e8c..da235b793627 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -407,8 +407,6 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, SearchPlu ); } - // TODO: The WeightedTokensBuilder is slated for removal after the SparseVectorQueryBuilder is available. - // The logic to create a Boolean query based on weighted tokens will remain and/or be moved to server. @Override public List> getQueries() { return List.of( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/util/ExpandedIdsMatcher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/util/ExpandedIdsMatcher.java index 2e0a799ea99a..ac071fab1d69 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/util/ExpandedIdsMatcher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/util/ExpandedIdsMatcher.java @@ -34,8 +34,6 @@ import java.util.stream.Collectors; */ public final class ExpandedIdsMatcher { - public static String ALL = "_all"; - /** * Split {@code expression} into tokens separated by a ',' * diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 1ea17f9b796c..42cf242e5d8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -13,12 +13,13 @@ import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -149,10 +150,11 @@ public class AutoFollowMetadata extends AbstractNamedDiffable toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .xContentObjectFieldObjects(PATTERNS_FIELD.getPreferredName(), patterns) - .object(FOLLOWED_LEADER_INDICES_FIELD.getPreferredName(), followedLeaderIndexUUIDs) - .object(HEADERS.getPreferredName(), headers); + return Iterators.concat( + ChunkedToXContentHelper.xContentFragmentValuesMap(PATTERNS_FIELD.getPreferredName(), patterns), + ChunkedToXContentHelper.map(FOLLOWED_LEADER_INDICES_FIELD.getPreferredName(), followedLeaderIndexUUIDs), + ChunkedToXContentHelper.map(HEADERS.getPreferredName(), headers) + ); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java index a5e50781dbb6..e8e88659efe5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java @@ -13,7 +13,7 @@ import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -98,8 +98,8 @@ public final class EnrichMetadata extends AbstractNamedDiffable toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFieldObjects(POLICIES.getPreferredName(), policies); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return ChunkedToXContentHelper.xContentFragmentValuesMap(POLICIES.getPreferredName(), policies); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index c5eff92fb130..554debcdc33d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -14,9 +14,10 @@ import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -114,10 +115,11 @@ public class IndexLifecycleMetadata implements Metadata.ProjectCustom { } @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .xContentObjectFields(POLICIES_FIELD.getPreferredName(), policyMetadatas) - .field(OPERATION_MODE_FIELD.getPreferredName(), operationMode); + public Iterator toXContentChunked(ToXContent.Params ignored) { + return Iterators.concat( + ChunkedToXContentHelper.xContentValuesMap(POLICIES_FIELD.getPreferredName(), policyMetadatas), + Iterators.single((builder, params) -> builder.field(OPERATION_MODE_FIELD.getPreferredName(), operationMode)) + ); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java index 5c63a6010313..902c69cef558 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -50,7 +50,7 @@ public record ChatCompletionResults(List results) implements InferenceSe @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(COMPLETION, results.iterator()); + return ChunkedToXContentHelper.array(COMPLETION, results.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java index c1be1ce265f6..16dca7b04d52 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContent; @@ -62,7 +62,7 @@ public record InferenceTextEmbeddingByteResults(List emb @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(TEXT_EMBEDDING_BYTES, embeddings.iterator()); + return ChunkedToXContentHelper.array(TEXT_EMBEDDING_BYTES, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java index 855125034892..9f9bdfec7cfa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -103,7 +103,7 @@ public record InferenceTextEmbeddingFloatResults(List e @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(TEXT_EMBEDDING, embeddings.iterator()); + return ChunkedToXContentHelper.array(TEXT_EMBEDDING, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java index a5f72bd51c6c..b71722d0b967 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -11,7 +11,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; @@ -178,7 +178,7 @@ public class RankedDocsResults implements InferenceServiceResults { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(RERANK, rankedDocs.iterator()); + return ChunkedToXContentHelper.array(RERANK, rankedDocs.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index 318a292b4773..dd8229c604ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -72,7 +72,7 @@ public record SparseEmbeddingResults(List embeddings) implements Infe @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).array(SPARSE_EMBEDDING, embeddings.iterator()); + return ChunkedToXContentHelper.array(SPARSE_EMBEDDING, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java index ec49603c89cb..a6df290be67b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java @@ -46,7 +46,6 @@ public class GetJobModelSnapshotsUpgradeStatsAction extends ActionType, ToXConten // Used for QueryPage public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - public static String TYPE = "datafeed"; + public static final String TYPE = "datafeed"; /** * The field name used to specify document counts in Elasticsearch diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java index 9dce623521e8..6ed5c9b2c620 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -93,7 +93,7 @@ public class ModelAliasMetadata implements Metadata.ProjectCustom { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).xContentObjectFields(MODEL_ALIASES.getPreferredName(), modelAliases); + return ChunkedToXContentHelper.xContentValuesMap(MODEL_ALIASES.getPreferredName(), modelAliases); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AllocationStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AllocationStatus.java index 9216fc89a810..d27e64752d7d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AllocationStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AllocationStatus.java @@ -42,9 +42,9 @@ public class AllocationStatus implements Writeable, ToXContentObject { } } - public static ParseField ALLOCATION_COUNT = new ParseField("allocation_count"); - public static ParseField TARGET_ALLOCATION_COUNT = new ParseField("target_allocation_count"); - public static ParseField STATE = new ParseField("state"); + public static final ParseField ALLOCATION_COUNT = new ParseField("allocation_count"); + public static final ParseField TARGET_ALLOCATION_COUNT = new ParseField("target_allocation_count"); + public static final ParseField STATE = new ParseField("state"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "allocation_health", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedNlpInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedNlpInferenceResults.java index 9c1316d4d094..177b9f61c0fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedNlpInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedNlpInferenceResults.java @@ -13,8 +13,8 @@ import java.io.IOException; public abstract class ChunkedNlpInferenceResults extends NlpInferenceResults { - public static String TEXT = "text"; - public static String INFERENCE = "inference"; + public static final String TEXT = "text"; + public static final String INFERENCE = "inference"; ChunkedNlpInferenceResults(boolean isTruncated) { super(isTruncated); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ClassificationInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ClassificationInferenceResults.java index 12921166b148..3881625c05ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ClassificationInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ClassificationInferenceResults.java @@ -22,7 +22,7 @@ import java.util.Objects; import java.util.stream.Collectors; public class ClassificationInferenceResults extends SingleValueInferenceResults { - public static String PREDICTION_PROBABILITY = "prediction_probability"; + public static final String PREDICTION_PROBABILITY = "prediction_probability"; public static final String NAME = "classification"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdate.java index a833048c76a5..7f4f63dee718 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdate.java @@ -21,7 +21,7 @@ public class BertJapaneseTokenizationUpdate extends AbstractTokenizationUpdate { public static final ParseField NAME = BertJapaneseTokenization.NAME; - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "bert_japanese_tokenization_update", a -> new BertJapaneseTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1]) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationUpdate.java index 3bda6f0070f0..065acdfd5030 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationUpdate.java @@ -21,7 +21,7 @@ public class BertTokenizationUpdate extends AbstractTokenizationUpdate { public static final ParseField NAME = BertTokenization.NAME; - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "bert_tokenization_update", a -> new BertTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1]) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java index 6985769f08cb..b04b49e645bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java @@ -29,7 +29,7 @@ public class ClassificationConfig implements LenientlyParsedInferenceConfig, Str public static final ParseField PREDICTION_FIELD_TYPE = new ParseField("prediction_field_type"); private static final MlConfigVersion MIN_SUPPORTED_VERSION = MlConfigVersion.V_7_6_0; - public static ClassificationConfig EMPTY_PARAMS = new ClassificationConfig( + public static final ClassificationConfig EMPTY_PARAMS = new ClassificationConfig( 0, DEFAULT_RESULTS_FIELD, DEFAULT_TOP_CLASSES_RESULTS_FIELD, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java index 1cf1c0ddeb57..9ee97029541f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java @@ -32,7 +32,7 @@ public class ClassificationConfigUpdate implements InferenceConfigUpdate, NamedX public static final ParseField NAME = ClassificationConfig.NAME; - public static ClassificationConfigUpdate EMPTY_PARAMS = new ClassificationConfigUpdate(null, null, null, null, null); + public static final ClassificationConfigUpdate EMPTY_PARAMS = new ClassificationConfigUpdate(null, null, null, null, null); private final Integer numTopClasses; private final String topClassesResultsField; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/DebertaV2TokenizationUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/DebertaV2TokenizationUpdate.java index 683b27793402..8f9743231ba6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/DebertaV2TokenizationUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/DebertaV2TokenizationUpdate.java @@ -20,7 +20,7 @@ import java.util.Optional; public class DebertaV2TokenizationUpdate extends AbstractTokenizationUpdate { public static final ParseField NAME = new ParseField(DebertaV2Tokenization.NAME); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "deberta_v2_tokenization_update", a -> new DebertaV2TokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1]) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java index a4d7c9c7fa08..31ebcdf2d1e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java @@ -45,7 +45,7 @@ public class LearningToRankConfig extends RegressionConfig implements Rewriteabl public static final ParseField FEATURE_EXTRACTORS = new ParseField("feature_extractors"); public static final ParseField DEFAULT_PARAMS = new ParseField("default_params"); - public static LearningToRankConfig EMPTY_PARAMS = new LearningToRankConfig(null, null, null); + public static final LearningToRankConfig EMPTY_PARAMS = new LearningToRankConfig(null, null, null); private static final ObjectParser LENIENT_PARSER = createParser(true); private static final ObjectParser STRICT_PARSER = createParser(false); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java index 403a747d6801..13c63b9cc7ba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java @@ -25,7 +25,7 @@ public class RegressionConfig implements LenientlyParsedInferenceConfig, Strictl private static final MlConfigVersion MIN_SUPPORTED_VERSION = MlConfigVersion.V_7_6_0; public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - public static RegressionConfig EMPTY_PARAMS = new RegressionConfig(DEFAULT_RESULTS_FIELD, null); + public static final RegressionConfig EMPTY_PARAMS = new RegressionConfig(DEFAULT_RESULTS_FIELD, null); private static final ObjectParser LENIENT_PARSER = createParser(true); private static final ObjectParser STRICT_PARSER = createParser(false); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index ceebe3105eb6..c7b66ef95943 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -29,7 +29,7 @@ public class RegressionConfigUpdate implements InferenceConfigUpdate, NamedXCont public static final ParseField NAME = RegressionConfig.NAME; - public static RegressionConfigUpdate EMPTY_PARAMS = new RegressionConfigUpdate(null, null); + public static final RegressionConfigUpdate EMPTY_PARAMS = new RegressionConfigUpdate(null, null); public static RegressionConfigUpdate fromMap(Map map) { Map options = new HashMap<>(map); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationUpdate.java index 3763a2350dad..90e55e8b8ba3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationUpdate.java @@ -20,7 +20,7 @@ import java.util.Optional; public class RobertaTokenizationUpdate extends AbstractTokenizationUpdate { public static final ParseField NAME = new ParseField(RobertaTokenization.NAME); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "roberta_tokenization_update", a -> new RobertaTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1]) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java index 57493c7d34d7..6be122c0ccfa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java @@ -29,7 +29,7 @@ public class TextEmbeddingConfig implements NlpConfig { public static final String NAME = "text_embedding"; - public static ParseField EMBEDDING_SIZE = new ParseField("embedding_size"); + public static final ParseField EMBEDDING_SIZE = new ParseField("embedding_size"); public static TextEmbeddingConfig fromXContentStrict(XContentParser parser) { return STRICT_PARSER.apply(parser, null); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java index e89281a59f7d..67b54b364286 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java @@ -29,7 +29,7 @@ public class TextEmbeddingConfigUpdate extends NlpConfigUpdate implements NamedX public static final String NAME = TextEmbeddingConfig.NAME; - public static TextEmbeddingConfigUpdate EMPTY_INSTANCE = new TextEmbeddingConfigUpdate(null, null); + public static final TextEmbeddingConfigUpdate EMPTY_INSTANCE = new TextEmbeddingConfigUpdate(null, null); public static TextEmbeddingConfigUpdate fromMap(Map map) { Map options = new HashMap<>(map); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenizationUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenizationUpdate.java index 5ddfdff4a5a4..58df6a8454a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenizationUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenizationUpdate.java @@ -20,7 +20,7 @@ import java.util.Optional; public class XLMRobertaTokenizationUpdate extends AbstractTokenizationUpdate { public static final ParseField NAME = new ParseField(XLMRobertaTokenization.NAME); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "xlm_roberta_tokenization_update", a -> new XLMRobertaTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1]) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java index d9e90b92382e..4fb79c5b2919 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java @@ -34,7 +34,7 @@ public record QueryExtractorBuilder(String featureName, QueryProvider query, flo public static final ParseField QUERY = new ParseField("query"); public static final ParseField DEFAULT_SCORE = new ParseField("default_score"); - public static float DEFAULT_SCORE_DEFAULT = 0f; + public static final float DEFAULT_SCORE_DEFAULT = 0f; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME.getPreferredName(), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 3114c03879eb..66ce4a135e50 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -94,7 +94,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable { return parser; } - public static String EMPTY_SNAPSHOT_ID = "empty"; + public static final String EMPTY_SNAPSHOT_ID = "empty"; private final String jobId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java index 32b401ebfb32..1c3e7bd08526 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java @@ -10,9 +10,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; @@ -24,6 +25,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.async.AsyncResponse; import java.io.IOException; +import java.util.Collections; import java.util.Iterator; import static org.elasticsearch.rest.RestStatus.OK; @@ -232,7 +234,8 @@ public class AsyncSearchResponse extends ActionResponse implements ChunkedToXCon @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> ob.append((builder, p) -> { + return Iterators.concat(ChunkedToXContentHelper.singleChunk((builder, p) -> { + builder.startObject(); if (id != null) { builder.field("id", id); } @@ -252,14 +255,18 @@ public class AsyncSearchResponse extends ActionResponse implements ChunkedToXCon builder.field("response"); } return builder; - }).appendIfPresent(searchResponse).append((builder, p) -> { - if (error != null) { - builder.startObject("error"); - ElasticsearchException.generateThrowableXContent(builder, p, error); + }), + searchResponse == null ? Collections.emptyIterator() : searchResponse.toXContentChunked(params), + ChunkedToXContentHelper.singleChunk((builder, p) -> { + if (error != null) { + builder.startObject("error"); + ElasticsearchException.generateThrowableXContent(builder, params, error); + builder.endObject(); + } builder.endObject(); - } - return builder; - })); + return builder; + }) + ); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchRequest.java index 4b9700a37ad7..e68a1d73a14d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchRequest.java @@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * @see AsyncSearchResponse */ public class SubmitAsyncSearchRequest extends ActionRequest { - public static long MIN_KEEP_ALIVE = TimeValue.timeValueSeconds(1).millis(); + public static final long MIN_KEEP_ALIVE = TimeValue.timeValueSeconds(1).millis(); private TimeValue waitForCompletionTimeout = TimeValue.timeValueSeconds(1); private boolean keepOnCompletion = false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java index c2f40a3e393b..13c65c457d96 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java @@ -1198,7 +1198,7 @@ public final class Authentication implements ToXContentObject { return FileRealmSettings.TYPE.equals(realmType) || NativeRealmSettings.TYPE.equals(realmType); } - public static ConstructingObjectParser REALM_REF_PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser REALM_REF_PARSER = new ConstructingObjectParser<>( "realm_ref", false, (args, v) -> new RealmRef((String) args[0], (String) args[1], (String) args[2], (RealmDomain) args[3]) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java index 5d144931a906..30d76da35370 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java @@ -26,7 +26,7 @@ import java.util.Objects; public final class AuthenticationResult { private static final AuthenticationResult NOT_HANDLED = new AuthenticationResult<>(Status.CONTINUE, null, null, null, null); - public static String THREAD_CONTEXT_KEY = "_xpack_security_auth_result"; + public static final String THREAD_CONTEXT_KEY = "_xpack_security_auth_result"; public enum Status { /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java index c2c64b45941c..0e1080ba9fe6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java @@ -261,7 +261,7 @@ public class RealmConfig { } } - public static ConstructingObjectParser REALM_IDENTIFIER_PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser REALM_IDENTIFIER_PARSER = new ConstructingObjectParser<>( "realm_identifier", false, (args, v) -> new RealmIdentifier((String) args[0], (String) args[1]) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java index 573023739db1..c458a3ba3981 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java @@ -59,7 +59,7 @@ public record RealmDomain(String name, Set realms) } @SuppressWarnings("unchecked") - public static ConstructingObjectParser REALM_DOMAIN_PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser REALM_DOMAIN_PARSER = new ConstructingObjectParser<>( "realm_domain", false, (args, v) -> new RealmDomain((String) args[0], Set.copyOf((List) args[1])) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java index 38aa1bc106e9..4d98767444f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java @@ -24,7 +24,7 @@ import java.util.Set; public record RoleDescriptorsIntersection(Collection> roleDescriptorsList) implements ToXContentObject, Writeable { - public static RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList()); + public static final RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList()); private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() .allowRestriction(true) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index 23c93226d549..5ba5c1fd1218 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -53,7 +53,7 @@ public final class ApplicationPermission { return new PermissionEntry( appPriv, Sets.union(existing.resourceNames, resourceNames), - Automatons.unionAndDeterminize(Arrays.asList(existing.resourceAutomaton, patterns)) + Automatons.unionAndMinimize(Arrays.asList(existing.resourceAutomaton, patterns)) ); } })); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 5f3da8f73a70..4e608281a785 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -137,7 +137,7 @@ public class ClusterPermission { } List checks = this.permissionChecks; if (false == actionAutomatons.isEmpty()) { - final Automaton mergedAutomaton = Automatons.unionAndDeterminize(this.actionAutomatons); + final Automaton mergedAutomaton = Automatons.unionAndMinimize(this.actionAutomatons); checks = new ArrayList<>(this.permissionChecks.size() + 1); checks.add(new AutomatonPermissionCheck(mergedAutomaton)); checks.addAll(this.permissionChecks); @@ -156,7 +156,7 @@ public class ClusterPermission { } else { final Automaton allowedAutomaton = Automatons.patterns(allowedActionPatterns); final Automaton excludedAutomaton = Automatons.patterns(excludeActionPatterns); - return Automatons.minusAndDeterminize(allowedAutomaton, excludedAutomaton); + return Automatons.minusAndMinimize(allowedAutomaton, excludedAutomaton); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index ed7bbf915827..235d7419d2bf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -147,7 +147,7 @@ public final class FieldPermissions implements Accountable, CacheKey { List automatonList = groups.stream() .map(g -> FieldPermissions.buildPermittedFieldsAutomaton(g.getGrantedFields(), g.getExcludedFields())) .collect(Collectors.toList()); - return Automatons.unionAndDeterminize(automatonList); + return Automatons.unionAndMinimize(automatonList); } /** @@ -189,7 +189,7 @@ public final class FieldPermissions implements Accountable, CacheKey { ); } - grantedFieldsAutomaton = Automatons.minusAndDeterminize(grantedFieldsAutomaton, deniedFieldsAutomaton); + grantedFieldsAutomaton = Automatons.minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton); return grantedFieldsAutomaton; } @@ -206,10 +206,7 @@ public final class FieldPermissions implements Accountable, CacheKey { public FieldPermissions limitFieldPermissions(FieldPermissions limitedBy) { if (hasFieldLevelSecurity() && limitedBy != null && limitedBy.hasFieldLevelSecurity()) { // TODO: cache the automaton computation with FieldPermissionsCache - Automaton _permittedFieldsAutomaton = Automatons.intersectAndDeterminize( - getIncludeAutomaton(), - limitedBy.getIncludeAutomaton() - ); + Automaton _permittedFieldsAutomaton = Automatons.intersectAndMinimize(getIncludeAutomaton(), limitedBy.getIncludeAutomaton()); return new FieldPermissions( CollectionUtils.concatLists(fieldPermissionsDefinitions, limitedBy.fieldPermissionsDefinitions), _permittedFieldsAutomaton diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java index a1e14bfde8aa..46261937a022 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java @@ -107,7 +107,7 @@ public final class FieldPermissionsCache { List automatonList = fieldPermissionsCollection.stream() .map(FieldPermissions::getIncludeAutomaton) .collect(Collectors.toList()); - return new FieldPermissions(key, Automatons.unionAndDeterminize(automatonList)); + return new FieldPermissions(key, Automatons.unionAndMinimize(automatonList)); }); } catch (ExecutionException e) { throw new ElasticsearchException("unable to compute field permissions", e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index cdd5a6f6ff72..558f8e6f22ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -283,14 +283,14 @@ public final class IndicesPermission { for (String forIndexPattern : checkForIndexPatterns) { Automaton checkIndexAutomaton = Automatons.patterns(forIndexPattern); if (false == allowRestrictedIndices && false == isConcreteRestrictedIndex(forIndexPattern)) { - checkIndexAutomaton = Automatons.minusAndDeterminize(checkIndexAutomaton, restrictedIndices.getAutomaton()); + checkIndexAutomaton = Automatons.minusAndMinimize(checkIndexAutomaton, restrictedIndices.getAutomaton()); } if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { - allowedIndexPrivilegesAutomaton = Automatons.unionAndDeterminize( + allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) ); } else { @@ -342,7 +342,7 @@ public final class IndicesPermission { automatonList.add(group.privilege.getAutomaton()); } } - return automatonList.isEmpty() ? Automatons.EMPTY : Automatons.unionAndDeterminize(automatonList); + return automatonList.isEmpty() ? Automatons.EMPTY : Automatons.unionAndMinimize(automatonList); } /** @@ -704,7 +704,7 @@ public final class IndicesPermission { Automaton indexAutomaton = group.getIndexMatcherAutomaton(); allAutomatons.compute( group.privilege().getAutomaton(), - (key, value) -> value == null ? indexAutomaton : Automatons.unionAndDeterminize(List.of(value, indexAutomaton)) + (key, value) -> value == null ? indexAutomaton : Automatons.unionAndMinimize(List.of(value, indexAutomaton)) ); if (combine) { List> combinedAutomatons = new ArrayList<>(); @@ -714,7 +714,7 @@ public final class IndicesPermission { group.privilege().getAutomaton() ); if (Operations.isEmpty(intersectingPrivileges) == false) { - Automaton indexPatternAutomaton = Automatons.unionAndDeterminize( + Automaton indexPatternAutomaton = Automatons.unionAndMinimize( List.of(indexAndPrivilegeAutomatons.getValue(), indexAutomaton) ); combinedAutomatons.add(new Tuple<>(intersectingPrivileges, indexPatternAutomaton)); @@ -723,7 +723,7 @@ public final class IndicesPermission { combinedAutomatons.forEach( automatons -> allAutomatons.compute( automatons.v1(), - (key, value) -> value == null ? automatons.v2() : Automatons.unionAndDeterminize(List.of(value, automatons.v2())) + (key, value) -> value == null ? automatons.v2() : Automatons.unionAndMinimize(List.of(value, automatons.v2())) ) ); } @@ -768,7 +768,7 @@ public final class IndicesPermission { this.indexNameMatcher = StringMatcher.of(indices).and(name -> restrictedIndices.isRestricted(name) == false); this.indexNameAutomaton = () -> indexNameAutomatonMemo.computeIfAbsent( indices, - k -> Automatons.minusAndDeterminize(Automatons.patterns(indices), restrictedIndices.getAutomaton()) + k -> Automatons.minusAndMinimize(Automatons.patterns(indices), restrictedIndices.getAutomaton()) ); } this.fieldPermissions = Objects.requireNonNull(fieldPermissions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java index e4d283aba75a..ea32ba13ae57 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java @@ -212,7 +212,7 @@ public final class LimitedRole implements Role { public Automaton allowedActionsMatcher(String index) { final Automaton allowedMatcher = baseRole.allowedActionsMatcher(index); final Automaton limitedByMatcher = limitedByRole.allowedActionsMatcher(index); - return Automatons.intersectAndDeterminize(allowedMatcher, limitedByMatcher); + return Automatons.intersectAndMinimize(allowedMatcher, limitedByMatcher); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index f4df99dcefea..7174b2f616c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -57,7 +57,7 @@ import java.util.stream.Stream; import static java.util.Map.entry; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; -import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndDeterminize; +import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndMinimize; /** * The name of an index related action always being with `indices:` followed by a sequence of slash-separated terms @@ -110,7 +110,7 @@ public final class IndexPrivilege extends Privilege { private static final Automaton DELETE_AUTOMATON = patterns("indices:data/write/delete*", "indices:data/write/bulk*"); private static final Automaton WRITE_AUTOMATON = patterns("indices:data/write/*", TransportAutoPutMappingAction.TYPE.name()); private static final Automaton MONITOR_AUTOMATON = patterns("indices:monitor/*"); - private static final Automaton MANAGE_AUTOMATON = unionAndDeterminize( + private static final Automaton MANAGE_AUTOMATON = unionAndMinimize( Arrays.asList( MONITOR_AUTOMATON, patterns("indices:admin/*", TransportFieldCapabilitiesAction.NAME + "*", GetRollupIndexCapsAction.NAME + "*") @@ -303,7 +303,7 @@ public final class IndexPrivilege extends Privilege { if (actions.isEmpty() == false) { automata.add(patterns(actions)); } - return new IndexPrivilege(name, unionAndDeterminize(automata)); + return new IndexPrivilege(name, unionAndMinimize(automata)); } static Map values() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index 923384189146..68baab4469e4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -20,7 +20,7 @@ import java.util.function.Predicate; public final class SystemPrivilege extends Privilege { - public static SystemPrivilege INSTANCE = new SystemPrivilege(); + public static final SystemPrivilege INSTANCE = new SystemPrivilege(); private static final Predicate ALLOWED_ACTIONS = StringMatcher.of( "internal:*", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index d3790ea64ba4..52a3650159a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.lucene.util.automaton.MinimizationOperations; import java.util.ArrayDeque; import java.util.ArrayList; @@ -76,7 +77,7 @@ public final class Automatons { static final char WILDCARD_ESCAPE = '\\'; // Escape character // for testing only -Dtests.jvm.argline="-Dtests.automaton.record.patterns=true" - public static boolean recordPatterns = System.getProperty("tests.automaton.record.patterns", "false").equals("true"); + public static final boolean recordPatterns = System.getProperty("tests.automaton.record.patterns", "false").equals("true"); private static final Map> patternsMap = new HashMap<>(); private Automatons() {} @@ -112,7 +113,7 @@ public final class Automatons { private static Automaton buildAutomaton(Collection patterns) { if (patterns.size() == 1) { - return determinize(pattern(patterns.iterator().next())); + return minimize(pattern(patterns.iterator().next())); } final Function, Automaton> build = strings -> { @@ -121,7 +122,7 @@ public final class Automatons { final Automaton patternAutomaton = pattern(pattern); automata.add(patternAutomaton); } - return unionAndDeterminize(automata); + return unionAndMinimize(automata); }; // We originally just compiled each automaton separately and then unioned them all. @@ -188,7 +189,7 @@ public final class Automatons { if (misc.isEmpty() == false) { automata.add(build.apply(misc)); } - return unionAndDeterminize(automata); + return unionAndMinimize(automata); } /** @@ -277,23 +278,23 @@ public final class Automatons { return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - public static Automaton unionAndDeterminize(Collection automata) { + public static Automaton unionAndMinimize(Collection automata) { Automaton res = automata.size() == 1 ? automata.iterator().next() : union(automata); - return determinize(res); + return minimize(res); } - public static Automaton minusAndDeterminize(Automaton a1, Automaton a2) { + public static Automaton minusAndMinimize(Automaton a1, Automaton a2) { Automaton res = minus(a1, a2, maxDeterminizedStates); - return determinize(res); + return minimize(res); } - public static Automaton intersectAndDeterminize(Automaton a1, Automaton a2) { + public static Automaton intersectAndMinimize(Automaton a1, Automaton a2) { Automaton res = intersection(a1, a2); - return determinize(res); + return minimize(res); } - private static Automaton determinize(Automaton automaton) { - return Operations.determinize(automaton, maxDeterminizedStates); + private static Automaton minimize(Automaton automaton) { + return MinimizationOperations.minimize(automaton, maxDeterminizedStates); } public static Predicate predicate(String... patterns) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/NoOpLogger.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/NoOpLogger.java index af1a2726b4f5..8844fec09cf5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/NoOpLogger.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/NoOpLogger.java @@ -20,7 +20,7 @@ import org.apache.logging.log4j.util.Supplier; */ public class NoOpLogger implements Logger { - public static NoOpLogger INSTANCE = new NoOpLogger(); + public static final NoOpLogger INSTANCE = new NoOpLogger(); private NoOpLogger() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java index fd92b0f6c0d5..14d80f82a75a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java @@ -15,9 +15,10 @@ import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -143,10 +144,13 @@ public class SnapshotLifecycleMetadata implements Metadata.ProjectCustom { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .xContentObjectFields(POLICIES_FIELD.getPreferredName(), snapshotConfigurations) - .field(OPERATION_MODE_FIELD.getPreferredName(), operationMode) - .field(STATS_FIELD.getPreferredName(), slmStats); + return Iterators.concat( + ChunkedToXContentHelper.xContentValuesMap(POLICIES_FIELD.getPreferredName(), this.snapshotConfigurations), + Iterators.single( + (builder, p) -> builder.field(OPERATION_MODE_FIELD.getPreferredName(), operationMode) + .field(STATS_FIELD.getPreferredName(), this.slmStats) + ) + ); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java index 95edd3fc3bb9..1c787389602a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java @@ -33,8 +33,8 @@ import static org.apache.lucene.index.IndexWriter.MAX_TERM_LENGTH; public final class TermsEnumRequest extends BroadcastRequest implements ToXContentObject { public static final IndicesOptions DEFAULT_INDICES_OPTIONS = SearchRequest.DEFAULT_INDICES_OPTIONS; - public static int DEFAULT_SIZE = 10; - public static TimeValue DEFAULT_TIMEOUT = new TimeValue(1000); + public static final int DEFAULT_SIZE = 10; + public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1000); private String field; private String string = null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformField.java index 204b81914816..b0ba81d8eeb3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformField.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.transform; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.xcontent.ParseField; /* @@ -99,7 +100,16 @@ public final class TransformField { public static final String EXCLUDE_GENERATED = "exclude_generated"; // internal document id - public static String DOCUMENT_ID_FIELD = "_id"; + public static final String DOCUMENT_ID_FIELD = "_id"; + + public static final PersistentTasksCustomMetadata.Assignment AWAITING_UPGRADE = new PersistentTasksCustomMetadata.Assignment( + null, + "Transform task will not be assigned while upgrade mode is enabled." + ); + public static final PersistentTasksCustomMetadata.Assignment RESET_IN_PROGRESS = new PersistentTasksCustomMetadata.Assignment( + null, + "Transform task will not be assigned as a feature reset is in progress." + ); private TransformField() {} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java index 285b1e5de932..489210b79d39 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java @@ -14,9 +14,9 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -30,8 +30,9 @@ import java.util.Objects; public class TransformMetadata implements Metadata.ProjectCustom { public static final String TYPE = "transform"; public static final ParseField RESET_MODE = new ParseField("reset_mode"); + public static final ParseField UPGRADE_MODE = new ParseField("upgrade_mode"); - public static final TransformMetadata EMPTY_METADATA = new TransformMetadata(false); + public static final TransformMetadata EMPTY_METADATA = new TransformMetadata(false, false); // This parser follows the pattern that metadata is parsed leniently (to allow for enhancements) public static final ObjectParser LENIENT_PARSER = new ObjectParser<>( "" + "transform_metadata", @@ -40,19 +41,26 @@ public class TransformMetadata implements Metadata.ProjectCustom { ); static { - LENIENT_PARSER.declareBoolean(TransformMetadata.Builder::isResetMode, RESET_MODE); + LENIENT_PARSER.declareBoolean(TransformMetadata.Builder::resetMode, RESET_MODE); + LENIENT_PARSER.declareBoolean(TransformMetadata.Builder::upgradeMode, UPGRADE_MODE); } private final boolean resetMode; + private final boolean upgradeMode; - private TransformMetadata(boolean resetMode) { + private TransformMetadata(boolean resetMode, boolean upgradeMode) { this.resetMode = resetMode; + this.upgradeMode = upgradeMode; } - public boolean isResetMode() { + public boolean resetMode() { return resetMode; } + public boolean upgradeMode() { + return upgradeMode; + } + @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.MINIMUM_COMPATIBLE; @@ -75,28 +83,46 @@ public class TransformMetadata implements Metadata.ProjectCustom { public TransformMetadata(StreamInput in) throws IOException { this.resetMode = in.readBoolean(); + if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORMS_UPGRADE_MODE)) { + this.upgradeMode = in.readBoolean(); + } else { + this.upgradeMode = false; + } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(resetMode); + if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORMS_UPGRADE_MODE)) { + out.writeBoolean(upgradeMode); + } } @Override public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.field(RESET_MODE.getPreferredName(), resetMode); + return Iterators.single( + ((builder, params) -> builder.field(UPGRADE_MODE.getPreferredName(), upgradeMode) + .field(RESET_MODE.getPreferredName(), resetMode)) + ); } public static class TransformMetadataDiff implements NamedDiff { final boolean resetMode; + final boolean upgradeMode; TransformMetadataDiff(TransformMetadata before, TransformMetadata after) { this.resetMode = after.resetMode; + this.upgradeMode = after.upgradeMode; } public TransformMetadataDiff(StreamInput in) throws IOException { resetMode = in.readBoolean(); + if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORMS_UPGRADE_MODE)) { + this.upgradeMode = in.readBoolean(); + } else { + this.upgradeMode = false; + } } /** @@ -106,12 +132,15 @@ public class TransformMetadata implements Metadata.ProjectCustom { */ @Override public Metadata.ProjectCustom apply(Metadata.ProjectCustom part) { - return new TransformMetadata(resetMode); + return new TransformMetadata(resetMode, upgradeMode); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(resetMode); + if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORMS_UPGRADE_MODE)) { + out.writeBoolean(upgradeMode); + } } @Override @@ -130,7 +159,7 @@ public class TransformMetadata implements Metadata.ProjectCustom { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TransformMetadata that = (TransformMetadata) o; - return resetMode == that.resetMode; + return resetMode == that.resetMode && upgradeMode == that.upgradeMode; } @Override @@ -140,12 +169,17 @@ public class TransformMetadata implements Metadata.ProjectCustom { @Override public int hashCode() { - return Objects.hash(resetMode); + return Objects.hash(resetMode, upgradeMode); + } + + public Builder builder() { + return new TransformMetadata.Builder(this); } public static class Builder { private boolean resetMode; + private boolean upgradeMode; public static TransformMetadata.Builder from(@Nullable TransformMetadata previous) { return new TransformMetadata.Builder(previous); @@ -156,16 +190,22 @@ public class TransformMetadata implements Metadata.ProjectCustom { public Builder(@Nullable TransformMetadata previous) { if (previous != null) { resetMode = previous.resetMode; + upgradeMode = previous.upgradeMode; } } - public TransformMetadata.Builder isResetMode(boolean isResetMode) { + public TransformMetadata.Builder resetMode(boolean isResetMode) { this.resetMode = isResetMode; return this; } + public TransformMetadata.Builder upgradeMode(boolean upgradeMode) { + this.upgradeMode = upgradeMode; + return this; + } + public TransformMetadata build() { - return new TransformMetadata(resetMode); + return new TransformMetadata(resetMode, upgradeMode); } } @@ -176,4 +216,8 @@ public class TransformMetadata implements Metadata.ProjectCustom { } return TransformMetadata; } + + public static boolean upgradeMode(ClusterState state) { + return getTransformMetadata(state).upgradeMode(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetTransformUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetTransformUpgradeModeAction.java new file mode 100644 index 000000000000..b93df8125708 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/SetTransformUpgradeModeAction.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; + +public class SetTransformUpgradeModeAction extends ActionType { + public static final SetTransformUpgradeModeAction INSTANCE = new SetTransformUpgradeModeAction(); + public static final String NAME = "cluster:admin/transform/upgrade_mode"; + + private SetTransformUpgradeModeAction() { + super(NAME); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java index fea6299c73e6..e4d8f8790c42 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java @@ -46,8 +46,8 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr */ public class TransformCheckpoint implements Writeable, ToXContentObject { - public static String EMPTY_NAME = "_empty"; - public static TransformCheckpoint EMPTY = createEmpty(0); + public static final String EMPTY_NAME = "_empty"; + public static final TransformCheckpoint EMPTY = createEmpty(0); public static TransformCheckpoint createEmpty(long timestampMillis) { return new TransformCheckpoint(EMPTY_NAME, timestampMillis, -1L, Collections.emptyMap(), timestampMillis); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java index 7cf84ac3ecea..f638f7b92cd5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformIndexerStats.java @@ -24,23 +24,23 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr public class TransformIndexerStats extends IndexerJobStats { public static final String NAME = "data_frame_indexer_transform_stats"; - public static ParseField NUM_PAGES = new ParseField("pages_processed"); - public static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); - public static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); - public static ParseField NUM_DELETED_DOCUMENTS = new ParseField("documents_deleted"); - public static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); - public static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); - public static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); - public static ParseField PROCESSING_TIME_IN_MS = new ParseField("processing_time_in_ms"); - public static ParseField DELETE_TIME_IN_MS = new ParseField("delete_time_in_ms"); - public static ParseField INDEX_TOTAL = new ParseField("index_total"); - public static ParseField SEARCH_TOTAL = new ParseField("search_total"); - public static ParseField PROCESSING_TOTAL = new ParseField("processing_total"); - public static ParseField SEARCH_FAILURES = new ParseField("search_failures"); - public static ParseField INDEX_FAILURES = new ParseField("index_failures"); - public static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms"); - public static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed"); - public static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed"); + public static final ParseField NUM_PAGES = new ParseField("pages_processed"); + public static final ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); + public static final ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); + public static final ParseField NUM_DELETED_DOCUMENTS = new ParseField("documents_deleted"); + public static final ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); + public static final ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); + public static final ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); + public static final ParseField PROCESSING_TIME_IN_MS = new ParseField("processing_time_in_ms"); + public static final ParseField DELETE_TIME_IN_MS = new ParseField("delete_time_in_ms"); + public static final ParseField INDEX_TOTAL = new ParseField("index_total"); + public static final ParseField SEARCH_TOTAL = new ParseField("search_total"); + public static final ParseField PROCESSING_TOTAL = new ParseField("processing_total"); + public static final ParseField SEARCH_FAILURES = new ParseField("search_failures"); + public static final ParseField INDEX_FAILURES = new ParseField("index_failures"); + public static final ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms"); + public static final ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed"); + public static final ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed"); // This changes how much "weight" past calculations have. // The shorter the window, the less "smoothing" will occur. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java index b7376f36c766..787690d6f55e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderTests.java @@ -762,27 +762,15 @@ public class DataTierAllocationDeciderTests extends ESAllocationTestCase { SingleNodeShutdownMetadata.Type.REPLACE, SingleNodeShutdownMetadata.Type.REMOVE ); + var builder = SingleNodeShutdownMetadata.builder() + .setNodeId(nodeId) + .setNodeEphemeralId(nodeId) + .setType(type) + .setReason(this.getTestName()); return switch (type) { - case REMOVE -> SingleNodeShutdownMetadata.builder() - .setNodeId(nodeId) - .setType(type) - .setReason(this.getTestName()) - .setStartedAtMillis(randomNonNegativeLong()) - .build(); - case REPLACE -> SingleNodeShutdownMetadata.builder() - .setNodeId(nodeId) - .setType(type) - .setTargetNodeName(randomAlphaOfLength(10)) - .setReason(this.getTestName()) - .setStartedAtMillis(randomNonNegativeLong()) - .build(); - case SIGTERM -> SingleNodeShutdownMetadata.builder() - .setNodeId(nodeId) - .setType(type) - .setGracePeriod(randomTimeValue()) - .setReason(this.getTestName()) - .setStartedAtMillis(randomNonNegativeLong()) - .build(); + case REMOVE -> builder.setStartedAtMillis(randomNonNegativeLong()).build(); + case REPLACE -> builder.setTargetNodeName(randomAlphaOfLength(10)).setStartedAtMillis(randomNonNegativeLong()).build(); + case SIGTERM -> builder.setGracePeriod(randomTimeValue()).setStartedAtMillis(randomNonNegativeLong()).build(); case RESTART -> throw new AssertionError("bad randomization, this method only generates removal type shutdowns"); }; } @@ -1030,6 +1018,7 @@ public class DataTierAllocationDeciderTests extends ESAllocationTestCase { nodeId, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId) + .setNodeEphemeralId(nodeId) .setType(SingleNodeShutdownMetadata.Type.RESTART) .setReason(this.getTestName()) .setStartedAtMillis(randomNonNegativeLong()) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java index 72bf7cedb2fb..a427f8c9154c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java @@ -465,6 +465,7 @@ public class CheckShrinkReadyStepTests extends AbstractStepTestCase concreteFields) { List mappers = concreteFields.stream().map(MockFieldMapper::new).collect(Collectors.toList()); - return MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList()); + return MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList(), null); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index fc707c7a5ee2..0cf4e2afdfdc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -197,6 +197,7 @@ import org.elasticsearch.xpack.core.transform.action.GetTransformAction; import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; +import org.elasticsearch.xpack.core.transform.action.SetTransformUpgradeModeAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction; @@ -3451,6 +3452,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(SetTransformUpgradeModeAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); @@ -3540,6 +3542,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(SetTransformUpgradeModeAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(ActivateProfileAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(SuggestProfilesAction.NAME, request, authentication), is(false)); diff --git a/x-pack/plugin/deprecation/build.gradle b/x-pack/plugin/deprecation/build.gradle index e62a3441af65..57124cda9a79 100644 --- a/x-pack/plugin/deprecation/build.gradle +++ b/x-pack/plugin/deprecation/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'x-pack-deprecation' - description 'Elasticsearch Expanded Pack Plugin - Deprecation' - classname 'org.elasticsearch.xpack.deprecation.Deprecation' + name = 'x-pack-deprecation' + description = 'Elasticsearch Expanded Pack Plugin - Deprecation' + classname ='org.elasticsearch.xpack.deprecation.Deprecation' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle index 7e61533c818e..2fe46efca8a1 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - description 'Deprecated query plugin' - classname 'org.elasticsearch.xpack.deprecation.EarlyDeprecationTestPlugin' + description = 'Deprecated query plugin' + classname ='org.elasticsearch.xpack.deprecation.EarlyDeprecationTestPlugin' } dependencies { diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 45b543d910a7..e3ad5799aaf7 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -11,8 +11,8 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - description 'Deprecated query plugin' - classname 'org.elasticsearch.xpack.deprecation.TestDeprecationPlugin' + description = 'Deprecated query plugin' + classname ='org.elasticsearch.xpack.deprecation.TestDeprecationPlugin' } dependencies { diff --git a/x-pack/plugin/downsample/build.gradle b/x-pack/plugin/downsample/build.gradle index 74999a7620e5..2de12b89b5d3 100644 --- a/x-pack/plugin/downsample/build.gradle +++ b/x-pack/plugin/downsample/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-downsample' - description 'Elasticsearch Expanded Pack Plugin - Downsample' - classname 'org.elasticsearch.xpack.downsample.Downsample' + name = 'x-pack-downsample' + description = 'Elasticsearch Expanded Pack Plugin - Downsample' + classname ='org.elasticsearch.xpack.downsample.Downsample' extendedPlugins = ['x-pack-aggregate-metric'] } diff --git a/x-pack/plugin/enrich/build.gradle b/x-pack/plugin/enrich/build.gradle index 134008c53fe1..352b7a3e6417 100644 --- a/x-pack/plugin/enrich/build.gradle +++ b/x-pack/plugin/enrich/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-enrich' - description 'Elasticsearch Expanded Pack Plugin - Enrich' - classname 'org.elasticsearch.xpack.enrich.EnrichPlugin' + name = 'x-pack-enrich' + description = 'Elasticsearch Expanded Pack Plugin - Enrich' + classname ='org.elasticsearch.xpack.enrich.EnrichPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/ent-search/build.gradle b/x-pack/plugin/ent-search/build.gradle index 66be40152d8b..52634ad788d9 100644 --- a/x-pack/plugin/ent-search/build.gradle +++ b/x-pack/plugin/ent-search/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - name 'x-pack-ent-search' - description 'Elasticsearch Expanded Pack Plugin - Enterprise Search' - classname 'org.elasticsearch.xpack.application.EnterpriseSearch' + name = 'x-pack-ent-search' + description = 'Elasticsearch Expanded Pack Plugin - Enterprise Search' + classname = 'org.elasticsearch.xpack.application.EnterpriseSearch' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index 0001e59aedd9..f6fd75888293 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -34,7 +34,7 @@ public class DeleteAnalyticsCollectionAction { public static class Request extends MasterNodeRequest implements ToXContentObject { private final String collectionName; - public static ParseField COLLECTION_NAME_FIELD = new ParseField("collection_name"); + public static final ParseField COLLECTION_NAME_FIELD = new ParseField("collection_name"); public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index dd1e341a8d90..833fc64f03f8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -35,7 +35,7 @@ public class GetAnalyticsCollectionAction { public static class Request extends MasterNodeReadRequest implements ToXContentObject { private final String[] names; - public static ParseField NAMES_FIELD = new ParseField("names"); + public static final ParseField NAMES_FIELD = new ParseField("names"); public Request(TimeValue masterNodeTimeout, String[] names) { super(masterNodeTimeout); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java index 7e5d05e9e222..7d93c33208e5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java @@ -311,7 +311,7 @@ public class PostAnalyticsEventAction { } public static class Response extends ActionResponse implements ToXContentObject { - public static Response ACCEPTED = new Response(true); + public static final Response ACCEPTED = new Response(true); public static Response readFromStreamInput(StreamInput in) throws IOException { boolean accepted = in.readBoolean(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java index bf73be9a5ddd..1cab0d191197 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java @@ -20,11 +20,11 @@ import static org.elasticsearch.common.Strings.requireNonBlank; public class DocumentAnalyticsEventField { - public static ParseField DOCUMENT_FIELD = new ParseField("document"); + public static final ParseField DOCUMENT_FIELD = new ParseField("document"); - public static ParseField DOCUMENT_ID_FIELD = new ParseField("id"); + public static final ParseField DOCUMENT_ID_FIELD = new ParseField("id"); - public static ParseField DOCUMENT_INDEX_FIELD = new ParseField("index"); + public static final ParseField DOCUMENT_INDEX_FIELD = new ParseField("index"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( DOCUMENT_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java index e450d89eead3..b2436be31919 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java @@ -17,13 +17,13 @@ import java.util.HashMap; import java.util.Map; public class PageAnalyticsEventField { - public static ParseField PAGE_FIELD = new ParseField("page"); + public static final ParseField PAGE_FIELD = new ParseField("page"); - public static ParseField PAGE_URL_FIELD = new ParseField("url"); + public static final ParseField PAGE_URL_FIELD = new ParseField("url"); - public static ParseField PAGE_TITLE_FIELD = new ParseField("title"); + public static final ParseField PAGE_TITLE_FIELD = new ParseField("title"); - public static ParseField PAGE_REFERRER_FIELD = new ParseField("referrer"); + public static final ParseField PAGE_REFERRER_FIELD = new ParseField("referrer"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( PAGE_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java index 78dc75befa92..3dca22d94ad7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java @@ -19,11 +19,11 @@ import java.util.Map; public class PaginationAnalyticsEventField { - public static ParseField PAGINATION_FIELD = new ParseField("page"); + public static final ParseField PAGINATION_FIELD = new ParseField("page"); - public static ParseField CURRENT_PAGE_FIELD = new ParseField("current"); + public static final ParseField CURRENT_PAGE_FIELD = new ParseField("current"); - public static ParseField PAGE_SIZE_FIELD = new ParseField("size"); + public static final ParseField PAGE_SIZE_FIELD = new ParseField("size"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( PAGINATION_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java index 27d63f4dd0a6..438fb20dcf0b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java @@ -21,13 +21,13 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.S import static org.elasticsearch.xpack.application.analytics.event.parser.field.SortOrderAnalyticsEventField.SORT_FIELD; public class SearchAnalyticsEventField { - public static ParseField SEARCH_FIELD = new ParseField("search"); + public static final ParseField SEARCH_FIELD = new ParseField("search"); - public static ParseField SEARCH_QUERY_FIELD = new ParseField("query"); + public static final ParseField SEARCH_QUERY_FIELD = new ParseField("query"); - public static ParseField SEARCH_APPLICATION_FIELD = new ParseField("search_application"); + public static final ParseField SEARCH_APPLICATION_FIELD = new ParseField("search_application"); - public static ParseField SEARCH_RESULTS_FIELD = new ParseField("results"); + public static final ParseField SEARCH_RESULTS_FIELD = new ParseField("results"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( SEARCH_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java index dc4ef6f816b9..261a6c623a7a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java @@ -19,7 +19,7 @@ import java.util.List; import java.util.Map; public class SearchFiltersAnalyticsEventField { - public static ParseField SEARCH_FILTERS_FIELD = new ParseField("filters"); + public static final ParseField SEARCH_FILTERS_FIELD = new ParseField("filters"); private static final ObjectParser>, AnalyticsEvent.Context> PARSER = new ObjectParser<>( SEARCH_FILTERS_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java index a680a151142d..e95e777bb88e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java @@ -20,9 +20,9 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.D import static org.elasticsearch.xpack.application.analytics.event.parser.field.PageAnalyticsEventField.PAGE_FIELD; public class SearchResultAnalyticsEventField { - public static ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results"); + public static final ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results"); - public static ParseField SEARCH_RESULT_ITEMS_FIELD = new ParseField("items"); + public static final ParseField SEARCH_RESULT_ITEMS_FIELD = new ParseField("items"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( "search_results", diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java index 406d59a7d081..866984c62b75 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java @@ -21,9 +21,9 @@ import java.util.Map; import static org.elasticsearch.common.Strings.requireNonBlank; public class SessionAnalyticsEventField { - public static ParseField SESSION_FIELD = new ParseField("session"); + public static final ParseField SESSION_FIELD = new ParseField("session"); - public static ParseField SESSION_ID_FIELD = new ParseField("id"); + public static final ParseField SESSION_ID_FIELD = new ParseField("id"); public static final ParseField CLIENT_ADDRESS_FIELD = new ParseField("ip"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java index 5b70bd8c91c2..df591572a767 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java @@ -20,11 +20,11 @@ import static org.elasticsearch.common.Strings.requireNonBlank; public class SortOrderAnalyticsEventField { - public static ParseField SORT_FIELD = new ParseField("sort"); + public static final ParseField SORT_FIELD = new ParseField("sort"); - public static ParseField SORT_ORDER_NAME_FIELD = new ParseField("name"); + public static final ParseField SORT_ORDER_NAME_FIELD = new ParseField("name"); - public static ParseField SORT_ORDER_DIRECTION_FIELD = new ParseField("direction"); + public static final ParseField SORT_ORDER_DIRECTION_FIELD = new ParseField("direction"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( SORT_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java index dbcedde82572..186f581d4b6f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java @@ -20,9 +20,9 @@ import java.util.Map; import static org.elasticsearch.common.Strings.requireNonBlank; public class UserAnalyticsEventField { - public static ParseField USER_FIELD = new ParseField("user"); + public static final ParseField USER_FIELD = new ParseField("user"); - public static ParseField USER_ID_FIELD = new ParseField("id"); + public static final ParseField USER_ID_FIELD = new ParseField("id"); private static final ObjectParser, AnalyticsEvent.Context> PARSER = new ObjectParser<>( USER_FIELD.getPreferredName(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java index 93ad2f46b558..98ae7cdef770 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java @@ -35,7 +35,7 @@ public class DeleteSearchApplicationAction { public static class Request extends ActionRequest implements ToXContentObject { private final String name; - public static ParseField NAME_FIELD = new ParseField("name"); + public static final ParseField NAME_FIELD = new ParseField("name"); public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java index 75bec0d61649..ff829b6f5301 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java @@ -101,8 +101,8 @@ public class PutSearchApplicationAction { return Objects.hash(searchApp, create); } - public static ParseField SEARCH_APPLICATION = new ParseField("searchApp"); - public static ParseField CREATE = new ParseField("create"); + public static final ParseField SEARCH_APPLICATION = new ParseField("searchApp"); + public static final ParseField CREATE = new ParseField("create"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index 9ae67f0e27c2..72ab7dabf0fc 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -8,9 +8,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-eql' - description 'The Elasticsearch plugin that powers EQL for Elasticsearch' - classname 'org.elasticsearch.xpack.eql.plugin.EqlPlugin' + name = 'x-pack-eql' + description = 'The Elasticsearch plugin that powers EQL for Elasticsearch' + classname ='org.elasticsearch.xpack.eql.plugin.EqlPlugin' extendedPlugins = ['x-pack-ql', 'lang-painless'] } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchRequest.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchRequest.java index 5804e11b72ff..0b1d5005c5f8 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchRequest.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchRequest.java @@ -44,8 +44,8 @@ import static org.elasticsearch.xpack.eql.action.RequestDefaults.FIELD_TIMESTAMP public class EqlSearchRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContent { - public static long MIN_KEEP_ALIVE = TimeValue.timeValueMinutes(1).millis(); - public static TimeValue DEFAULT_KEEP_ALIVE = TimeValue.timeValueDays(5); + public static final long MIN_KEEP_ALIVE = TimeValue.timeValueMinutes(1).millis(); + public static final TimeValue DEFAULT_KEEP_ALIVE = TimeValue.timeValueDays(5); public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.fromOptions(true, true, true, false); private String[] indices; diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index a4d93b765997..e068f652cd28 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -238,7 +238,7 @@ public class EqlSearchResponse extends ActionResponse implements ToXContentObjec // Event public static class Event implements Writeable, ToXContentObject { - public static Event MISSING_EVENT = new Event("", "", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), null, true); + public static final Event MISSING_EVENT = new Event("", "", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), null, true); private static final class Fields { static final String INDEX = GetResult._INDEX; diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/RequestDefaults.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/RequestDefaults.java index 58bfb9513add..e920067def8c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/RequestDefaults.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/RequestDefaults.java @@ -14,8 +14,8 @@ public final class RequestDefaults { public static final String FIELD_TIMESTAMP = "@timestamp"; public static final String FIELD_EVENT_CATEGORY = "event.category"; - public static int SIZE = 10; - public static int FETCH_SIZE = 1000; - public static boolean CCS_MINIMIZE_ROUNDTRIPS = true; - public static int MAX_SAMPLES_PER_KEY = 1; + public static final int SIZE = 10; + public static final int FETCH_SIZE = 1000; + public static final boolean CCS_MINIMIZE_ROUNDTRIPS = true; + public static final int MAX_SAMPLES_PER_KEY = 1; } diff --git a/x-pack/plugin/esql-core/build.gradle b/x-pack/plugin/esql-core/build.gradle index 796ec1d13715..c18c10840d22 100644 --- a/x-pack/plugin/esql-core/build.gradle +++ b/x-pack/plugin/esql-core/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.publish' esplugin { - name 'x-pack-esql-core' - description 'Elasticsearch infrastructure plugin for ESQL' - classname 'org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin' + name = 'x-pack-esql-core' + description = 'Elasticsearch infrastructure plugin for ESQL' + classname = 'org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 02f9752d21e0..9cbcd8d49b58 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -19,9 +19,9 @@ apply plugin: 'elasticsearch.string-templates' apply plugin: 'elasticsearch.publish' esplugin { - name 'x-pack-esql' - description 'The plugin that powers ESQL for Elasticsearch' - classname 'org.elasticsearch.xpack.esql.plugin.EsqlPlugin' + name = 'x-pack-esql' + description = 'The plugin that powers ESQL for Elasticsearch' + classname ='org.elasticsearch.xpack.esql.plugin.EsqlPlugin' extendedPlugins = ['x-pack-esql-core', 'lang-painless', 'x-pack-ml'] } @@ -276,7 +276,7 @@ tasks.register("regen") { } } -tasks.named("spotlessJava") { dependsOn stringTemplates } +tasks.named("spotlessJava") { dependsOn "stringTemplates" } tasks.named('checkstyleMain').configure { excludes = [ "**/*.java.st" ] exclude { normalize(it.file.toString()).contains("src/main/generated-src/generated") } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 3d600bec1bd6..896662dddf1e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -122,7 +122,7 @@ final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.appendBoolean(getBoolean(getFirstValueIndex(pos))); + builder.appendBoolean(getBoolean(first)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index f50135aa51dd..5bcb1b0ec509 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -110,7 +110,7 @@ final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlo int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.appendBytesRef(getBytesRef(getFirstValueIndex(pos), scratch)); + builder.appendBytesRef(getBytesRef(first, scratch)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index eceec3034874..20bd42da98c7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -101,7 +101,7 @@ final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.appendDouble(getDouble(getFirstValueIndex(pos))); + builder.appendDouble(getDouble(first)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java index 56f0cedb5f15..c0941557dc4f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java @@ -101,7 +101,7 @@ final class FloatArrayBlock extends AbstractArrayBlock implements FloatBlock { int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.appendFloat(getFloat(getFirstValueIndex(pos))); + builder.appendFloat(getFloat(first)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 2e10d09e1a41..8ced678bc90b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -101,7 +101,7 @@ final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.appendInt(getInt(getFirstValueIndex(pos))); + builder.appendInt(getInt(first)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 776fa363f608..fb631ab326ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -101,7 +101,7 @@ final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.appendLong(getLong(getFirstValueIndex(pos))); + builder.appendLong(getLong(first)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 1e06cf1ea445..edf54a829deb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -212,10 +212,46 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R /** * Expand multivalued fields into one row per value. Returns the same block if there aren't any multivalued * fields to expand. The returned block needs to be closed by the caller to release the block's resources. - * TODO: pass BlockFactory */ Block expand(); + /** + * Build a {@link Block} with a {@code null} inserted {@code before} each + * listed position. + *

+ * Note: {@code before} must be non-decreasing. + *

+ */ + default Block insertNulls(IntVector before) { + // TODO remove default and scatter to implementation where it can be a lot more efficient + int myCount = getPositionCount(); + int beforeCount = before.getPositionCount(); + try (Builder builder = elementType().newBlockBuilder(myCount + beforeCount, blockFactory())) { + int beforeP = 0; + int nextNull = before.getInt(beforeP); + for (int mainP = 0; mainP < myCount; mainP++) { + while (mainP == nextNull) { + builder.appendNull(); + beforeP++; + if (beforeP >= beforeCount) { + builder.copyFrom(this, mainP, myCount); + return builder.build(); + } + nextNull = before.getInt(beforeP); + } + // This line right below this is the super inefficient one. + builder.copyFrom(this, mainP, mainP + 1); + } + assert nextNull == myCount; + while (beforeP < beforeCount) { + nextNull = before.getInt(beforeP++); + assert nextNull == myCount; + builder.appendNull(); + } + return builder.build(); + } + } + /** * Builds {@link Block}s. Typically, you use one of it's direct supinterfaces like {@link IntBlock.Builder}. * This is {@link Releasable} and should be released after building the block or if building the block fails. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index e855e6d6296d..16e2a62b9d03 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -149,7 +149,7 @@ $endif$ int valueCount = getValueCount(pos); int first = getFirstValueIndex(pos); if (valueCount == 1) { - builder.append$Type$(get$Type$(getFirstValueIndex(pos)$if(BytesRef)$, scratch$endif$)); + builder.append$Type$(get$Type$(first$if(BytesRef)$, scratch$endif$)); } else { builder.beginPositionEntry(); for (int c = 0; c < valueCount; c++) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index c071b5055df7..ad7a4f169d92 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -9,10 +9,11 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -167,24 +168,24 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> { - ob.append((b, p) -> { - b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); - b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); - b.field("took_nanos", tookNanos); - if (b.humanReadable()) { - b.field("took_time", TimeValue.timeValueNanos(tookNanos)); - } - b.field("cpu_nanos", cpuNanos); - if (b.humanReadable()) { - b.field("cpu_time", TimeValue.timeValueNanos(cpuNanos)); - } - b.field("iterations", iterations); - return b; - }); - ob.array("operators", operators.iterator()); - ob.field("sleeps", sleeps); - }); + return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { + b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); + b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); + b.field("took_nanos", tookNanos); + if (b.humanReadable()) { + b.field("took_time", TimeValue.timeValueNanos(tookNanos)); + } + b.field("cpu_nanos", cpuNanos); + if (b.humanReadable()) { + b.field("cpu_time", TimeValue.timeValueNanos(cpuNanos)); + } + b.field("iterations", iterations); + return b; + }), + ChunkedToXContentHelper.array("operators", operators.iterator()), + Iterators.single((b, p) -> b.field("sleeps", sleeps)), + ChunkedToXContentHelper.endObject() + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java index d42655446ca1..578e1c046954 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java @@ -20,21 +20,24 @@ import java.util.Arrays; import java.util.Objects; /** - * Combines values at the given blocks with the same positions into a single position for the blocks at the given channels + * Combines values at the given blocks with the same positions into a single position + * for the blocks at the given channels. + *

* Example, input pages consisting of three blocks: - * positions | field-1 | field-2 | - * ----------------------------------- + *

+ *
{@code
+ * | positions    | field-1 | field-2 |
+ * ------------------------------------
  * Page 1:
- * 1           |  a,b    |   2020  |
- * 1           |  c      |   2021  |
- * ---------------------------------
+ * | 1            |  a,b    |   2020  |
+ * | 1            |  c      |   2021  |
  * Page 2:
- * 2           |  a,e    |   2021  |
- * ---------------------------------
+ * | 2            |  a,e    |   2021  |
  * Page 3:
- * 4           |  d      |   null  |
- * ---------------------------------
+ * | 4            |  d      |   null  |
+ * }
* Output: + *
{@code
  * |  field-1   | field-2    |
  * ---------------------------
  * |  null      | null       |
@@ -42,6 +45,7 @@ import java.util.Objects;
  * |  a,e       | 2021       |
  * |  null      | null       |
  * |  d         | 2023       |
+ * }
*/ public final class MergePositionsOperator implements Operator { private boolean finished = false; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java new file mode 100644 index 000000000000..f9895ff346b5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoin.java @@ -0,0 +1,253 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.lookup; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.util.Optional; +import java.util.stream.IntStream; + +/** + * Performs a {@code LEFT JOIN} where many "right hand" pages are joined + * against a "left hand" {@link Page}. Each row on the "left hand" page + * is output at least once whether it appears in the "right hand" or not. + * And more than once if it appears in the "right hand" pages more than once. + *

+ * The "right hand" page contains a non-decreasing {@code positions} + * column that controls which position in the "left hand" page the row + * in the "right hand" page. This'll make more sense with a picture: + *

+ *
{@code
+ * "left hand"                 "right hand"
+ * | lhdata |             | positions | r1 | r2 |
+ * ----------             -----------------------
+ * |    l00 |             |         0 |  1 |  2 |
+ * |    l01 |             |         1 |  2 |  3 |
+ * |    l02 |             |         1 |  3 |  3 |
+ * |    ... |             |         3 |  9 |  9 |
+ * |    l99 |
+ * }
+ *

+ * Joins to: + *

+ *
{@code
+ * | lhdata  |  r1  |  r2  |
+ * -------------------------
+ * |     l00 |    1 |    2 |
+ * |     l01 |    2 |    3 |
+ * |     l01 |    3 |    3 |   <1>
+ * |     l02 | null | null |   <2>
+ * |     l03 |    9 |    9 |
+ * }
+ *
    + *
  1. {@code l01} is duplicated because it's positions appears twice in + * the right hand page.
  2. + *
  3. {@code l02}'s row is filled with {@code null}s because it's position + * does not appear in the right hand page.
  4. + *
+ *

+ * This supports joining many "right hand" pages against the same + * "left hand" so long as the first value of the next {@code positions} + * column is the same or greater than the last value of the previous + * {@code positions} column. Large gaps are fine. Starting with the + * same number as you ended on is fine. This looks like: + *

+ *
{@code
+ * "left hand"                 "right hand"
+ * | lhdata |             | positions | r1 | r2 |
+ * ----------             -----------------------
+ * |    l00 |                    page 1
+ * |    l01 |             |         0 |  1 |  2 |
+ * |    l02 |             |         1 |  3 |  3 |
+ * |    l03 |                    page 2
+ * |    l04 |             |         1 |  9 |  9 |
+ * |    l05 |             |         2 |  9 |  9 |
+ * |    l06 |                    page 3
+ * |    ... |             |         5 | 10 | 10 |
+ * |    l99 |             |         7 | 11 | 11 |
+ * }
+ *

+ * Which makes: + *

+ *
{@code
+ * | lhdata  |  r1  |  r2  |
+ * -------------------------
+ *         page 1
+ * |     l00 |    1 |    2 |
+ * |     l01 |    3 |    3 |
+ *         page 2
+ * |     l01 |    9 |    9 |
+ * |     l02 |    9 |    9 |
+ *         page 3
+ * |     l03 | null | null |
+ * |     l04 | null | null |
+ * |     l05 |   10 |   10 |
+ * |     l06 | null | null |
+ * |     l07 |   11 |   11 |
+ * }
+ *

+ * Note that the output pages are sized by the "right hand" pages with + * {@code null}s inserted. + *

+ *

+ * Finally, after all "right hand" pages have been joined this will produce + * all remaining "left hand" rows joined against {@code null}. + * Another picture: + *

+ *
{@code
+ * "left hand"                 "right hand"
+ * | lhdata |             | positions | r1 | r2 |
+ * ----------             -----------------------
+ * |    l00 |                    last page
+ * |    l01 |             |        96 |  1 |  2 |
+ * |    ... |             |        97 |  1 |  2 |
+ * |    l99 |
+ * }
+ *

+ * Which makes: + *

+ *
{@code
+ * | lhdata  |  r1  |  r2  |
+ * -------------------------
+ *     last matching page
+ * |     l96 |    1 |    2 |
+ * |     l97 |    2 |    3 |
+ *    trailing nulls page
+ * |     l98 | null | null |
+ * |     l99 | null | null |
+ * }
+ */ +class RightChunkedLeftJoin implements Releasable { + private final Page leftHand; + private final int mergedElementCount; + /** + * The next position that we'll emit or one more than the + * next position we'll emit. This is used to cover gaps between "right hand" + * pages and to detect if "right hand" pages "go backwards". + */ + private int next = 0; + + RightChunkedLeftJoin(Page leftHand, int mergedElementCounts) { + this.leftHand = leftHand; + this.mergedElementCount = mergedElementCounts; + } + + Page join(Page rightHand) { + IntVector positions = rightHand.getBlock(0).asVector(); + if (positions.getInt(0) < next - 1) { + throw new IllegalArgumentException("maximum overlap is one position"); + } + Block[] blocks = new Block[leftHand.getBlockCount() + mergedElementCount]; + if (rightHand.getBlockCount() != mergedElementCount + 1) { + throw new IllegalArgumentException( + "expected right hand side with [" + (mergedElementCount + 1) + "] but got [" + rightHand.getBlockCount() + "]" + ); + } + IntVector.Builder leftFilterBuilder = null; + IntVector leftFilter = null; + IntVector.Builder insertNullsBuilder = null; + IntVector insertNulls = null; + try { + leftFilterBuilder = positions.blockFactory().newIntVectorBuilder(positions.getPositionCount()); + for (int p = 0; p < positions.getPositionCount(); p++) { + int pos = positions.getInt(p); + if (pos > next) { + if (insertNullsBuilder == null) { + insertNullsBuilder = positions.blockFactory().newIntVectorBuilder(pos - next); + } + for (int missing = next; missing < pos; missing++) { + leftFilterBuilder.appendInt(missing); + insertNullsBuilder.appendInt(p); + } + } + leftFilterBuilder.appendInt(pos); + next = pos + 1; + } + leftFilter = leftFilterBuilder.build(); + int[] leftFilterArray = toArray(leftFilter); + insertNulls = insertNullsBuilder == null ? null : insertNullsBuilder.build(); + + int b = 0; + while (b < leftHand.getBlockCount()) { + blocks[b] = leftHand.getBlock(b).filter(leftFilterArray); + b++; + } + int rb = 1; // Skip the positions column + while (b < blocks.length) { + Block block = rightHand.getBlock(rb); + if (insertNulls == null) { + block.mustIncRef(); + } else { + block = block.insertNulls(insertNulls); + } + blocks[b] = block; + b++; + rb++; + } + Page result = new Page(blocks); + blocks = null; + return result; + } finally { + Releasables.close( + blocks == null ? null : Releasables.wrap(blocks), + leftFilter, + leftFilterBuilder, + insertNullsBuilder, + insertNulls + ); + } + } + + Optional noMoreRightHandPages() { + if (next == leftHand.getPositionCount()) { + return Optional.empty(); + } + BlockFactory factory = leftHand.getBlock(0).blockFactory(); + Block[] blocks = new Block[leftHand.getBlockCount() + mergedElementCount]; + // TODO make a filter that takes a min and max? + int[] filter = IntStream.range(next, leftHand.getPositionCount()).toArray(); + try { + int b = 0; + while (b < leftHand.getBlockCount()) { + blocks[b] = leftHand.getBlock(b).filter(filter); + b++; + } + while (b < blocks.length) { + blocks[b] = factory.newConstantNullBlock(leftHand.getPositionCount() - next); + b++; + } + Page result = new Page(blocks); + blocks = null; + return Optional.of(result); + } finally { + if (blocks != null) { + Releasables.close(blocks); + } + } + } + + @Override + public void close() { + Releasables.close(leftHand::releaseBlocks); + } + + private int[] toArray(IntVector vector) { + // TODO replace parameter to filter with vector and remove this + int[] array = new int[vector.getPositionCount()]; + for (int p = 0; p < vector.getPositionCount(); p++) { + array[p] = vector.getInt(p); + } + return array; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 439ebe34c7d4..33a294131c99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -224,6 +224,7 @@ public class BasicBlockTests extends ESTestCase { try (IntBlock.Builder blockBuilder = blockFactory.newIntBlockBuilder(1)) { IntBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + assertInsertNulls(block); releaseAndAssertBreaker(block, copy); } @@ -250,6 +251,7 @@ public class BasicBlockTests extends ESTestCase { assertSingleValueDenseBlock(vector.asBlock()); assertThat(vector.min(), equalTo(0)); assertThat(vector.max(), equalTo(positionCount - 1)); + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -272,12 +274,14 @@ public class BasicBlockTests extends ESTestCase { assertEmptyLookup(blockFactory, block); assertThat(block.asVector().min(), equalTo(Integer.MAX_VALUE)); assertThat(block.asVector().max(), equalTo(Integer.MIN_VALUE)); + assertInsertNulls(block); releaseAndAssertBreaker(block); try (IntVector.Builder vectorBuilder = blockFactory.newIntVectorBuilder(0)) { IntVector vector = vectorBuilder.build(); assertThat(vector.min(), equalTo(Integer.MAX_VALUE)); assertThat(vector.max(), equalTo(Integer.MIN_VALUE)); + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -317,6 +321,7 @@ public class BasicBlockTests extends ESTestCase { assertEmptyLookup(blockFactory, block); assertThat(block.asVector().min(), equalTo(value)); assertThat(block.asVector().max(), equalTo(value)); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -350,6 +355,7 @@ public class BasicBlockTests extends ESTestCase { try (LongBlock.Builder blockBuilder = blockFactory.newLongBlockBuilder(1)) { LongBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + assertInsertNulls(block); releaseAndAssertBreaker(block, copy); } @@ -372,6 +378,7 @@ public class BasicBlockTests extends ESTestCase { LongStream.range(0, positionCount).forEach(vectorBuilder::appendLong); LongVector vector = vectorBuilder.build(); assertSingleValueDenseBlock(vector.asBlock()); + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -408,6 +415,7 @@ public class BasicBlockTests extends ESTestCase { b -> assertThat(b, instanceOf(ConstantNullBlock.class)) ); assertEmptyLookup(blockFactory, block); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -442,6 +450,7 @@ public class BasicBlockTests extends ESTestCase { try (DoubleBlock.Builder blockBuilder = blockFactory.newDoubleBlockBuilder(1)) { DoubleBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + assertInsertNulls(block); releaseAndAssertBreaker(block, copy); } @@ -466,6 +475,7 @@ public class BasicBlockTests extends ESTestCase { IntStream.range(0, positionCount).mapToDouble(ii -> 1.0 / ii).forEach(vectorBuilder::appendDouble); DoubleVector vector = vectorBuilder.build(); assertSingleValueDenseBlock(vector.asBlock()); + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -501,6 +511,7 @@ public class BasicBlockTests extends ESTestCase { b -> assertThat(b, instanceOf(ConstantNullBlock.class)) ); assertEmptyLookup(blockFactory, block); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -536,6 +547,7 @@ public class BasicBlockTests extends ESTestCase { try (FloatBlock.Builder blockBuilder = blockFactory.newFloatBlockBuilder(1)) { FloatBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + assertInsertNulls(block); releaseAndAssertBreaker(block, copy); } @@ -560,6 +572,7 @@ public class BasicBlockTests extends ESTestCase { IntStream.range(0, positionCount).mapToDouble(ii -> 1.0 / ii).forEach(vectorBuilder::appendDouble); DoubleVector vector = vectorBuilder.build(); assertSingleValueDenseBlock(vector.asBlock()); + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -595,6 +608,7 @@ public class BasicBlockTests extends ESTestCase { b -> assertThat(b, instanceOf(ConstantNullBlock.class)) ); assertEmptyLookup(blockFactory, block); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -646,6 +660,7 @@ public class BasicBlockTests extends ESTestCase { try (BytesRefBlock.Builder blockBuilder = blockFactory.newBytesRefBlockBuilder(1)) { BytesRefBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + assertInsertNulls(block); releaseAndAssertBreaker(block, copy); } @@ -671,6 +686,7 @@ public class BasicBlockTests extends ESTestCase { IntStream.range(0, positionCount).mapToObj(ii -> new BytesRef(randomAlphaOfLength(5))).forEach(vectorBuilder::appendBytesRef); BytesRefVector vector = vectorBuilder.build(); assertSingleValueDenseBlock(vector.asBlock()); + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -726,6 +742,7 @@ public class BasicBlockTests extends ESTestCase { } } assertKeepMask(block); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -765,6 +782,7 @@ public class BasicBlockTests extends ESTestCase { b -> assertThat(b, instanceOf(ConstantNullBlock.class)) ); assertEmptyLookup(blockFactory, block); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -810,6 +828,7 @@ public class BasicBlockTests extends ESTestCase { try (BooleanBlock.Builder blockBuilder = blockFactory.newBooleanBlockBuilder(1)) { BooleanBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + assertInsertNulls(block); releaseAndAssertBreaker(block, copy); } @@ -852,6 +871,7 @@ public class BasicBlockTests extends ESTestCase { assertTrue(vector.allFalse()); } } + assertInsertNulls(vector.asBlock()); releaseAndAssertBreaker(vector.asBlock()); } } @@ -893,6 +913,7 @@ public class BasicBlockTests extends ESTestCase { assertFalse(block.asVector().allTrue()); assertTrue(block.asVector().allFalse()); } + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -935,6 +956,7 @@ public class BasicBlockTests extends ESTestCase { singletonList(null), b -> assertThat(b, instanceOf(ConstantNullBlock.class)) ); + assertInsertNulls(block); releaseAndAssertBreaker(block); } } @@ -1390,6 +1412,7 @@ public class BasicBlockTests extends ESTestCase { asserter.accept(randomNonNullPosition, block); assertTrue(block.isNull(randomNullPosition)); assertFalse(block.isNull(randomNonNullPosition)); + assertInsertNulls(block); releaseAndAssertBreaker(block); if (block instanceof BooleanBlock bb) { try (ToMask mask = bb.toMask()) { @@ -1409,6 +1432,7 @@ public class BasicBlockTests extends ESTestCase { void assertZeroPositionsAndRelease(Block block) { assertThat(block.getPositionCount(), is(0)); assertKeepMaskEmpty(block); + assertInsertNulls(block); releaseAndAssertBreaker(block); } @@ -1451,6 +1475,36 @@ public class BasicBlockTests extends ESTestCase { } } + static void assertInsertNulls(Block block) { + int maxNulls = Math.min(1000, block.getPositionCount() * 5); + List orig = new ArrayList<>(block.getPositionCount()); + BlockTestUtils.readInto(orig, block); + + int nullCount = 0; + try (IntVector.Builder beforeBuilder = block.blockFactory().newIntVectorBuilder(block.getPositionCount())) { + List expected = new ArrayList<>(block.getPositionCount()); + for (int p = 0; p < block.getPositionCount(); p++) { + while (nullCount < maxNulls && randomBoolean()) { + expected.add(null); + beforeBuilder.appendInt(p); + nullCount++; + } + expected.add(orig.get(p)); + } + while (nullCount == 0 || (nullCount < maxNulls && randomBoolean())) { + expected.add(null); + beforeBuilder.appendInt(block.getPositionCount()); + nullCount++; + } + + try (IntVector before = beforeBuilder.build(); Block withNulls = block.insertNulls(before)) { + List actual = new ArrayList<>(block.getPositionCount()); + BlockTestUtils.readInto(actual, withNulls); + assertThat(actual, equalTo(expected)); + } + } + } + void releaseAndAssertBreaker(Block... blocks) { assertThat(breaker.getUsed(), greaterThan(0L)); Page[] pages = Arrays.stream(blocks).map(Page::new).toArray(Page[]::new); @@ -1909,4 +1963,14 @@ public class BasicBlockTests extends ESTestCase { return builder.build(); } } + + /** + * A random {@link ElementType} for which we can build a {@link RandomBlock}. + */ + public static ElementType randomElementType() { + return randomValueOtherThanMany( + e -> e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE, + () -> randomFrom(ElementType.values()) + ); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java index e37b2638b56f..da7b8cd87db7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.function.IntUnaryOperator; import java.util.stream.IntStream; +import static org.elasticsearch.compute.data.BasicBlockTests.assertInsertNulls; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.nullValue; @@ -68,6 +69,7 @@ public class BlockMultiValuedTests extends ESTestCase { assertThat(b.block().mayHaveMultivaluedFields(), equalTo(b.values().stream().anyMatch(l -> l != null && l.size() > 1))); assertThat(b.block().doesHaveMultivaluedFields(), equalTo(b.values().stream().anyMatch(l -> l != null && l.size() > 1))); + assertInsertNulls(b.block()); } finally { b.block().close(); } @@ -171,6 +173,16 @@ public class BlockMultiValuedTests extends ESTestCase { } } + public void testInsertNull() { + int positionCount = randomIntBetween(1, 16 * 1024); + var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 2, 10, 0, 0); + try { + assertInsertNulls(b.block()); + } finally { + b.block().close(); + } + } + private void assertFiltered(boolean all, boolean shuffled) { int positionCount = randomIntBetween(1, 16 * 1024); var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java index ce62fb9896eb..cf99c59bb4c7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java @@ -22,6 +22,7 @@ import org.junit.After; import java.util.ArrayList; import java.util.List; +import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; @@ -76,6 +77,16 @@ public abstract class ComputeTestCase extends ESTestCase { return blockFactory; } + protected final void testWithCrankyBlockFactory(Consumer run) { + try { + run.accept(crankyBlockFactory()); + logger.info("cranky let us finish!"); + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + @After public final void allBreakersEmpty() throws Exception { // first check that all big arrays are released, which can affect breakers diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 8200529e1829..bbe4a07cc44b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -10,14 +10,13 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.hamcrest.Matcher; -import java.util.ArrayList; import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.data.BasicBlockTests.randomElementType; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; @@ -129,17 +128,6 @@ public class LimitOperatorTests extends OperatorTestCase { if (randomBoolean()) { return blockFactory.newConstantNullBlock(size); } - return BasicBlockTests.randomBlock(blockFactory, randomElement(), size, false, 1, 1, 0, 0).block(); - } - - static ElementType randomElement() { - List l = new ArrayList<>(); - for (ElementType e : ElementType.values()) { - if (e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE) { - continue; - } - l.add(e); - } - return randomFrom(l); + return BasicBlockTests.randomBlock(blockFactory, randomElementType(), size, false, 1, 1, 0, 0).block(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java new file mode 100644 index 000000000000..1312b772dbfa --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java @@ -0,0 +1,434 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.lookup; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BasicBlockTests; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockTestUtils; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.test.ListMatcher; + +import java.text.NumberFormat; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Optional; +import java.util.Set; +import java.util.stream.IntStream; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.hamcrest.Matchers.equalTo; + +public class RightChunkedLeftJoinTests extends ComputeTestCase { + public void testNoGaps() { + testNoGaps(blockFactory()); + } + + public void testNoGapsCranky() { + testWithCrankyBlockFactory(this::testNoGaps); + } + + private void testNoGaps(BlockFactory factory) { + int size = 100; + try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(buildExampleLeftHand(factory, size), 2)) { + assertJoined( + factory, + join, + new int[][] { + { 0, 1, 2 }, // formatter + { 1, 2, 3 }, // formatter + { 2, 3, 3 }, // formatter + { 3, 9, 9 }, // formatter + }, + new Object[][] { + { "l00", 1, 2 }, // formatter + { "l01", 2, 3 }, // formatter + { "l02", 3, 3 }, // formatter + { "l03", 9, 9 }, // formatter + } + ); + assertTrailing(join, size, 4); + } + } + + /** + * Test the first example in the main javadoc of {@link RightChunkedLeftJoin}. + */ + public void testFirstExample() { + testFirstExample(blockFactory()); + } + + public void testFirstExampleCranky() { + testWithCrankyBlockFactory(this::testFirstExample); + } + + private void testFirstExample(BlockFactory factory) { + try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(buildExampleLeftHand(factory, 100), 2)) { + assertJoined( + factory, + join, + new int[][] { + { 0, 1, 2 }, // formatter + { 1, 2, 3 }, // formatter + { 1, 3, 3 }, // formatter + { 3, 9, 9 }, // formatter + }, + new Object[][] { + { "l00", 1, 2 }, // formatter + { "l01", 2, 3 }, // formatter + { "l01", 3, 3 }, // formatter + { "l02", null, null }, // formatter + { "l03", 9, 9 }, // formatter + } + ); + } + } + + public void testLeadingNulls() { + testLeadingNulls(blockFactory()); + } + + public void testLeadingNullsCranky() { + testWithCrankyBlockFactory(this::testLeadingNulls); + } + + private void testLeadingNulls(BlockFactory factory) { + int size = 3; + try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(buildExampleLeftHand(factory, size), 2)) { + assertJoined( + factory, + join, + new int[][] { { 2, 1, 2 } }, + new Object[][] { + { "l0", null, null }, // formatter + { "l1", null, null }, // formatter + { "l2", 1, 2 }, // formatter + } + ); + assertTrailing(join, size, 3); + } + } + + public void testSecondExample() { + testSecondExample(blockFactory()); + } + + public void testSecondExampleCranky() { + testWithCrankyBlockFactory(this::testSecondExample); + } + + /** + * Test the second example in the main javadoc of {@link RightChunkedLeftJoin}. + */ + private void testSecondExample(BlockFactory factory) { + int size = 100; + try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(buildExampleLeftHand(factory, size), 2)) { + assertJoined( + factory, + join, + new int[][] { + { 0, 1, 2 }, // formatter + { 1, 3, 3 }, // formatter + }, + new Object[][] { + { "l00", 1, 2 }, // formatter + { "l01", 3, 3 }, // formatter + } + ); + assertJoined( + factory, + join, + new int[][] { + { 1, 9, 9 }, // formatter + { 2, 9, 9 }, // formatter + }, + new Object[][] { + { "l01", 9, 9 }, // formatter + { "l02", 9, 9 }, // formatter + } + ); + assertJoined( + factory, + join, + new int[][] { + { 5, 10, 10 }, // formatter + { 7, 11, 11 }, // formatter + }, + new Object[][] { + { "l03", null, null }, // formatter + { "l04", null, null }, // formatter + { "l05", 10, 10 }, // formatter + { "l06", null, null }, // formatter + { "l07", 11, 11 }, // formatter + } + ); + assertTrailing(join, size, 8); + } + } + + public void testThirdExample() { + testThirdExample(blockFactory()); + } + + public void testThirdExampleCranky() { + testWithCrankyBlockFactory(this::testThirdExample); + } + + /** + * Test the third example in the main javadoc of {@link RightChunkedLeftJoin}. + */ + private void testThirdExample(BlockFactory factory) { + int size = 100; + try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(buildExampleLeftHand(factory, size), 2)) { + Page pre = buildPage(factory, IntStream.range(0, 96).mapToObj(p -> new int[] { p, p, p }).toArray(int[][]::new)); + try { + join.join(pre).releaseBlocks(); + } finally { + pre.releaseBlocks(); + } + assertJoined( + factory, + join, + new int[][] { + { 96, 1, 2 }, // formatter + { 97, 3, 3 }, // formatter + }, + new Object[][] { + { "l96", 1, 2 }, // formatter + { "l97", 3, 3 }, // formatter + } + ); + assertTrailing(join, size, 98); + } + } + + public void testRandom() { + testRandom(blockFactory()); + } + + public void testRandomCranky() { + testWithCrankyBlockFactory(this::testRandom); + } + + private void testRandom(BlockFactory factory) { + int leftSize = between(100, 10000); + ElementType[] leftColumns = randomArray(1, 10, ElementType[]::new, BasicBlockTests::randomElementType); + ElementType[] rightColumns = randomArray(1, 10, ElementType[]::new, BasicBlockTests::randomElementType); + + RandomPage left = randomPage(factory, leftColumns, leftSize); + try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(left.page, rightColumns.length)) { + int rightSize = 5; + IntVector selected = randomPositions(factory, leftSize, rightSize); + RandomPage right = randomPage(factory, rightColumns, rightSize, selected.asBlock()); + + try { + Page joined = join.join(right.page); + try { + assertThat(joined.getPositionCount(), equalTo(selected.max() + 1)); + + List> actualColumns = new ArrayList<>(); + BlockTestUtils.readInto(actualColumns, joined); + int rightRow = 0; + for (int leftRow = 0; leftRow < joined.getPositionCount(); leftRow++) { + List actualRow = new ArrayList<>(); + for (List actualColumn : actualColumns) { + actualRow.add(actualColumn.get(leftRow)); + } + ListMatcher matcher = ListMatcher.matchesList(); + for (int c = 0; c < leftColumns.length; c++) { + matcher = matcher.item(unwrapSingletonLists(left.blocks[c].values().get(leftRow))); + } + if (selected.getInt(rightRow) == leftRow) { + for (int c = 0; c < rightColumns.length; c++) { + matcher = matcher.item(unwrapSingletonLists(right.blocks[c].values().get(rightRow))); + } + rightRow++; + } else { + for (int c = 0; c < rightColumns.length; c++) { + matcher = matcher.item(null); + } + } + assertMap(actualRow, matcher); + } + } finally { + joined.releaseBlocks(); + } + + int start = selected.max() + 1; + if (start >= left.page.getPositionCount()) { + assertThat(join.noMoreRightHandPages().isPresent(), equalTo(false)); + return; + } + Page remaining = join.noMoreRightHandPages().get(); + try { + assertThat(remaining.getPositionCount(), equalTo(left.page.getPositionCount() - start)); + List> actualColumns = new ArrayList<>(); + BlockTestUtils.readInto(actualColumns, remaining); + for (int leftRow = start; leftRow < left.page.getPositionCount(); leftRow++) { + List actualRow = new ArrayList<>(); + for (List actualColumn : actualColumns) { + actualRow.add(actualColumn.get(leftRow - start)); + } + ListMatcher matcher = ListMatcher.matchesList(); + for (int c = 0; c < leftColumns.length; c++) { + matcher = matcher.item(unwrapSingletonLists(left.blocks[c].values().get(leftRow))); + } + for (int c = 0; c < rightColumns.length; c++) { + matcher = matcher.item(null); + } + assertMap(actualRow, matcher); + } + + } finally { + remaining.releaseBlocks(); + } + } finally { + right.page.releaseBlocks(); + } + } finally { + left.page.releaseBlocks(); + } + } + + NumberFormat exampleNumberFormat(int size) { + NumberFormat nf = NumberFormat.getIntegerInstance(Locale.ROOT); + nf.setMinimumIntegerDigits((int) Math.ceil(Math.log10(size))); + return nf; + } + + Page buildExampleLeftHand(BlockFactory factory, int size) { + NumberFormat nf = exampleNumberFormat(size); + try (BytesRefVector.Builder builder = factory.newBytesRefVectorBuilder(size)) { + for (int i = 0; i < size; i++) { + builder.appendBytesRef(new BytesRef("l" + nf.format(i))); + } + return new Page(builder.build().asBlock()); + } + } + + Page buildPage(BlockFactory factory, int[][] rows) { + try ( + IntVector.Builder positions = factory.newIntVectorFixedBuilder(rows.length); + IntVector.Builder r1 = factory.newIntVectorFixedBuilder(rows.length); + IntVector.Builder r2 = factory.newIntVectorFixedBuilder(rows.length); + ) { + for (int[] row : rows) { + positions.appendInt(row[0]); + r1.appendInt(row[1]); + r2.appendInt(row[2]); + } + return new Page(positions.build().asBlock(), r1.build().asBlock(), r2.build().asBlock()); + } + } + + private void assertJoined(Page joined, Object[][] expected) { + try { + List> actualColumns = new ArrayList<>(); + BlockTestUtils.readInto(actualColumns, joined); + + for (int r = 0; r < expected.length; r++) { + List actualRow = new ArrayList<>(); + for (int c = 0; c < actualColumns.size(); c++) { + Object v = actualColumns.get(c).get(r); + if (v instanceof BytesRef b) { + v = b.utf8ToString(); + } + actualRow.add(v); + } + + ListMatcher rowMatcher = matchesList(); + for (Object v : expected[r]) { + rowMatcher = rowMatcher.item(v); + } + assertMap("row " + r, actualRow, rowMatcher); + } + } finally { + joined.releaseBlocks(); + } + } + + private void assertJoined(BlockFactory factory, RightChunkedLeftJoin join, int[][] rightRows, Object[][] expectRows) { + Page rightHand = buildPage(factory, rightRows); + try { + assertJoined(join.join(rightHand), expectRows); + } finally { + rightHand.releaseBlocks(); + } + } + + private void assertTrailing(RightChunkedLeftJoin join, int size, int next) { + NumberFormat nf = exampleNumberFormat(size); + if (size == next) { + assertThat(join.noMoreRightHandPages(), equalTo(Optional.empty())); + } else { + assertJoined( + join.noMoreRightHandPages().get(), + IntStream.range(next, size).mapToObj(p -> new Object[] { "l" + nf.format(p), null, null }).toArray(Object[][]::new) + ); + } + } + + Object unwrapSingletonLists(Object o) { + if (o instanceof List l && l.size() == 1) { + return l.getFirst(); + } + return o; + } + + record RandomPage(Page page, BasicBlockTests.RandomBlock[] blocks) {}; + + RandomPage randomPage(BlockFactory factory, ElementType[] types, int positions, Block... prepend) { + BasicBlockTests.RandomBlock[] randomBlocks = new BasicBlockTests.RandomBlock[types.length]; + Block[] blocks = new Block[prepend.length + types.length]; + try { + for (int c = 0; c < prepend.length; c++) { + blocks[c] = prepend[c]; + } + for (int c = 0; c < types.length; c++) { + + int min = between(0, 3); + randomBlocks[c] = BasicBlockTests.randomBlock( + factory, + types[c], + positions, + randomBoolean(), + min, + between(min, min + 3), + 0, + 0 + ); + blocks[prepend.length + c] = randomBlocks[c].block(); + } + Page p = new Page(blocks); + blocks = null; + return new RandomPage(p, randomBlocks); + } finally { + if (blocks != null) { + Releasables.close(blocks); + } + } + } + + IntVector randomPositions(BlockFactory factory, int leftSize, int positionCount) { + Set positions = new HashSet<>(); + while (positions.size() < positionCount) { + positions.add(between(0, leftSize - 1)); + } + int[] positionsArray = positions.stream().mapToInt(i -> i).sorted().toArray(); + return factory.newIntArrayVector(positionsArray, positionsArray.length); + } +} diff --git a/x-pack/plugin/esql/qa/security/build.gradle b/x-pack/plugin/esql/qa/security/build.gradle index 068a4fd8f498..c04595ef2283 100644 --- a/x-pack/plugin/esql/qa/security/build.gradle +++ b/x-pack/plugin/esql/qa/security/build.gradle @@ -5,3 +5,9 @@ apply plugin: 'elasticsearch.internal-test-artifact' tasks.named('javaRestTest') { usesDefaultDistribution() } + +dependencies { + javaRestTestImplementation project(xpackModule('esql-core')) + javaRestTestImplementation project(xpackModule('esql')) + javaRestTestImplementation project(xpackModule('esql:qa:server')) +} diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index d9f211405e23..00cf4d63af33 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -26,6 +26,8 @@ import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.junit.Before; import org.junit.ClassRule; @@ -544,6 +546,11 @@ public class EsqlSecurityIT extends ESRestTestCase { } public void testLookupJoinIndexAllowed() throws Exception { + assumeTrue( + "Requires LOOKUP JOIN capability", + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V9.capabilityName())) + ); + Response resp = runESQLCommand( "metadata1_read2", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN `lookup-user2` ON value | KEEP x, org" @@ -577,7 +584,12 @@ public class EsqlSecurityIT extends ESRestTestCase { assertThat(respMap.get("values"), equalTo(List.of(Arrays.asList(123.0, null)))); } - public void testLookupJoinIndexForbidden() { + public void testLookupJoinIndexForbidden() throws Exception { + assumeTrue( + "Requires LOOKUP JOIN capability", + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V9.capabilityName())) + ); + var resp = expectThrows( ResponseException.class, () -> runESQLCommand("metadata1_read2", "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN `lookup-user1` ON value | KEEP x") diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index e7bce73e21cd..edc3df9db030 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -199,7 +199,7 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { return hasCapabilities(adminClient(), requiredCapabilities); } - protected static boolean hasCapabilities(RestClient client, List requiredCapabilities) throws IOException { + public static boolean hasCapabilities(RestClient client, List requiredCapabilities) throws IOException { if (requiredCapabilities.isEmpty()) { return true; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 0ecff0e229ef..d65a0975e22b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1317,7 +1317,7 @@ emp_no:integer | first_name:keyword equalsNullToUpperFolded from employees -| where to_upper(first_name) == null::keyword +| where to_upper(first_name) == to_string(null) | keep emp_no, first_name ; @@ -1335,7 +1335,7 @@ emp_no:integer | first_name:keyword notEqualsNullToUpperFolded from employees -| where to_upper(first_name) != null::keyword +| where to_upper(first_name) != to_string(null) | keep emp_no, first_name ; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java index 3a69983a0d86..1e34421097aa 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -7,18 +7,11 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.FailingFieldPlugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -26,8 +19,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Map; -import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -51,7 +42,7 @@ public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { mapping.startObject("fail_me"); { mapping.field("type", "long"); - mapping.startObject("script").field("source", "").field("lang", "fail").endObject(); + mapping.startObject("script").field("source", "").field("lang", "failing_field").endObject(); } mapping.endObject(); } @@ -66,51 +57,7 @@ public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { docs.add(client().prepareIndex("fail").setSource("foo", 0)); indexRandom(true, docs); - ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> run("FROM fail,ok | LIMIT 100").close()); - assertThat(e.getMessage(), equalTo("test failure")); - } - - public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { - - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "fail"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - throw new ElasticsearchException("test failure"); - } - }; - } - }; - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } + IllegalStateException e = expectThrows(IllegalStateException.class, () -> run("FROM fail,ok | LIMIT 100").close()); + assertThat(e.getMessage(), equalTo("Accessing failing field")); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 3b9359fe66d4..f31eabea9d61 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -69,13 +69,56 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.hamcrest.Matchers.empty; public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { + // TODO should we remove this now that this is integrated into ESQL proper? /** * Quick and dirty test for looking up data from a lookup index. */ public void testLookupIndex() throws IOException { - // TODO this should *fail* if the target index isn't a lookup type index - it doesn't now. - int docCount = between(10, 1000); - List expected = new ArrayList<>(docCount); + runLookup(new UsingSingleLookupTable(new String[] { "aa", "bb", "cc", "dd" })); + } + + /** + * Tests when multiple results match. + */ + @AwaitsFix(bugUrl = "fixing real soon now") + public void testLookupIndexMultiResults() throws IOException { + runLookup(new UsingSingleLookupTable(new Object[] { "aa", new String[] { "bb", "ff" }, "cc", "dd" })); + } + + interface PopulateIndices { + void populate(int docCount, List expected) throws IOException; + } + + class UsingSingleLookupTable implements PopulateIndices { + private final Object[] lookupData; + + UsingSingleLookupTable(Object[] lookupData) { + this.lookupData = lookupData; + } + + @Override + public void populate(int docCount, List expected) throws IOException { + List docs = new ArrayList<>(); + for (int i = 0; i < docCount; i++) { + docs.add(client().prepareIndex("source").setSource(Map.of("data", lookupData[i % lookupData.length]))); + Object d = lookupData[i % lookupData.length]; + if (d instanceof String s) { + expected.add(s + ":" + (i % lookupData.length)); + } else if (d instanceof String[] ss) { + for (String s : ss) { + expected.add(s + ":" + (i % lookupData.length)); + } + } + } + for (int i = 0; i < lookupData.length; i++) { + docs.add(client().prepareIndex("lookup").setSource(Map.of("data", lookupData[i], "l", i))); + } + Collections.sort(expected); + indexRandom(true, true, docs); + } + } + + private void runLookup(PopulateIndices populateIndices) throws IOException { client().admin() .indices() .prepareCreate("source") @@ -95,17 +138,9 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { .get(); client().admin().cluster().prepareHealth(TEST_REQUEST_TIMEOUT).setWaitForGreenStatus().get(); - String[] data = new String[] { "aa", "bb", "cc", "dd" }; - List docs = new ArrayList<>(); - for (int i = 0; i < docCount; i++) { - docs.add(client().prepareIndex("source").setSource(Map.of("data", data[i % data.length]))); - expected.add(data[i % data.length] + ":" + (i % data.length)); - } - for (int i = 0; i < data.length; i++) { - docs.add(client().prepareIndex("lookup").setSource(Map.of("data", data[i], "l", i))); - } - Collections.sort(expected); - indexRandom(true, true, docs); + int docCount = between(10, 1000); + List expected = new ArrayList<>(docCount); + populateIndices.populate(docCount, expected); /* * Find the data node hosting the only shard of the source index. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index 52170dfb0525..9b21efc069e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -9,11 +9,12 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; @@ -233,16 +234,18 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable { if (isCrossClusterSearch() == false || clusterInfo.isEmpty()) { return Collections.emptyIterator(); } - return ChunkedToXContent.builder(params).object(b -> { - b.field(TOTAL_FIELD.getPreferredName(), clusterInfo.size()); - b.field(SUCCESSFUL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SUCCESSFUL)); - b.field(RUNNING_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.RUNNING)); - b.field(SKIPPED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SKIPPED)); - b.field(PARTIAL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.PARTIAL)); - b.field(FAILED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.FAILED)); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + ChunkedToXContentHelper.field(TOTAL_FIELD.getPreferredName(), clusterInfo.size()), + ChunkedToXContentHelper.field(SUCCESSFUL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SUCCESSFUL)), + ChunkedToXContentHelper.field(RUNNING_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.RUNNING)), + ChunkedToXContentHelper.field(SKIPPED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SKIPPED)), + ChunkedToXContentHelper.field(PARTIAL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.PARTIAL)), + ChunkedToXContentHelper.field(FAILED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.FAILED)), // each Cluster object defines its own field object name - b.xContentObject("details", clusterInfo.values().iterator()); - }); + ChunkedToXContentHelper.xContentFragmentValuesMapCreateOwnName("details", clusterInfo), + ChunkedToXContentHelper.endObject() + ); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 4163a222b1a2..01b1821c01b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.core.esql.action.EsqlResponse; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -184,38 +185,60 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action. return executionInfo; } + private Iterator asyncPropertiesOrEmpty() { + if (isAsync) { + return ChunkedToXContentHelper.singleChunk((builder, params) -> { + if (asyncExecutionId != null) { + builder.field("id", asyncExecutionId); + } + builder.field("is_running", isRunning); + return builder; + }); + } else { + return Collections.emptyIterator(); + } + } + @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(b -> { - boolean dropNullColumns = b.params().paramAsBoolean(DROP_NULL_COLUMNS_OPTION, false); - boolean[] nullColumns = dropNullColumns ? nullColumns() : null; + boolean dropNullColumns = params.paramAsBoolean(DROP_NULL_COLUMNS_OPTION, false); + boolean[] nullColumns = dropNullColumns ? nullColumns() : null; - if (isAsync) { - if (asyncExecutionId != null) { - b.field("id", asyncExecutionId); - } - b.field("is_running", isRunning); - } - if (executionInfo != null) { - long tookInMillis = executionInfo.overallTook() == null - ? executionInfo.tookSoFar().millis() - : executionInfo.overallTook().millis(); - b.field("took", tookInMillis); - } - if (dropNullColumns) { - b.append(ResponseXContentUtils.allColumns(columns, "all_columns")) - .append(ResponseXContentUtils.nonNullColumns(columns, nullColumns, "columns")); - } else { - b.append(ResponseXContentUtils.allColumns(columns, "columns")); - } - b.array("values", ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns)); - if (executionInfo != null && executionInfo.isCrossClusterSearch() && executionInfo.includeCCSMetadata()) { - b.field("_clusters", executionInfo); - } - if (profile != null) { - b.field("profile", profile); - } - }); + Iterator tookTime; + if (executionInfo != null && executionInfo.overallTook() != null) { + tookTime = ChunkedToXContentHelper.singleChunk((builder, p) -> { + builder.field("took", executionInfo.overallTook().millis()); + return builder; + }); + } else { + tookTime = Collections.emptyIterator(); + } + + Iterator columnHeadings = dropNullColumns + ? Iterators.concat( + ResponseXContentUtils.allColumns(columns, "all_columns"), + ResponseXContentUtils.nonNullColumns(columns, nullColumns, "columns") + ) + : ResponseXContentUtils.allColumns(columns, "columns"); + Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns); + Iterator profileRender = profile == null + ? Collections.emptyIterator() + : ChunkedToXContentHelper.field("profile", profile, params); + Iterator executionInfoRender = executionInfo != null + && executionInfo.isCrossClusterSearch() + && executionInfo.includeCCSMetadata() + ? ChunkedToXContentHelper.field("_clusters", executionInfo, params) + : Collections.emptyIterator(); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + asyncPropertiesOrEmpty(), + tookTime, + columnHeadings, + ChunkedToXContentHelper.array("values", valuesIt), + executionInfoRender, + profileRender, + ChunkedToXContentHelper.endObject() + ); } public boolean[] nullColumns() { @@ -354,8 +377,11 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action. @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params) - .object(ob -> ob.array("drivers", drivers.iterator(), ChunkedToXContentBuilder::append)); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + ChunkedToXContentHelper.array("drivers", drivers.iterator(), params), + ChunkedToXContentHelper.endObject() + ); } List drivers() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 67948fe717f2..b79dda900f39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -61,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.plan.PlanWritables; @@ -192,6 +193,7 @@ public class EsqlPlugin extends Plugin implements ActionPlugin { entries.add(SingleValueQuery.ENTRY); entries.add(AsyncOperator.Status.ENTRY); entries.add(EnrichLookupOperator.Status.ENTRY); + entries.add(LookupFromIndexOperator.Status.ENTRY); entries.addAll(BlockWritables.getNamedWriteables()); entries.addAll(ExpressionWritables.getNamedWriteables()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 2deedb927331..4b9ae0d1692e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -529,7 +529,7 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; - assertChunkCount(resp, r -> 7 + sizeClusterDetails + bodySize); // is_running + assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); // is_running } } @@ -541,7 +541,7 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase 7 + sizeClusterDetails + bodySize); + assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); } } diff --git a/x-pack/plugin/fleet/build.gradle b/x-pack/plugin/fleet/build.gradle index 9fef7a928bcf..c00a2af430f4 100644 --- a/x-pack/plugin/fleet/build.gradle +++ b/x-pack/plugin/fleet/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - name 'x-pack-fleet' - description 'Elasticsearch Expanded Pack Plugin - Plugin exposing APIs for Fleet system indices' - classname 'org.elasticsearch.xpack.fleet.Fleet' + name = 'x-pack-fleet' + description = 'Elasticsearch Expanded Pack Plugin - Plugin exposing APIs for Fleet system indices' + classname ='org.elasticsearch.xpack.fleet.Fleet' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/frozen-indices/build.gradle b/x-pack/plugin/frozen-indices/build.gradle index d4423f9df274..8b1aa7f0ba8e 100644 --- a/x-pack/plugin/frozen-indices/build.gradle +++ b/x-pack/plugin/frozen-indices/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'frozen-indices' - description 'A plugin for the frozen indices functionality' - classname 'org.elasticsearch.xpack.frozen.FrozenIndices' + name = 'frozen-indices' + description = 'A plugin for the frozen indices functionality' + classname = 'org.elasticsearch.xpack.frozen.FrozenIndices' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/geoip-enterprise-downloader/build.gradle b/x-pack/plugin/geoip-enterprise-downloader/build.gradle index ab16609ac7aa..7ab9159eed4c 100644 --- a/x-pack/plugin/geoip-enterprise-downloader/build.gradle +++ b/x-pack/plugin/geoip-enterprise-downloader/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-geoip-enterprise-downloader' - description 'Elasticsearch Expanded Pack Plugin - Geoip Enterprise Downloader' - classname 'org.elasticsearch.xpack.geoip.EnterpriseDownloaderPlugin' + name = 'x-pack-geoip-enterprise-downloader' + description = 'Elasticsearch Expanded Pack Plugin - Geoip Enterprise Downloader' + classname ='org.elasticsearch.xpack.geoip.EnterpriseDownloaderPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/graph/build.gradle b/x-pack/plugin/graph/build.gradle index 7f622b228343..b944a3b5b8a7 100644 --- a/x-pack/plugin/graph/build.gradle +++ b/x-pack/plugin/graph/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-graph' - description 'Elasticsearch Expanded Pack Plugin - Graph' - classname 'org.elasticsearch.xpack.graph.Graph' + name = 'x-pack-graph' + description = 'Elasticsearch Expanded Pack Plugin - Graph' + classname ='org.elasticsearch.xpack.graph.Graph' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/identity-provider/build.gradle b/x-pack/plugin/identity-provider/build.gradle index f9c121da0f55..2d0b3e09db2c 100644 --- a/x-pack/plugin/identity-provider/build.gradle +++ b/x-pack/plugin/identity-provider/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-identity-provider' - description 'Elasticsearch Expanded Pack Plugin - Identity Provider' - classname 'org.elasticsearch.xpack.idp.IdentityProviderPlugin' + name = 'x-pack-identity-provider' + description = 'Elasticsearch Expanded Pack Plugin - Identity Provider' + classname = 'org.elasticsearch.xpack.idp.IdentityProviderPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/ilm/build.gradle b/x-pack/plugin/ilm/build.gradle index e0dbd0b8db00..9ef2ad5b0592 100644 --- a/x-pack/plugin/ilm/build.gradle +++ b/x-pack/plugin/ilm/build.gradle @@ -2,12 +2,12 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-ilm' - description 'Elasticsearch Expanded Pack Plugin - Index Lifecycle Management' - classname 'org.elasticsearch.xpack.ilm.IndexLifecycle' + name = 'x-pack-ilm' + description = 'Elasticsearch Expanded Pack Plugin - Index Lifecycle Management' + classname ='org.elasticsearch.xpack.ilm.IndexLifecycle' extendedPlugins = ['x-pack-core'] - hasNativeController false - requiresKeystore true + hasNativeController =false + requiresKeystore =true } base { archivesName = 'x-pack-ilm' diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index b77e643bc285..e70b1be1d108 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -601,6 +601,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { "shutdown_node", SingleNodeShutdownMetadata.builder() .setNodeId("shutdown_node") + .setNodeEphemeralId("shutdown_node") .setReason("shut down for test") .setStartedAtMillis(randomNonNegativeLong()) .setType(SingleNodeShutdownMetadata.Type.RESTART) @@ -632,6 +633,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { "shutdown_node", SingleNodeShutdownMetadata.builder() .setNodeId("shutdown_node") + .setNodeEphemeralId("shutdown_node") .setReason("shut down for test") .setStartedAtMillis(randomNonNegativeLong()) .setType(type) diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 1d0236a5834e..cb3ccbd17130 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -16,9 +16,9 @@ restResources { } esplugin { - name 'x-pack-inference' - description 'Configuration and evaluation of inference models' - classname 'org.elasticsearch.xpack.inference.InferencePlugin' + name = 'x-pack-inference' + description = 'Configuration and evaluation of inference models' + classname ='org.elasticsearch.xpack.inference.InferencePlugin' extendedPlugins = ['x-pack-core'] } @@ -34,6 +34,7 @@ dependencies { testImplementation(testArtifact(project(':server'))) testImplementation(project(':x-pack:plugin:inference:qa:test-service-plugin')) testImplementation project(':modules:reindex') + testImplementation project(':modules:mapper-extras') clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') api "com.ibm.icu:icu4j:${versions.icu4j}" @@ -66,8 +67,8 @@ dependencies { api "software.amazon.awssdk:checksums-spi:${versions.awsv2sdk}" api "software.amazon.awssdk:checksums:${versions.awsv2sdk}" api "software.amazon.awssdk:sdk-core:${versions.awsv2sdk}" - api "org.reactivestreams:reactive-streams:1.0.4" - api "org.reactivestreams:reactive-streams-tck:1.0.4" + api "org.reactivestreams:reactive-streams:${versions.reactive_streams}" + api "org.reactivestreams:reactive-streams-tck:${versions.reactive_streams}" api "software.amazon.awssdk:profiles:${versions.awsv2sdk}" api "software.amazon.awssdk:retries:${versions.awsv2sdk}" api "software.amazon.awssdk:auth:${versions.awsv2sdk}" diff --git a/x-pack/plugin/inference/qa/test-service-plugin/build.gradle b/x-pack/plugin/inference/qa/test-service-plugin/build.gradle index 031c7519154b..145869bf6c34 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/build.gradle +++ b/x-pack/plugin/inference/qa/test-service-plugin/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - name 'inference-service-test' - description 'A mock inference service' - classname 'org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin' + name = 'inference-service-test' + description = 'A mock inference service' + classname = 'org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin' extendedPlugins = ['x-pack-inference'] } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 3b0fc869c812..90a9dd3355b3 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.action.filter; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -17,6 +19,9 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; @@ -29,9 +34,9 @@ import org.junit.Before; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; @@ -40,9 +45,19 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.ra import static org.hamcrest.Matchers.equalTo; public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { - public static final String INDEX_NAME = "test-index"; + private final boolean useLegacyFormat; + + public ShardBulkInferenceActionFilterIT(boolean useLegacyFormat) { + this.useLegacyFormat = useLegacyFormat; + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return List.of(new Object[] { true }, new Object[] { false }); + } + @Before public void setup() throws Exception { Utils.storeSparseModel(client()); @@ -61,8 +76,16 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { return Arrays.asList(Utils.TestInferencePlugin.class); } + @Override + public Settings indexSettings() { + return Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)) + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) + .build(); + } + public void testBulkOperations() throws Exception { - Map shardsSettings = Collections.singletonMap(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)); indicesAdmin().prepareCreate(INDEX_NAME) .setMapping( String.format( @@ -85,7 +108,6 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { TestDenseInferenceServiceExtension.TestInferenceService.NAME ) ) - .setSettings(shardsSettings) .get(); int totalBulkReqs = randomIntBetween(2, 100); @@ -151,5 +173,4 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { searchResponse.decRef(); } } - } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index b993cf36cb87..5fc4448c8094 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.ResponseListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; @@ -34,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; @@ -242,7 +244,11 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { @Override public Iterator toXContentChunked(ToXContent.Params params) { var randomString = randomUnicodeOfLengthBetween(2, 20); - return ChunkedToXContent.builder(params).object(b -> b.field("delta", randomString)); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + ChunkedToXContentHelper.field("delta", randomString), + ChunkedToXContentHelper.endObject() + ); } } @@ -275,7 +281,7 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).field("result", randomUnicodeOfLengthBetween(2, 20)); + return ChunkedToXContentHelper.field("result", randomUnicodeOfLengthBetween(2, 20)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index ac225800ad1b..0ce64a5dccb4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -75,6 +76,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorSer import org.elasticsearch.xpack.inference.highlight.SemanticTextHighlighter; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.mapper.OffsetSourceFieldMapper; +import org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsMapper; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; import org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; @@ -288,7 +290,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP } inferenceServiceRegistry.set(registry); - var actionFilter = new ShardBulkInferenceActionFilter(registry, modelRegistry); + var actionFilter = new ShardBulkInferenceActionFilter(services.clusterService(), registry, modelRegistry); shardBulkInferenceActionFilter.set(actionFilter); var meterRegistry = services.telemetryProvider().getMeterRegistry(); @@ -417,6 +419,11 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP IOUtils.closeWhileHandlingException(inferenceServiceRegistry.get(), throttlerToClose); } + @Override + public Map getMetadataMappers() { + return Map.of(SemanticInferenceMetadataFieldsMapper.NAME, SemanticInferenceMetadataFieldsMapper.PARSER); + } + @Override public Map getMappers() { return Map.of( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index a9195ea24af3..44643bd24566 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -24,11 +24,14 @@ import org.elasticsearch.action.support.MappedActionFilter; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.cluster.metadata.ProjectMetadata; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -40,11 +43,11 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.inference.mapper.SemanticTextField; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.mapper.SemanticTextUtils; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -53,8 +56,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSemanticTextFieldChunks; - /** * A {@link MappedActionFilter} that intercepts {@link BulkShardRequest} to apply inference on fields specified * as {@link SemanticTextFieldMapper} in the index mapping. For each semantic text field referencing fields in @@ -68,15 +69,26 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSeman public class ShardBulkInferenceActionFilter implements MappedActionFilter { protected static final int DEFAULT_BATCH_SIZE = 512; + private final ClusterService clusterService; private final InferenceServiceRegistry inferenceServiceRegistry; private final ModelRegistry modelRegistry; private final int batchSize; - public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry) { - this(inferenceServiceRegistry, modelRegistry, DEFAULT_BATCH_SIZE); + public ShardBulkInferenceActionFilter( + ClusterService clusterService, + InferenceServiceRegistry inferenceServiceRegistry, + ModelRegistry modelRegistry + ) { + this(clusterService, inferenceServiceRegistry, modelRegistry, DEFAULT_BATCH_SIZE); } - public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry, int batchSize) { + public ShardBulkInferenceActionFilter( + ClusterService clusterService, + InferenceServiceRegistry inferenceServiceRegistry, + ModelRegistry modelRegistry, + int batchSize + ) { + this.clusterService = clusterService; this.inferenceServiceRegistry = inferenceServiceRegistry; this.modelRegistry = modelRegistry; this.batchSize = batchSize; @@ -112,7 +124,10 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { BulkShardRequest bulkShardRequest, Runnable onCompletion ) { - new AsyncBulkShardInferenceAction(fieldInferenceMap, bulkShardRequest, onCompletion).run(); + final ProjectMetadata project = clusterService.state().getMetadata().getProject(); + var index = project.index(bulkShardRequest.index()); + boolean useLegacyFormat = InferenceMetadataFieldsMapper.isEnabled(index.getSettings()) == false; + new AsyncBulkShardInferenceAction(useLegacyFormat, fieldInferenceMap, bulkShardRequest, onCompletion).run(); } private record InferenceProvider(InferenceService service, Model model) {} @@ -121,26 +136,29 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { * A field inference request on a single input. * @param index The index of the request in the original bulk request. * @param field The target field. + * @param sourceField The source field. * @param input The input to run inference on. * @param inputOrder The original order of the input. - * @param isOriginalFieldInput Whether the input is part of the original values of the field. + * @param offsetAdjustment The adjustment to apply to the chunk text offsets. */ - private record FieldInferenceRequest(int index, String field, String input, int inputOrder, boolean isOriginalFieldInput) {} + private record FieldInferenceRequest(int index, String field, String sourceField, String input, int inputOrder, int offsetAdjustment) {} /** * The field inference response. * @param field The target field. + * @param sourceField The input that was used to run inference. * @param input The input that was used to run inference. * @param inputOrder The original order of the input. - * @param isOriginalFieldInput Whether the input is part of the original values of the field. + * @param offsetAdjustment The adjustment to apply to the chunk text offsets. * @param model The model used to run inference. * @param chunkedResults The actual results. */ private record FieldInferenceResponse( String field, + String sourceField, String input, int inputOrder, - boolean isOriginalFieldInput, + int offsetAdjustment, Model model, ChunkedInference chunkedResults ) {} @@ -165,16 +183,19 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { } private class AsyncBulkShardInferenceAction implements Runnable { + private final boolean useLegacyFormat; private final Map fieldInferenceMap; private final BulkShardRequest bulkShardRequest; private final Runnable onCompletion; private final AtomicArray inferenceResults; private AsyncBulkShardInferenceAction( + boolean useLegacyFormat, Map fieldInferenceMap, BulkShardRequest bulkShardRequest, Runnable onCompletion ) { + this.useLegacyFormat = useLegacyFormat; this.fieldInferenceMap = fieldInferenceMap; this.bulkShardRequest = bulkShardRequest; this.inferenceResults = new AtomicArray<>(bulkShardRequest.items().length); @@ -295,9 +316,10 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { acc.addOrUpdateResponse( new FieldInferenceResponse( request.field(), + request.sourceField(), request.input(), request.inputOrder(), - request.isOriginalFieldInput(), + request.offsetAdjustment(), inferenceProvider.model, result ) @@ -357,8 +379,7 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { /** * Applies the {@link FieldInferenceResponseAccumulator} to the provided {@link BulkItemRequest}. * If the response contains failures, the bulk item request is marked as failed for the downstream action. - * Otherwise, the source of the request is augmented with the field inference results under the - * {@link SemanticTextField#INFERENCE_FIELD} field. + * Otherwise, the source of the request is augmented with the field inference results. */ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceResponseAccumulator response) throws IOException { if (response.failures().isEmpty() == false) { @@ -370,25 +391,49 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { final IndexRequest indexRequest = getIndexRequestOrNull(item.request()); var newDocMap = indexRequest.sourceAsMap(); + Map inferenceFieldsMap = new HashMap<>(); for (var entry : response.responses.entrySet()) { var fieldName = entry.getKey(); var responses = entry.getValue(); var model = responses.get(0).model(); // ensure that the order in the original field is consistent in case of multiple inputs Collections.sort(responses, Comparator.comparingInt(FieldInferenceResponse::inputOrder)); - List inputs = responses.stream().filter(r -> r.isOriginalFieldInput).map(r -> r.input).collect(Collectors.toList()); - List results = responses.stream().map(r -> r.chunkedResults).collect(Collectors.toList()); + Map> chunkMap = new LinkedHashMap<>(); + for (var resp : responses) { + var lst = chunkMap.computeIfAbsent(resp.sourceField, k -> new ArrayList<>()); + lst.addAll( + SemanticTextField.toSemanticTextFieldChunks( + resp.input, + resp.offsetAdjustment, + resp.chunkedResults, + indexRequest.getContentType(), + useLegacyFormat + ) + ); + } + List inputs = responses.stream() + .filter(r -> r.sourceField().equals(fieldName)) + .map(r -> r.input) + .collect(Collectors.toList()); var result = new SemanticTextField( + useLegacyFormat, fieldName, - inputs, + useLegacyFormat ? inputs : null, new SemanticTextField.InferenceResult( model.getInferenceEntityId(), new SemanticTextField.ModelSettings(model), - toSemanticTextFieldChunks(results, indexRequest.getContentType()) + chunkMap ), indexRequest.getContentType() ); - SemanticTextFieldMapper.insertValue(fieldName, newDocMap, result); + if (useLegacyFormat) { + SemanticTextUtils.insertValue(fieldName, newDocMap, result); + } else { + inferenceFieldsMap.put(fieldName, result); + } + } + if (useLegacyFormat == false) { + newDocMap.put(InferenceMetadataFieldsMapper.NAME, inferenceFieldsMap); } indexRequest.source(newDocMap, indexRequest.getContentType()); } @@ -437,17 +482,30 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); String inferenceId = entry.getInferenceId(); - var originalFieldValue = XContentMapValues.extractValue(field, docMap); - if (originalFieldValue instanceof Map || (originalFieldValue == null && entry.getSourceFields().length == 1)) { - // Inference has already been computed, or there is no inference required. - continue; + + if (useLegacyFormat) { + var originalFieldValue = XContentMapValues.extractValue(field, docMap); + if (originalFieldValue instanceof Map || (originalFieldValue == null && entry.getSourceFields().length == 1)) { + // Inference has already been computed, or there is no inference required. + continue; + } + } else { + var inferenceMetadataFieldsValue = XContentMapValues.extractValue( + InferenceMetadataFieldsMapper.NAME + "." + field, + docMap + ); + if (inferenceMetadataFieldsValue != null) { + // Inference has already been computed + continue; + } } + int order = 0; for (var sourceField : entry.getSourceFields()) { - boolean isOriginalFieldInput = sourceField.equals(field); + // TODO: Detect when the field is provided with an explicit null value var valueObj = XContentMapValues.extractValue(sourceField, docMap); if (valueObj == null) { - if (isUpdateRequest) { + if (isUpdateRequest && useLegacyFormat) { addInferenceResponseFailure( item.id(), new ElasticsearchStatusException( @@ -464,14 +522,21 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { ensureResponseAccumulatorSlot(itemIndex); final List values; try { - values = nodeStringValues(field, valueObj); + values = SemanticTextUtils.nodeStringValues(field, valueObj); } catch (Exception exc) { addInferenceResponseFailure(item.id(), exc); break; } + List fieldRequests = fieldRequestsMap.computeIfAbsent(inferenceId, k -> new ArrayList<>()); - for (var v : values) { - fieldRequests.add(new FieldInferenceRequest(itemIndex, field, v, order++, isOriginalFieldInput)); + int offsetAdjustment = 0; + for (String v : values) { + fieldRequests.add(new FieldInferenceRequest(itemIndex, field, sourceField, v, order++, offsetAdjustment)); + + // When using the inference metadata fields format, all the input values are concatenated so that the + // chunk text offsets are expressed in the context of a single string. Calculate the offset adjustment + // to apply to account for this. + offsetAdjustment += v.length() + 1; // Add one for separator char length } } } @@ -480,41 +545,6 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { } } - /** - * This method converts the given {@code valueObj} into a list of strings. - * If {@code valueObj} is not a string or a collection of strings, it throws an ElasticsearchStatusException. - */ - private static List nodeStringValues(String field, Object valueObj) { - if (valueObj instanceof Number || valueObj instanceof Boolean) { - return List.of(valueObj.toString()); - } else if (valueObj instanceof String value) { - return List.of(value); - } else if (valueObj instanceof Collection values) { - List valuesString = new ArrayList<>(); - for (var v : values) { - if (v instanceof Number || v instanceof Boolean) { - valuesString.add(v.toString()); - } else if (v instanceof String value) { - valuesString.add(value); - } else { - throw new ElasticsearchStatusException( - "Invalid format for field [{}], expected [String] got [{}]", - RestStatus.BAD_REQUEST, - field, - valueObj.getClass().getSimpleName() - ); - } - } - return valuesString; - } - throw new ElasticsearchStatusException( - "Invalid format for field [{}], expected [String] got [{}]", - RestStatus.BAD_REQUEST, - field, - valueObj.getClass().getSimpleName() - ); - } - static IndexRequest getIndexRequestOrNull(DocWriteRequest docWriteRequest) { if (docWriteRequest instanceof IndexRequest indexRequest) { return indexRequest; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java index f2bfa72ec617..cd3cde65e12e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java @@ -53,8 +53,9 @@ public class SemanticTextHighlighter implements Highlighter { @Override public boolean canHighlight(MappedFieldType fieldType) { - if (fieldType instanceof SemanticTextFieldMapper.SemanticTextFieldType) { - return true; + if (fieldType instanceof SemanticTextFieldMapper.SemanticTextFieldType semanticTextFieldType) { + // TODO: Implement highlighting when using inference metadata fields + return semanticTextFieldType.useLegacyFormat(); } return false; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java new file mode 100644 index 000000000000..7a1a9b056d0a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** + * An {@link InferenceMetadataFieldsMapper} that delegates parsing of underlying fields + * to the corresponding {@link SemanticTextFieldMapper}. + */ +public class SemanticInferenceMetadataFieldsMapper extends InferenceMetadataFieldsMapper { + private static final SemanticInferenceMetadataFieldsMapper INSTANCE = new SemanticInferenceMetadataFieldsMapper(); + + public static final TypeParser PARSER = new FixedTypeParser( + c -> InferenceMetadataFieldsMapper.isEnabled(c.getSettings()) ? INSTANCE : null + ); + + static class FieldType extends InferenceMetadataFieldType { + private static final FieldType INSTANCE = new FieldType(); + + FieldType() { + super(); + } + + @Override + public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + return valueFetcher(context.getMappingLookup(), context::bitsetFilter, context.searcher()); + } + + @Override + public ValueFetcher valueFetcher(MappingLookup mappingLookup, Function bitSetCache, IndexSearcher searcher) { + Map fieldFetchers = new HashMap<>(); + for (var inferenceField : mappingLookup.inferenceFields().keySet()) { + MappedFieldType ft = mappingLookup.getFieldType(inferenceField); + if (ft instanceof SemanticTextFieldMapper.SemanticTextFieldType semanticTextFieldType) { + fieldFetchers.put(inferenceField, semanticTextFieldType.valueFetcherWithInferenceResults(bitSetCache, searcher)); + } else { + throw new IllegalArgumentException( + "Invalid inference field [" + ft.name() + "]. Expected field type [semantic_text] but got [" + ft.typeName() + "]" + ); + } + } + if (fieldFetchers.isEmpty()) { + return ValueFetcher.EMPTY; + } + return new ValueFetcher() { + @Override + public void setNextReader(LeafReaderContext context) { + fieldFetchers.values().forEach(f -> f.setNextReader(context)); + } + + @Override + public List fetchValues(Source source, int doc, List ignoredValues) throws IOException { + Map result = new HashMap<>(); + for (var entry : fieldFetchers.entrySet()) { + var values = entry.getValue().fetchValues(source, doc, ignoredValues); + if (values.size() > 0) { + assert values.size() == 1; + result.put(entry.getKey(), values.get(0)); + } + } + return result.isEmpty() ? List.of() : List.of(result); + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + }; + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public Query termQuery(Object value, SearchExecutionContext context) { + throw new QueryShardException( + context, + "[" + name() + "] field which is of type [" + typeName() + "], does not support term queries" + ); + } + } + + private SemanticInferenceMetadataFieldsMapper() { + super(FieldType.INSTANCE); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected boolean supportsParsingObject() { + return true; + } + + @Override + protected void parseCreateField(DocumentParserContext context) throws IOException { + final boolean isWithinLeaf = context.path().isWithinLeafObject(); + try { + // make sure that we don't expand dots in field names while parsing + context.path().setWithinLeafObject(true); + XContentParser parser = context.parser(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String fieldName = parser.currentName(); + + // Set the path to that of semantic text field so the parser acts as if we are parsing the semantic text field value + // directly. We can safely split on all "." chars because semantic text fields cannot be used when subobjects == false. + String[] fieldNameParts = fieldName.split("\\."); + setPath(context.path(), fieldNameParts); + + var parent = context.parent().findParentMapper(fieldName); + if (parent == null) { + throw new IllegalArgumentException("Field [" + fieldName + "] does not have a parent mapper"); + } + String suffix = parent != context.parent() ? fieldName.substring(parent.fullPath().length() + 1) : fieldName; + var mapper = parent.getMapper(suffix); + if (mapper instanceof SemanticTextFieldMapper fieldMapper) { + XContentLocation xContentLocation = context.parser().getTokenLocation(); + var input = fieldMapper.parseSemanticTextField(context); + if (input != null) { + fieldMapper.parseCreateFieldFromContext(context, input, xContentLocation); + } + } else { + throw new IllegalArgumentException( + "Field [" + fieldName + "] is not a [" + SemanticTextFieldMapper.CONTENT_TYPE + "] field" + ); + } + } + } finally { + context.path().setWithinLeafObject(isWithinLeaf); + setPath(context.path(), new String[] { InferenceMetadataFieldsMapper.NAME }); + } + } + + private static void setPath(ContentPath contentPath, String[] newPath) { + while (contentPath.length() > 0) { + contentPath.remove(); + } + + for (String pathPart : newPath) { + contentPath.add(pathPart); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index d651729dee25..cfd05cb29ca0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -10,8 +10,10 @@ package org.elasticsearch.xpack.inference.mapper; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.Model; @@ -31,6 +33,9 @@ import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -46,13 +51,18 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr * the inference results under the {@link SemanticTextField#INFERENCE_FIELD}. * * @param fieldName The original field name. - * @param originalValues The original values associated with the field name. + * @param originalValues The original values associated with the field name for indices created before + * {@link IndexVersions#INFERENCE_METADATA_FIELDS}, null otherwise. * @param inference The inference result. * @param contentType The {@link XContentType} used to store the embeddings chunks. */ -public record SemanticTextField(String fieldName, List originalValues, InferenceResult inference, XContentType contentType) - implements - ToXContentObject { +public record SemanticTextField( + boolean useLegacyFormat, + String fieldName, + @Nullable List originalValues, + InferenceResult inference, + XContentType contentType +) implements ToXContentObject { static final String TEXT_FIELD = "text"; static final String INFERENCE_FIELD = "inference"; @@ -61,15 +71,20 @@ public record SemanticTextField(String fieldName, List originalValues, I static final String CHUNKS_FIELD = "chunks"; static final String CHUNKED_EMBEDDINGS_FIELD = "embeddings"; public static final String CHUNKED_TEXT_FIELD = "text"; + static final String CHUNKED_OFFSET_FIELD = "offset"; + static final String CHUNKED_START_OFFSET_FIELD = "start_offset"; + static final String CHUNKED_END_OFFSET_FIELD = "end_offset"; static final String MODEL_SETTINGS_FIELD = "model_settings"; static final String TASK_TYPE_FIELD = "task_type"; static final String DIMENSIONS_FIELD = "dimensions"; static final String SIMILARITY_FIELD = "similarity"; static final String ELEMENT_TYPE_FIELD = "element_type"; - public record InferenceResult(String inferenceId, ModelSettings modelSettings, List chunks) {} + public record InferenceResult(String inferenceId, ModelSettings modelSettings, Map> chunks) {} - record Chunk(String text, BytesReference rawEmbeddings) {} + public record Chunk(@Nullable String text, int startOffset, int endOffset, BytesReference rawEmbeddings) {} + + public record Offset(String sourceFieldName, int startOffset, int endOffset) {} public record ModelSettings( TaskType taskType, @@ -187,12 +202,14 @@ public record SemanticTextField(String fieldName, List originalValues, I return getChunksFieldName(fieldName) + "." + CHUNKED_EMBEDDINGS_FIELD; } - static SemanticTextField parse(XContentParser parser, Tuple context) throws IOException { - return SEMANTIC_TEXT_FIELD_PARSER.parse(parser, context); + public static String getOffsetsFieldName(String fieldName) { + return getChunksFieldName(fieldName) + "." + CHUNKED_OFFSET_FIELD; } - static ModelSettings parseModelSettings(XContentParser parser) throws IOException { - return MODEL_SETTINGS_PARSER.parse(parser, null); + record ParserContext(boolean useLegacyFormat, String fieldName, XContentType xContentType) {} + + static SemanticTextField parse(XContentParser parser, ParserContext context) throws IOException { + return SEMANTIC_TEXT_FIELD_PARSER.parse(parser, context); } static ModelSettings parseModelSettingsFromMap(Object node) { @@ -207,63 +224,102 @@ public record SemanticTextField(String fieldName, List originalValues, I map, XContentType.JSON ); - return parseModelSettings(parser); + return MODEL_SETTINGS_PARSER.parse(parser, null); } catch (Exception exc) { throw new ElasticsearchException(exc); } } + @Override + public List originalValues() { + return originalValues != null ? originalValues : Collections.emptyList(); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - if (originalValues.isEmpty() == false) { + List originalValues = originalValues(); + if (useLegacyFormat && originalValues.isEmpty() == false) { builder.field(TEXT_FIELD, originalValues.size() == 1 ? originalValues.get(0) : originalValues); } builder.startObject(INFERENCE_FIELD); builder.field(INFERENCE_ID_FIELD, inference.inferenceId); builder.field(MODEL_SETTINGS_FIELD, inference.modelSettings); - builder.startArray(CHUNKS_FIELD); - for (var chunk : inference.chunks) { - builder.startObject(); - builder.field(CHUNKED_TEXT_FIELD, chunk.text); - XContentParser parser = XContentHelper.createParserNotCompressed( - XContentParserConfiguration.EMPTY, - chunk.rawEmbeddings, - contentType - ); - builder.field(CHUNKED_EMBEDDINGS_FIELD).copyCurrentStructure(parser); + if (useLegacyFormat) { + builder.startArray(CHUNKS_FIELD); + } else { + builder.startObject(CHUNKS_FIELD); + } + for (var entry : inference.chunks.entrySet()) { + if (useLegacyFormat == false) { + builder.startArray(entry.getKey()); + } + for (var chunk : entry.getValue()) { + builder.startObject(); + if (useLegacyFormat) { + builder.field(TEXT_FIELD, chunk.text); + } else { + builder.field(CHUNKED_START_OFFSET_FIELD, chunk.startOffset); + builder.field(CHUNKED_END_OFFSET_FIELD, chunk.endOffset); + } + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY, + chunk.rawEmbeddings, + contentType + ); + builder.field(CHUNKED_EMBEDDINGS_FIELD).copyCurrentStructure(parser); + builder.endObject(); + } + if (useLegacyFormat == false) { + builder.endArray(); + } + } + if (useLegacyFormat) { + builder.endArray(); + } else { builder.endObject(); } - builder.endArray(); builder.endObject(); builder.endObject(); return builder; } @SuppressWarnings("unchecked") - private static final ConstructingObjectParser> SEMANTIC_TEXT_FIELD_PARSER = - new ConstructingObjectParser<>( - SemanticTextFieldMapper.CONTENT_TYPE, - true, - (args, context) -> new SemanticTextField( - context.v1(), - (List) (args[0] == null ? List.of() : args[0]), + private static final ConstructingObjectParser SEMANTIC_TEXT_FIELD_PARSER = + new ConstructingObjectParser<>(SemanticTextFieldMapper.CONTENT_TYPE, true, (args, context) -> { + List originalValues = (List) args[0]; + if (context.useLegacyFormat() == false) { + if (originalValues != null && originalValues.isEmpty() == false) { + throw new IllegalArgumentException("Unknown field [" + TEXT_FIELD + "]"); + } + originalValues = null; + } + return new SemanticTextField( + context.useLegacyFormat(), + context.fieldName(), + originalValues, (InferenceResult) args[1], - context.v2() - ) - ); + context.xContentType() + ); + }); @SuppressWarnings("unchecked") - private static final ConstructingObjectParser INFERENCE_RESULT_PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser INFERENCE_RESULT_PARSER = new ConstructingObjectParser<>( INFERENCE_FIELD, true, - args -> new InferenceResult((String) args[0], (ModelSettings) args[1], (List) args[2]) + args -> new InferenceResult((String) args[0], (ModelSettings) args[1], (Map>) args[2]) ); - private static final ConstructingObjectParser CHUNKS_PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser CHUNKS_PARSER = new ConstructingObjectParser<>( CHUNKS_FIELD, true, - args -> new Chunk((String) args[0], (BytesReference) args[1]) + (args, context) -> { + String text = (String) args[0]; + if (context.useLegacyFormat() && text == null) { + throw new IllegalArgumentException("Missing chunk text"); + } + return new Chunk(text, args[1] != null ? (int) args[1] : -1, args[2] != null ? (int) args[2] : -1, (BytesReference) args[3]); + } ); private static final ConstructingObjectParser MODEL_SETTINGS_PARSER = new ConstructingObjectParser<>( @@ -284,15 +340,26 @@ public record SemanticTextField(String fieldName, List originalValues, I SEMANTIC_TEXT_FIELD_PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TEXT_FIELD)); SEMANTIC_TEXT_FIELD_PARSER.declareObject( constructorArg(), - (p, c) -> INFERENCE_RESULT_PARSER.parse(p, null), + (p, c) -> INFERENCE_RESULT_PARSER.parse(p, c), new ParseField(INFERENCE_FIELD) ); INFERENCE_RESULT_PARSER.declareString(constructorArg(), new ParseField(INFERENCE_ID_FIELD)); - INFERENCE_RESULT_PARSER.declareObject(constructorArg(), MODEL_SETTINGS_PARSER, new ParseField(MODEL_SETTINGS_FIELD)); - INFERENCE_RESULT_PARSER.declareObjectArray(constructorArg(), CHUNKS_PARSER, new ParseField(CHUNKS_FIELD)); + INFERENCE_RESULT_PARSER.declareObject( + constructorArg(), + (p, c) -> MODEL_SETTINGS_PARSER.parse(p, null), + new ParseField(MODEL_SETTINGS_FIELD) + ); + INFERENCE_RESULT_PARSER.declareField(constructorArg(), (p, c) -> { + if (c.useLegacyFormat()) { + return Map.of(c.fieldName, parseChunksArrayLegacy(p, c)); + } + return parseChunksMap(p, c); + }, new ParseField(CHUNKS_FIELD), ObjectParser.ValueType.OBJECT_ARRAY); - CHUNKS_PARSER.declareString(constructorArg(), new ParseField(CHUNKED_TEXT_FIELD)); + CHUNKS_PARSER.declareString(optionalConstructorArg(), new ParseField(TEXT_FIELD)); + CHUNKS_PARSER.declareInt(optionalConstructorArg(), new ParseField(CHUNKED_START_OFFSET_FIELD)); + CHUNKS_PARSER.declareInt(optionalConstructorArg(), new ParseField(CHUNKED_END_OFFSET_FIELD)); CHUNKS_PARSER.declareField(constructorArg(), (p, c) -> { XContentBuilder b = XContentBuilder.builder(p.contentType().xContent()); b.copyCurrentStructure(p); @@ -305,18 +372,64 @@ public record SemanticTextField(String fieldName, List originalValues, I MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(ELEMENT_TYPE_FIELD)); } + private static Map> parseChunksMap(XContentParser parser, ParserContext context) throws IOException { + Map> resultMap = new LinkedHashMap<>(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String fieldName = parser.currentName(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); + var chunks = resultMap.computeIfAbsent(fieldName, k -> new ArrayList<>()); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + chunks.add(CHUNKS_PARSER.parse(parser, context)); + } + } + return resultMap; + } + + private static List parseChunksArrayLegacy(XContentParser parser, ParserContext context) throws IOException { + List results = new ArrayList<>(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + results.add(CHUNKS_PARSER.parse(parser, context)); + } + return results; + } + /** * Converts the provided {@link ChunkedInference} into a list of {@link Chunk}. */ - public static List toSemanticTextFieldChunks(List results, XContentType contentType) throws IOException { + public static List toSemanticTextFieldChunks( + String input, + int offsetAdjustment, + ChunkedInference results, + XContentType contentType, + boolean useLegacyFormat + ) throws IOException { List chunks = new ArrayList<>(); - for (var result : results) { - for (var it = result.chunksAsMatchedTextAndByteReference(contentType.xContent()); it.hasNext();) { - var chunkAsByteReference = it.next(); - chunks.add(new Chunk(chunkAsByteReference.matchedText(), chunkAsByteReference.bytesReference())); - } + Iterator it = results.chunksAsMatchedTextAndByteReference(contentType.xContent()); + while (it.hasNext()) { + chunks.add(toSemanticTextFieldChunk(input, offsetAdjustment, it.next(), useLegacyFormat)); } return chunks; } + public static Chunk toSemanticTextFieldChunk( + String input, + int offsetAdjustment, + ChunkedInference.Chunk chunk, + boolean useLegacyFormat + ) { + String text = null; + int startOffset = -1; + int endOffset = -1; + if (useLegacyFormat) { + text = input.substring(chunk.textOffset().start(), chunk.textOffset().end()); + } else { + startOffset = chunk.textOffset().start() + offsetAdjustment; + endOffset = chunk.textOffset().end() + offsetAdjustment; + } + + return new Chunk(text, startOffset, endOffset, chunk.bytesReference()); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index cda77233bdfd..b47c55c30227 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -8,16 +8,23 @@ package org.elasticsearch.xpack.inference.mapper; import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; +import org.apache.lucene.util.BitSet; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -29,6 +36,7 @@ import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.InferenceFieldMapper; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; @@ -38,6 +46,7 @@ import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.SimpleMappedFieldType; +import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.ValueFetcher; @@ -49,19 +58,25 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilder; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -71,7 +86,7 @@ import java.util.function.Function; import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; -import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_TEXT_FIELD; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_OFFSET_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_ID_FIELD; @@ -80,6 +95,7 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.SEARCH_ import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getChunksFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getEmbeddingsFieldName; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getOffsetsFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getOriginalTextFieldName; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.DEFAULT_ELSER_ID; @@ -100,16 +116,14 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; - private final IndexSettings indexSettings; - public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c::bitSetProducer, c.getIndexSettings()), + (n, c) -> new Builder(n, c::bitSetProducer, c.getIndexSettings()), List.of(notInMultiFields(CONTENT_TYPE), notFromDynamicTemplates(CONTENT_TYPE)) ); public static class Builder extends FieldMapper.Builder { + private final boolean useLegacyFormat; private final IndexVersion indexVersionCreated; - private final IndexSettings indexSettings; private final Parameter inferenceId = Parameter.stringParam( INFERENCE_ID_FIELD, @@ -154,26 +168,21 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static Builder from(SemanticTextFieldMapper mapper) { Builder builder = new Builder( mapper.leafName(), - mapper.fieldType().indexVersionCreated, mapper.fieldType().getChunksField().bitsetProducer(), - mapper.indexSettings + mapper.fieldType().getChunksField().indexSettings() ); builder.init(mapper); return builder; } - public Builder( - String name, - IndexVersion indexVersionCreated, - Function bitSetProducer, - IndexSettings indexSettings - ) { + public Builder(String name, Function bitSetProducer, IndexSettings indexSettings) { super(name); - this.indexVersionCreated = indexVersionCreated; - this.indexSettings = indexSettings; + this.indexVersionCreated = indexSettings.getIndexVersionCreated(); + this.useLegacyFormat = InferenceMetadataFieldsMapper.isEnabled(indexSettings.getSettings()) == false; this.inferenceFieldBuilder = c -> createInferenceField( c, - indexVersionCreated, + indexSettings.getIndexVersionCreated(), + useLegacyFormat, modelSettings.get(), bitSetProducer, indexSettings @@ -238,10 +247,10 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie modelSettings.getValue(), inferenceField, indexVersionCreated, + useLegacyFormat, meta.getValue() ), - builderParams(this, context), - indexSettings + builderParams(this, context) ); } @@ -265,14 +274,8 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie } } - private SemanticTextFieldMapper( - String simpleName, - MappedFieldType mappedFieldType, - BuilderParams builderParams, - IndexSettings indexSettings - ) { + private SemanticTextFieldMapper(String simpleName, MappedFieldType mappedFieldType, BuilderParams builderParams) { super(simpleName, mappedFieldType, builderParams); - this.indexSettings = indexSettings; } @Override @@ -289,21 +292,48 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie @Override protected void parseCreateField(DocumentParserContext context) throws IOException { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + final XContentParser parser = context.parser(); + final XContentLocation xContentLocation = parser.getTokenLocation(); + + if (fieldType().useLegacyFormat == false) { + // Detect if field value is an object, which we don't support parsing + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + throw new DocumentParsingException( + xContentLocation, + "[" + CONTENT_TYPE + "] field [" + fullPath() + "] does not support object values" + ); + } + + // ignore the rest of the field value + parser.skipChildren(); return; } - XContentLocation xContentLocation = parser.getTokenLocation(); - final SemanticTextField field; + final SemanticTextField field = parseSemanticTextField(context); + if (field != null) { + parseCreateFieldFromContext(context, field, xContentLocation); + } + } + + SemanticTextField parseSemanticTextField(DocumentParserContext context) throws IOException { + XContentParser parser = context.parser(); + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } boolean isWithinLeaf = context.path().isWithinLeafObject(); try { context.path().setWithinLeafObject(true); - field = SemanticTextField.parse(parser, new Tuple<>(fullPath(), context.parser().contentType())); + return SemanticTextField.parse( + context.parser(), + new SemanticTextField.ParserContext(fieldType().useLegacyFormat, fullPath(), context.parser().contentType()) + ); } finally { context.path().setWithinLeafObject(isWithinLeaf); } + } + void parseCreateFieldFromContext(DocumentParserContext context, SemanticTextField field, XContentLocation xContentLocation) + throws IOException { final String fullFieldName = fieldType().name(); if (field.inference().inferenceId().equals(fieldType().getInferenceId()) == false) { throw new DocumentParsingException( @@ -324,9 +354,8 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie context.path().remove(); Builder builder = (Builder) new Builder( leafName(), - fieldType().indexVersionCreated, fieldType().getChunksField().bitsetProducer(), - indexSettings + fieldType().getChunksField().indexSettings() ).init(this); try { mapper = builder.setModelSettings(field.inference().modelSettings()) @@ -357,17 +386,44 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie var chunksField = mapper.fieldType().getChunksField(); var embeddingsField = mapper.fieldType().getEmbeddingsField(); - for (var chunk : field.inference().chunks()) { - try ( - XContentParser subParser = XContentHelper.createParserNotCompressed( - XContentParserConfiguration.EMPTY, - chunk.rawEmbeddings(), - context.parser().contentType() - ) - ) { - DocumentParserContext subContext = context.createNestedContext(chunksField).switchParser(subParser); - subParser.nextToken(); - embeddingsField.parse(subContext); + var offsetsField = mapper.fieldType().getOffsetsField(); + for (var entry : field.inference().chunks().entrySet()) { + for (var chunk : entry.getValue()) { + var nestedContext = context.createNestedContext(chunksField); + try ( + XContentParser subParser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY, + chunk.rawEmbeddings(), + context.parser().contentType() + ) + ) { + DocumentParserContext subContext = nestedContext.switchParser(subParser); + subParser.nextToken(); + embeddingsField.parse(subContext); + } + + if (fieldType().useLegacyFormat) { + continue; + } + + try (XContentBuilder builder = XContentFactory.contentBuilder(context.parser().contentType())) { + builder.startObject(); + builder.field("field", entry.getKey()); + builder.field("start", chunk.startOffset()); + builder.field("end", chunk.endOffset()); + builder.endObject(); + try ( + XContentParser subParser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY, + BytesReference.bytes(builder), + context.parser().contentType() + ) + ) { + DocumentParserContext subContext = nestedContext.switchParser(subParser); + subParser.nextToken(); + offsetsField.parse(subContext); + } + } } } } @@ -390,20 +446,6 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie return new InferenceFieldMetadata(fullPath(), fieldType().getInferenceId(), fieldType().getSearchInferenceId(), copyFields); } - @Override - public Object getOriginalValue(Map sourceAsMap) { - Object fieldValue = sourceAsMap.get(fullPath()); - if (fieldValue == null) { - return null; - } else if (fieldValue instanceof Map == false) { - // Don't try to further validate the non-map value, that will be handled when the source is fully parsed - return fieldValue; - } - - Map fieldValueMap = XContentMapValues.nodeMapValue(fieldValue, "Field [" + fullPath() + "]"); - return XContentMapValues.extractValue(TEXT_FIELD, fieldValueMap); - } - @Override protected void doValidate(MappingLookup mappers) { int parentPathIndex = fullPath().lastIndexOf(leafName()); @@ -429,6 +471,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie private final SemanticTextField.ModelSettings modelSettings; private final ObjectMapper inferenceField; private final IndexVersion indexVersionCreated; + private final boolean useLegacyFormat; public SemanticTextFieldType( String name, @@ -437,6 +480,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie SemanticTextField.ModelSettings modelSettings, ObjectMapper inferenceField, IndexVersion indexVersionCreated, + boolean useLegacyFormat, Map meta ) { super(name, true, false, false, TextSearchInfo.NONE, meta); @@ -445,6 +489,11 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie this.modelSettings = modelSettings; this.inferenceField = inferenceField; this.indexVersionCreated = indexVersionCreated; + this.useLegacyFormat = useLegacyFormat; + } + + public boolean useLegacyFormat() { + return useLegacyFormat; } @Override @@ -476,6 +525,10 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie return (FieldMapper) getChunksField().getMapper(CHUNKED_EMBEDDINGS_FIELD); } + public FieldMapper getOffsetsField() { + return (FieldMapper) getChunksField().getMapper(CHUNKED_OFFSET_FIELD); + } + @Override public Query termQuery(Object value, SearchExecutionContext context) { throw new IllegalArgumentException(CONTENT_TYPE + " fields do not support term query"); @@ -498,8 +551,30 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - // Redirect the fetcher to load the original values of the field - return SourceValueFetcher.toString(getOriginalTextFieldName(name()), context, format); + if (useLegacyFormat) { + // Redirect the fetcher to load the original values of the field + return SourceValueFetcher.toString(getOriginalTextFieldName(name()), context, format); + } + return SourceValueFetcher.toString(name(), context, null); + } + + ValueFetcher valueFetcherWithInferenceResults(Function bitSetCache, IndexSearcher searcher) { + var embeddingsField = getEmbeddingsField(); + if (embeddingsField == null) { + return ValueFetcher.EMPTY; + } + try { + var embeddingsLoader = embeddingsField.syntheticFieldLoader(); + var bitSetFilter = bitSetCache.apply(getChunksField().parentTypeFilter()); + var childWeight = searcher.createWeight( + getChunksField().nestedTypeFilter(), + org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, + 1 + ); + return new SemanticTextFieldValueFetcher(bitSetFilter, childWeight, embeddingsLoader); + } catch (IOException exc) { + throw new UncheckedIOException(exc); + } } @Override @@ -622,158 +697,170 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie @Override public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) { - SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name().concat(".text"))); + String name = useLegacyFormat ? name().concat(".text") : name(); + SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name)); return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); } - } - /** - *

- * Insert or replace the path's value in the map with the provided new value. The map will be modified in-place. - * If the complete path does not exist in the map, it will be added to the deepest (sub-)map possible. - *

- *

- * For example, given the map: - *

- *
-     * {
-     *   "path1": {
-     *     "path2": {
-     *       "key1": "value1"
-     *     }
-     *   }
-     * }
-     * 
- *

- * And the caller wanted to insert {@code "path1.path2.path3.key2": "value2"}, the method would emit the modified map: - *

- *
-     * {
-     *   "path1": {
-     *     "path2": {
-     *       "key1": "value1",
-     *       "path3.key2": "value2"
-     *     }
-     *   }
-     * }
-     * 
- * - * @param path the value's path in the map. - * @param map the map to search and modify in-place. - * @param newValue the new value to assign to the path. - * - * @throws IllegalArgumentException If either the path cannot be fully traversed or there is ambiguity about where to insert the new - * value. - */ - public static void insertValue(String path, Map map, Object newValue) { - String[] pathElements = path.split("\\."); - if (pathElements.length == 0) { - return; - } + private class SemanticTextFieldValueFetcher implements ValueFetcher { + private final BitSetProducer parentBitSetProducer; + private final Weight childWeight; + private final SourceLoader.SyntheticFieldLoader fieldLoader; - List suffixMaps = extractSuffixMaps(pathElements, 0, map); - if (suffixMaps.isEmpty()) { - // This should never happen. Throw in case it does for some reason. - throw new IllegalStateException("extractSuffixMaps returned an empty suffix map list"); - } else if (suffixMaps.size() == 1) { - SuffixMap suffixMap = suffixMaps.getFirst(); - suffixMap.map().put(suffixMap.suffix(), newValue); - } else { - throw new IllegalArgumentException( - "Path [" + path + "] could be inserted in " + suffixMaps.size() + " distinct ways, it is ambiguous which one to use" - ); - } - } + private BitSet bitSet; + private Scorer childScorer; + private SourceLoader.SyntheticFieldLoader.DocValuesLoader dvLoader; + private OffsetSourceField.OffsetSourceLoader offsetsLoader; - private record SuffixMap(String suffix, Map map) {} - - private static List extractSuffixMaps(String[] pathElements, int index, Object currentValue) { - if (currentValue instanceof List valueList) { - List suffixMaps = new ArrayList<>(valueList.size()); - for (Object o : valueList) { - suffixMaps.addAll(extractSuffixMaps(pathElements, index, o)); + private SemanticTextFieldValueFetcher( + BitSetProducer bitSetProducer, + Weight childWeight, + SourceLoader.SyntheticFieldLoader fieldLoader + ) { + this.parentBitSetProducer = bitSetProducer; + this.childWeight = childWeight; + this.fieldLoader = fieldLoader; } - return suffixMaps; - } else if (currentValue instanceof Map) { - @SuppressWarnings("unchecked") - Map map = (Map) currentValue; - List suffixMaps = new ArrayList<>(map.size()); + @Override + public void setNextReader(LeafReaderContext context) { + try { + bitSet = parentBitSetProducer.getBitSet(context); + childScorer = childWeight.scorer(context); + if (childScorer != null) { + childScorer.iterator().nextDoc(); + } + dvLoader = fieldLoader.docValuesLoader(context.reader(), null); + var terms = context.reader().terms(getOffsetsFieldName(name())); + offsetsLoader = terms != null ? OffsetSourceField.loader(terms) : null; + } catch (IOException exc) { + throw new UncheckedIOException(exc); + } + } - String key = pathElements[index]; - while (index < pathElements.length) { - if (map.containsKey(key)) { - if (index + 1 == pathElements.length) { - // We found the complete path - suffixMaps.add(new SuffixMap(key, map)); - } else { - // We've matched that path partially, keep traversing to try to match it fully - suffixMaps.addAll(extractSuffixMaps(pathElements, index + 1, map.get(key))); + @Override + public List fetchValues(Source source, int doc, List ignoredValues) throws IOException { + if (childScorer == null || offsetsLoader == null || doc == 0) { + return List.of(); + } + int previousParent = bitSet.prevSetBit(doc - 1); + var it = childScorer.iterator(); + if (it.docID() < previousParent) { + it.advance(previousParent); + } + Map> chunkMap = new LinkedHashMap<>(); + while (it.docID() < doc) { + if (dvLoader == null || dvLoader.advanceToDoc(it.docID()) == false) { + throw new IllegalStateException( + "Cannot fetch values for field [" + name() + "], missing embeddings for doc [" + doc + "]" + ); + } + var offset = offsetsLoader.advanceTo(it.docID()); + if (offset == null) { + throw new IllegalStateException( + "Cannot fetch values for field [" + name() + "], missing offsets for doc [" + doc + "]" + ); + } + var chunks = chunkMap.computeIfAbsent(offset.field(), k -> new ArrayList<>()); + chunks.add( + new SemanticTextField.Chunk( + null, + offset.start(), + offset.end(), + rawEmbeddings(fieldLoader::write, source.sourceContentType()) + ) + ); + if (it.nextDoc() == DocIdSetIterator.NO_MORE_DOCS) { + break; } } + if (chunkMap.isEmpty()) { + return List.of(); + } + return List.of( + new SemanticTextField( + useLegacyFormat, + name(), + null, + new SemanticTextField.InferenceResult(inferenceId, modelSettings, chunkMap), + source.sourceContentType() + ) + ); + } - if (++index < pathElements.length) { - key += "." + pathElements[index]; + private BytesReference rawEmbeddings(CheckedConsumer writer, XContentType xContentType) + throws IOException { + try (var result = XContentFactory.contentBuilder(xContentType)) { + try (var builder = XContentFactory.contentBuilder(xContentType)) { + builder.startObject(); + writer.accept(builder); + builder.endObject(); + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY, + BytesReference.bytes(builder), + xContentType + ) + ) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); + parser.nextToken(); + result.copyCurrentStructure(parser); + } + return BytesReference.bytes(result); + } } } - if (suffixMaps.isEmpty()) { - // We checked for all remaining elements in the path, and they do not exist. This means we found a leaf map that we should - // add the value to. - suffixMaps.add(new SuffixMap(key, map)); + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; } - - return suffixMaps; - } else { - throw new IllegalArgumentException( - "Path [" - + String.join(".", Arrays.copyOfRange(pathElements, 0, index)) - + "] has value [" - + currentValue - + "] of type [" - + currentValue.getClass().getSimpleName() - + "], which cannot be traversed into further" - ); } } private static ObjectMapper createInferenceField( MapperBuilderContext context, IndexVersion indexVersionCreated, + boolean useLegacyFormat, @Nullable SemanticTextField.ModelSettings modelSettings, Function bitSetProducer, IndexSettings indexSettings ) { return new ObjectMapper.Builder(INFERENCE_FIELD, Optional.of(ObjectMapper.Subobjects.ENABLED)).dynamic(ObjectMapper.Dynamic.FALSE) - .add(createChunksField(indexVersionCreated, modelSettings, bitSetProducer, indexSettings)) + .add(createChunksField(indexVersionCreated, useLegacyFormat, modelSettings, bitSetProducer, indexSettings)) .build(context); } private static NestedObjectMapper.Builder createChunksField( IndexVersion indexVersionCreated, + boolean useLegacyFormat, @Nullable SemanticTextField.ModelSettings modelSettings, Function bitSetProducer, IndexSettings indexSettings ) { NestedObjectMapper.Builder chunksField = new NestedObjectMapper.Builder( - CHUNKS_FIELD, - indexVersionCreated, + SemanticTextField.CHUNKS_FIELD, + indexSettings.getIndexVersionCreated(), bitSetProducer, indexSettings ); chunksField.dynamic(ObjectMapper.Dynamic.FALSE); - KeywordFieldMapper.Builder chunkTextField = new KeywordFieldMapper.Builder(CHUNKED_TEXT_FIELD, indexVersionCreated).indexed(false) - .docValues(false); if (modelSettings != null) { - chunksField.add(createEmbeddingsField(indexVersionCreated, modelSettings)); + chunksField.add(createEmbeddingsField(indexSettings.getIndexVersionCreated(), modelSettings)); + } + if (useLegacyFormat) { + var chunkTextField = new KeywordFieldMapper.Builder(TEXT_FIELD, indexVersionCreated).indexed(false).docValues(false); + chunksField.add(chunkTextField); + } else { + chunksField.add(new OffsetSourceFieldMapper.Builder(CHUNKED_OFFSET_FIELD)); } - chunksField.add(chunkTextField); return chunksField; } private static Mapper.Builder createEmbeddingsField(IndexVersion indexVersionCreated, SemanticTextField.ModelSettings modelSettings) { return switch (modelSettings.taskType()) { - case SPARSE_EMBEDDING -> new SparseVectorFieldMapper.Builder(CHUNKED_EMBEDDINGS_FIELD); + case SPARSE_EMBEDDING -> new SparseVectorFieldMapper.Builder(CHUNKED_EMBEDDINGS_FIELD).setStored(true); case TEXT_EMBEDDING -> { DenseVectorFieldMapper.Builder denseVectorMapperBuilder = new DenseVectorFieldMapper.Builder( CHUNKED_EMBEDDINGS_FIELD, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextUtils.java new file mode 100644 index 000000000000..4d3c2e875236 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextUtils.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +public interface SemanticTextUtils { + /** + * This method converts the given {@code valueObj} into a list of strings. + */ + static List nodeStringValues(String field, Object valueObj) { + if (valueObj instanceof Number || valueObj instanceof Boolean) { + return List.of(valueObj.toString()); + } else if (valueObj instanceof String value) { + return List.of(value); + } else if (valueObj instanceof Collection values) { + List valuesString = new ArrayList<>(); + for (var v : values) { + if (v instanceof Number || v instanceof Boolean) { + valuesString.add(v.toString()); + } else if (v instanceof String value) { + valuesString.add(value); + } else { + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String|Number|Boolean] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + } + return valuesString; + } + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String|Number|Boolean] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + + /** + *

+ * Insert or replace the path's value in the map with the provided new value. The map will be modified in-place. + * If the complete path does not exist in the map, it will be added to the deepest (sub-)map possible. + *

+ *

+ * For example, given the map: + *

+ *
+     * {
+     *   "path1": {
+     *     "path2": {
+     *       "key1": "value1"
+     *     }
+     *   }
+     * }
+     * 
+ *

+ * And the caller wanted to insert {@code "path1.path2.path3.key2": "value2"}, the method would emit the modified map: + *

+ *
+     * {
+     *   "path1": {
+     *     "path2": {
+     *       "key1": "value1",
+     *       "path3.key2": "value2"
+     *     }
+     *   }
+     * }
+     * 
+ * + * @param path the value's path in the map. + * @param map the map to search and modify in-place. + * @param newValue the new value to assign to the path. + * + * @throws IllegalArgumentException If either the path cannot be fully traversed or there is ambiguity about where to insert the new + * value. + */ + static void insertValue(String path, Map map, Object newValue) { + String[] pathElements = path.split("\\."); + if (pathElements.length == 0) { + return; + } + + List suffixMaps = extractSuffixMaps(pathElements, 0, map); + if (suffixMaps.isEmpty()) { + // This should never happen. Throw in case it does for some reason. + throw new IllegalStateException("extractSuffixMaps returned an empty suffix map list"); + } else if (suffixMaps.size() == 1) { + SuffixMap suffixMap = suffixMaps.getFirst(); + suffixMap.map().put(suffixMap.suffix(), newValue); + } else { + throw new IllegalArgumentException( + "Path [" + path + "] could be inserted in " + suffixMaps.size() + " distinct ways, it is ambiguous which one to use" + ); + } + } + + record SuffixMap(String suffix, Map map) {} + + private static List extractSuffixMaps(String[] pathElements, int index, Object currentValue) { + if (currentValue instanceof List valueList) { + List suffixMaps = new ArrayList<>(valueList.size()); + for (Object o : valueList) { + suffixMaps.addAll(extractSuffixMaps(pathElements, index, o)); + } + + return suffixMaps; + } else if (currentValue instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) currentValue; + List suffixMaps = new ArrayList<>(map.size()); + + String key = pathElements[index]; + while (index < pathElements.length) { + if (map.containsKey(key)) { + if (index + 1 == pathElements.length) { + // We found the complete path + suffixMaps.add(new SuffixMap(key, map)); + } else { + // We've matched that path partially, keep traversing to try to match it fully + suffixMaps.addAll(extractSuffixMaps(pathElements, index + 1, map.get(key))); + } + } + + if (++index < pathElements.length) { + key += "." + pathElements[index]; + } + } + + if (suffixMaps.isEmpty()) { + // We checked for all remaining elements in the path, and they do not exist. This means we found a leaf map that we should + // add the value to. + suffixMaps.add(new SuffixMap(key, map)); + } + + return suffixMaps; + } else { + throw new IllegalArgumentException( + "Path [" + + String.join(".", Arrays.copyOfRange(pathElements, 0, index)) + + "] has value [" + + currentValue + + "] of type [" + + currentValue.getClass().getSimpleName() + + "], which cannot be traversed into further" + ); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java index 340a5a65f096..817079fd54cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java @@ -17,7 +17,7 @@ public enum AmazonBedrockProvider { META, MISTRAL; - public static String NAME = "amazon_bedrock_provider"; + public static final String NAME = "amazon_bedrock_provider"; public static AmazonBedrockProvider fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioEndpointType.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioEndpointType.java index ece63f4bbf0c..8639ba5b3aed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioEndpointType.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioEndpointType.java @@ -13,7 +13,7 @@ public enum AzureAiStudioEndpointType { TOKEN, REALTIME; - public static String NAME = "azure_ai_studio_endpoint_type"; + public static final String NAME = "azure_ai_studio_endpoint_type"; public static AzureAiStudioEndpointType fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioProvider.java index 6b3efca0888f..55f3f557797d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioProvider.java @@ -17,7 +17,7 @@ public enum AzureAiStudioProvider { COHERE, DATABRICKS; - public static String NAME = "azure_ai_studio_provider"; + public static final String NAME = "azure_ai_studio_provider"; public static AzureAiStudioProvider fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java index 3bcaa57827fd..264d694703d8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java @@ -22,7 +22,7 @@ public class ElserMlNodeTaskSettings implements TaskSettings { public static final String NAME = "elser_mlnode_task_settings"; - public static ElserMlNodeTaskSettings DEFAULT = new ElserMlNodeTaskSettings(); + public static final ElserMlNodeTaskSettings DEFAULT = new ElserMlNodeTaskSettings(); public ElserMlNodeTaskSettings() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java index 37f528ea3a75..c1764b93bfc8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java @@ -16,7 +16,7 @@ public class ElserModels { public static final String ELSER_V2_MODEL = ".elser_model_2"; public static final String ELSER_V2_MODEL_LINUX_X86 = ".elser_model_2_linux-x86_64"; - public static Set VALID_ELSER_MODEL_IDS = Set.of( + public static final Set VALID_ELSER_MODEL_IDS = Set.of( ElserModels.ELSER_V1_MODEL, ElserModels.ELSER_V2_MODEL, ElserModels.ELSER_V2_MODEL_LINUX_X86 diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index 0b7d136ffb04..3a668cb96604 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.action.filter; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkItemRequest; @@ -16,10 +18,19 @@ import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.ProjectMetadata; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.InferenceService; @@ -65,14 +76,26 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.to import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; public class ShardBulkInferenceActionFilterTests extends ESTestCase { + private final boolean useLegacyFormat; private ThreadPool threadPool; + public ShardBulkInferenceActionFilterTests(boolean useLegacyFormat) { + this.useLegacyFormat = useLegacyFormat; + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return List.of(new Object[] { true }, new Object[] { false }); + } + @Before public void setupThreadPool() { threadPool = new TestThreadPool(getTestName()); @@ -85,7 +108,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { @SuppressWarnings({ "unchecked", "rawtypes" }) public void testFilterNoop() throws Exception { - ShardBulkInferenceActionFilter filter = createFilter(threadPool, Map.of(), DEFAULT_BATCH_SIZE); + ShardBulkInferenceActionFilter filter = createFilter(threadPool, Map.of(), DEFAULT_BATCH_SIZE, useLegacyFormat); CountDownLatch chainExecuted = new CountDownLatch(1); ActionFilterChain actionFilterChain = (task, action, request, listener) -> { try { @@ -114,7 +137,8 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { ShardBulkInferenceActionFilter filter = createFilter( threadPool, Map.of(model.getInferenceEntityId(), model), - randomIntBetween(1, 10) + randomIntBetween(1, 10), + useLegacyFormat ); CountDownLatch chainExecuted = new CountDownLatch(1); ActionFilterChain actionFilterChain = (task, action, request, listener) -> { @@ -144,7 +168,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { ); BulkItemRequest[] items = new BulkItemRequest[10]; for (int i = 0; i < items.length; i++) { - items[i] = randomBulkItemRequest(Map.of(), inferenceFieldMap)[0]; + items[i] = randomBulkItemRequest(useLegacyFormat, Map.of(), inferenceFieldMap)[0]; } BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); request.setInferenceFieldMap(inferenceFieldMap); @@ -155,10 +179,12 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { @SuppressWarnings({ "unchecked", "rawtypes" }) public void testItemFailures() throws Exception { StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( threadPool, Map.of(model.getInferenceEntityId(), model), - randomIntBetween(1, 10) + randomIntBetween(1, 10), + useLegacyFormat ); model.putResult("I am a failure", new ChunkedInferenceError(new IllegalArgumentException("boom"))); model.putResult("I am a success", randomChunkedInferenceEmbeddingSparse(List.of("I am a success"))); @@ -178,7 +204,10 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { // item 1 is a success assertNull(bulkShardRequest.items()[1].getPrimaryResponse()); IndexRequest actualRequest = getIndexRequestOrNull(bulkShardRequest.items()[1].request()); - assertThat(XContentMapValues.extractValue("field1.text", actualRequest.sourceAsMap()), equalTo("I am a success")); + assertThat( + XContentMapValues.extractValue(useLegacyFormat ? "field1.text" : "field1", actualRequest.sourceAsMap()), + equalTo("I am a success") + ); // item 2 is a failure assertNotNull(bulkShardRequest.items()[2].getPrimaryResponse()); @@ -227,12 +256,12 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { BulkItemRequest[] originalRequests = new BulkItemRequest[numRequests]; BulkItemRequest[] modifiedRequests = new BulkItemRequest[numRequests]; for (int id = 0; id < numRequests; id++) { - BulkItemRequest[] res = randomBulkItemRequest(inferenceModelMap, inferenceFieldMap); + BulkItemRequest[] res = randomBulkItemRequest(useLegacyFormat, inferenceModelMap, inferenceFieldMap); originalRequests[id] = res[0]; modifiedRequests[id] = res[1]; } - ShardBulkInferenceActionFilter filter = createFilter(threadPool, inferenceModelMap, randomIntBetween(10, 30)); + ShardBulkInferenceActionFilter filter = createFilter(threadPool, inferenceModelMap, randomIntBetween(10, 30), useLegacyFormat); CountDownLatch chainExecuted = new CountDownLatch(1); ActionFilterChain actionFilterChain = (task, action, request, listener) -> { try { @@ -263,7 +292,12 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { } @SuppressWarnings("unchecked") - private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool, Map modelMap, int batchSize) { + private static ShardBulkInferenceActionFilter createFilter( + ThreadPool threadPool, + Map modelMap, + int batchSize, + boolean useLegacyFormat + ) { ModelRegistry modelRegistry = mock(ModelRegistry.class); Answer unparsedModelAnswer = invocationOnMock -> { String id = (String) invocationOnMock.getArguments()[0]; @@ -319,24 +353,53 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { InferenceServiceRegistry inferenceServiceRegistry = mock(InferenceServiceRegistry.class); when(inferenceServiceRegistry.getService(any())).thenReturn(Optional.of(inferenceService)); - ShardBulkInferenceActionFilter filter = new ShardBulkInferenceActionFilter(inferenceServiceRegistry, modelRegistry, batchSize); - return filter; + + return new ShardBulkInferenceActionFilter( + createClusterService(useLegacyFormat), + inferenceServiceRegistry, + modelRegistry, + batchSize + ); + } + + private static ClusterService createClusterService(boolean useLegacyFormat) { + IndexMetadata indexMetadata = mock(IndexMetadata.class); + var settings = Settings.builder() + .put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), IndexVersion.current()) + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) + .build(); + when(indexMetadata.getSettings()).thenReturn(settings); + + ProjectMetadata project = spy(ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID).build()); + when(project.index(anyString())).thenReturn(indexMetadata); + + Metadata metadata = mock(Metadata.class); + when(metadata.getProject()).thenReturn(project); + + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(metadata).build(); + ClusterService clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(clusterState); + + return clusterService; } private static BulkItemRequest[] randomBulkItemRequest( + boolean useLegacyFormat, Map modelMap, Map fieldInferenceMap ) throws IOException { Map docMap = new LinkedHashMap<>(); Map expectedDocMap = new LinkedHashMap<>(); XContentType requestContentType = randomFrom(XContentType.values()); + + Map inferenceMetadataFields = new HashMap<>(); for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); var model = modelMap.get(entry.getInferenceId()); Object inputObject = randomSemanticTextInput(); String inputText = inputObject.toString(); docMap.put(field, inputObject); - expectedDocMap.put(field, inputText); + expectedDocMap.put(field, useLegacyFormat ? inputText : inputObject); if (model == null) { // ignore results, the doc should fail with a resource not found exception continue; @@ -349,6 +412,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { if (model.hasResult(inputText)) { var results = model.getResults(inputText); semanticTextField = semanticTextFieldFromChunkedInferenceResults( + useLegacyFormat, field, model, List.of(inputText), @@ -356,11 +420,19 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { requestContentType ); } else { - semanticTextField = randomSemanticText(field, model, List.of(inputText), requestContentType); - model.putResult(inputText, toChunkedResult(semanticTextField)); + Map> inputTextMap = Map.of(field, List.of(inputText)); + semanticTextField = randomSemanticText(useLegacyFormat, field, model, List.of(inputText), requestContentType); + model.putResult(inputText, toChunkedResult(useLegacyFormat, inputTextMap, semanticTextField)); } - expectedDocMap.put(field, semanticTextField); + if (useLegacyFormat) { + expectedDocMap.put(field, semanticTextField); + } else { + inferenceMetadataFields.put(field, semanticTextField); + } + } + if (useLegacyFormat == false) { + expectedDocMap.put(InferenceMetadataFieldsMapper.NAME, inferenceMetadataFields); } int requestId = randomIntBetween(0, Integer.MAX_VALUE); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java index b4fa5c912225..a4228d50eae4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java @@ -56,7 +56,7 @@ public class WordBoundaryChunkerTests extends ESTestCase { + " خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين " + "چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔" }; - public static int NUM_WORDS_IN_TEST_TEXT; + public static final int NUM_WORDS_IN_TEST_TEXT; static { var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); wordIterator.setText(TEST_TEXT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighterTests.java index 78743409ca17..20119bf933e1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighterTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.SourceToParse; @@ -166,7 +167,8 @@ public class SemanticTextHighlighterTests extends MapperServiceTestCase { private MapperService createDefaultMapperService() throws IOException { var mappings = Streams.readFully(SemanticTextHighlighterTests.class.getResourceAsStream("mappings.json")); - return createMapperService(mappings.utf8ToString()); + var settings = Settings.builder().put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), true).build(); + return createMapperService(settings, mappings.utf8ToString()); } private float[] readDenseVector(Object value) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldMapperTests.java new file mode 100644 index 000000000000..6504ccc4dd39 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldMapperTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.inference.InferencePlugin; + +import java.util.Collection; +import java.util.Collections; + +public class SemanticInferenceMetadataFieldMapperTests extends MapperServiceTestCase { + @Override + protected Collection getPlugins() { + return Collections.singletonList(new InferencePlugin(Settings.EMPTY)); + } + + @Override + public void testFieldHasValue() { + assertTrue( + getMappedFieldType().fieldHasValue( + new FieldInfos(new FieldInfo[] { getFieldInfoWithName(SemanticInferenceMetadataFieldsMapper.NAME) }) + ) + ); + } + + @Override + public void testFieldHasValueWithEmptyFieldInfos() { + assertFalse(getMappedFieldType().fieldHasValue(FieldInfos.EMPTY)); + } + + @Override + public MappedFieldType getMappedFieldType() { + return new SemanticInferenceMetadataFieldsMapper.FieldType(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index c6a492dfcf4e..11362c3cedd0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.mapper; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexableField; @@ -29,10 +31,12 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; @@ -57,35 +61,30 @@ import org.elasticsearch.search.LeafNestedDocuments; import org.elasticsearch.search.NestedDocuments; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryWrapper; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.model.TestModel; import org.junit.AssumptionViolatedException; import java.io.IOException; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.BiConsumer; -import java.util.stream.Stream; -import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; -import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_ID_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.MODEL_SETTINGS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.SEARCH_INFERENCE_ID_FIELD; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getChunksFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getEmbeddingsFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper.DEFAULT_ELSER_2_INFERENCE_ID; @@ -95,9 +94,35 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class SemanticTextFieldMapperTests extends MapperTestCase { + private final boolean useLegacyFormat; + + public SemanticTextFieldMapperTests(boolean useLegacyFormat) { + this.useLegacyFormat = useLegacyFormat; + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return List.of(new Object[] { true }, new Object[] { false }); + } + @Override protected Collection getPlugins() { - return singletonList(new InferencePlugin(Settings.EMPTY)); + return List.of(new InferencePlugin(Settings.EMPTY), new XPackClientPlugin()); + } + + private MapperService createMapperService(XContentBuilder mappings, boolean useLegacyFormat) throws IOException { + var settings = Settings.builder() + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) + .build(); + return createMapperService(settings, mappings); + } + + @Override + protected Settings getIndexSettings() { + return Settings.builder() + .put(super.getIndexSettings()) + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) + .build(); } @Override @@ -158,7 +183,8 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { null, null, null, - IndexVersion.current(), + getVersion(), + false, Map.of() ); } @@ -175,7 +201,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping); final XContentBuilder expectedMapping = fieldMapping(this::metaMapping); - MapperService mapperService = createMapperService(fieldMapping); + MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat); DocumentMapper mapper = mapperService.documentMapper(); assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString()); assertSemanticTextField(mapperService, fieldName, false); @@ -207,7 +233,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { { final XContentBuilder fieldMapping = fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, inferenceId)); - final MapperService mapperService = createMapperService(fieldMapping); + final MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); assertSerialization.accept(fieldMapping, mapperService); @@ -221,7 +247,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { .field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID) .field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) ); - final MapperService mapperService = createMapperService(fieldMapping); + final MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); assertSerialization.accept(expectedMapping, mapperService); @@ -232,7 +258,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { .field(INFERENCE_ID_FIELD, inferenceId) .field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) ); - MapperService mapperService = createMapperService(fieldMapping); + MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId); assertSerialization.accept(fieldMapping, mapperService); @@ -243,7 +269,10 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { { Exception e = expectThrows( MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, (String) null))) + () -> createMapperService( + fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, (String) null)), + useLegacyFormat + ) ); assertThat( e.getMessage(), @@ -253,14 +282,20 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { { Exception e = expectThrows( MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, ""))) + () -> createMapperService( + fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, "")), + useLegacyFormat + ) ); assertThat(e.getMessage(), containsString("[inference_id] on mapper [field] of type [semantic_text] must not be empty")); } { Exception e = expectThrows( MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) + () -> createMapperService( + fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, "")), + useLegacyFormat + ) ); assertThat(e.getMessage(), containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty")); } @@ -275,14 +310,15 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { b.field("inference_id", "my_inference_id"); b.endObject(); b.endObject(); - }))); + }), useLegacyFormat)); assertThat(e.getMessage(), containsString("Field [semantic] of type [semantic_text] can't be used in multifields")); } public void testUpdatesToInferenceIdNotSupported() throws IOException { String fieldName = randomAlphaOfLengthBetween(5, 15); MapperService mapperService = createMapperService( - mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()) + mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()), + useLegacyFormat ); assertSemanticTextField(mapperService, fieldName, false); Exception e = expectThrows( @@ -326,7 +362,8 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { for (int depth = 1; depth < 5; depth++) { String fieldName = randomFieldName(depth); MapperService mapperService = createMapperService( - mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()) + mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()), + useLegacyFormat ); assertSemanticTextField(mapperService, fieldName, false); { @@ -415,7 +452,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { for (int depth = 1; depth < 5; depth++) { String fieldName = randomFieldName(depth); - MapperService mapperService = createMapperService(buildMapping.apply(fieldName, null)); + MapperService mapperService = createMapperService(buildMapping.apply(fieldName, null), useLegacyFormat); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); @@ -471,12 +508,20 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { .get(getChunksFieldName(fieldName)); assertThat(chunksMapper, equalTo(semanticFieldMapper.fieldType().getChunksField())); assertThat(chunksMapper.fullPath(), equalTo(getChunksFieldName(fieldName))); - Mapper textMapper = chunksMapper.getMapper(CHUNKED_TEXT_FIELD); - assertNotNull(textMapper); - assertThat(textMapper, instanceOf(KeywordFieldMapper.class)); - KeywordFieldMapper textFieldMapper = (KeywordFieldMapper) textMapper; - assertFalse(textFieldMapper.fieldType().isIndexed()); - assertFalse(textFieldMapper.fieldType().hasDocValues()); + + Mapper textMapper = chunksMapper.getMapper(TEXT_FIELD); + if (semanticTextFieldType.useLegacyFormat()) { + assertNotNull(textMapper); + assertThat(textMapper, instanceOf(KeywordFieldMapper.class)); + KeywordFieldMapper textFieldMapper = (KeywordFieldMapper) textMapper; + assertFalse(textFieldMapper.fieldType().isIndexed()); + assertFalse(textFieldMapper.fieldType().hasDocValues()); + } else { + assertNull(textMapper); + var offsetMapper = semanticTextFieldType.getOffsetsField(); + assertThat(offsetMapper, instanceOf(OffsetSourceFieldMapper.class)); + } + if (expectedModelSettings) { assertNotNull(semanticFieldMapper.fieldType().getModelSettings()); Mapper embeddingsMapper = chunksMapper.getMapper(CHUNKED_EMBEDDINGS_FIELD); @@ -523,7 +568,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { addSemanticTextMapping(b, fieldName2, model2.getInferenceEntityId(), setSearchInferenceId ? searchInferenceId : null); }); - MapperService mapperService = createMapperService(mapping); + MapperService mapperService = createMapperService(mapping, useLegacyFormat); assertSemanticTextField(mapperService, fieldName1, false); assertInferenceEndpoints( mapperService, @@ -543,10 +588,11 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { ParsedDocument doc = documentMapper.parse( source( b -> addSemanticTextInferenceResults( + useLegacyFormat, b, List.of( - randomSemanticText(fieldName1, model1, List.of("a b", "c"), XContentType.JSON), - randomSemanticText(fieldName2, model2, List.of("d e f"), XContentType.JSON) + randomSemanticText(useLegacyFormat, fieldName1, model1, List.of("a b", "c"), XContentType.JSON), + randomSemanticText(useLegacyFormat, fieldName2, model2, List.of("d e f"), XContentType.JSON) ) ) ) @@ -626,52 +672,64 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { } public void testMissingInferenceId() throws IOException { - DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null))); + final MapperService mapperService = createMapperService( + mapping(b -> addSemanticTextMapping(b, "field", "my_id", null)), + useLegacyFormat + ); + IllegalArgumentException ex = expectThrows( DocumentParsingException.class, IllegalArgumentException.class, - () -> documentMapper.parse( - source( - b -> b.startObject("field") - .startObject(INFERENCE_FIELD) - .field(MODEL_SETTINGS_FIELD, new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null)) - .field(CHUNKS_FIELD, List.of()) - .endObject() - .endObject() + () -> mapperService.documentMapper() + .parse( + semanticTextInferenceSource( + useLegacyFormat, + b -> b.startObject("field") + .startObject(INFERENCE_FIELD) + .field(MODEL_SETTINGS_FIELD, new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null)) + .field(CHUNKS_FIELD, useLegacyFormat ? List.of() : Map.of()) + .endObject() + .endObject() + ) ) - ) ); assertThat(ex.getCause().getMessage(), containsString("Required [inference_id]")); } public void testMissingModelSettings() throws IOException { - DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null))); + MapperService mapperService = createMapperService(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null)), useLegacyFormat); IllegalArgumentException ex = expectThrows( DocumentParsingException.class, IllegalArgumentException.class, - () -> documentMapper.parse( - source(b -> b.startObject("field").startObject(INFERENCE_FIELD).field(INFERENCE_ID_FIELD, "my_id").endObject().endObject()) - ) + () -> mapperService.documentMapper() + .parse( + semanticTextInferenceSource( + useLegacyFormat, + b -> b.startObject("field").startObject(INFERENCE_FIELD).field(INFERENCE_ID_FIELD, "my_id").endObject().endObject() + ) + ) ); assertThat(ex.getCause().getMessage(), containsString("Required [model_settings, chunks]")); } public void testMissingTaskType() throws IOException { - DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null))); + MapperService mapperService = createMapperService(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null)), useLegacyFormat); IllegalArgumentException ex = expectThrows( DocumentParsingException.class, IllegalArgumentException.class, - () -> documentMapper.parse( - source( - b -> b.startObject("field") - .startObject(INFERENCE_FIELD) - .field(INFERENCE_ID_FIELD, "my_id") - .startObject(MODEL_SETTINGS_FIELD) - .endObject() - .endObject() - .endObject() + () -> mapperService.documentMapper() + .parse( + semanticTextInferenceSource( + useLegacyFormat, + b -> b.startObject("field") + .startObject(INFERENCE_FIELD) + .field(INFERENCE_ID_FIELD, "my_id") + .startObject(MODEL_SETTINGS_FIELD) + .endObject() + .endObject() + .endObject() + ) ) - ) ); assertThat(ex.getCause().getMessage(), containsString("failed to parse field [model_settings]")); } @@ -731,7 +789,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { mappingParams += ",search_inference_id=" + searchInferenceId; } - MapperService mapperService = createMapperService(mapping(b -> {})); + MapperService mapperService = createMapperService(mapping(b -> {}), useLegacyFormat); mapperService.merge( "_doc", new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(fieldName, mappingParams))), @@ -739,14 +797,19 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { ); SemanticTextField semanticTextField = new SemanticTextField( + useLegacyFormat, fieldName, List.of(), - new SemanticTextField.InferenceResult(inferenceId, modelSettings, List.of()), + new SemanticTextField.InferenceResult(inferenceId, modelSettings, Map.of()), XContentType.JSON ); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - builder.field(semanticTextField.fieldName()); - builder.value(semanticTextField); + if (useLegacyFormat) { + builder.field(semanticTextField.fieldName()); + builder.value(semanticTextField); + } else { + builder.field(InferenceMetadataFieldsMapper.NAME, Map.of(semanticTextField.fieldName(), semanticTextField)); + } builder.endObject(); SourceToParse sourceToParse = new SourceToParse("test", BytesReference.bytes(builder), XContentType.JSON); @@ -798,266 +861,6 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { assertThat(existsQuery, instanceOf(ESToParentBlockJoinQuery.class)); } - public void testInsertValueMapTraversal() throws IOException { - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("test", "value").endObject(); - - Map map = toSourceMap(Strings.toString(builder)); - SemanticTextFieldMapper.insertValue("test", map, "value2"); - assertThat(getMapValue(map, "test"), equalTo("value2")); - SemanticTextFieldMapper.insertValue("something.else", map, "something_else_value"); - assertThat(getMapValue(map, "something\\.else"), equalTo("something_else_value")); - } - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - builder.startObject("path1").startObject("path2").field("test", "value").endObject().endObject(); - builder.endObject(); - - Map map = toSourceMap(Strings.toString(builder)); - SemanticTextFieldMapper.insertValue("path1.path2.test", map, "value2"); - assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); - SemanticTextFieldMapper.insertValue("path1.path2.test_me", map, "test_me_value"); - assertThat(getMapValue(map, "path1.path2.test_me"), equalTo("test_me_value")); - SemanticTextFieldMapper.insertValue("path1.non_path2.test", map, "test_value"); - assertThat(getMapValue(map, "path1.non_path2\\.test"), equalTo("test_value")); - - SemanticTextFieldMapper.insertValue("path1.path2", map, Map.of("path3", "bar")); - assertThat(getMapValue(map, "path1.path2"), equalTo(Map.of("path3", "bar"))); - - SemanticTextFieldMapper.insertValue("path1", map, "baz"); - assertThat(getMapValue(map, "path1"), equalTo("baz")); - - SemanticTextFieldMapper.insertValue("path3.path4", map, Map.of("test", "foo")); - assertThat(getMapValue(map, "path3\\.path4"), equalTo(Map.of("test", "foo"))); - } - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - builder.startObject("path1").array("test", "value1", "value2").endObject(); - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - - SemanticTextFieldMapper.insertValue("path1.test", map, List.of("value3", "value4", "value5")); - assertThat(getMapValue(map, "path1.test"), equalTo(List.of("value3", "value4", "value5"))); - - SemanticTextFieldMapper.insertValue("path2.test", map, List.of("value6", "value7", "value8")); - assertThat(getMapValue(map, "path2\\.test"), equalTo(List.of("value6", "value7", "value8"))); - } - } - - public void testInsertValueListTraversal() throws IOException { - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - { - builder.startObject("path1"); - { - builder.startArray("path2"); - builder.startObject().field("test", "value1").endObject(); - builder.endArray(); - } - builder.endObject(); - } - { - builder.startObject("path3"); - { - builder.startArray("path4"); - builder.startObject().field("test", "value1").endObject(); - builder.endArray(); - } - builder.endObject(); - } - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - - SemanticTextFieldMapper.insertValue("path1.path2.test", map, "value2"); - assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); - SemanticTextFieldMapper.insertValue("path1.path2.test2", map, "value3"); - assertThat(getMapValue(map, "path1.path2.test2"), equalTo("value3")); - assertThat(getMapValue(map, "path1.path2"), equalTo(List.of(Map.of("test", "value2", "test2", "value3")))); - - SemanticTextFieldMapper.insertValue("path3.path4.test", map, "value4"); - assertThat(getMapValue(map, "path3.path4.test"), equalTo("value4")); - } - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - { - builder.startObject("path1"); - { - builder.startArray("path2"); - builder.startArray(); - builder.startObject().field("test", "value1").endObject(); - builder.endArray(); - builder.endArray(); - } - builder.endObject(); - } - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - - SemanticTextFieldMapper.insertValue("path1.path2.test", map, "value2"); - assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); - SemanticTextFieldMapper.insertValue("path1.path2.test2", map, "value3"); - assertThat(getMapValue(map, "path1.path2.test2"), equalTo("value3")); - assertThat(getMapValue(map, "path1.path2"), equalTo(List.of(List.of(Map.of("test", "value2", "test2", "value3"))))); - } - } - - public void testInsertValueFieldsWithDots() throws IOException { - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("xxx.yyy", "value1").endObject(); - Map map = toSourceMap(Strings.toString(builder)); - - SemanticTextFieldMapper.insertValue("xxx.yyy", map, "value2"); - assertThat(getMapValue(map, "xxx\\.yyy"), equalTo("value2")); - - SemanticTextFieldMapper.insertValue("xxx", map, "value3"); - assertThat(getMapValue(map, "xxx"), equalTo("value3")); - } - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - { - builder.startObject("path1.path2"); - { - builder.startObject("path3.path4"); - builder.field("test", "value1"); - builder.endObject(); - } - builder.endObject(); - } - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - - SemanticTextFieldMapper.insertValue("path1.path2.path3.path4.test", map, "value2"); - assertThat(getMapValue(map, "path1\\.path2.path3\\.path4.test"), equalTo("value2")); - - SemanticTextFieldMapper.insertValue("path1.path2.path3.path4.test2", map, "value3"); - assertThat(getMapValue(map, "path1\\.path2.path3\\.path4.test2"), equalTo("value3")); - assertThat(getMapValue(map, "path1\\.path2.path3\\.path4"), equalTo(Map.of("test", "value2", "test2", "value3"))); - } - } - - public void testInsertValueAmbiguousPath() throws IOException { - // Mixed dotted object notation - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - { - builder.startObject("path1.path2"); - { - builder.startObject("path3"); - builder.field("test1", "value1"); - builder.endObject(); - } - builder.endObject(); - } - { - builder.startObject("path1"); - { - builder.startObject("path2.path3"); - builder.field("test2", "value2"); - builder.endObject(); - } - builder.endObject(); - } - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); - - IllegalArgumentException ex = assertThrows( - IllegalArgumentException.class, - () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test1", map, "value3") - ); - assertThat( - ex.getMessage(), - equalTo("Path [path1.path2.path3.test1] could be inserted in 2 distinct ways, it is ambiguous which one to use") - ); - - ex = assertThrows( - IllegalArgumentException.class, - () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test3", map, "value4") - ); - assertThat( - ex.getMessage(), - equalTo("Path [path1.path2.path3.test3] could be inserted in 2 distinct ways, it is ambiguous which one to use") - ); - - assertThat(map, equalTo(originalMap)); - } - - // traversal through lists - { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - { - builder.startObject("path1.path2"); - { - builder.startArray("path3"); - builder.startObject().field("test1", "value1").endObject(); - builder.endArray(); - } - builder.endObject(); - } - { - builder.startObject("path1"); - { - builder.startArray("path2.path3"); - builder.startObject().field("test2", "value2").endObject(); - builder.endArray(); - } - builder.endObject(); - } - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); - - IllegalArgumentException ex = assertThrows( - IllegalArgumentException.class, - () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test1", map, "value3") - ); - assertThat( - ex.getMessage(), - equalTo("Path [path1.path2.path3.test1] could be inserted in 2 distinct ways, it is ambiguous which one to use") - ); - - ex = assertThrows( - IllegalArgumentException.class, - () -> SemanticTextFieldMapper.insertValue("path1.path2.path3.test3", map, "value4") - ); - assertThat( - ex.getMessage(), - equalTo("Path [path1.path2.path3.test3] could be inserted in 2 distinct ways, it is ambiguous which one to use") - ); - - assertThat(map, equalTo(originalMap)); - } - } - - public void testInsertValueCannotTraversePath() throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - { - builder.startObject("path1"); - { - builder.startArray("path2"); - builder.startArray(); - builder.startObject().field("test", "value1").endObject(); - builder.endArray(); - builder.endArray(); - } - builder.endObject(); - } - builder.endObject(); - Map map = toSourceMap(Strings.toString(builder)); - final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); - - IllegalArgumentException ex = assertThrows( - IllegalArgumentException.class, - () -> SemanticTextFieldMapper.insertValue("path1.path2.test.test2", map, "value2") - ); - assertThat( - ex.getMessage(), - equalTo("Path [path1.path2.test] has value [value1] of type [String], which cannot be traversed into further") - ); - - assertThat(map, equalTo(originalMap)); - } - @Override protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { // Until a doc is indexed, the query is rewritten as match no docs @@ -1079,11 +882,23 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { mappingBuilder.endObject(); } - private static void addSemanticTextInferenceResults(XContentBuilder sourceBuilder, List semanticTextInferenceResults) - throws IOException { - for (var field : semanticTextInferenceResults) { - sourceBuilder.field(field.fieldName()); - sourceBuilder.value(field); + private static void addSemanticTextInferenceResults( + boolean useLegacyFormat, + XContentBuilder sourceBuilder, + List semanticTextInferenceResults + ) throws IOException { + if (useLegacyFormat) { + for (var field : semanticTextInferenceResults) { + sourceBuilder.field(field.fieldName()); + sourceBuilder.value(field); + } + } else { + // Use a linked hash map to maintain insertion-order iteration over the inference fields + Map inferenceMetadataFields = new LinkedHashMap<>(); + for (var field : semanticTextInferenceResults) { + inferenceMetadataFields.put(field.fieldName(), field); + } + sourceBuilder.field(InferenceMetadataFieldsMapper.NAME, inferenceMetadataFields); } } @@ -1119,6 +934,19 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { ); } + private static SourceToParse semanticTextInferenceSource(boolean useLegacyFormat, CheckedConsumer build) + throws IOException { + return source(b -> { + if (useLegacyFormat == false) { + b.startObject(InferenceMetadataFieldsMapper.NAME); + } + build.accept(b); + if (useLegacyFormat == false) { + b.endObject(); + } + }); + } + private static void assertChildLeafNestedDocument( LeafNestedDocuments leaf, int advanceToDoc, @@ -1143,68 +971,4 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { } assertThat(count, equalTo(expectedCount)); } - - private Map toSourceMap(String source) throws IOException { - try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - return parser.map(); - } - } - - private static Object getMapValue(Map map, String key) { - // Split the path on unescaped "." chars and then unescape the escaped "." chars - final String[] pathElements = Arrays.stream(key.split("(? k.replace("\\.", ".")).toArray(String[]::new); - - Object value = null; - Object nextLayer = map; - for (int i = 0; i < pathElements.length; i++) { - if (nextLayer instanceof Map nextMap) { - value = nextMap.get(pathElements[i]); - } else if (nextLayer instanceof List nextList) { - final String pathElement = pathElements[i]; - List values = nextList.stream().flatMap(v -> { - Stream.Builder streamBuilder = Stream.builder(); - if (v instanceof List innerList) { - traverseList(innerList, streamBuilder); - } else { - streamBuilder.add(v); - } - return streamBuilder.build(); - }).filter(v -> v instanceof Map).map(v -> ((Map) v).get(pathElement)).filter(Objects::nonNull).toList(); - - if (values.isEmpty()) { - return null; - } else if (values.size() > 1) { - throw new AssertionError("List " + nextList + " contains multiple values for [" + pathElement + "]"); - } else { - value = values.getFirst(); - } - } else if (nextLayer == null) { - break; - } else { - throw new AssertionError( - "Path [" - + String.join(".", Arrays.copyOfRange(pathElements, 0, i)) - + "] has value [" - + value - + "] of type [" - + value.getClass().getSimpleName() - + "], which cannot be traversed into further" - ); - } - - nextLayer = value; - } - - return value; - } - - private static void traverseList(List list, Stream.Builder streamBuilder) { - for (Object value : list) { - if (value instanceof List innerList) { - traverseList(innerList, streamBuilder); - } else { - streamBuilder.add(value); - } - } - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index dcdd9b3d4234..29ca71d38e1b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -7,9 +7,10 @@ package org.elasticsearch.xpack.inference.mapper; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.Model; @@ -27,18 +28,31 @@ import org.elasticsearch.xpack.inference.model.TestModel; import java.io.IOException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; +import java.util.ListIterator; import java.util.Map; import java.util.function.Predicate; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; -import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSemanticTextFieldChunks; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSemanticTextFieldChunk; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class SemanticTextFieldTests extends AbstractXContentTestCase { private static final String NAME = "field"; + private final boolean useLegacyFormat; + + public SemanticTextFieldTests(boolean useLegacyFormat) { + this.useLegacyFormat = useLegacyFormat; + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return List.of(new Object[] { true }, new Object[] { false }); + } + @Override protected Predicate getRandomFieldsExcludeFilter() { return n -> n.endsWith(CHUNKED_EMBEDDINGS_FIELD); @@ -46,39 +60,45 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase { - double[] expectedVector = parseDenseVector( - expectedInstance.inference().chunks().get(i).rawEmbeddings(), - modelSettings.dimensions(), - expectedInstance.contentType() - ); - double[] newVector = parseDenseVector( - newInstance.inference().chunks().get(i).rawEmbeddings(), - modelSettings.dimensions(), - newInstance.contentType() - ); - assertArrayEquals(expectedVector, newVector, 0.0000001f); + for (var entry : newInstance.inference().chunks().entrySet()) { + var expectedChunks = expectedInstance.inference().chunks().get(entry.getKey()); + assertNotNull(expectedChunks); + assertThat(entry.getValue().size(), equalTo(expectedChunks.size())); + for (int i = 0; i < entry.getValue().size(); i++) { + var actualChunk = entry.getValue().get(i); + assertThat(actualChunk.text(), equalTo(expectedChunks.get(i).text())); + assertThat(actualChunk.startOffset(), equalTo(expectedChunks.get(i).startOffset())); + assertThat(actualChunk.endOffset(), equalTo(expectedChunks.get(i).endOffset())); + switch (modelSettings.taskType()) { + case TEXT_EMBEDDING -> { + double[] expectedVector = parseDenseVector( + expectedChunks.get(i).rawEmbeddings(), + modelSettings.dimensions(), + expectedInstance.contentType() + ); + double[] newVector = parseDenseVector( + actualChunk.rawEmbeddings(), + modelSettings.dimensions(), + newInstance.contentType() + ); + assertArrayEquals(expectedVector, newVector, 0.0000001f); + } + case SPARSE_EMBEDDING -> { + List expectedTokens = parseWeightedTokens( + expectedChunks.get(i).rawEmbeddings(), + expectedInstance.contentType() + ); + List newTokens = parseWeightedTokens(actualChunk.rawEmbeddings(), newInstance.contentType()); + assertThat(newTokens, equalTo(expectedTokens)); + } + default -> throw new AssertionError("Invalid task type " + modelSettings.taskType()); } - case SPARSE_EMBEDDING -> { - List expectedTokens = parseWeightedTokens( - expectedInstance.inference().chunks().get(i).rawEmbeddings(), - expectedInstance.contentType() - ); - List newTokens = parseWeightedTokens( - newInstance.inference().chunks().get(i).rawEmbeddings(), - newInstance.contentType() - ); - assertThat(newTokens, equalTo(expectedTokens)); - } - default -> throw new AssertionError("Invalid task type " + modelSettings.taskType()); } } } @@ -87,7 +107,13 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase rawValues = randomList(1, 5, () -> randomSemanticTextInput().toString()); try { // try catch required for override - return randomSemanticText(NAME, TestModel.createRandomInstance(), rawValues, randomFrom(XContentType.values())); + return randomSemanticText( + useLegacyFormat, + NAME, + TestModel.createRandomInstance(), + rawValues, + randomFrom(XContentType.values()) + ); } catch (IOException e) { fail("Failed to create random SemanticTextField instance"); } @@ -96,12 +122,12 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase(NAME, parser.contentType())); + return SemanticTextField.parse(parser, new SemanticTextField.ParserContext(useLegacyFormat, NAME, parser.contentType())); } @Override protected boolean supportsUnknownFields() { - return true; + return false; } public void testModelSettingsValidation() { @@ -185,30 +211,60 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase inputs, XContentType contentType) - throws IOException { + public static SemanticTextField randomSemanticText( + boolean useLegacyFormat, + String fieldName, + Model model, + List inputs, + XContentType contentType + ) throws IOException { ChunkedInference results = switch (model.getTaskType()) { case TEXT_EMBEDDING -> randomChunkedInferenceEmbeddingFloat(model, inputs); case SPARSE_EMBEDDING -> randomChunkedInferenceEmbeddingSparse(inputs); default -> throw new AssertionError("invalid task type: " + model.getTaskType().name()); }; - return semanticTextFieldFromChunkedInferenceResults(fieldName, model, inputs, results, contentType); + return semanticTextFieldFromChunkedInferenceResults(useLegacyFormat, fieldName, model, inputs, results, contentType); } public static SemanticTextField semanticTextFieldFromChunkedInferenceResults( + boolean useLegacyFormat, String fieldName, Model model, List inputs, ChunkedInference results, XContentType contentType ) throws IOException { + + // In this test framework, we don't perform "real" chunking; each input generates one chunk. Thus, we can assume there is a + // one-to-one relationship between inputs and chunks. Iterate over the inputs and chunks to match each input with its + // corresponding chunk. + final List chunks = new ArrayList<>(inputs.size()); + int offsetAdjustment = 0; + Iterator inputsIt = inputs.iterator(); + Iterator chunkIt = results.chunksAsMatchedTextAndByteReference(contentType.xContent()); + while (inputsIt.hasNext() && chunkIt.hasNext()) { + String input = inputsIt.next(); + var chunk = chunkIt.next(); + chunks.add(toSemanticTextFieldChunk(input, offsetAdjustment, chunk, useLegacyFormat)); + + // When using the inference metadata fields format, all the input values are concatenated so that the + // chunk text offsets are expressed in the context of a single string. Calculate the offset adjustment + // to apply to account for this. + offsetAdjustment = input.length() + 1; // Add one for separator char length + } + + if (inputsIt.hasNext() || chunkIt.hasNext()) { + throw new IllegalArgumentException("Input list size and chunk count do not match"); + } + return new SemanticTextField( + useLegacyFormat, fieldName, - inputs, + useLegacyFormat ? inputs : null, new SemanticTextField.InferenceResult( model.getInferenceEntityId(), new SemanticTextField.ModelSettings(model), - toSemanticTextFieldChunks(List.of(results), contentType) + Map.of(fieldName, chunks) ), contentType ); @@ -232,37 +288,53 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase> matchedTextMap, + SemanticTextField field + ) { switch (field.inference().modelSettings().taskType()) { case SPARSE_EMBEDDING -> { List chunks = new ArrayList<>(); - for (var chunk : field.inference().chunks()) { - var tokens = parseWeightedTokens(chunk.rawEmbeddings(), field.contentType()); - chunks.add( - new ChunkedInferenceEmbeddingSparse.SparseEmbeddingChunk( - tokens, - chunk.text(), - new ChunkedInference.TextOffset(0, chunk.text().length()) - ) - ); + for (var entry : field.inference().chunks().entrySet()) { + String entryField = entry.getKey(); + List entryChunks = entry.getValue(); + List entryFieldMatchedText = validateAndGetMatchedTextForField(matchedTextMap, entryField, entryChunks.size()); + + ListIterator matchedTextIt = entryFieldMatchedText.listIterator(); + for (var chunk : entryChunks) { + String matchedText = matchedTextIt.next(); + ChunkedInference.TextOffset offset = createOffset(useLegacyFormat, chunk, matchedText); + var tokens = parseWeightedTokens(chunk.rawEmbeddings(), field.contentType()); + chunks.add(new ChunkedInferenceEmbeddingSparse.SparseEmbeddingChunk(tokens, matchedText, offset)); + } } return new ChunkedInferenceEmbeddingSparse(chunks); } case TEXT_EMBEDDING -> { List chunks = new ArrayList<>(); - for (var chunk : field.inference().chunks()) { - double[] values = parseDenseVector( - chunk.rawEmbeddings(), - field.inference().modelSettings().dimensions(), - field.contentType() - ); - chunks.add( - new ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk( - FloatConversionUtils.floatArrayOf(values), - chunk.text(), - new ChunkedInference.TextOffset(0, chunk.text().length()) - ) - ); + for (var entry : field.inference().chunks().entrySet()) { + String entryField = entry.getKey(); + List entryChunks = entry.getValue(); + List entryFieldMatchedText = validateAndGetMatchedTextForField(matchedTextMap, entryField, entryChunks.size()); + + ListIterator matchedTextIt = entryFieldMatchedText.listIterator(); + for (var chunk : entryChunks) { + String matchedText = matchedTextIt.next(); + ChunkedInference.TextOffset offset = createOffset(useLegacyFormat, chunk, matchedText); + double[] values = parseDenseVector( + chunk.rawEmbeddings(), + field.inference().modelSettings().dimensions(), + field.contentType() + ); + chunks.add( + new ChunkedInferenceEmbeddingFloat.FloatEmbeddingChunk( + FloatConversionUtils.floatArrayOf(values), + matchedText, + offset + ) + ); + } } return new ChunkedInferenceEmbeddingFloat(chunks); } @@ -270,6 +342,38 @@ public class SemanticTextFieldTests extends AbstractXContentTestCase validateAndGetMatchedTextForField( + Map> matchedTextMap, + String fieldName, + int chunkCount + ) { + List fieldMatchedText = matchedTextMap.get(fieldName); + if (fieldMatchedText == null) { + throw new IllegalStateException("No matched text list exists for field [" + fieldName + "]"); + } else if (fieldMatchedText.size() != chunkCount) { + throw new IllegalStateException("Matched text list size does not equal chunk count for field [" + fieldName + "]"); + } + + return fieldMatchedText; + } + + /** + * Create a {@link ChunkedInference.TextOffset} instance with valid offset values. When using the legacy semantic text format, the + * offset values are not written to {@link SemanticTextField.Chunk}, so we cannot read them from there. Instead, use the knowledge that + * the matched text corresponds to one complete input value (i.e. one input value -> one chunk) to calculate the offset values. + * + * @param useLegacyFormat Whether the old format should be used + * @param chunk The chunk to get/calculate offset values for + * @param matchedText The matched text to calculate offset values for + * @return A {@link ChunkedInference.TextOffset} instance with valid offset values + */ + private static ChunkedInference.TextOffset createOffset(boolean useLegacyFormat, SemanticTextField.Chunk chunk, String matchedText) { + final int startOffset = useLegacyFormat ? 0 : chunk.startOffset(); + final int endOffset = useLegacyFormat ? matchedText.length() : chunk.endOffset(); + + return new ChunkedInference.TextOffset(startOffset, endOffset); + } + private static double[] parseDenseVector(BytesReference value, int numDims, XContentType contentType) { try (XContentParser parser = XContentHelper.createParserNotCompressed(XContentParserConfiguration.EMPTY, value, contentType)) { parser.nextToken(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextUtilsTests.java new file mode 100644 index 000000000000..e334335d6c78 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextUtilsTests.java @@ -0,0 +1,351 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class SemanticTextUtilsTests extends ESTestCase { + public void testInsertValueMapTraversal() throws IOException { + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("test", "value").endObject(); + + Map map = toSourceMap(Strings.toString(builder)); + SemanticTextUtils.insertValue("test", map, "value2"); + assertThat(getMapValue(map, "test"), equalTo("value2")); + SemanticTextUtils.insertValue("something.else", map, "something_else_value"); + assertThat(getMapValue(map, "something\\.else"), equalTo("something_else_value")); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.startObject("path1").startObject("path2").field("test", "value").endObject().endObject(); + builder.endObject(); + + Map map = toSourceMap(Strings.toString(builder)); + SemanticTextUtils.insertValue("path1.path2.test", map, "value2"); + assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); + SemanticTextUtils.insertValue("path1.path2.test_me", map, "test_me_value"); + assertThat(getMapValue(map, "path1.path2.test_me"), equalTo("test_me_value")); + SemanticTextUtils.insertValue("path1.non_path2.test", map, "test_value"); + assertThat(getMapValue(map, "path1.non_path2\\.test"), equalTo("test_value")); + + SemanticTextUtils.insertValue("path1.path2", map, Map.of("path3", "bar")); + assertThat(getMapValue(map, "path1.path2"), equalTo(Map.of("path3", "bar"))); + + SemanticTextUtils.insertValue("path1", map, "baz"); + assertThat(getMapValue(map, "path1"), equalTo("baz")); + + SemanticTextUtils.insertValue("path3.path4", map, Map.of("test", "foo")); + assertThat(getMapValue(map, "path3\\.path4"), equalTo(Map.of("test", "foo"))); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.startObject("path1").array("test", "value1", "value2").endObject(); + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextUtils.insertValue("path1.test", map, List.of("value3", "value4", "value5")); + assertThat(getMapValue(map, "path1.test"), equalTo(List.of("value3", "value4", "value5"))); + + SemanticTextUtils.insertValue("path2.test", map, List.of("value6", "value7", "value8")); + assertThat(getMapValue(map, "path2\\.test"), equalTo(List.of("value6", "value7", "value8"))); + } + } + + public void testInsertValueListTraversal() throws IOException { + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1"); + { + builder.startArray("path2"); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + } + builder.endObject(); + } + { + builder.startObject("path3"); + { + builder.startArray("path4"); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextUtils.insertValue("path1.path2.test", map, "value2"); + assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); + SemanticTextUtils.insertValue("path1.path2.test2", map, "value3"); + assertThat(getMapValue(map, "path1.path2.test2"), equalTo("value3")); + assertThat(getMapValue(map, "path1.path2"), equalTo(List.of(Map.of("test", "value2", "test2", "value3")))); + + SemanticTextUtils.insertValue("path3.path4.test", map, "value4"); + assertThat(getMapValue(map, "path3.path4.test"), equalTo("value4")); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1"); + { + builder.startArray("path2"); + builder.startArray(); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextUtils.insertValue("path1.path2.test", map, "value2"); + assertThat(getMapValue(map, "path1.path2.test"), equalTo("value2")); + SemanticTextUtils.insertValue("path1.path2.test2", map, "value3"); + assertThat(getMapValue(map, "path1.path2.test2"), equalTo("value3")); + assertThat(getMapValue(map, "path1.path2"), equalTo(List.of(List.of(Map.of("test", "value2", "test2", "value3"))))); + } + } + + public void testInsertValueFieldsWithDots() throws IOException { + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("xxx.yyy", "value1").endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextUtils.insertValue("xxx.yyy", map, "value2"); + assertThat(getMapValue(map, "xxx\\.yyy"), equalTo("value2")); + + SemanticTextUtils.insertValue("xxx", map, "value3"); + assertThat(getMapValue(map, "xxx"), equalTo("value3")); + } + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1.path2"); + { + builder.startObject("path3.path4"); + builder.field("test", "value1"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + + SemanticTextUtils.insertValue("path1.path2.path3.path4.test", map, "value2"); + assertThat(getMapValue(map, "path1\\.path2.path3\\.path4.test"), equalTo("value2")); + + SemanticTextUtils.insertValue("path1.path2.path3.path4.test2", map, "value3"); + assertThat(getMapValue(map, "path1\\.path2.path3\\.path4.test2"), equalTo("value3")); + assertThat(getMapValue(map, "path1\\.path2.path3\\.path4"), equalTo(Map.of("test", "value2", "test2", "value3"))); + } + } + + public void testInsertValueAmbiguousPath() throws IOException { + // Mixed dotted object notation + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1.path2"); + { + builder.startObject("path3"); + builder.field("test1", "value1"); + builder.endObject(); + } + builder.endObject(); + } + { + builder.startObject("path1"); + { + builder.startObject("path2.path3"); + builder.field("test2", "value2"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextUtils.insertValue("path1.path2.path3.test1", map, "value3") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test1] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextUtils.insertValue("path1.path2.path3.test3", map, "value4") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test3] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + assertThat(map, equalTo(originalMap)); + } + + // traversal through lists + { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1.path2"); + { + builder.startArray("path3"); + builder.startObject().field("test1", "value1").endObject(); + builder.endArray(); + } + builder.endObject(); + } + { + builder.startObject("path1"); + { + builder.startArray("path2.path3"); + builder.startObject().field("test2", "value2").endObject(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextUtils.insertValue("path1.path2.path3.test1", map, "value3") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test1] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextUtils.insertValue("path1.path2.path3.test3", map, "value4") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.path3.test3] could be inserted in 2 distinct ways, it is ambiguous which one to use") + ); + + assertThat(map, equalTo(originalMap)); + } + } + + public void testInsertValueCannotTraversePath() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + { + builder.startObject("path1"); + { + builder.startArray("path2"); + builder.startArray(); + builder.startObject().field("test", "value1").endObject(); + builder.endArray(); + builder.endArray(); + } + builder.endObject(); + } + builder.endObject(); + Map map = toSourceMap(Strings.toString(builder)); + final Map originalMap = Collections.unmodifiableMap(toSourceMap(Strings.toString(builder))); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> SemanticTextUtils.insertValue("path1.path2.test.test2", map, "value2") + ); + assertThat( + ex.getMessage(), + equalTo("Path [path1.path2.test] has value [value1] of type [String], which cannot be traversed into further") + ); + + assertThat(map, equalTo(originalMap)); + } + + private Map toSourceMap(String source) throws IOException { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + return parser.map(); + } + } + + private static Object getMapValue(Map map, String key) { + // Split the path on unescaped "." chars and then unescape the escaped "." chars + final String[] pathElements = Arrays.stream(key.split("(? k.replace("\\.", ".")).toArray(String[]::new); + + Object value = null; + Object nextLayer = map; + for (int i = 0; i < pathElements.length; i++) { + if (nextLayer instanceof Map nextMap) { + value = nextMap.get(pathElements[i]); + } else if (nextLayer instanceof List nextList) { + final String pathElement = pathElements[i]; + List values = nextList.stream().flatMap(v -> { + Stream.Builder streamBuilder = Stream.builder(); + if (v instanceof List innerList) { + traverseList(innerList, streamBuilder); + } else { + streamBuilder.add(v); + } + return streamBuilder.build(); + }).filter(v -> v instanceof Map).map(v -> ((Map) v).get(pathElement)).filter(Objects::nonNull).toList(); + + if (values.isEmpty()) { + return null; + } else if (values.size() > 1) { + throw new AssertionError("List " + nextList + " contains multiple values for [" + pathElement + "]"); + } else { + value = values.getFirst(); + } + } else if (nextLayer == null) { + break; + } else { + throw new AssertionError( + "Path [" + + String.join(".", Arrays.copyOfRange(pathElements, 0, i)) + + "] has value [" + + value + + "] of type [" + + value.getClass().getSimpleName() + + "], which cannot be traversed into further" + ); + } + + nextLayer = value; + } + + return value; + } + + private static void traverseList(List list, Stream.Builder streamBuilder) { + for (Object value : list) { + if (value instanceof List innerList) { + traverseList(innerList, streamBuilder); + } else { + streamBuilder.add(value); + } + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index 36aa2200ecea..d5042643013e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.queries; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -27,6 +29,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -41,7 +44,6 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -65,11 +67,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Map; import static org.apache.lucene.search.BooleanClause.Occur.FILTER; import static org.apache.lucene.search.BooleanClause.Occur.MUST; import static org.apache.lucene.search.BooleanClause.Occur.SHOULD; -import static org.elasticsearch.index.IndexVersions.NEW_SPARSE_VECTOR; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -86,6 +88,7 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase parameters() throws Exception { + return List.of(new Object[] { true }, new Object[] { false }); + } + @BeforeClass public static void setInferenceResultType() { // These are class variables because they are used when initializing additional mappings, which happens once per test suite run in @@ -121,12 +133,10 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase null; @@ -350,15 +365,21 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "similarity": "cosine", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + +--- +"Calculates sparse embedding and text embedding results for new documents": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Calculates sparse embedding and text embedding results for new documents with integer value": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: 75 + dense_field: 100 + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "75" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "75" } + - match: { _source.dense_field.text: "100" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "100" } + +--- +"Calculates sparse embedding and text embedding results for new documents with boolean value": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: true + dense_field: false + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "true" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "true" } + - match: { _source.dense_field.text: "false" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "false" } + +--- +"Calculates sparse embedding and text embedding results for new documents with a collection of mixed data types": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: [false, 75, "inference test", 13.49] + dense_field: [true, 49.99, "another inference test", 5654] + + - do: + get: + index: test-index + id: doc_1 + + - length: { _source.sparse_field.text: 4 } + - match: { _source.sparse_field.text.0: "false" } + - match: { _source.sparse_field.text.1: "75" } + - match: { _source.sparse_field.text.2: "inference test" } + - match: { _source.sparse_field.text.3: "13.49" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - exists: _source.sparse_field.inference.chunks.1.embeddings + - exists: _source.sparse_field.inference.chunks.2.embeddings + - exists: _source.sparse_field.inference.chunks.3.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "false" } + - match: { _source.sparse_field.inference.chunks.1.text: "75" } + - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.3.text: "13.49" } + + - length: { _source.dense_field.text: 4 } + - match: { _source.dense_field.text.0: "true" } + - match: { _source.dense_field.text.1: "49.99" } + - match: { _source.dense_field.text.2: "another inference test" } + - match: { _source.dense_field.text.3: "5654" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - exists: _source.dense_field.inference.chunks.1.embeddings + - exists: _source.dense_field.inference.chunks.2.embeddings + - exists: _source.dense_field.inference.chunks.3.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "true" } + - match: { _source.dense_field.inference.chunks.1.text: "49.99" } + - match: { _source.dense_field.inference.chunks.2.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.3.text: "5654" } + +--- +"Sparse vector results are indexed as nested chunks and searchable": + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{}} + {"sparse_field": ["you know, for testing", "now with chunks"]} + {"index":{}} + {"sparse_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + nested: + path: sparse_field.inference.chunks + query: + sparse_vector: + field: sparse_field.inference.chunks.embeddings + inference_id: sparse-inference-id + query: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source.sparse_field.inference.chunks: 2 } + - length: { hits.hits.1._source.sparse_field.inference.chunks: 2 } + + # Search with inner hits + - do: + search: + _source: false + index: test-index + body: + query: + nested: + path: sparse_field.inference.chunks + inner_hits: + _source: false + fields: [ sparse_field.inference.chunks.text ] + query: + sparse_vector: + field: sparse_field.inference.chunks.embeddings + inference_id: sparse-inference-id + query: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.relation: eq } + + - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } + +--- +"Dense vector results are indexed as nested chunks and searchable": + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + nested: + path: dense_field.inference.chunks + query: + knn: + field: dense_field.inference.chunks.embeddings + query_vector_builder: + text_embedding: + model_id: dense-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source.dense_field.inference.chunks: 2 } + - length: { hits.hits.1._source.dense_field.inference.chunks: 2 } + + # Search with inner hits + - do: + search: + _source: false + index: test-index + body: + query: + nested: + path: dense_field.inference.chunks + inner_hits: + _source: false + fields: [ dense_field.inference.chunks.text ] + query: + knn: + field: dense_field.inference.chunks.embeddings + query_vector_builder: + text_embedding: + model_id: dense-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.relation: eq } + + - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } + +--- +"Reindex works for semantic_text fields": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } + - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + + - do: + indices.refresh: { } + + - do: + indices.create: + index: destination-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + reindex: + wait_for_completion: true + body: + source: + index: test-index + dest: + index: destination-index + - do: + get: + index: destination-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } + - match: { _source.non_inference_field: "non inference test" } + +--- +"semantic_text copy_to calculates embeddings for source fields": + - do: + indices.create: + index: test-copy-to-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + source_field: + type: text + copy_to: sparse_field + another_source_field: + type: text + copy_to: sparse_field + + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + source_field: "copy_to inference test" + sparse_field: "inference test" + another_source_field: "another copy_to inference test" + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - length: { _source.sparse_field.inference.chunks: 3 } + - match: { _source.sparse_field.inference.chunks.0.text: "another copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.1.text: "copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.1.embeddings + - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.2.embeddings + +--- +"Calculates embeddings for bulk operations - index": + - do: + bulk: + body: + - '{"index": {"_index": "test-index", "_id": "doc_1"}}' + - '{"sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test"}' + + - match: { errors: false } + - match: { items.0.index.result: "created" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Update by query picks up new semantic_text fields": + - do: + indices.create: + index: mapping-update-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + dynamic: false + properties: + non_inference_field: + type: text + + - do: + index: + index: mapping-update-index + id: doc_1 + refresh: true + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + indices.put_mapping: + index: mapping-update-index + body: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + update_by_query: + wait_for_completion: true + index: mapping-update-index + + - match: { updated: 1 } + + - do: + get: + index: mapping-update-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Update by query works for scripts": + - do: + index: + index: test-index + id: doc_1 + refresh: true + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + update_by_query: + wait_for_completion: true + index: test-index + body: { "script": "ctx._source.sparse_field = 'updated inference test'; ctx._source.dense_field = 'another updated inference test'" } + + - match: { updated: 1 } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + +--- +"Calculates embeddings using the default ELSER 2 endpoint": + - requires: + reason: "default ELSER 2 inference ID is enabled via a capability" + test_runner_features: [capabilities] + capabilities: + - method: GET + path: /_inference + capabilities: [default_elser_2] + + - do: + indices.create: + index: test-elser-2-default-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + + - do: + index: + index: test-elser-2-default-index + id: doc_1 + body: + sparse_field: "inference test" + + - do: + get: + index: test-elser-2-default-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + +--- +"Can be used inside an object field": + - requires: + cluster_features: "semantic_text.in_object_field_fix" + reason: object field fix added in 8.16.0 & 8.15.4 + + - do: + indices.create: + index: test-in-object-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + level_1: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + + - do: + index: + index: test-in-object-index + id: doc_1 + body: + level_1: + sparse_field: "inference test" + dense_field: "another inference test" + + - do: + get: + index: test-in-object-index + id: doc_1 + + - match: { _source.level_1.sparse_field.text: "inference test" } + - exists: _source.level_1.sparse_field.inference.chunks.0.embeddings + - match: { _source.level_1.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.level_1.dense_field.text: "another inference test" } + - exists: _source.level_1.dense_field.inference.chunks.0.embeddings + - match: { _source.level_1.dense_field.inference.chunks.0.text: "another inference test" } + +--- +"Deletes on bulk operation": + - requires: + cluster_features: semantic_text.delete_fix + reason: Delete operations are properly applied when subsequent operations include a semantic text field. + + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{"_id": "1"}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{"_id": "2"}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: ["you know, for testing", "now with chunks"] } + - match: { hits.hits.1._source.dense_field.text: ["some more tests", "that include chunks"] } + + - do: + bulk: + index: test-index + refresh: true + body: | + {"delete":{ "_id": "2"}} + {"update":{"_id": "1"}} + {"doc":{"dense_field": "updated text"}} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: "updated text" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 3d3790d879ef..2f3bcfae600e 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -120,7 +120,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Numeric query using a sparse embedding model": @@ -146,7 +145,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Boolean query using a sparse embedding model": @@ -172,7 +170,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - - length: { hits.hits.0._source.inference_field.inference.chunks: 1 } --- "Query using a sparse embedding model via a search inference ID": @@ -217,7 +214,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Query using a dense embedding model": @@ -248,7 +244,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Numeric query using a dense embedding model": @@ -274,7 +269,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Boolean query using a dense embedding model": @@ -300,7 +294,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - - length: { hits.hits.0._source.inference_field.inference.chunks: 1 } --- "Query using a dense embedding model that uses byte embeddings": @@ -361,7 +354,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Query using a dense embedding model via a search inference ID": @@ -406,7 +398,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Apply boost and query name": @@ -439,7 +430,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 3.783733e19, error: 1e13 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } --- @@ -476,7 +466,6 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } - - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } --- "Query the wrong field type": diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update.yml index 294761608ee8..660d3e37f424 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update.yml @@ -39,6 +39,11 @@ setup: indices.create: index: test-index body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: false mappings: properties: sparse_field: @@ -52,6 +57,9 @@ setup: --- "Updating non semantic_text fields does not recalculate embeddings": + - skip: + features: [ "headers" ] + - do: index: index: test-index @@ -60,14 +68,23 @@ setup: sparse_field: "inference test" dense_field: "another inference test" non_inference_field: "non inference test" + refresh: true - do: - get: + headers: + # Force JSON content type so that we use a parser that interprets the embeddings as doubles + Content-Type: application/json + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } - - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - set: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings: sparse_field_embedding } + - set: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings: dense_field_embedding } - do: update: @@ -76,19 +93,23 @@ setup: body: doc: non_inference_field: "another non inference test" + refresh: true - do: - get: + headers: + # Force JSON content type so that we use a parser that interprets the embeddings as doubles + Content-Type: application/json + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } - - match: { _source.dense_field.text: "another inference test" } - - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } - - match: { _source.non_inference_field: "another non inference test" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings: $sparse_field_embedding } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings: $dense_field_embedding } --- "Updating semantic_text fields recalculates embeddings": @@ -100,17 +121,22 @@ setup: sparse_field: "inference test" dense_field: "another inference test" non_inference_field: "non inference test" + refresh: true - do: - get: + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.dense_field.text: "another inference test" } - - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - - match: { _source.non_inference_field: "non inference test" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 14 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 22 } - do: bulk: @@ -118,17 +144,24 @@ setup: body: - '{"update": {"_id": "doc_1"}}' - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + refresh: true - do: - get: + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "I am a test" } - - match: { _source.sparse_field.inference.chunks.0.text: "I am a test" } - - match: { _source.dense_field.text: "I am a teapot" } - - match: { _source.dense_field.inference.chunks.0.text: "I am a teapot" } - - match: { _source.non_inference_field: "non inference test" } + # We can't directly check that the embeddings are different since there isn't a "does not match" assertion in the + # YAML test framework. Check that the start and end offsets change as expected as a proxy. + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 11 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 13 } - do: update: @@ -138,17 +171,22 @@ setup: doc: sparse_field: "updated inference test" dense_field: "another updated inference test" + refresh: true - do: - get: + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "updated inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } - - match: { _source.dense_field.text: "another updated inference test" } - - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } - - match: { _source.non_inference_field: "non inference test" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 22 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 30 } - do: bulk: @@ -156,17 +194,22 @@ setup: body: - '{"update": {"_id": "doc_1"}}' - '{"doc":{"sparse_field": "bulk inference test", "dense_field": "bulk updated inference test"}}' + refresh: true - do: - get: + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "bulk inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "bulk inference test" } - - match: { _source.dense_field.text: "bulk updated inference test" } - - match: { _source.dense_field.inference.chunks.0.text: "bulk updated inference test" } - - match: { _source.non_inference_field: "non inference test" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 19 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 27 } --- "Update logic handles source fields in object fields": @@ -174,6 +217,11 @@ setup: indices.create: index: test-copy-to-index body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: false mappings: properties: sparse_field: @@ -198,18 +246,22 @@ setup: body: sparse_field: "sparse data 1" dense_field: "dense data 1" + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "sparse data 1" } - - length: { _source.sparse_field.inference.chunks: 1 } - - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 1" } - - match: { _source.dense_field.text: "dense data 1" } - - length: { _source.dense_field.inference.chunks: 1 } - - match: { _source.dense_field.inference.chunks.0.text: "dense data 1" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field - do: bulk: @@ -217,28 +269,38 @@ setup: body: - '{"update": {"_id": "doc_1"}}' - > - { - "doc": { - "sparse_field": "sparse data 1", - "object_source.sparse_field": "sparse data 2", - "dense_field": "dense data 1", - "object_source.dense_field": "dense data 2" - } - } + { + "doc": { + "object_source.sparse_field": "sparse data two", + "object_source.dense_field": "dense data two" + } + } + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "sparse data 1" } - - length: { _source.sparse_field.inference.chunks: 2 } - - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 2" } - - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 1" } - - match: { _source.dense_field.text: "dense data 1" } - - length: { _source.dense_field.inference.chunks: 2 } - - match: { _source.dense_field.inference.chunks.0.text: "dense data 1" } - - match: { _source.dense_field.inference.chunks.1.text: "dense data 2" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field.0.end_offset: 15 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field.0.end_offset: 14 } - do: update: @@ -247,25 +309,35 @@ setup: body: doc: { - "sparse_field": "sparse data 1", - "object_source.sparse_field": "sparse data 3", - "dense_field": "dense data 1", - "object_source.dense_field": "dense data 3" + "object_source.sparse_field": "sparse data three", + "object_source.dense_field": "dense data three" } + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "sparse data 1" } - - length: { _source.sparse_field.inference.chunks: 2 } - - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 3" } - - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 1" } - - match: { _source.dense_field.text: "dense data 1" } - - length: { _source.dense_field.inference.chunks: 2 } - - match: { _source.dense_field.inference.chunks.0.text: "dense data 1" } - - match: { _source.dense_field.inference.chunks.1.text: "dense data 3" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.object_source\\.sparse_field.0.end_offset: 17 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.object_source\\.dense_field.0.end_offset: 16 } --- "Updates fail when the updated value is invalid": @@ -278,17 +350,6 @@ setup: dense_field: "another inference test" non_inference_field: "non inference test" - - do: - get: - index: test-index - id: doc_1 - - - match: { _source.sparse_field.text: "inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.dense_field.text: "another inference test" } - - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - - match: { _source.non_inference_field: "non inference test" } - - do: bulk: index: test-index @@ -317,6 +378,11 @@ setup: indices.create: index: test-copy-to-index body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: false mappings: properties: sparse_field: @@ -338,42 +404,68 @@ setup: id: doc_1 body: sparse_field: "sparse data 1" - sparse_source_field: "sparse data 2" + sparse_source_field: "sparse data two" dense_field: "dense data 1" - dense_source_field: "dense data 2" + dense_source_field: "dense data two" + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - length: { _source.sparse_field.inference.chunks: 2 } - - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 2" } - - exists: _source.sparse_field.inference.chunks.1.embeddings - - length: { _source.dense_field.inference.chunks: 2 } - - match: { _source.dense_field.inference.chunks.1.text: "dense data 2" } - - exists: _source.dense_field.inference.chunks.1.embeddings + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.end_offset: 15 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.end_offset: 14 } - do: update: index: test-copy-to-index id: doc_1 body: - doc: { "sparse_source_field": "sparse data 3", "dense_source_field": "dense data 3" } + doc: { "sparse_source_field": "sparse data three", "dense_source_field": "dense data three" } + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: "sparse data 1" } - - length: { _source.sparse_field.inference.chunks: 2 } - - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 3" } - - exists: _source.sparse_field.inference.chunks.1.embeddings - - match: { _source.dense_field.text: "dense data 1" } - - length: { _source.dense_field.inference.chunks: 2 } - - match: { _source.dense_field.inference.chunks.1.text: "dense data 3" } - - exists: _source.dense_field.inference.chunks.1.embeddings + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.end_offset: 17 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 2 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.end_offset: 16 } --- "Partial updates work when using the update API and the semantic_text field's original value is null": @@ -381,6 +473,11 @@ setup: indices.create: index: test-copy-to-index body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: false mappings: properties: sparse_field: @@ -402,83 +499,142 @@ setup: index: test-copy-to-index id: doc_1 body: - sparse_source_field: "sparse data 2" - dense_source_field: "dense data 2" + sparse_source_field: "sparse data two" + dense_source_field: "dense data two" + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: null } - - length: { _source.sparse_field.inference.chunks: 1 } - - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 2" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.dense_field.text: null } - - length: { _source.dense_field.inference.chunks: 1 } - - match: { _source.dense_field.inference.chunks.0.text: "dense data 2" } - - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.end_offset: 15 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.end_offset: 14 } - do: update: index: test-copy-to-index id: doc_1 body: - doc: { "sparse_source_field": "sparse data 3", "dense_source_field": "dense data 3" } + doc: { "sparse_source_field": "sparse data three", "dense_source_field": "dense data three" } + refresh: true - do: - get: + search: index: test-copy-to-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: {} - - match: { _source.sparse_field.text: null } - - length: { _source.sparse_field.inference.chunks: 1 } - - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 3" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.dense_field.text: null } - - length: { _source.dense_field.inference.chunks: 1 } - - match: { _source.dense_field.inference.chunks.0.text: "dense data 3" } - - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_source_field.0.end_offset: 17 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_source_field.0.end_offset: 16 } --- -"Updates with script are not allowed": +"Updates with script via bulk API are not allowed": - do: bulk: index: test-index body: - '{"index": {"_id": "doc_1"}}' - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + refresh: true + + - match: { errors: false } - do: bulk: index: test-index body: - '{"update": {"_id": "doc_1"}}' - - '{"script": "ctx._source.new_field = \"hello\"", "scripted_upsert": true}' + - '{"script": "ctx._source.sparse_field = \"sparse data two\"", "scripted_upsert": true}' + refresh: true - match: { errors: true } - match: { items.0.update.status: 400 } - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } +--- +"Updates with script works when using the update API": + - do: + bulk: + index: test-index + body: + - '{"index": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + refresh: true + + - match: { errors: false } + - do: - catch: bad_request update: index: test-index id: doc_1 body: script: - source: "ctx._source.new_field = \"hello\"" + source: "ctx._source.sparse_field = \"sparse data two\"; ctx._source.dense_field = \"dense data two\"" lang: "painless" + refresh: true - - match: { error.type: "status_exception" } - - match: { error.reason: "Cannot apply update with a script on indices that contain inference field(s)" } + - do: + search: + index: test-index + body: + fields: [ _inference_fields ] + query: + match_all: { } + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 15 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 14 } --- -"semantic_text copy_to needs values for every source field for bulk updates": +"semantic_text copy_to does not need values for every source field for bulk updates": - do: indices.create: index: test-copy-to-index body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: false mappings: properties: sparse_field: @@ -491,7 +647,6 @@ setup: type: text copy_to: sparse_field - # Not every source field needed on creation - do: index: index: test-copy-to-index @@ -500,15 +655,35 @@ setup: source_field: "a single source field provided" sparse_field: "inference test" - # Every source field needed on bulk updates - do: bulk: body: - '{"update": {"_index": "test-copy-to-index", "_id": "doc_1"}}' - - '{"doc": {"source_field": "a single source field is kept as provided via bulk", "sparse_field": "updated inference test" }}' + - '{"doc": {"source_field": "updated source field value"}}' + refresh: true - - match: { items.0.update.status: 400 } - - match: { items.0.update.error.reason: "Field [another_source_field] must be specified on an update request to calculate inference for field [sparse_field]" } + - match: { errors: false } + + - do: + search: + index: test-copy-to-index + body: + fields: [ _inference_fields ] + query: + match_all: { } + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 2 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 14 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.source_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.source_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.source_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.source_field.0.end_offset: 26 } --- "Calculates embeddings for bulk operations - update": @@ -526,22 +701,33 @@ setup: body: - '{"update": {"_index": "test-index", "_id": "doc_1"}}' - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }}' + refresh: true - match: { errors: false } - match: { items.0.update.result: "updated" } - do: - get: + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: { } - - match: { _source.sparse_field.text: "updated inference test" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } - - match: { _source.dense_field.text: "another updated inference test" } - - exists: _source.dense_field.inference.chunks.0.embeddings - - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } - - match: { _source.non_inference_field: "updated non inference test" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 22 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 30 } # Script update not supported - do: @@ -572,97 +758,145 @@ setup: body: - '{"update": {"_index": "test-index", "_id": "doc_1"}}' - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' + refresh: true - match: { errors: false } - match: { items.0.update.result: "created" } - do: - get: + search: index: test-index - id: doc_1 + body: + fields: [ _inference_fields ] + query: + match_all: { } - - match: { _source.sparse_field.text: "inference test" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.dense_field.text: "another inference test" } - - exists: _source.dense_field.inference.chunks.0.embeddings - - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - - match: { _source.non_inference_field: "non inference test" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 14 } + + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 22 } - do: bulk: body: - '{"update": {"_index": "test-index", "_id": "doc_1"}}' - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }, "doc_as_upsert": true}' + refresh: true - match: { errors: false } - match: { items.0.update.result: "updated" } - do: - get: + search: index: test-index - id: doc_1 - - - match: { _source.sparse_field.text: "updated inference test" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } - - match: { _source.dense_field.text: "another updated inference test" } - - exists: _source.dense_field.inference.chunks.0.embeddings - - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } - - match: { _source.non_inference_field: "updated non inference test" } - ---- -"Bypass inference on bulk update operation": - - requires: - cluster_features: semantic_text.single_field_update_fix - reason: Standalone semantic text fields are now optional in a bulk update operation - - # Update as upsert - - do: - bulk: body: - - '{"update": {"_index": "test-index", "_id": "doc_1"}}' - - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' + fields: [ _inference_fields ] + query: + match_all: { } - - match: { errors: false } - - match: { items.0.update.result: "created" } + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } - - do: - bulk: - body: - - '{"update": {"_index": "test-index", "_id": "doc_1"}}' - - '{"doc": { "non_inference_field": "another value" }, "doc_as_upsert": true}' + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 1 } + - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 22 } - - match: { errors: false } - - match: { items.0.update.result: "updated" } + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } + - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field: 1 } + - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } + - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 30 } - - do: - get: - index: test-index - id: doc_1 - - - match: { _source.sparse_field.text: "inference test" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.dense_field.text: "another inference test" } - - exists: _source.dense_field.inference.chunks.0.embeddings - - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - - match: { _source.non_inference_field: "another value" } - - - do: - bulk: - body: - - '{"update": {"_index": "test-index", "_id": "doc_1"}}' - - '{"doc": { "sparse_field": null, "dense_field": null, "non_inference_field": "updated value" }, "doc_as_upsert": true}' - - - match: { errors: false } - - match: { items.0.update.result: "updated" } - - - do: - get: - index: test-index - id: doc_1 - - - match: { _source.sparse_field: null } - - match: { _source.dense_field: null } - - match: { _source.non_inference_field: "updated value" } +# TODO: Uncomment this test once we implement a fix +#--- +#"Bypass inference on bulk update operation": +# # Update as upsert +# - do: +# bulk: +# body: +# - '{"update": {"_index": "test-index", "_id": "doc_1"}}' +# - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' +# +# - match: { errors: false } +# - match: { items.0.update.result: "created" } +# +# - do: +# bulk: +# body: +# - '{"update": {"_index": "test-index", "_id": "doc_1"}}' +# - '{"doc": { "non_inference_field": "another value" }, "doc_as_upsert": true}' +# refresh: true +# +# - match: { errors: false } +# - match: { items.0.update.result: "updated" } +# +# - do: +# search: +# index: test-index +# body: +# fields: [ _inference_fields ] +# query: +# match_all: { } +# +# - match: { hits.total.value: 1 } +# - match: { hits.total.relation: eq } +# +# - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } +# - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 1 } +# - exists: hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.embeddings +# - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.start_offset: 0 } +# - match: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field.0.end_offset: 14 } +# +# - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } +# - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field: 1 } +# - exists: hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.embeddings +# - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.start_offset: 0 } +# - match: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field.0.end_offset: 22 } +# +# - match: { hits.hits.0._source.sparse_field: "inference test" } +# - match: { hits.hits.0._source.dense_field: "another inference test" } +# - match: { hits.hits.0._source.non_inference_field: "another value" } +# +# - do: +# bulk: +# body: +# - '{"update": {"_index": "test-index", "_id": "doc_1"}}' +# - '{"doc": { "sparse_field": null, "dense_field": null, "non_inference_field": "updated value" }, "doc_as_upsert": true}' +# refresh: true +# +# - match: { errors: false } +# - match: { items.0.update.result: "updated" } +# +# - do: +# search: +# index: test-index +# body: +# fields: [ _inference_fields ] +# query: +# match_all: { } +# +# - match: { hits.total.value: 1 } +# - match: { hits.total.relation: eq } +# +# # TODO: BUG! Setting sparse_field & dense_field to null does not clear _inference_fields +# - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks: 1 } +# - length: { hits.hits.0._source._inference_fields.sparse_field.inference.chunks.sparse_field: 0 } +# +# - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks: 1 } +# - length: { hits.hits.0._source._inference_fields.dense_field.inference.chunks.dense_field: 0 } +# +# - not_exists: hits.hits.0._source.sparse_field +# - not_exists: hits.hits.0._source.dense_field +# - match: { hits.hits.0._source.non_inference_field: "updated value" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update_bwc.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update_bwc.yml new file mode 100644 index 000000000000..6b494d531b2d --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/60_semantic_text_inference_update_bwc.yml @@ -0,0 +1,651 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: semantic_text introduced in 8.15.0 + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "similarity": "cosine", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + +--- +"Updating non semantic_text fields does not recalculate embeddings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } + - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + + - do: + update: + index: test-index + id: doc_1 + body: + doc: + non_inference_field: "another non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } + - match: { _source.non_inference_field: "another non inference test" } + +--- +"Updating semantic_text fields recalculates embeddings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "I am a test" } + - match: { _source.sparse_field.inference.chunks.0.text: "I am a test" } + - match: { _source.dense_field.text: "I am a teapot" } + - match: { _source.dense_field.inference.chunks.0.text: "I am a teapot" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + update: + index: test-index + id: doc_1 + body: + doc: + sparse_field: "updated inference test" + dense_field: "another updated inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "bulk inference test", "dense_field": "bulk updated inference test"}}' + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "bulk inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "bulk inference test" } + - match: { _source.dense_field.text: "bulk updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "bulk updated inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Update logic handles source fields in object fields": + - do: + indices.create: + index: test-copy-to-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + object_source: + properties: + sparse_field: + type: text + copy_to: sparse_field + dense_field: + type: text + copy_to: dense_field + + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + sparse_field: "sparse data 1" + dense_field: "dense data 1" + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "sparse data 1" } + - length: { _source.sparse_field.inference.chunks: 1 } + - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 1" } + - match: { _source.dense_field.text: "dense data 1" } + - length: { _source.dense_field.inference.chunks: 1 } + - match: { _source.dense_field.inference.chunks.0.text: "dense data 1" } + + - do: + bulk: + index: test-copy-to-index + body: + - '{"update": {"_id": "doc_1"}}' + - > + { + "doc": { + "sparse_field": "sparse data 1", + "object_source.sparse_field": "sparse data 2", + "dense_field": "dense data 1", + "object_source.dense_field": "dense data 2" + } + } + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "sparse data 1" } + - length: { _source.sparse_field.inference.chunks: 2 } + - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 2" } + - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 1" } + - match: { _source.dense_field.text: "dense data 1" } + - length: { _source.dense_field.inference.chunks: 2 } + - match: { _source.dense_field.inference.chunks.0.text: "dense data 1" } + - match: { _source.dense_field.inference.chunks.1.text: "dense data 2" } + + - do: + update: + index: test-copy-to-index + id: doc_1 + body: + doc: + { + "sparse_field": "sparse data 1", + "object_source.sparse_field": "sparse data 3", + "dense_field": "dense data 1", + "object_source.dense_field": "dense data 3" + } + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "sparse data 1" } + - length: { _source.sparse_field.inference.chunks: 2 } + - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 3" } + - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 1" } + - match: { _source.dense_field.text: "dense data 1" } + - length: { _source.dense_field.inference.chunks: 2 } + - match: { _source.dense_field.inference.chunks.0.text: "dense data 1" } + - match: { _source.dense_field.inference.chunks.1.text: "dense data 3" } + +--- +"Partial updates work when using the update API": + - do: + indices.create: + index: test-copy-to-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + sparse_source_field: + type: text + copy_to: sparse_field + dense_field: + type: semantic_text + inference_id: dense-inference-id + dense_source_field: + type: text + copy_to: dense_field + + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + sparse_field: "sparse data 1" + sparse_source_field: "sparse data 2" + dense_field: "dense data 1" + dense_source_field: "dense data 2" + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - length: { _source.sparse_field.inference.chunks: 2 } + - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 2" } + - exists: _source.sparse_field.inference.chunks.1.embeddings + - length: { _source.dense_field.inference.chunks: 2 } + - match: { _source.dense_field.inference.chunks.1.text: "dense data 2" } + - exists: _source.dense_field.inference.chunks.1.embeddings + + - do: + update: + index: test-copy-to-index + id: doc_1 + body: + doc: { "sparse_source_field": "sparse data 3", "dense_source_field": "dense data 3" } + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "sparse data 1" } + - length: { _source.sparse_field.inference.chunks: 2 } + - match: { _source.sparse_field.inference.chunks.1.text: "sparse data 3" } + - exists: _source.sparse_field.inference.chunks.1.embeddings + - match: { _source.dense_field.text: "dense data 1" } + - length: { _source.dense_field.inference.chunks: 2 } + - match: { _source.dense_field.inference.chunks.1.text: "dense data 3" } + - exists: _source.dense_field.inference.chunks.1.embeddings + +--- +"Partial updates work when using the update API and the semantic_text field's original value is null": + - do: + indices.create: + index: test-copy-to-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + sparse_source_field: + type: text + copy_to: sparse_field + dense_field: + type: semantic_text + inference_id: dense-inference-id + dense_source_field: + type: text + copy_to: dense_field + + # Don't set sparse_field or dense_field so their original value is null + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + sparse_source_field: "sparse data 2" + dense_source_field: "dense data 2" + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: null } + - length: { _source.sparse_field.inference.chunks: 1 } + - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 2" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.dense_field.text: null } + - length: { _source.dense_field.inference.chunks: 1 } + - match: { _source.dense_field.inference.chunks.0.text: "dense data 2" } + - exists: _source.dense_field.inference.chunks.0.embeddings + + - do: + update: + index: test-copy-to-index + id: doc_1 + body: + doc: { "sparse_source_field": "sparse data 3", "dense_source_field": "dense data 3" } + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: null } + - length: { _source.sparse_field.inference.chunks: 1 } + - match: { _source.sparse_field.inference.chunks.0.text: "sparse data 3" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.dense_field.text: null } + - length: { _source.dense_field.inference.chunks: 1 } + - match: { _source.dense_field.inference.chunks.0.text: "dense data 3" } + - exists: _source.dense_field.inference.chunks.0.embeddings + +--- +"Updates with script are not allowed": + - do: + bulk: + index: test-index + body: + - '{"index": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + + - match: { errors: false } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"script": "ctx._source.new_field = \"hello\"", "scripted_upsert": true}' + + - match: { errors: true } + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } + + - do: + catch: bad_request + update: + index: test-index + id: doc_1 + body: + script: + source: "ctx._source.new_field = \"hello\"" + lang: "painless" + + - match: { error.type: "status_exception" } + - match: { error.reason: "Cannot apply update with a script on indices that contain inference field(s)" } + +--- +"semantic_text copy_to needs values for every source field for bulk updates": + - do: + indices.create: + index: test-copy-to-index + body: + settings: + index: + mapping: + semantic_text: + use_legacy_format: true + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + source_field: + type: text + copy_to: sparse_field + another_source_field: + type: text + copy_to: sparse_field + + # Not every source field needed on creation + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + source_field: "a single source field provided" + sparse_field: "inference test" + + # Every source field needed on bulk updates + - do: + bulk: + body: + - '{"update": {"_index": "test-copy-to-index", "_id": "doc_1"}}' + - '{"doc": {"source_field": "a single source field is kept as provided via bulk", "sparse_field": "updated inference test" }}' + + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Field [another_source_field] must be specified on an update request to calculate inference for field [sparse_field]" } + +--- +"Calculates embeddings for bulk operations - update": + - do: + bulk: + body: + - '{"index": {"_index": "test-index", "_id": "doc_1"}}' + - '{"sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test"}' + + - match: { errors: false } + - match: { items.0.index.result: "created" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + + # Script update not supported + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"script": {"source": {"ctx.sparse_field": "updated inference test"}}}' + + - match: { errors: true } + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } + +--- +"Calculates embeddings for bulk operations - upsert": + # Initial update fails + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }}' + + - match: { errors: true } + - match: { items.0.update.status: 404 } + + # Update as upsert + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "created" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + +--- +"Bypass inference on bulk update operation": + - requires: + cluster_features: semantic_text.single_field_update_fix + reason: Standalone semantic text fields are now optional in a bulk update operation + + # Update as upsert + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "created" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "non_inference_field": "another value" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "another value" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": null, "dense_field": null, "non_inference_field": "updated value" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field: null } + - match: { _source.dense_field: null } + - match: { _source.non_inference_field: "updated value" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml index 25cd1b5aec48..9e2bd8fefd15 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml @@ -2,6 +2,15 @@ setup: - requires: cluster_features: "semantic_text.highlighter" reason: a new highlighter for semantic text field + test_runner_features: [ capabilities ] + + # TODO: Remove once highlighter supports inference metadata fields + - skip: + reason: Test suite targets semantic text without inference metadata fields + capabilities: + - method: GET + path: /_inference + capabilities: [ inference_metadata_fields ] - do: inference.put: diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 76a4bd5aff77..3d27906b0bc8 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -13,9 +13,9 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.publish' esplugin { - name 'x-pack-kql' - description 'Elasticsearch Expanded Pack Plugin - KQL query' - classname 'org.elasticsearch.xpack.kql.KqlPlugin' + name = 'x-pack-kql' + description = 'Elasticsearch Expanded Pack Plugin - KQL query' + classname ='org.elasticsearch.xpack.kql.KqlPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle index f66dc23ff41b..bef07258a8e3 100644 --- a/x-pack/plugin/logsdb/build.gradle +++ b/x-pack/plugin/logsdb/build.gradle @@ -13,9 +13,9 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - name 'logsdb' - description 'A plugin for logsdb related functionality' - classname 'org.elasticsearch.xpack.logsdb.LogsDBPlugin' + name = 'logsdb' + description = 'A plugin for logsdb related functionality' + classname ='org.elasticsearch.xpack.logsdb.LogsDBPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/logstash/build.gradle b/x-pack/plugin/logstash/build.gradle index 8e47ebaebdf8..37bd972629aa 100644 --- a/x-pack/plugin/logstash/build.gradle +++ b/x-pack/plugin/logstash/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - name 'x-pack-logstash' - description 'Elasticsearch Expanded Pack Plugin - Logstash' - classname 'org.elasticsearch.xpack.logstash.Logstash' + name = 'x-pack-logstash' + description = 'Elasticsearch Expanded Pack Plugin - Logstash' + classname ='org.elasticsearch.xpack.logstash.Logstash' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/mapper-aggregate-metric/build.gradle b/x-pack/plugin/mapper-aggregate-metric/build.gradle index 2a7841929b21..cb7dad88ac61 100644 --- a/x-pack/plugin/mapper-aggregate-metric/build.gradle +++ b/x-pack/plugin/mapper-aggregate-metric/build.gradle @@ -10,9 +10,9 @@ evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'x-pack-aggregate-metric' - description 'Module for the aggregate_metric_double field type, which allows pre-aggregated fields to be stored as a single field' - classname 'org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin' + name = 'x-pack-aggregate-metric' + description = 'Module for the aggregate_metric_double field type, which allows pre-aggregated fields to be stored as a single field' + classname ='org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index c1e0eb61b611..165001694966 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - name 'constant-keyword' - description 'Module for the constant-keyword field type, which is a specialization of keyword for the case when all documents have the same value.' - classname 'org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin' + name = 'constant-keyword' + description = 'Module for the constant-keyword field type, which is a specialization of keyword for the case when all documents have the same value.' + classname ='org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin' extendedPlugins = ['x-pack-core', 'lang-painless'] } base { diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index fa5d9428bb0c..fbc89fd563cc 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.constantkeyword.mapper; +import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchAllDocsQuery; @@ -41,6 +42,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.SortedNumericDocValuesSyntheticFieldLoader; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.query.QueryRewriteContext; @@ -339,6 +341,12 @@ public class ConstantKeywordFieldMapper extends FieldMapper { + "]" ); } + + if (context.mappingLookup().isSourceSynthetic()) { + // Remember which documents had value in source so that it can be correctly + // reconstructed in synthetic source + context.doc().add(new SortedNumericDocValuesField(fieldType().name(), 1)); + } } @Override @@ -348,20 +356,19 @@ public class ConstantKeywordFieldMapper extends FieldMapper { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - String value = fieldType().value(); + String const_value = fieldType().value(); - if (value == null) { + if (const_value == null) { return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); } - /* - If there was no value in the document, synthetic source should not have the value too. - This is consistent with stored source behavior and is important for scenarios - like reindexing into an index that has a different value of this value in the mapping. + var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), false) { + @Override + protected void writeValue(XContentBuilder b, long ignored) throws IOException { + b.value(const_value); + } + }; - In order to do that we use fallback logic which implements exactly such logic (_source only contains value - if it was in the original document). - */ - return new SyntheticSourceSupport.Fallback(); + return new SyntheticSourceSupport.Native(loader); } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml index 012b1006b8d2..7ea1937aa39e 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml @@ -59,3 +59,103 @@ constant_keyword: hits.hits.0._source: kwd: foo const_kwd: bar + +--- +constant_keyword update value: + - requires: + cluster_features: [ "mapper.constant_keyword.synthetic_source_write_fix" ] + reason: "Behavior fix" + + - do: + indices.create: + index: test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + const_kwd: + type: constant_keyword + kwd: + type: keyword + + - do: + index: + index: test + id: 1 + refresh: true + body: + kwd: foo + + - do: + index: + index: test + id: 2 + refresh: true + body: + const_kwd: bar + + - do: + index: + index: test + id: 3 + refresh: true + body: + kwd: foo + + - do: + index: + index: test + id: 4 + refresh: true + body: + const_kwd: bar + + - do: + search: + index: test + body: + query: + ids: + values: [1] + + - match: + hits.hits.0._source: + kwd: foo + + - do: + search: + index: test + body: + query: + ids: + values: [2] + + - match: + hits.hits.0._source: + const_kwd: bar + + - do: + search: + index: test + body: + query: + ids: + values: [3] + + - match: + hits.hits.0._source: + kwd: foo + + - do: + search: + index: test + body: + query: + ids: + values: [4] + + - match: + hits.hits.0._source: + const_kwd: bar diff --git a/x-pack/plugin/mapper-counted-keyword/build.gradle b/x-pack/plugin/mapper-counted-keyword/build.gradle index f2e715240696..b69a599628d2 100644 --- a/x-pack/plugin/mapper-counted-keyword/build.gradle +++ b/x-pack/plugin/mapper-counted-keyword/build.gradle @@ -8,9 +8,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'counted-keyword' - description 'Module for the counted-keyword field type, which allows to consider duplicates in an array of values of that type.' - classname 'org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin' + name = 'counted-keyword' + description = 'Module for the counted-keyword field type, which allows to consider duplicates in an array of values of that type.' + classname ='org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index 17a4f8a03fa5..71afd2e9082f 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -14,9 +14,9 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { - name 'unsigned-long' - description 'Module for the unsigned long field type' - classname 'org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin' + name = 'unsigned-long' + description = 'Module for the unsigned long field type' + classname ='org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin' extendedPlugins = ['x-pack-core', 'lang-painless'] } base { diff --git a/x-pack/plugin/mapper-version/build.gradle b/x-pack/plugin/mapper-version/build.gradle index bf78c61523e3..4ee97c2d1a46 100644 --- a/x-pack/plugin/mapper-version/build.gradle +++ b/x-pack/plugin/mapper-version/build.gradle @@ -13,9 +13,9 @@ apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'mapper-version' - description 'A plugin for a field type to store software versions' - classname 'org.elasticsearch.xpack.versionfield.VersionFieldPlugin' + name = 'mapper-version' + description = 'A plugin for a field type to store software versions' + classname = 'org.elasticsearch.xpack.versionfield.VersionFieldPlugin' extendedPlugins = ['x-pack-core', 'lang-painless'] } base { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index 6bf2917c601a..0bdb5fc0101a 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -403,7 +403,7 @@ public class VersionStringFieldMapper extends FieldMapper { return concat; } - public static DocValueFormat VERSION_DOCVALUE = new DocValueFormat() { + public static final DocValueFormat VERSION_DOCVALUE = new DocValueFormat() { @Override public String getWriteableName() { diff --git a/x-pack/plugin/migrate/build.gradle b/x-pack/plugin/migrate/build.gradle index 87ea7a07ab41..283362a637e7 100644 --- a/x-pack/plugin/migrate/build.gradle +++ b/x-pack/plugin/migrate/build.gradle @@ -2,12 +2,12 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-migrate' - description 'Elasticsearch Expanded Pack Plugin - Index and Data Stream Migration' - classname 'org.elasticsearch.xpack.migrate.MigratePlugin' + name = 'x-pack-migrate' + description = 'Elasticsearch Expanded Pack Plugin - Index and Data Stream Migration' + classname = 'org.elasticsearch.xpack.migrate.MigratePlugin' extendedPlugins = ['x-pack-core'] - hasNativeController false - requiresKeystore true + hasNativeController =false + requiresKeystore =true } base { archivesName = 'x-pack-migrate' diff --git a/x-pack/plugin/ml-package-loader/build.gradle b/x-pack/plugin/ml-package-loader/build.gradle index 122ad396b507..fa460bf5605e 100644 --- a/x-pack/plugin/ml-package-loader/build.gradle +++ b/x-pack/plugin/ml-package-loader/build.gradle @@ -11,9 +11,9 @@ import org.elasticsearch.gradle.OS apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'ml-package-loader' - description 'Loader for prepackaged Machine Learning Models from Elastic' - classname 'org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader' + name = 'ml-package-loader' + description = 'Loader for prepackaged Machine Learning Models from Elastic' + classname ='org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 716c401a9fcc..8373d9c6582e 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -11,10 +11,10 @@ apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.dra-artifacts' esplugin { - name 'x-pack-ml' - description 'Elasticsearch Expanded Pack Plugin - Machine Learning' - classname 'org.elasticsearch.xpack.ml.MachineLearning' - hasNativeController true + name = 'x-pack-ml' + description = 'Elasticsearch Expanded Pack Plugin - Machine Learning' + classname ='org.elasticsearch.xpack.ml.MachineLearning' + hasNativeController =true extendedPlugins = ['x-pack-autoscaling', 'lang-painless'] } @@ -23,8 +23,8 @@ if (useDra == false) { exclusiveContent { forRepository { ivy { - name "ml-cpp" - url providers.systemProperty('build.ml_cpp.repo').orElse('https://prelert-artifacts.s3.amazonaws.com').get() + name = "ml-cpp" + url = providers.systemProperty('build.ml_cpp.repo').orElse('https://prelert-artifacts.s3.amazonaws.com').get() metadataSources { // no repository metadata, look directly for the artifact artifact() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/modelsize/ModelSizeInfo.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/modelsize/ModelSizeInfo.java index 587e7e3815c2..8dca78cafda4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/modelsize/ModelSizeInfo.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/modelsize/ModelSizeInfo.java @@ -30,7 +30,7 @@ public class ModelSizeInfo implements Accountable, ToXContentObject { private static final ParseField TRAINED_MODEL_SIZE = new ParseField("trained_model_size"); @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "model_size", false, a -> new ModelSizeInfo((EnsembleSizeInfo) a[0], (List) a[1]) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java index 0bfc64c9b002..72fc96683b72 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java @@ -47,7 +47,7 @@ public class Vocabulary implements Writeable, ToXContentObject { return parser; } - public static ConstructingObjectParser PARSER = createParser(true); + public static final ConstructingObjectParser PARSER = createParser(true); private final List vocab; private final List merges; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/AckResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/AckResult.java index 06f2679c56c2..5626270c1c6d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/AckResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/AckResult.java @@ -18,7 +18,7 @@ public record AckResult(boolean acknowledged) implements ToXContentObject { public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "ack", a -> new AckResult((Boolean) a[0]) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ErrorResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ErrorResult.java index 68fc5cc58923..d5fd64c74c54 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ErrorResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ErrorResult.java @@ -18,7 +18,7 @@ public record ErrorResult(String error) implements ToXContentObject { public static final ParseField ERROR = new ParseField("error"); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "error", a -> new ErrorResult((String) a[0]) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchResult.java index 11340d0bf542..3db738e72766 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchResult.java @@ -36,7 +36,7 @@ public record PyTorchResult( private static final ParseField THREAD_SETTINGS = new ParseField("thread_settings"); private static final ParseField ACK = new ParseField("ack"); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "pytorch_result", a -> new PyTorchResult( (String) a[0], diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ThreadSettings.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ThreadSettings.java index 9154d33c0457..af8818d540eb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ThreadSettings.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/ThreadSettings.java @@ -19,7 +19,7 @@ public record ThreadSettings(int numThreadsPerAllocation, int numAllocations) im private static final ParseField NUM_ALLOCATIONS = new ParseField("num_allocations"); private static final ParseField NUM_THREADS_PER_ALLOCATION = new ParseField("num_threads_per_allocation"); - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "thread_settings", a -> new ThreadSettings((int) a[0], (int) a[1]) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizer.java index 89533255528f..c5e3754564ee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizer.java @@ -15,7 +15,7 @@ import java.io.IOException; */ public class MlClassicTokenizer extends AbstractMlTokenizer { - public static String NAME = "ml_classic"; + public static final String NAME = "ml_classic"; MlClassicTokenizer() {} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlStandardTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlStandardTokenizer.java index ebb1b646a4c5..a3fb05efb17c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlStandardTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlStandardTokenizer.java @@ -18,7 +18,7 @@ import java.io.Reader; */ public class MlStandardTokenizer extends AbstractMlTokenizer { - public static String NAME = "ml_standard"; + public static final String NAME = "ml_standard"; private int putBackChar = -1; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java index 85219b0e62c6..eb05d84c2140 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java @@ -84,8 +84,8 @@ public class InternalItemSetMapReduceAggregationTests extends InternalAggregatio public void close() throws IOException {} } - public static String MAP_REDUCER_NAME = "word-count-test-aggregation"; - public static String AGG_NAME = "internal-map-reduce-aggregation-test"; + public static final String MAP_REDUCER_NAME = "word-count-test-aggregation"; + public static final String AGG_NAME = "internal-map-reduce-aggregation-test"; WordCountMapReducer() { super(AGG_NAME, MAP_REDUCER_NAME); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index d3225ae53613..7444176595f8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -1559,6 +1559,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { nodeId1, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId1) + .setNodeEphemeralId(nodeId1) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(System.currentTimeMillis()) .setReason("test") @@ -1602,6 +1603,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { nodeId1, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId1) + .setNodeEphemeralId(nodeId1) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(System.currentTimeMillis()) .setReason("test") @@ -1648,6 +1650,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { nodeId3, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId3) + .setNodeEphemeralId(nodeId3) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(System.currentTimeMillis()) .setReason("test") @@ -1706,6 +1709,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { nodeId3, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId3) + .setNodeEphemeralId(nodeId3) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(System.currentTimeMillis()) .setReason("test") @@ -1766,6 +1770,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { nodeId3, SingleNodeShutdownMetadata.builder() .setNodeId(nodeId3) + .setNodeEphemeralId(nodeId3) .setType(SingleNodeShutdownMetadata.Type.REMOVE) .setStartedAtMillis(System.currentTimeMillis()) .setReason("test") @@ -1996,6 +2001,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { nodeToShutdown.getId(), SingleNodeShutdownMetadata.builder() .setNodeId(nodeToShutdown.getId()) + .setNodeEphemeralId(nodeToShutdown.getEphemeralId()) .setStartedAtMillis(1L) .setType(SingleNodeShutdownMetadata.Type.RESTART) .setReason("because this cannot be null") @@ -2052,6 +2058,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { .setStartedAtMillis(randomNonNegativeLong()) .setReason("tests") .setNodeId(nodeId) + .setNodeEphemeralId(nodeId) .build() ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java index b0cce14c5911..3e47f0bc6cd0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java @@ -41,7 +41,7 @@ public class XLMRobertaTestVocab { } @SuppressWarnings("checkstyle:linelength") - public static String[] MULTILINUGAL_TEXTS = new String[] { + public static final String[] MULTILINUGAL_TEXTS = new String[] { "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", @@ -53,7 +53,7 @@ public class XLMRobertaTestVocab { "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; - public static int[][] EXPECTED_TOKENS = new int[][] { + public static final int[][] EXPECTED_TOKENS = new int[][] { { 0, 910, diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle index 43f7db22cc7e..c10f33d93653 100644 --- a/x-pack/plugin/monitoring/build.gradle +++ b/x-pack/plugin/monitoring/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'x-pack-monitoring' - description 'Elasticsearch Expanded Pack Plugin - Monitoring' - classname 'org.elasticsearch.xpack.monitoring.Monitoring' + name = 'x-pack-monitoring' + description = 'Elasticsearch Expanded Pack Plugin - Monitoring' + classname ='org.elasticsearch.xpack.monitoring.Monitoring' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java index fb24c1ad2b88..f280171c046d 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java @@ -28,7 +28,7 @@ import java.util.function.Consumer; public abstract class Exporter implements AutoCloseable { - public static Setting.AffixSettingDependency TYPE_DEPENDENCY = () -> Exporter.TYPE_SETTING; + public static final Setting.AffixSettingDependency TYPE_DEPENDENCY = () -> Exporter.TYPE_SETTING; private static final Setting.AffixSetting ENABLED_SETTING = Setting.affixKeySetting( "xpack.monitoring.exporters.", diff --git a/x-pack/plugin/old-lucene-versions/build.gradle b/x-pack/plugin/old-lucene-versions/build.gradle index ea988fe5e1ff..8d23bbea472d 100644 --- a/x-pack/plugin/old-lucene-versions/build.gradle +++ b/x-pack/plugin/old-lucene-versions/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'old-lucene-versions' - description 'A plugin for accessing older Lucene indices' - classname 'org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions' + name = 'old-lucene-versions' + description = 'A plugin for accessing older Lucene indices' + classname = 'org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/otel-data/build.gradle b/x-pack/plugin/otel-data/build.gradle index 126875bf183f..0ae8022bff61 100644 --- a/x-pack/plugin/otel-data/build.gradle +++ b/x-pack/plugin/otel-data/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-otel-data' - description 'The OTEL plugin defines OTEL data streams and ingest pipelines.' - classname 'org.elasticsearch.xpack.oteldata.OTelPlugin' + name = 'x-pack-otel-data' + description = 'The OTEL plugin defines OTEL data streams and ingest pipelines.' + classname ='org.elasticsearch.xpack.oteldata.OTelPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/profiling/build.gradle b/x-pack/plugin/profiling/build.gradle index d291a1a25dfb..954cdcb670a8 100644 --- a/x-pack/plugin/profiling/build.gradle +++ b/x-pack/plugin/profiling/build.gradle @@ -8,9 +8,9 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'x-pack-profiling' - description 'The profiling plugin adds support for retrieving data from Universal Profiling.' - classname 'org.elasticsearch.xpack.profiling.ProfilingPlugin' + name = 'x-pack-profiling' + description = 'The profiling plugin adds support for retrieving data from Universal Profiling.' + classname = 'org.elasticsearch.xpack.profiling.ProfilingPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index 5d32c39e350a..24f2f287f4cd 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -11,13 +11,12 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -177,56 +176,89 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXC @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // change casing from Camel Case to Snake Case (requires updates in Kibana as well) @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> { - ob.array("Edges", edges.iterator(), (eb, e) -> eb.array(intValues(e.values()))); - ob.array("FileID", fileIds.toArray(String[]::new)); - ob.array("FrameType", intValues(frameTypes)); - ob.array("Inline", inlineFrames.iterator(), e -> (b, p) -> b.value(e)); - ob.array("ExeFilename", fileNames.toArray(String[]::new)); - ob.array("AddressOrLine", intValues(addressOrLines)); - ob.array("FunctionName", functionNames.toArray(String[]::new)); - ob.array("FunctionOffset", intValues(functionOffsets)); - ob.array("SourceFilename", sourceFileNames.toArray(String[]::new)); - ob.array("SourceLine", intValues(sourceLines)); - ob.array("CountInclusive", longValues(countInclusive)); - ob.array("CountExclusive", longValues(countExclusive)); - ob.array("AnnualCO2TonsInclusive", doubleValues(annualCO2TonsInclusive)); - ob.array("AnnualCO2TonsExclusive", doubleValues(annualCO2TonsExclusive)); - ob.array("AnnualCostsUSDInclusive", doubleValues(annualCostsUSDInclusive)); - ob.array("AnnualCostsUSDExclusive", doubleValues(annualCostsUSDExclusive)); - ob.field("Size", size); - ob.field("SamplingRate", samplingRate); - ob.field("SelfCPU", selfCPU); - ob.field("TotalCPU", totalCPU); - ob.field("TotalSamples", totalSamples); - }); - } - - private static Iterator intValues(Collection values) { - return Iterators.single((b, p) -> { - for (Integer i : values) { - b.value(i); - } - return b; - }); - } - - private static Iterator longValues(Collection values) { - return Iterators.single((b, p) -> { - for (Long l : values) { - b.value(l); - } - return b; - }); - } - - private static Iterator doubleValues(Collection values) { - return Iterators.single((b, p) -> { - for (Double d : values) { - // write as raw value - we need direct control over the output representation (here: limit to 4 decimal places) - b.rawValue(NumberUtils.doubleToString(d)); - } - return b; - }); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + ChunkedToXContentHelper.array( + "Edges", + Iterators.flatMap( + edges.iterator(), + perNodeEdges -> Iterators.concat( + ChunkedToXContentHelper.startArray(), + Iterators.map(perNodeEdges.entrySet().iterator(), edge -> (b, p) -> b.value(edge.getValue())), + ChunkedToXContentHelper.endArray() + ) + ) + ), + ChunkedToXContentHelper.array("FileID", Iterators.map(fileIds.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.array("FrameType", Iterators.map(frameTypes.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.array("Inline", Iterators.map(inlineFrames.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.array("ExeFilename", Iterators.map(fileNames.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.array("AddressOrLine", Iterators.map(addressOrLines.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.array("FunctionName", Iterators.map(functionNames.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("FunctionOffset"); + for (int functionOffset : functionOffsets) { + b.value(functionOffset); + } + return b.endArray(); + }), + ChunkedToXContentHelper.array("SourceFilename", Iterators.map(sourceFileNames.iterator(), e -> (b, p) -> b.value(e))), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("SourceLine"); + for (int sourceLine : sourceLines) { + b.value(sourceLine); + } + return b.endArray(); + }), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("CountInclusive"); + for (long countInclusive : countInclusive) { + b.value(countInclusive); + } + return b.endArray(); + }), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("CountExclusive"); + for (long c : countExclusive) { + b.value(c); + } + return b.endArray(); + }), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("AnnualCO2TonsInclusive"); + for (double co2Tons : annualCO2TonsInclusive) { + // write as raw value - we need direct control over the output representation (here: limit to 4 decimal places) + b.rawValue(NumberUtils.doubleToString(co2Tons)); + } + return b.endArray(); + }), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("AnnualCO2TonsExclusive"); + for (double co2Tons : annualCO2TonsExclusive) { + b.rawValue(NumberUtils.doubleToString(co2Tons)); + } + return b.endArray(); + }), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("AnnualCostsUSDInclusive"); + for (double costs : annualCostsUSDInclusive) { + b.rawValue(NumberUtils.doubleToString(costs)); + } + return b.endArray(); + }), + ChunkedToXContentHelper.singleChunk((b, p) -> { + b.startArray("AnnualCostsUSDExclusive"); + for (double costs : annualCostsUSDExclusive) { + b.rawValue(NumberUtils.doubleToString(costs)); + } + return b.endArray(); + }), + Iterators.single((b, p) -> b.field("Size", size)), + Iterators.single((b, p) -> b.field("SamplingRate", samplingRate)), + Iterators.single((b, p) -> b.field("SelfCPU", selfCPU)), + Iterators.single((b, p) -> b.field("TotalCPU", totalCPU)), + Iterators.single((b, p) -> b.field("TotalSamples", totalSamples)), + ChunkedToXContentHelper.endObject() + ); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index 19747ec6e49f..e9757d3806b6 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -9,19 +9,19 @@ package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.TriConsumer; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; +import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.Objects; -import java.util.function.Consumer; +import java.util.function.BiFunction; public class GetStackTracesResponse extends ActionResponse implements ChunkedToXContentObject { @Nullable @@ -91,33 +91,34 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(ob -> { - ob.execute(optional("stack_traces", stackTraces, ChunkedToXContentBuilder::xContentObjectFields)); - ob.execute(optional("stack_frames", stackFrames, ChunkedToXContentBuilder::xContentObjectFields)); - ob.execute(optional("executables", executables, ChunkedToXContentBuilder::object)); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + optional("stack_traces", stackTraces, ChunkedToXContentHelper::xContentValuesMap), + optional("stack_frames", stackFrames, ChunkedToXContentHelper::xContentValuesMap), + optional("executables", executables, ChunkedToXContentHelper::map), // render only count for backwards-compatibility - ob.execute( - optional( - "stack_trace_events", - stackTraceEvents, - (steb, n, v) -> steb.object(n, v.entrySet().iterator(), e -> (b, p) -> b.field(e.getKey(), e.getValue().count)) - ) - ); - ob.field("total_frames", totalFrames); - ob.field("sampling_rate", samplingRate); + optional( + "stack_trace_events", + stackTraceEvents, + (n, v) -> ChunkedToXContentHelper.map(n, v, entry -> (b, p) -> b.field(entry.getKey(), entry.getValue().count)) + ), + Iterators.single((b, p) -> b.field("total_frames", totalFrames)), + Iterators.single((b, p) -> b.field("sampling_rate", samplingRate)), // the following fields are intentionally not written to the XContent representation (only needed on the transport layer): + // // * start // * end // * totalSamples - }); + ChunkedToXContentHelper.endObject() + ); } - private static Consumer optional( + private static Iterator optional( String name, Map values, - TriConsumer> function + BiFunction, Iterator> supplier ) { - return values != null ? b -> function.apply(b, name, values) : b -> {}; + return (values != null) ? supplier.apply(name, values) : Collections.emptyIterator(); } @Override diff --git a/x-pack/plugin/ql/build.gradle b/x-pack/plugin/ql/build.gradle index c9d86cdd63e4..947b4d8a0d0c 100644 --- a/x-pack/plugin/ql/build.gradle +++ b/x-pack/plugin/ql/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'x-pack-ql' - description 'Elasticsearch infrastructure plugin for EQL and SQL for Elasticsearch' - classname 'org.elasticsearch.xpack.ql.plugin.QlPlugin' + name = 'x-pack-ql' + description = 'Elasticsearch infrastructure plugin for EQL and SQL for Elasticsearch' + classname ='org.elasticsearch.xpack.ql.plugin.QlPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/gen/processor/ConstantProcessor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/gen/processor/ConstantProcessor.java index cf87d01b87a5..3d5f63db23c5 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/gen/processor/ConstantProcessor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/gen/processor/ConstantProcessor.java @@ -19,7 +19,7 @@ import java.util.Objects; public class ConstantProcessor implements Processor { - public static String NAME = "c"; + public static final String NAME = "c"; private Object constant; private final Type type; diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java index 318f3888ac9b..7a85bf4c7349 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java @@ -1037,7 +1037,7 @@ public class IndexResolver { /** * Preserve the properties (sub fields) of an existing field even when marking it as invalid. */ - public static ExistingFieldInvalidCallback PRESERVE_PROPERTIES = (oldField, newField) -> { + public static final ExistingFieldInvalidCallback PRESERVE_PROPERTIES = (oldField, newField) -> { var oldProps = oldField.getProperties(); if (oldProps.size() > 0) { newField.getProperties().putAll(oldProps); diff --git a/x-pack/plugin/rank-rrf/build.gradle b/x-pack/plugin/rank-rrf/build.gradle index b2d470c6618e..216e85f48f56 100644 --- a/x-pack/plugin/rank-rrf/build.gradle +++ b/x-pack/plugin/rank-rrf/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - name 'rank-rrf' - description 'Reciprocal rank fusion in search.' - classname 'org.elasticsearch.xpack.rank.rrf.RRFRankPlugin' + name = 'rank-rrf' + description = 'Reciprocal rank fusion in search.' + classname ='org.elasticsearch.xpack.rank.rrf.RRFRankPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/redact/build.gradle b/x-pack/plugin/redact/build.gradle index f179ce3a88cf..e03567a4fe5a 100644 --- a/x-pack/plugin/redact/build.gradle +++ b/x-pack/plugin/redact/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-redact' - description 'Elasticsearch Expanded Pack Plugin - Redact' - classname 'org.elasticsearch.xpack.redact.RedactPlugin' + name = 'x-pack-redact' + description = 'Elasticsearch Expanded Pack Plugin - Redact' + classname ='org.elasticsearch.xpack.redact.RedactPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/repositories-metering-api/build.gradle b/x-pack/plugin/repositories-metering-api/build.gradle index c9c3cbf84256..a9c706741e55 100644 --- a/x-pack/plugin/repositories-metering-api/build.gradle +++ b/x-pack/plugin/repositories-metering-api/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'repositories-metering-api' - description 'Repositories metering API' - classname 'org.elasticsearch.xpack.repositories.metering.RepositoriesMeteringPlugin' + name = 'repositories-metering-api' + description = 'Repositories metering API' + classname = 'org.elasticsearch.xpack.repositories.metering.RepositoriesMeteringPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index 4fca8e15d247..83fd08cb18f9 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -1,8 +1,8 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'x-pack-rollup' - description 'Elasticsearch Expanded Pack Plugin - Rollup' - classname 'org.elasticsearch.xpack.rollup.Rollup' + name = 'x-pack-rollup' + description = 'Elasticsearch Expanded Pack Plugin - Rollup' + classname ='org.elasticsearch.xpack.rollup.Rollup' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/search-business-rules/build.gradle b/x-pack/plugin/search-business-rules/build.gradle index 11e1207f7f3c..cce6e15aedc0 100644 --- a/x-pack/plugin/search-business-rules/build.gradle +++ b/x-pack/plugin/search-business-rules/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'search-business-rules' - description 'A plugin for applying business rules to search result rankings' - classname 'org.elasticsearch.xpack.searchbusinessrules.SearchBusinessRules' + name = 'search-business-rules' + description = 'A plugin for applying business rules to search result rankings' + classname ='org.elasticsearch.xpack.searchbusinessrules.SearchBusinessRules' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/searchable-snapshots/build.gradle b/x-pack/plugin/searchable-snapshots/build.gradle index 747e94b0e8d8..571ee3c1101c 100644 --- a/x-pack/plugin/searchable-snapshots/build.gradle +++ b/x-pack/plugin/searchable-snapshots/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'searchable-snapshots' - description 'A plugin for the searchable snapshots functionality' - classname 'org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots' + name = 'searchable-snapshots' + description = 'A plugin for the searchable snapshots functionality' + classname = 'org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots' extendedPlugins = ['x-pack-core', 'blob-cache'] } base { diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle index 1659c592e5e6..b95e07090baf 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle @@ -13,6 +13,7 @@ apply plugin: 'elasticsearch.rest-resources' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('searchable-snapshots')))) javaRestTestImplementation project(":test:framework") + javaRestTestImplementation project(':test:fixtures:aws-fixture-utils') javaRestTestImplementation project(':test:fixtures:s3-fixture') } diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java index 989e5468c4fb..bb5df5d37dba 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java @@ -36,6 +36,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.function.UnaryOperator; +import static fixture.aws.AwsCredentialsUtils.mutableAccessKey; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.allOf; @@ -46,12 +47,7 @@ public class S3SearchableSnapshotsCredentialsReloadIT extends ESRestTestCase { private static volatile String repositoryAccessKey; - public static final S3HttpFixture s3Fixture = new S3HttpFixture( - true, - BUCKET, - BASE_PATH, - S3HttpFixture.mutableAccessKey(() -> repositoryAccessKey) - ); + public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, mutableAccessKey(() -> repositoryAccessKey)); private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index ebd435bf653a..d3425c1e42e5 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -4,10 +4,10 @@ apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'x-pack-security' - description 'Elasticsearch Expanded Pack Plugin - Security' - classname 'org.elasticsearch.xpack.security.Security' - requiresKeystore true + name = 'x-pack-security' + description = 'Elasticsearch Expanded Pack Plugin - Security' + classname = 'org.elasticsearch.xpack.security.Security' + requiresKeystore =true extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle b/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle index e9abb413402d..e3bad85a4256 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle +++ b/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - name 'operator-privileges-test' - description 'An test plugin for testing hard to get internals' - classname 'org.elasticsearch.xpack.security.operator.OperatorPrivilegesTestPlugin' + name = 'operator-privileges-test' + description = 'An test plugin for testing hard to get internals' + classname = 'org.elasticsearch.xpack.security.operator.OperatorPrivilegesTestPlugin' } dependencies { diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1cb73de4646c..62687e42b091 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -115,6 +115,7 @@ public class Constants { "cluster:admin/transform/schedule_now", "cluster:admin/transform/update", "cluster:admin/transform/upgrade", + "cluster:admin/transform/upgrade_mode", "cluster:admin/transform/validate", // "cluster:admin/voting_config/add_exclusions", // "cluster:admin/voting_config/clear_exclusions", diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle b/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle index f805dc74c4ca..b99bc6594a5e 100644 --- a/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle +++ b/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle @@ -4,9 +4,9 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name 'secondary-auth-actions-extension' - description 'Spi extension plugin for security to enforce custom secondary auth actions' - classname 'org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin' + name = 'secondary-auth-actions-extension' + description = 'Spi extension plugin for security to enforce custom secondary auth actions' + classname ='org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin' extendedPlugins = ['x-pack-security'] } diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index e6caf943dc02..766598edc6f0 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - name 'queryable-reserved-roles-test' - description 'A test plugin for testing that changes to reserved roles are made queryable' - classname 'org.elasticsearch.xpack.security.role.QueryableBuiltInRolesTestPlugin' + name = 'queryable-reserved-roles-test' + description = 'A test plugin for testing that changes to reserved roles are made queryable' + classname = 'org.elasticsearch.xpack.security.role.QueryableBuiltInRolesTestPlugin' extendedPlugins = ['x-pack-core', 'x-pack-security'] } dependencies { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java index 229b517be529..43615f3eb605 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java @@ -109,7 +109,7 @@ public class LdapMetadataResolver { public static class LdapMetadataResult { - public static LdapMetadataResult EMPTY = new LdapMetadataResult(null, null, Map.of()); + public static final LdapMetadataResult EMPTY = new LdapMetadataResult(null, null, Map.of()); private final String fullName; private final String email; diff --git a/x-pack/plugin/shutdown/build.gradle b/x-pack/plugin/shutdown/build.gradle index 8eabb51b3e13..df1a7575cdd0 100644 --- a/x-pack/plugin/shutdown/build.gradle +++ b/x-pack/plugin/shutdown/build.gradle @@ -4,9 +4,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-shutdown' - description 'Elasticsearch Expanded Pack Plugin - Shutdown' - classname 'org.elasticsearch.xpack.shutdown.ShutdownPlugin' + name = 'x-pack-shutdown' + description = 'Elasticsearch Expanded Pack Plugin - Shutdown' + classname ='org.elasticsearch.xpack.shutdown.ShutdownPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java index 95fd97cd5931..6078f1cbfa96 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java @@ -12,10 +12,11 @@ import org.elasticsearch.cluster.metadata.ShutdownPluginsStatus; import org.elasticsearch.cluster.metadata.ShutdownShardMigrationStatus; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -24,6 +25,10 @@ import java.io.IOException; import java.util.Iterator; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; + public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObject { private final SingleNodeShutdownMetadata metadata; @@ -111,27 +116,27 @@ public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObj @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContent.builder(params).object(b -> { - b.append((builder, p) -> { - builder.field(SingleNodeShutdownMetadata.NODE_ID_FIELD.getPreferredName(), metadata.getNodeId()); - builder.field(SingleNodeShutdownMetadata.TYPE_FIELD.getPreferredName(), metadata.getType()); - builder.field(SingleNodeShutdownMetadata.REASON_FIELD.getPreferredName(), metadata.getReason()); - if (metadata.getAllocationDelay() != null) { - builder.field( - SingleNodeShutdownMetadata.ALLOCATION_DELAY_FIELD.getPreferredName(), - metadata.getAllocationDelay().getStringRep() - ); - } - builder.timestampFieldsFromUnixEpochMillis( - SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(), - SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD, - metadata.getStartedAtMillis() + return Iterators.concat(startObject(), singleChunk((builder, p) -> { + builder.field(SingleNodeShutdownMetadata.NODE_ID_FIELD.getPreferredName(), metadata.getNodeId()); + builder.field(SingleNodeShutdownMetadata.NODE_EPHEMERAL_ID_FIELD.getPreferredName(), metadata.getNodeEphemeralId()); + builder.field(SingleNodeShutdownMetadata.TYPE_FIELD.getPreferredName(), metadata.getType()); + builder.field(SingleNodeShutdownMetadata.REASON_FIELD.getPreferredName(), metadata.getReason()); + if (metadata.getAllocationDelay() != null) { + builder.field( + SingleNodeShutdownMetadata.ALLOCATION_DELAY_FIELD.getPreferredName(), + metadata.getAllocationDelay().getStringRep() ); - builder.field(STATUS.getPreferredName(), overallStatus()); - return builder; - }); - b.field(SHARD_MIGRATION_FIELD.getPreferredName(), shardMigrationStatus); - b.append((builder, p) -> { + } + builder.timestampFieldsFromUnixEpochMillis( + SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(), + SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD, + metadata.getStartedAtMillis() + ); + builder.field(STATUS.getPreferredName(), overallStatus()); + return builder; + }), + ChunkedToXContentHelper.field(SHARD_MIGRATION_FIELD.getPreferredName(), shardMigrationStatus, params), + singleChunk((builder, p) -> { builder.field(PERSISTENT_TASKS_FIELD.getPreferredName(), persistentTasksStatus); builder.field(PLUGINS_STATUS.getPreferredName(), pluginsStatus); if (metadata.getTargetNodeName() != null) { @@ -144,7 +149,8 @@ public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObj ); } return builder; - }); - }); + }), + endObject() + ); } } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java index 4c163bc1a936..f0197b3c998d 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; @@ -49,7 +50,8 @@ public class TransportPutShutdownNodeAction extends AcknowledgedTransportMasterN private static boolean putShutdownNodeState( Map shutdownMetadata, Predicate nodeExists, - Request request + Request request, + String nodeEphemeralId ) { if (isNoop(shutdownMetadata, request)) { return false; @@ -58,6 +60,7 @@ public class TransportPutShutdownNodeAction extends AcknowledgedTransportMasterN final boolean nodeSeen = nodeExists.test(request.getNodeId()); SingleNodeShutdownMetadata newNodeMetadata = SingleNodeShutdownMetadata.builder() .setNodeId(request.getNodeId()) + .setNodeEphemeralId(nodeEphemeralId) .setType(request.getType()) .setReason(request.getReason()) .setStartedAtMillis(System.currentTimeMillis()) @@ -103,8 +106,13 @@ public class TransportPutShutdownNodeAction extends AcknowledgedTransportMasterN boolean needsReroute = false; for (final var taskContext : batchExecutionContext.taskContexts()) { var request = taskContext.getTask().request(); + String nodeEphemeralId = null; + DiscoveryNode discoveryNode = initialState.nodes().getNodes().get(request.getNodeId()); + if (discoveryNode != null) { + nodeEphemeralId = discoveryNode.getEphemeralId(); + } try (var ignored = taskContext.captureResponseHeaders()) { - changed |= putShutdownNodeState(shutdownMetadata, nodeExistsPredicate, request); + changed |= putShutdownNodeState(shutdownMetadata, nodeExistsPredicate, request, nodeEphemeralId); } catch (Exception e) { taskContext.onFailure(e); continue; diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java index 2c68e8d7f39b..e3a8fb4482c3 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java @@ -55,6 +55,7 @@ public class GetShutdownStatusResponseTests extends AbstractWireSerializingTestC final TimeValue gracefulShutdown = type == SIGTERM ? randomPositiveTimeValue() : null; return SingleNodeShutdownMetadata.builder() .setNodeId(randomAlphaOfLength(5)) + .setNodeEphemeralId(randomAlphaOfLength(5)) .setType(type) .setReason(randomAlphaOfLength(5)) .setStartedAtMillis(randomNonNegativeLong()) diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java index 9a1dda99674c..c8f1d8b58a49 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java @@ -603,6 +603,7 @@ public class TransportGetShutdownStatusActionTests extends ESTestCase { .setStartedAtMillis(randomNonNegativeLong()) .setReason(this.getTestName()) .setNodeId(bogusNodeId) + .setNodeEphemeralId(bogusNodeId) .build() ) ) @@ -866,6 +867,7 @@ public class TransportGetShutdownStatusActionTests extends ESTestCase { .setStartedAtMillis(randomNonNegativeLong()) .setReason(this.getTestName()) .setNodeId(SHUTTING_DOWN_NODE_ID) + .setNodeEphemeralId(SHUTTING_DOWN_NODE_ID) .build() ) ) diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java index de5c5d393f39..ed603d30d059 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java @@ -15,6 +15,8 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskExecutor.TaskContext; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; @@ -32,6 +34,8 @@ import org.mockito.MockitoAnnotations; import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.containsString; @@ -96,14 +100,16 @@ public class TransportPutShutdownNodeActionTests extends ESTestCase { targetNodeName, null ); - action.masterOperation(null, request, ClusterState.EMPTY_STATE, ActionListener.noop()); + var dummyNode = new DiscoveryNode(targetNodeName, "node1", "eph-node1", "abc", "abc", null, Map.of(), Set.of(), null); + var state = ClusterState.builder(ClusterState.EMPTY_STATE).nodes(DiscoveryNodes.builder().add(dummyNode).build()).build(); + action.masterOperation(null, request, state, ActionListener.noop()); var updateTask = ArgumentCaptor.forClass(PutShutdownNodeTask.class); var taskExecutor = ArgumentCaptor.forClass(PutShutdownNodeExecutor.class); verify(clusterService).createTaskQueue(any(), any(), taskExecutor.capture()); verify(taskQueue).submitTask(any(), updateTask.capture(), any()); when(taskContext.getTask()).thenReturn(updateTask.getValue()); ClusterState stableState = taskExecutor.getValue() - .execute(new ClusterStateTaskExecutor.BatchExecutionContext<>(ClusterState.EMPTY_STATE, List.of(taskContext), () -> null)); + .execute(new ClusterStateTaskExecutor.BatchExecutionContext<>(state, List.of(taskContext), () -> null)); // run the request again, there should be no call to submit an update task clearTaskQueueInvocations(); diff --git a/x-pack/plugin/slm/build.gradle b/x-pack/plugin/slm/build.gradle index b54e31315f70..a827891e4b16 100644 --- a/x-pack/plugin/slm/build.gradle +++ b/x-pack/plugin/slm/build.gradle @@ -9,12 +9,12 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-slm' - description 'Elasticsearch Expanded Pack Plugin - Snapshot Lifecycle Management' - classname 'org.elasticsearch.xpack.slm.SnapshotLifecycle' + name = 'x-pack-slm' + description = 'Elasticsearch Expanded Pack Plugin - Snapshot Lifecycle Management' + classname ='org.elasticsearch.xpack.slm.SnapshotLifecycle' extendedPlugins = ['x-pack-core'] - hasNativeController false - requiresKeystore true + hasNativeController =false + requiresKeystore =true } base { archivesName = 'x-pack-slm' diff --git a/x-pack/plugin/snapshot-based-recoveries/build.gradle b/x-pack/plugin/snapshot-based-recoveries/build.gradle index f1bed57838c4..d11f78a13f95 100644 --- a/x-pack/plugin/snapshot-based-recoveries/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'snapshot-based-recoveries' - description 'A plugin that enables snapshot based recoveries' - classname 'org.elasticsearch.xpack.snapshotbasedrecoveries.SnapshotBasedRecoveriesPlugin' + name = 'snapshot-based-recoveries' + description = 'A plugin that enables snapshot based recoveries' + classname ='org.elasticsearch.xpack.snapshotbasedrecoveries.SnapshotBasedRecoveriesPlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/snapshot-repo-test-kit/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/build.gradle index 45839a049f45..58afbeb237e4 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/build.gradle @@ -3,9 +3,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'snapshot-repo-test-kit' - description 'A plugin for a test kit for snapshot repositories' - classname 'org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKit' + name = 'snapshot-repo-test-kit' + description = 'A plugin for a test kit for snapshot repositories' + classname = 'org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKit' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java index eaae913fe9c6..dd3193cd1d04 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java @@ -59,7 +59,7 @@ public class RepositoryVerifyIntegrityTask extends CancellableTask { long throttledNanos ) implements org.elasticsearch.tasks.Task.Status { - public static String NAME = "verify_repository_integrity_status"; + public static final String NAME = "verify_repository_integrity_status"; public Status(StreamInput in) throws IOException { this( diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index 6299908f0dc1..49af925c0a42 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'spatial' - description 'A plugin for Basic Spatial features' - classname 'org.elasticsearch.xpack.spatial.SpatialPlugin' + name = 'spatial' + description = 'A plugin for Basic Spatial features' + classname ='org.elasticsearch.xpack.spatial.SpatialPlugin' extendedPlugins = ['x-pack-core', 'legacy-geo', 'lang-painless'] } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java index f9481bb8169e..4621929e8c62 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java @@ -27,7 +27,7 @@ import java.io.IOException; */ public abstract class CartesianShapeValues extends ShapeValues { - public static CartesianShapeValues EMPTY = new CartesianShapeValues() { + public static final CartesianShapeValues EMPTY = new CartesianShapeValues() { private final CartesianShapeValuesSourceType DEFAULT_VALUES_SOURCE_TYPE = CartesianShapeValuesSourceType.instance(); @Override diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java index c5193d5412ec..d7099c904085 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java @@ -28,7 +28,7 @@ import java.io.IOException; */ public abstract class GeoShapeValues extends ShapeValues { - public static GeoShapeValues EMPTY = new GeoShapeValues() { + public static final GeoShapeValues EMPTY = new GeoShapeValues() { private final GeoShapeValuesSourceType DEFAULT_VALUES_SOURCE_TYPE = GeoShapeValuesSourceType.instance(); @Override diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 50221d838e05..44e68c2ebea7 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -121,7 +121,7 @@ public class PointFieldMapper extends AbstractPointGeometryFieldMapper new Builder(n, IGNORE_MALFORMED_SETTING.get(c.getSettings()))); + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, IGNORE_MALFORMED_SETTING.get(c.getSettings()))); private final Builder builder; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 2d586ac8eb86..f1140093f236 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -126,7 +126,7 @@ public class ShapeFieldMapper extends AbstractShapeGeometryFieldMapper } } - public static TypeParser PARSER = new TypeParser( + public static final TypeParser PARSER = new TypeParser( (n, c) -> new Builder( n, c.indexVersionCreated(), diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 69468bf57495..74936d119492 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.internal-cluster-test' esplugin { name = 'x-pack-sql' - description 'The Elasticsearch plugin that powers SQL for Elasticsearch' + description = 'The Elasticsearch plugin that powers SQL for Elasticsearch' classname = 'org.elasticsearch.xpack.sql.plugin.SqlPlugin' extendedPlugins = ['x-pack-ql', 'lang-painless'] } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java index e12661122f3a..298ee54b89e9 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java @@ -40,7 +40,7 @@ import static org.elasticsearch.xpack.sql.client.UriUtils.removeQuery; public class JdbcConfiguration extends ConnectionConfiguration { static final String URL_PREFIX = "jdbc:es://"; static final String URL_FULL_PREFIX = "jdbc:elasticsearch://"; - public static URI DEFAULT_URI = URI.create("http://localhost:9200/"); + public static final URI DEFAULT_URI = URI.create("http://localhost:9200/"); static final String DEBUG = "debug"; static final String DEBUG_DEFAULT = "false"; diff --git a/x-pack/plugin/sql/qa/jdbc/security/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/build.gradle index b40a8aaaa558..4248423d4ff4 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/build.gradle @@ -20,7 +20,7 @@ subprojects { def clusterPath = getPath() // Use tests from the root security qa project in subprojects - configurations.create('testArtifacts').transitive(false) + configurations.create('testArtifacts').transitive = false dependencies { javaRestTestImplementation project(":x-pack:plugin:core") diff --git a/x-pack/plugin/sql/qa/server/security/build.gradle b/x-pack/plugin/sql/qa/server/security/build.gradle index ef81ebda5cb8..37ae3edaf51d 100644 --- a/x-pack/plugin/sql/qa/server/security/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/build.gradle @@ -20,7 +20,7 @@ Project mainProject = project subprojects { // Use tests from the root security qa project in subprojects - configurations.create('testArtifacts').transitive(false) + configurations.create('testArtifacts').transitive = false def clusterPath = getPath() diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java index 3d6324bc4455..9886d3b2637d 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java @@ -24,8 +24,8 @@ import static org.elasticsearch.xpack.sql.client.UriUtils.removeQuery; * Connection Builder. Can interactively ask users for the password if it is not provided */ public class ConnectionBuilder { - public static String DEFAULT_CONNECTION_STRING = "http://localhost:9200/"; - public static URI DEFAULT_URI = URI.create(DEFAULT_CONNECTION_STRING); + public static final String DEFAULT_CONNECTION_STRING = "http://localhost:9200/"; + public static final URI DEFAULT_URI = URI.create(DEFAULT_CONNECTION_STRING); private CliTerminal cliTerminal; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 5f9cd8443d50..092fa78f8a3e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -31,9 +31,9 @@ public class Histogram extends GroupingFunction { private final Literal interval; private final ZoneId zoneId; - public static String YEAR_INTERVAL = DateHistogramInterval.YEAR.toString(); - public static String MONTH_INTERVAL = DateHistogramInterval.MONTH.toString(); - public static String DAY_INTERVAL = DateHistogramInterval.DAY.toString(); + public static final String YEAR_INTERVAL = DateHistogramInterval.YEAR.toString(); + public static final String MONTH_INTERVAL = DateHistogramInterval.MONTH.toString(); + public static final String DAY_INTERVAL = DateHistogramInterval.DAY.toString(); public Histogram(Source source, Expression field, Expression interval, ZoneId zoneId) { super(source, field, Collections.singletonList(interval)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index 049895bc9f31..e100f30717ae 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -504,52 +504,3 @@ double nested declared in mapping: # The `nested` field is not visible, nor are any of it's subfields. - match: { columns: [{name: name, type: keyword}] } - ---- -semantic_text declared in mapping: - - requires: - test_runner_features: [ capabilities ] - capabilities: - - method: POST - path: /_query - parameters: [ ] - capabilities: [ semantic_text_type ] - reason: "support for semantic_text type" - - do: - indices.create: - index: test_semantic_text - body: - settings: - number_of_shards: 5 - mappings: - properties: - semantic_text_field: - type: semantic_text - inference_id: my_inference_id - - do: - bulk: - index: test_semantic_text - refresh: true - body: - - { "index": { } } - - { - "semantic_text_field": { - "text": "be excellent to each other", - "inference": { - "inference_id": "my_inference_id", - "model_settings": { - "task_type": "sparse_embedding" - }, - "chunks": [{ "text": "be excellent to each other", "embeddings": { "a": 1,"b": 2 } }] - } - } - } - - do: - allowed_warnings_regex: - - "No limit defined, adding default limit of \\[.*\\]" - esql.query: - body: - query: 'FROM test_semantic_text' - - match: { columns: [{name: semantic_text_field, type: semantic_text}] } - - length: { values: 1 } - - match: { values.0: ["be excellent to each other"] } diff --git a/x-pack/plugin/stack/build.gradle b/x-pack/plugin/stack/build.gradle index c4b950ad9cb5..1796ba334cda 100644 --- a/x-pack/plugin/stack/build.gradle +++ b/x-pack/plugin/stack/build.gradle @@ -2,12 +2,12 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { - name 'x-pack-stack' - description 'Elasticsearch Expanded Pack Plugin - Stack' - classname 'org.elasticsearch.xpack.stack.StackPlugin' + name = 'x-pack-stack' + description = 'Elasticsearch Expanded Pack Plugin - Stack' + classname ='org.elasticsearch.xpack.stack.StackPlugin' extendedPlugins = ['x-pack-core'] - hasNativeController false - requiresKeystore true + hasNativeController =false + requiresKeystore =true } base { diff --git a/x-pack/plugin/text-structure/build.gradle b/x-pack/plugin/text-structure/build.gradle index 5bb6d8ef5027..4ebe380e7c72 100644 --- a/x-pack/plugin/text-structure/build.gradle +++ b/x-pack/plugin/text-structure/build.gradle @@ -1,8 +1,8 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { - name 'x-pack-text-structure' - description 'Elasticsearch Expanded Pack Plugin - Text Structure' - classname 'org.elasticsearch.xpack.textstructure.TextStructurePlugin' + name = 'x-pack-text-structure' + description = 'Elasticsearch Expanded Pack Plugin - Text Structure' + classname ='org.elasticsearch.xpack.textstructure.TextStructurePlugin' extendedPlugins = ['x-pack-core'] } base { diff --git a/x-pack/plugin/transform/build.gradle b/x-pack/plugin/transform/build.gradle index 9e50aea3a8fc..02c63b06ac08 100644 --- a/x-pack/plugin/transform/build.gradle +++ b/x-pack/plugin/transform/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'transform' - description 'A plugin to transform data' - classname 'org.elasticsearch.xpack.transform.Transform' + name = 'transform' + description = 'A plugin to transform data' + classname ='org.elasticsearch.xpack.transform.Transform' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java index d73dc2a4e3aa..570feaf28cc5 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java @@ -37,11 +37,6 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; public class TransformGetAndGetStatsIT extends TransformRestTestCase { - - private static final String TEST_USER_NAME = "transform_user"; - private static final String BASIC_AUTH_VALUE_TRANSFORM_USER = basicAuthHeaderValue(TEST_USER_NAME, TEST_PASSWORD_SECURE_STRING); - private static final String TEST_ADMIN_USER_NAME = "transform_admin"; - private static final String BASIC_AUTH_VALUE_TRANSFORM_ADMIN = basicAuthHeaderValue(TEST_ADMIN_USER_NAME, TEST_PASSWORD_SECURE_STRING); private static final String DANGLING_TASK_ERROR_MESSAGE = "Found task for transform [pivot_continuous], but no configuration for it. To delete this transform use DELETE with force=true."; diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 09fbea29d4b1..537f50a30b5d 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -39,9 +39,16 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static final String TEST_PASSWORD = "x-pack-test-password"; protected static final SecureString TEST_PASSWORD_SECURE_STRING = new SecureString(TEST_PASSWORD.toCharArray()); + + protected static final String TEST_USER_NAME = "transform_user"; + protected static final String BASIC_AUTH_VALUE_TRANSFORM_USER = basicAuthHeaderValue(TEST_USER_NAME, TEST_PASSWORD_SECURE_STRING); + protected static final String TEST_ADMIN_USER_NAME = "transform_admin"; + protected static final String BASIC_AUTH_VALUE_TRANSFORM_ADMIN = basicAuthHeaderValue( + TEST_ADMIN_USER_NAME, + TEST_PASSWORD_SECURE_STRING + ); private static final String BASIC_AUTH_VALUE_SUPER_USER = basicAuthHeaderValue("x_pack_rest_user", TEST_PASSWORD_SECURE_STRING); protected static final String REVIEWS_INDEX_NAME = "reviews"; diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUpgradeModeIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUpgradeModeIT.java new file mode 100644 index 000000000000..437d7aa5e26d --- /dev/null +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUpgradeModeIT.java @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.transforms.TransformStats; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class TransformUpgradeModeIT extends TransformRestTestCase { + private static boolean indicesCreated = false; + + // preserve indices in order to reuse source indices in several test cases + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Before + public void createIndexes() throws IOException { + setupUser(TEST_USER_NAME, singletonList("transform_user")); + setupUser(TEST_ADMIN_USER_NAME, singletonList("transform_admin")); + // it's not possible to run it as @BeforeClass as clients aren't initialized then, so we need this little hack + if (indicesCreated) { + return; + } + + createReviewsIndex(); + indicesCreated = true; + } + + @After + public void clearOutTransforms() throws Exception { + adminClient().performRequest(new Request("POST", "/_features/_reset")); + } + + public void testUpgradeMode() throws Exception { + var transformId = startTransform(); + + assertAcknowledged(setUpgradeMode(BASIC_AUTH_VALUE_TRANSFORM_ADMIN, true)); + + var statsAndState = getTransformStateAndStats(transformId); + assertThat(readString(statsAndState, "state"), equalTo(TransformStats.State.WAITING.value())); + assertThat(readString(statsAndState, "reason"), equalTo("Transform task will not be assigned while upgrade mode is enabled.")); + + assertAcknowledged(setUpgradeMode(BASIC_AUTH_VALUE_TRANSFORM_ADMIN, false)); + + // upgrade mode only waits for the assignment block to be removed, not for the transform to restart + // so we need to wait until the transform restarts + assertBusy(() -> { + var nextStatsAndState = getTransformStateAndStats(transformId); + assertThat(readString(nextStatsAndState, "state"), equalTo(TransformStats.State.STARTED.value())); + assertThat(readString(nextStatsAndState, "reason"), nullValue()); + }, 30, TimeUnit.SECONDS); + } + + private String startTransform() throws Exception { + var transformId = "pivot_continuous"; + createContinuousPivotReviewsTransform(transformId, "pivot_reviews_continuous", null); + startAndWaitForContinuousTransform(transformId, "pivot_reviews_continuous", null); + return transformId; + } + + private Response setUpgradeMode(String auth, boolean enabled) throws Exception { + var path = enabled ? "set_upgrade_mode?enabled" : "set_upgrade_mode"; + var setUpgradeMode = createRequestWithAuth("POST", getTransformEndpoint() + path, auth); + return client().performRequest(setUpgradeMode); + } + + private String readString(Map statsAndState, String key) { + return statsAndState == null ? null : (String) XContentMapValues.extractValue(key, statsAndState); + } + + public void testUpgradeModeFailsWithNonAdminUser() throws Exception { + assertRestException( + () -> setUpgradeMode(TEST_USER_NAME, true), + e -> assertThat( + "Expected security error using non-admin transform user", + e.getResponse().getStatusLine().getStatusCode(), + is(401) + ) + ); + } + + @SuppressWarnings("unchecked") + public void testUpgradeModeBlocksRestApi() throws Exception { + var stoppedTransform = "stopped-transform"; + createContinuousPivotReviewsTransform(stoppedTransform, "pivot_reviews_continuous", null); + + var startedTransform = startTransform(); + + var uncreatedTransform = "some-new-transform"; + + assertAcknowledged(setUpgradeMode(BASIC_AUTH_VALUE_TRANSFORM_ADMIN, true)); + + // start an existing transform + assertConflict(() -> startTransform(stoppedTransform), "Cannot start any Transform while the Transform feature is upgrading."); + + // stop an existing transform + assertConflict(() -> stopTransform(startedTransform, false), "Cannot stop any Transform while the Transform feature is upgrading."); + + // create a new transform + assertConflict( + () -> createContinuousPivotReviewsTransform(uncreatedTransform, "pivot_reviews_continuous", null), + "Cannot create new Transform while the Transform feature is upgrading." + ); + + // update an existing transform + assertConflict( + () -> updateTransform(stoppedTransform, "{ \"settings\": { \"max_page_search_size\": 123 } }", false), + "Cannot update any Transform while the Transform feature is upgrading." + ); + + // upgrade all transforms + assertConflict(this::upgrade, "Cannot upgrade Transforms while the Transform feature is upgrading."); + + // schedule transform + assertConflict( + () -> scheduleNowTransform(startedTransform), + "Cannot schedule any Transform while the Transform feature is upgrading." + ); + + // reset transform + assertConflict( + () -> resetTransform(startedTransform, false), + "Cannot reset any Transform while the Transform feature is upgrading." + ); + + assertAcknowledged(setUpgradeMode(BASIC_AUTH_VALUE_TRANSFORM_ADMIN, false)); + + // started transform should go back into started + assertBusy(() -> { + var statsAndState = getTransformStateAndStats(startedTransform); + assertThat(readString(statsAndState, "state"), equalTo(TransformStats.State.STARTED.value())); + assertThat(readString(statsAndState, "reason"), nullValue()); + }, 30, TimeUnit.SECONDS); + + // stopped transform should have never started + var statsAndState = getTransformStateAndStats(stoppedTransform); + assertThat(readString(statsAndState, "state"), equalTo(TransformStats.State.STOPPED.value())); + assertThat(readString(statsAndState, "reason"), nullValue()); + + // new transform shouldn't exist + assertRestException(() -> getTransformConfig(uncreatedTransform, BASIC_AUTH_VALUE_TRANSFORM_ADMIN), e -> { + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(404)); + assertThat(e.getMessage(), containsString("Transform with id [" + uncreatedTransform + "] could not be found")); + }); + + // stopped transform should not have been updated + var stoppedConfig = getTransformConfig(stoppedTransform, BASIC_AUTH_VALUE_TRANSFORM_ADMIN); + assertThat((Map) stoppedConfig.get("settings"), is(anEmptyMap())); + } + + private void assertConflict(CheckedRunnable runnable, String message) throws Exception { + assertRestException(runnable, e -> { + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(409)); + assertThat(e.getMessage(), containsString(message)); + }); + } + + private void assertRestException(CheckedRunnable runnable, Consumer assertThat) throws Exception { + try { + runnable.run(); + fail("Expected code to throw ResponseException."); + } catch (ResponseException e) { + assertThat.accept(e); + } + } + + private void upgrade() throws IOException { + client().performRequest( + createRequestWithAuth("POST", TransformField.REST_BASE_PATH_TRANSFORMS + "_upgrade", BASIC_AUTH_VALUE_TRANSFORM_ADMIN) + ); + } +} diff --git a/x-pack/plugin/transform/src/main/java/module-info.java b/x-pack/plugin/transform/src/main/java/module-info.java index 33a722725d5d..246e207f1c36 100644 --- a/x-pack/plugin/transform/src/main/java/module-info.java +++ b/x-pack/plugin/transform/src/main/java/module-info.java @@ -22,6 +22,7 @@ module org.elasticsearch.transform { requires org.apache.logging.log4j; requires org.apache.lucene.core; requires org.apache.lucene.join; + requires org.elasticsearch.plugin; exports org.elasticsearch.xpack.transform; exports org.elasticsearch.xpack.transform.action; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 732d64059d73..9243eac0457f 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -48,11 +48,13 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry.Entry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.SetResetModeActionRequest; +import org.elasticsearch.xpack.core.action.SetUpgradeModeActionRequest; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.TransformMessages; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; @@ -65,6 +67,7 @@ import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.ResetTransformAction; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction; import org.elasticsearch.xpack.core.transform.action.SetResetModeAction; +import org.elasticsearch.xpack.core.transform.action.SetTransformUpgradeModeAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction; @@ -82,6 +85,7 @@ import org.elasticsearch.xpack.transform.action.TransportPutTransformAction; import org.elasticsearch.xpack.transform.action.TransportResetTransformAction; import org.elasticsearch.xpack.transform.action.TransportScheduleNowTransformAction; import org.elasticsearch.xpack.transform.action.TransportSetTransformResetModeAction; +import org.elasticsearch.xpack.transform.action.TransportSetTransformUpgradeModeAction; import org.elasticsearch.xpack.transform.action.TransportStartTransformAction; import org.elasticsearch.xpack.transform.action.TransportStopTransformAction; import org.elasticsearch.xpack.transform.action.TransportUpdateTransformAction; @@ -101,6 +105,7 @@ import org.elasticsearch.xpack.transform.rest.action.RestPreviewTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestPutTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestResetTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestScheduleNowTransformAction; +import org.elasticsearch.xpack.transform.rest.action.RestSetTransformUpgradeModeAction; import org.elasticsearch.xpack.transform.rest.action.RestStartTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestStopTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestUpdateTransformAction; @@ -177,6 +182,45 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa return XPackPlugin.getSharedLicenseState(); } + @Override + public void prepareForIndicesMigration(ClusterService clusterService, Client client, ActionListener> listener) { + if (TransformMetadata.upgradeMode(clusterService.state())) { + // Transform is already in upgrade mode, so nothing will write to the Transform system indices during their upgrade + listener.onResponse(Map.of("already_in_upgrade_mode", true)); + return; + } + + // Enable Transform upgrade mode before upgrading the system indices to ensure nothing writes to them during the upgrade + var originClient = new OriginSettingClient(client, TRANSFORM_ORIGIN); + originClient.execute( + SetTransformUpgradeModeAction.INSTANCE, + new SetUpgradeModeActionRequest(true), + listener.delegateFailureAndWrap((l, r) -> l.onResponse(Map.of("already_in_upgrade_mode", false))) + ); + } + + @Override + public void indicesMigrationComplete( + Map preUpgradeMetadata, + ClusterService clusterService, + Client client, + ActionListener listener + ) { + var wasAlreadyInUpgradeMode = (boolean) preUpgradeMetadata.getOrDefault("already_in_upgrade_mode", false); + if (wasAlreadyInUpgradeMode) { + // Transform was already in upgrade mode before system indices upgrade started - we shouldn't disable it + listener.onResponse(true); + return; + } + + var originClient = new OriginSettingClient(client, TRANSFORM_ORIGIN); + originClient.execute( + SetTransformUpgradeModeAction.INSTANCE, + new SetUpgradeModeActionRequest(false), + listener.delegateFailureAndWrap((l, r) -> l.onResponse(r.isAcknowledged())) + ); + } + @Override public List getRestHandlers( final Settings unused, @@ -203,7 +247,8 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa new RestUpgradeTransformsAction(), new RestResetTransformAction(), new RestScheduleNowTransformAction(), - new RestGetTransformNodeStatsAction() + new RestGetTransformNodeStatsAction(), + new RestSetTransformUpgradeModeAction() ); } @@ -224,6 +269,7 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa new ActionHandler<>(ResetTransformAction.INSTANCE, TransportResetTransformAction.class), new ActionHandler<>(ScheduleNowTransformAction.INSTANCE, TransportScheduleNowTransformAction.class), new ActionHandler<>(GetTransformNodeStatsAction.INSTANCE, TransportGetTransformNodeStatsAction.class), + new ActionHandler<>(SetTransformUpgradeModeAction.INSTANCE, TransportSetTransformUpgradeModeAction.class), // internal, no rest endpoint new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index aad3cc4dc6ac..13aaf10793e0 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; @@ -80,6 +81,15 @@ public class TransportDeleteTransformAction extends AcknowledgedTransportMasterN @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { + if (TransformMetadata.upgradeMode(state)) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot delete any Transform while the Transform feature is upgrading.", + RestStatus.CONFLICT + ) + ); + return; + } final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); final boolean transformIsRunning = TransformTask.getTransformTask(request.getId(), state) != null; if (transformIsRunning && request.isForce() == false) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 35c97f50b682..2c9b3ebac1bd 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.transform.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -25,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -35,6 +37,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; import org.elasticsearch.xpack.core.transform.TransformMessages; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; @@ -94,6 +97,16 @@ public class TransportPutTransformAction extends AcknowledgedTransportMasterNode protected void masterOperation(Task task, Request request, ClusterState clusterState, ActionListener listener) { XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); + if (TransformMetadata.upgradeMode(clusterState)) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot create new Transform while the Transform feature is upgrading.", + RestStatus.CONFLICT + ) + ); + return; + } + TransformConfig config = request.getConfig().setCreateTime(Instant.now()).setVersion(TransformConfigVersion.CURRENT); config.setHeaders(getSecurityHeadersPreferringSecondary(threadPool, securityContext, clusterState)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index e817463385a1..6dd126628d72 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.ResetTransformAction; import org.elasticsearch.xpack.core.transform.action.ResetTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; @@ -96,6 +97,16 @@ public class TransportResetTransformAction extends AcknowledgedTransportMasterNo @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { + if (TransformMetadata.upgradeMode(state)) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot reset any Transform while the Transform feature is upgrading.", + RestStatus.CONFLICT + ) + ); + return; + } + final boolean transformIsRunning = TransformTask.getTransformTask(request.getId(), state) != null; if (transformIsRunning && request.isForce() == false) { listener.onFailure( diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java index ee82c2891314..62582e885c33 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.transform.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; @@ -21,11 +22,13 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction.Response; @@ -72,6 +75,15 @@ public class TransportScheduleNowTransformAction extends TransportTasksAction listener) { final ClusterState clusterState = clusterService.state(); XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); + if (TransformMetadata.upgradeMode(clusterState)) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot schedule any Transform while the Transform feature is upgrading.", + RestStatus.CONFLICT + ) + ); + return; + } ActionListener getTransformListener = ActionListener.wrap(unusedConfig -> { PersistentTasksCustomMetadata.PersistentTask transformTask = TransformTask.getTransformTask(request.getId(), clusterState); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformResetModeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformResetModeAction.java index 6c890119ca09..06093c3e3876 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformResetModeAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformResetModeAction.java @@ -34,7 +34,7 @@ public class TransportSetTransformResetModeAction extends AbstractTransportSetRe @Override protected boolean isResetMode(ClusterState clusterState) { - return TransformMetadata.getTransformMetadata(clusterState).isResetMode(); + return TransformMetadata.getTransformMetadata(clusterState).resetMode(); } @Override @@ -51,7 +51,7 @@ public class TransportSetTransformResetModeAction extends AbstractTransportSetRe } else { TransformMetadata.Builder builder = TransformMetadata.Builder.from( oldState.metadata().getProject().custom(TransformMetadata.TYPE) - ).isResetMode(request.isEnabled()); + ).resetMode(request.isEnabled()); newState.metadata(Metadata.builder(oldState.getMetadata()).putCustom(TransformMetadata.TYPE, builder.build()).build()); } return newState.build(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeAction.java new file mode 100644 index 000000000000..7a10704bcc19 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeAction.java @@ -0,0 +1,199 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksClusterService; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.AbstractTransportSetUpgradeModeAction; +import org.elasticsearch.xpack.core.action.SetUpgradeModeActionRequest; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.TransformMetadata; +import org.elasticsearch.xpack.core.transform.action.SetTransformUpgradeModeAction; + +import java.util.Comparator; +import java.util.stream.Collectors; + +import static org.elasticsearch.ExceptionsHelper.rethrowAndSuppress; +import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; +import static org.elasticsearch.xpack.core.transform.TransformField.AWAITING_UPGRADE; + +public class TransportSetTransformUpgradeModeAction extends AbstractTransportSetUpgradeModeAction { + private static final Logger logger = LogManager.getLogger(TransportSetTransformUpgradeModeAction.class); + private final PersistentTasksClusterService persistentTasksClusterService; + private final PersistentTasksService persistentTasksService; + private final OriginSettingClient client; + + @Inject + public TransportSetTransformUpgradeModeAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + PersistentTasksClusterService persistentTasksClusterService, + PersistentTasksService persistentTasksService, + Client client + ) { + super( + SetTransformUpgradeModeAction.NAME, + "transform", + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.persistentTasksClusterService = persistentTasksClusterService; + this.persistentTasksService = persistentTasksService; + this.client = new OriginSettingClient(client, TRANSFORM_ORIGIN); + } + + @Override + protected String featureName() { + return "transform-set-upgrade-mode"; + } + + @Override + protected boolean upgradeMode(ClusterState state) { + return TransformMetadata.upgradeMode(state); + } + + @Override + protected ClusterState createUpdatedState(SetUpgradeModeActionRequest request, ClusterState state) { + var updatedTransformMetadata = TransformMetadata.getTransformMetadata(state).builder().upgradeMode(request.enabled()).build(); + var updatedClusterMetadata = state.metadata().copyAndUpdate(b -> b.putCustom(TransformMetadata.TYPE, updatedTransformMetadata)); + return state.copyAndUpdate(b -> b.metadata(updatedClusterMetadata)); + } + + @Override + protected void upgradeModeSuccessfullyChanged( + Task task, + SetUpgradeModeActionRequest request, + ClusterState state, + ActionListener listener + ) { + PersistentTasksCustomMetadata tasksCustomMetadata = state.metadata().getProject().custom(PersistentTasksCustomMetadata.TYPE); + if (tasksCustomMetadata == null + || tasksCustomMetadata.tasks().isEmpty() + || tasksCustomMetadata.tasks().stream().noneMatch(this::isTransformTask)) { + logger.info("No tasks to worry about after state update"); + listener.onResponse(AcknowledgedResponse.TRUE); + return; + } + + if (request.enabled()) { + stopTransforms(request, state, listener); + } else { + waitForTransformsToRestart(request, listener); + } + } + + private void stopTransforms(SetUpgradeModeActionRequest request, ClusterState state, ActionListener listener) { + unassignTransforms(state, listener.delegateFailureAndWrap((l, r) -> waitForTransformsToStop(request, l))); + } + + private void unassignTransforms(ClusterState state, ActionListener listener) { + PersistentTasksCustomMetadata tasksCustomMetadata = state.metadata().getProject().custom(PersistentTasksCustomMetadata.TYPE); + var transformTasks = tasksCustomMetadata.tasks() + .stream() + .filter(this::isTransformTask) + // We want to always have the same ordering of which tasks we un-allocate first. + // However, the order in which the distributed tasks handle the un-allocation event is not guaranteed. + .sorted(Comparator.comparing(PersistentTasksCustomMetadata.PersistentTask::getTaskName)) + .toList(); + + logger.info( + "Un-assigning persistent tasks : {}", + () -> transformTasks.stream() + .map(PersistentTasksCustomMetadata.PersistentTask::getId) + .collect(Collectors.joining(", ", "[ ", " ]")) + ); + + // chain each call one at a time + // because that is what we are doing for ML, and that is all that is supported in the persistentTasksClusterService (for now) + SubscribableListener> chainListener = SubscribableListener.newSucceeded(null); + for (var task : transformTasks) { + chainListener = chainListener.andThen(executor, threadPool.getThreadContext(), (l, unused) -> { + persistentTasksClusterService.unassignPersistentTask( + task.getId(), + task.getAllocationId(), + AWAITING_UPGRADE.getExplanation(), + l.delegateResponse((ll, ex) -> { + // ignore ResourceNotFoundException since a delete task is already unassigned + if (ExceptionsHelper.unwrapCause(ex) instanceof ResourceNotFoundException) { + ll.onResponse(null); + } else { + ll.onFailure(ex); + } + }) + ); + }); + } + chainListener.addListener(listener.delegateFailure((l, ignored) -> l.onResponse(null)), executor, threadPool.getThreadContext()); + } + + private void waitForTransformsToStop(SetUpgradeModeActionRequest request, ActionListener listener) { + client.admin() + .cluster() + .prepareListTasks() + .setActions(TransformField.TASK_NAME + "[c]") + // There is a chance that we failed un-allocating a task due to allocation_id being changed + // This call will timeout in that case and return an error + .setWaitForCompletion(true) + .setTimeout(request.ackTimeout()) + .execute(listener.delegateFailureAndWrap((l, r) -> { + try { + // Handle potential node timeouts, + // these should be considered failures as tasks as still potentially executing + logger.info("Waited for tasks to be unassigned"); + if (r.getNodeFailures().isEmpty() == false) { + logger.info("There were node failures waiting for tasks", r.getNodeFailures().get(0)); + } + rethrowAndSuppress(r.getNodeFailures()); + l.onResponse(AcknowledgedResponse.TRUE); + } catch (ElasticsearchException ex) { + logger.info("Caught node failures waiting for tasks to be unassigned", ex); + l.onFailure(ex); + } + })); + } + + private boolean isTransformTask(PersistentTasksCustomMetadata.PersistentTask task) { + return TransformField.TASK_NAME.equals(task.getTaskName()); + } + + private void waitForTransformsToRestart(SetUpgradeModeActionRequest request, ActionListener listener) { + logger.info("Disabling upgrade mode for Transforms, must wait for tasks to not have AWAITING_UPGRADE assignment"); + persistentTasksService.waitForPersistentTasksCondition( + persistentTasksCustomMetadata -> persistentTasksCustomMetadata.tasks() + .stream() + .noneMatch(t -> isTransformTask(t) && t.getAssignment().equals(AWAITING_UPGRADE)), + request.ackTimeout(), + listener.delegateFailureAndWrap((d, r) -> d.onResponse(AcknowledgedResponse.TRUE)) + ); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java index cd9b79d92154..69afce887222 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.transform.TransformMessages; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.AuthorizationState; @@ -135,6 +136,15 @@ public class TransportStartTransformAction extends TransportMasterNodeAction listener ) { TransformNodes.warnIfNoTransformNodes(state); + if (TransformMetadata.upgradeMode(state)) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot start any Transform while the Transform feature is upgrading.", + RestStatus.CONFLICT + ) + ); + return; + } var transformTaskParamsHolder = new SetOnce(); var transformConfigHolder = new SetOnce(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index deae931fa0c0..2de797d47fb3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.transform.TransformMessages; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Response; @@ -125,6 +126,13 @@ public class TransportStopTransformAction extends TransportTasksAction listener) { final ClusterState state = clusterService.state(); + if (TransformMetadata.upgradeMode(state)) { + listener.onFailure( + new ElasticsearchStatusException("Cannot stop any Transform while the Transform feature is upgrading.", RestStatus.CONFLICT) + ); + return; + } + final DiscoveryNodes nodes = state.nodes(); if (nodes.isLocalNodeElectedMaster() == false) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index 6251dab36b2c..ecd93ae1ae72 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.transform.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.FailedNodeException; @@ -26,6 +27,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -34,6 +36,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Response; @@ -106,6 +109,15 @@ public class TransportUpdateTransformAction extends TransportTasksAction listener) { final ClusterState clusterState = clusterService.state(); XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); + if (TransformMetadata.upgradeMode(clusterState)) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot update any Transform while the Transform feature is upgrading.", + RestStatus.CONFLICT + ) + ); + return; + } final DiscoveryNodes nodes = clusterState.nodes(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java index 5e2fa7f7a590..80a84c812309 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.UpgradeTransformsAction; import org.elasticsearch.xpack.core.transform.action.UpgradeTransformsAction.Request; import org.elasticsearch.xpack.core.transform.action.UpgradeTransformsAction.Response; @@ -99,6 +100,12 @@ public class TransportUpgradeTransformsAction extends TransportMasterNodeAction< protected void masterOperation(Task ignoredTask, Request request, ClusterState state, ActionListener listener) throws Exception { TransformNodes.warnIfNoTransformNodes(state); + if (TransformMetadata.upgradeMode(state)) { + listener.onFailure( + new ElasticsearchStatusException("Cannot upgrade Transforms while the Transform feature is upgrading.", RestStatus.CONFLICT) + ); + return; + } // do not allow in mixed clusters if (TransportVersionUtils.isMinTransportVersionSameAsCurrent(state) == false) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 6da7b6190bd4..7a1f874da66a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -64,7 +64,7 @@ public class TransformAuditor extends AbstractAuditor { clusterService.addListener(event -> { if (event.metadataChanged()) { boolean oldIsResetMode = isResetMode; - boolean newIsResetMode = TransformMetadata.getTransformMetadata(event.state()).isResetMode(); + boolean newIsResetMode = TransformMetadata.getTransformMetadata(event.state()).resetMode(); if (oldIsResetMode != newIsResetMode) { logger.debug("TransformAuditor has noticed change of isResetMode bit from {} to {}", oldIsResetMode, newIsResetMode); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index 9d5175922c89..f78868112e8c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -68,6 +68,7 @@ import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.TransformMessages; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; @@ -129,6 +130,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void putTransformCheckpoint(TransformCheckpoint checkpoint, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot store Transform while the Transform feature is upgrading.")); + return; + } try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = checkpoint.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); @@ -148,6 +153,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void putTransformConfiguration(TransformConfig transformConfig, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot store Transform while the Transform feature is upgrading.")); + return; + } putTransformConfiguration(transformConfig, DocWriteRequest.OpType.CREATE, null, listener); } @@ -157,6 +166,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager SeqNoPrimaryTermAndIndex seqNoPrimaryTermAndIndex, ActionListener listener ) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot update Transform while the Transform feature is upgrading.")); + return; + } if (seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndexConstants.LATEST_INDEX_NAME)) { // update the config in the same, current index using optimistic concurrency control putTransformConfiguration(transformConfig, DocWriteRequest.OpType.INDEX, seqNoPrimaryTermAndIndex, listener); @@ -169,6 +182,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void deleteOldTransformConfigurations(String transformId, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot delete Transform while the Transform feature is upgrading.")); + return; + } DeleteByQueryRequest deleteByQueryRequest = createDeleteByQueryRequest(); deleteByQueryRequest.indices( TransformInternalIndexConstants.INDEX_NAME_PATTERN, @@ -196,6 +213,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void deleteOldTransformStoredDocuments(String transformId, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot delete Transform while the Transform feature is upgrading.")); + return; + } DeleteByQueryRequest deleteByQueryRequest = createDeleteByQueryRequest(); deleteByQueryRequest.indices( TransformInternalIndexConstants.INDEX_NAME_PATTERN, @@ -213,6 +234,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void deleteOldCheckpoints(String transformId, long deleteCheckpointsBelow, long deleteOlderThan, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot delete checkpoints while the Transform feature is upgrading.")); + return; + } DeleteByQueryRequest deleteByQueryRequest = createDeleteByQueryRequest(); deleteByQueryRequest.indices( TransformInternalIndexConstants.INDEX_NAME_PATTERN, @@ -246,6 +271,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void deleteOldIndices(ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot delete internal indices while the Transform feature is upgrading.")); + return; + } ClusterState state = clusterService.state(); Set indicesToDelete = new HashSet<>(); @@ -568,6 +597,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void resetTransform(String transformId, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot reset Transform while the Transform feature is upgrading.")); + return; + } ActionListener deleteListener = ActionListener.wrap(dbqResponse -> listener.onResponse(true), e -> { if (e.getClass() == IndexNotFoundException.class) { listener.onFailure( @@ -613,6 +646,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager @Override public void deleteTransform(String transformId, ActionListener listener) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot delete Transform while the Transform feature is upgrading.")); + return; + } DeleteByQueryRequest request = createDeleteByQueryRequest(); request.indices(TransformInternalIndexConstants.INDEX_NAME_PATTERN, TransformInternalIndexConstants.INDEX_NAME_PATTERN_DEPRECATED); @@ -645,6 +682,10 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager SeqNoPrimaryTermAndIndex seqNoPrimaryTermAndIndex, ActionListener listener ) { + if (isUpgrading()) { + listener.onFailure(conflictStatusException("Cannot store Transform while the Transform feature is upgrading.")); + return; + } try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = storedDoc.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); @@ -940,6 +981,14 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager }), client::search); } + private boolean isUpgrading() { + return TransformMetadata.upgradeMode(clusterService.state()); + } + + private Exception conflictStatusException(String message) { + return new ElasticsearchStatusException(message, RestStatus.CONFLICT); + } + private static Tuple getStatusAndReason(final BulkByScrollResponse response) { RestStatus status = RestStatus.OK; Throwable reason = new Exception("Unknown error"); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestSetTransformUpgradeModeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestSetTransformUpgradeModeAction.java new file mode 100644 index 000000000000..f4e7606c1c1d --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestSetTransformUpgradeModeAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.rest.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.action.SetUpgradeModeActionRequest; +import org.elasticsearch.xpack.core.transform.action.SetTransformUpgradeModeAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestUtils.getAckTimeout; +import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; +import static org.elasticsearch.xpack.core.transform.TransformField.REST_BASE_PATH_TRANSFORMS; + +@ServerlessScope(Scope.INTERNAL) +public class RestSetTransformUpgradeModeAction extends BaseRestHandler { + + @Override + public List routes() { + return List.of(new Route(POST, REST_BASE_PATH_TRANSFORMS + "set_upgrade_mode")); + } + + @Override + public String getName() { + return "transform_set_upgrade_mode_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + SetUpgradeModeActionRequest request = new SetUpgradeModeActionRequest(restRequest.paramAsBoolean("enabled", false)); + request.ackTimeout(getAckTimeout(restRequest)); + request.masterNodeTimeout(getMasterNodeTimeout(restRequest)); + return channel -> client.execute(SetTransformUpgradeModeAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index df8c3f62034e..4a0c4c2921ba 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -47,6 +47,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -436,6 +437,15 @@ class ClientTransformIndexer extends TransformIndexer { super.afterFinishOrFailure(); } + @Override + public boolean maybeTriggerAsyncJob(long now) { + if (TransformMetadata.upgradeMode(clusterService.state())) { + logger.debug("[{}] schedule was triggered but the Transform is upgrading. Ignoring trigger.", getJobId()); + return false; + } + return super.maybeTriggerAsyncJob(now); + } + @Override protected void onStop() { closePointInTime(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodes.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodes.java index 56e5fd5900cf..6aea39b34752 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodes.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodes.java @@ -136,7 +136,7 @@ public final class TransformNodes { * @param clusterState state */ public static void warnIfNoTransformNodes(ClusterState clusterState) { - if (TransformMetadata.getTransformMetadata(clusterState).isResetMode() == false) { + if (TransformMetadata.getTransformMetadata(clusterState).resetMode() == false) { if (hasAnyTransformNode(clusterState.getNodes()) == false) { HeaderWarning.addWarning(TransformMessages.REST_WARN_NO_TRANSFORM_NODES); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index 377476011465..5878e1a5e5c3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -68,6 +68,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.common.notifications.Level.ERROR; import static org.elasticsearch.xpack.core.common.notifications.Level.INFO; +import static org.elasticsearch.xpack.core.transform.TransformField.AWAITING_UPGRADE; +import static org.elasticsearch.xpack.core.transform.TransformField.RESET_IN_PROGRESS; import static org.elasticsearch.xpack.transform.transforms.TransformNodes.nodeCanRunThisTransform; public class TransformPersistentTasksExecutor extends PersistentTasksExecutor { @@ -119,11 +121,12 @@ public class TransformPersistentTasksExecutor extends PersistentTasksExecutor unavailableIndices = verifyIndicesPrimaryShardsAreActive(clusterState, resolver); if (unavailableIndices.size() != 0) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformMetadataTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformMetadataTests.java index 39ea10ef570f..108bbab85935 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformMetadataTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformMetadataTests.java @@ -16,7 +16,7 @@ public class TransformMetadataTests extends AbstractChunkedSerializingTestCase { + ActionListener listener = invocationOnMock.getArgument(2); + listener.onResponse(AcknowledgedResponse.TRUE); + return null; + }).when(client).execute(same(SetTransformUpgradeModeAction.INSTANCE), any(), any()); + + try (var transformPlugin = new Transform(Settings.EMPTY)) { + SetOnce> response = new SetOnce<>(); + transformPlugin.prepareForIndicesMigration(clusterService, client, ActionTestUtils.assertNoFailureListener(response::set)); + + assertThat(response.get(), equalTo(Collections.singletonMap("already_in_upgrade_mode", false))); + verify(client).execute(same(SetTransformUpgradeModeAction.INSTANCE), eq(new SetUpgradeModeActionRequest(true)), any()); + + transformPlugin.indicesMigrationComplete( + response.get(), + clusterService, + client, + ActionTestUtils.assertNoFailureListener(ESTestCase::assertTrue) + ); + + verify(client).execute(same(SetTransformUpgradeModeAction.INSTANCE), eq(new SetUpgradeModeActionRequest(false)), any()); + } finally { + terminate(threadPool); + } + } + + public void testIgnoreSetTransformUpgradeMode() { + ClusterService clusterService = mock(); + when(clusterService.state()).thenReturn( + ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().putCustom(TransformMetadata.TYPE, new TransformMetadata.Builder().upgradeMode(true).build())) + .build() + ); + Client client = mock(); + + try (var transformPlugin = new Transform(Settings.EMPTY)) { + SetOnce> response = new SetOnce<>(); + transformPlugin.prepareForIndicesMigration(clusterService, client, ActionTestUtils.assertNoFailureListener(response::set)); + + assertThat(response.get(), equalTo(Collections.singletonMap("already_in_upgrade_mode", true))); + verifyNoMoreInteractions(client); + + transformPlugin.indicesMigrationComplete( + response.get(), + clusterService, + client, + ActionTestUtils.assertNoFailureListener(ESTestCase::assertTrue) + ); + + verifyNoMoreInteractions(client); + } + } +} diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java new file mode 100644 index 000000000000..97ef367dca8e --- /dev/null +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.AdminClient; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.persistent.PersistentTasksClusterService; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.action.SetUpgradeModeActionRequest; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.TransformMetadata; +import org.junit.Before; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.action.support.ActionTestUtils.assertNoFailureListener; +import static org.elasticsearch.xpack.core.action.AbstractTransportSetUpgradeModeActionTests.clusterService; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.when; + +public class TransportSetTransformUpgradeModeActionTests extends ESTestCase { + private ClusterService clusterService; + private PersistentTasksService persistentTasksService; + private Client client; + private TransportSetTransformUpgradeModeAction action; + + @Before + public void setUp() throws Exception { + super.setUp(); + clusterService = clusterService(); + doAnswer(ans -> { + ClusterStateUpdateTask task = ans.getArgument(1); + task.clusterStateProcessed(ClusterState.EMPTY_STATE, ClusterState.EMPTY_STATE); + return null; + }).when(clusterService).submitUnbatchedStateUpdateTask(any(), any(ClusterStateUpdateTask.class)); + when(clusterService.getClusterSettings()).thenReturn(ClusterSettings.createBuiltInClusterSettings()); + PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService( + Settings.EMPTY, + mock(), + clusterService, + mock() + ); + persistentTasksService = mock(); + client = mock(); + ThreadPool threadPool = mock(); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(client.threadPool()).thenReturn(threadPool); + action = new TransportSetTransformUpgradeModeAction( + mock(), + clusterService, + threadPool, + mock(), + mock(), + persistentTasksClusterService, + persistentTasksService, + client + ); + } + + public void testUpgradeMode() { + assertTrue(action.upgradeMode(state(true))); + assertFalse(action.upgradeMode(state(false))); + } + + private ClusterState state(boolean upgradeMode) { + return ClusterState.EMPTY_STATE.copyAndUpdate( + u -> u.metadata( + ClusterState.EMPTY_STATE.metadata() + .copyAndUpdate( + b -> b.putCustom( + TransformMetadata.TYPE, + TransformMetadata.getTransformMetadata(ClusterState.EMPTY_STATE).builder().upgradeMode(upgradeMode).build() + ) + ) + ) + ); + } + + public void testCreateUpdatedState() { + var updatedState = action.createUpdatedState(new SetUpgradeModeActionRequest(true), state(false)); + assertTrue(TransformMetadata.upgradeMode(updatedState)); + + updatedState = action.createUpdatedState(new SetUpgradeModeActionRequest(false), state(true)); + assertFalse(TransformMetadata.upgradeMode(updatedState)); + } + + public void testUpgradeModeWithNoMetadata() throws InterruptedException { + upgradeModeSuccessfullyChanged(ClusterState.EMPTY_STATE, assertNoFailureListener(r -> { + assertThat(r, is(AcknowledgedResponse.TRUE)); + verifyNoInteractions(persistentTasksService); + verify(client, never()).admin(); + })); + } + + public void testUpgradeModeWithNoTasks() throws InterruptedException { + upgradeModeSuccessfullyChanged( + ClusterState.EMPTY_STATE.copyAndUpdateMetadata( + m -> m.putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(1, Map.of())) + ), + assertNoFailureListener(r -> { + assertThat(r, is(AcknowledgedResponse.TRUE)); + verifyNoInteractions(persistentTasksService); + verify(client, never()).admin(); + }) + ); + } + + public void testUpgradeModeWithNoTransformTasks() throws InterruptedException { + upgradeModeSuccessfullyChanged( + ClusterState.EMPTY_STATE.copyAndUpdateMetadata( + m -> m.putCustom( + PersistentTasksCustomMetadata.TYPE, + new PersistentTasksCustomMetadata(1, Map.of("not a transform", mock())) + ) + ), + assertNoFailureListener(r -> { + assertThat(r, is(AcknowledgedResponse.TRUE)); + verifyNoInteractions(persistentTasksService); + verify(client, never()).admin(); + }) + ); + } + + private void upgradeModeSuccessfullyChanged(ClusterState state, ActionListener listener) + throws InterruptedException { + upgradeModeSuccessfullyChanged(new SetUpgradeModeActionRequest(true), state, listener); + } + + private void upgradeModeSuccessfullyChanged( + SetUpgradeModeActionRequest request, + ClusterState state, + ActionListener listener + ) throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + action.upgradeModeSuccessfullyChanged(mock(), request, state, ActionListener.runAfter(listener, latch::countDown)); + assertTrue("Failed to finish test after 10s", latch.await(10, TimeUnit.SECONDS)); + } + + public void testEnableUpgradeMode() throws InterruptedException { + doAnswer(ans -> { + ActionListener listener = ans.getArgument(2); + ListTasksResponse response = mock(); + when(response.getNodeFailures()).thenReturn(List.of()); + listener.onResponse(response); + return null; + }).when(client).execute(any(), any(), any()); + + when(client.admin()).thenReturn(new AdminClient(client)); + + upgradeModeSuccessfullyChanged(stateWithTransformTask(), assertNoFailureListener(r -> { + assertThat(r, is(AcknowledgedResponse.TRUE)); + verify(clusterService).submitUnbatchedStateUpdateTask(eq("unassign persistent task from any node"), any()); + })); + } + + private ClusterState stateWithTransformTask() { + PersistentTasksCustomMetadata.PersistentTask task = mock(); + when(task.getTaskName()).thenReturn(TransformField.TASK_NAME); + return ClusterState.EMPTY_STATE.copyAndUpdateMetadata( + m -> m.putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(1, Map.of("a transform", task))) + ); + } + + public void testDisableUpgradeMode() throws InterruptedException { + doAnswer(ans -> { + ActionListener listener = ans.getArgument(2); + listener.onResponse(true); + return null; + }).when(persistentTasksService).waitForPersistentTasksCondition(any(), any(), any()); + upgradeModeSuccessfullyChanged(new SetUpgradeModeActionRequest(false), stateWithTransformTask(), assertNoFailureListener(r -> { + assertThat(r, is(AcknowledgedResponse.TRUE)); + verify(clusterService, never()).submitUnbatchedStateUpdateTask(eq("unassign persistent task from any node"), any()); + })); + } +} diff --git a/x-pack/plugin/vector-tile/build.gradle b/x-pack/plugin/vector-tile/build.gradle index 5855757b417f..4ff3240d239e 100644 --- a/x-pack/plugin/vector-tile/build.gradle +++ b/x-pack/plugin/vector-tile/build.gradle @@ -16,9 +16,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' esplugin { - name 'vector-tile' - description 'A plugin for mapbox vector tile features' - classname 'org.elasticsearch.xpack.vectortile.VectorTilePlugin' + name = 'vector-tile' + description = 'A plugin for mapbox vector tile features' + classname ='org.elasticsearch.xpack.vectortile.VectorTilePlugin' extendedPlugins = ['spatial'] } diff --git a/x-pack/plugin/voting-only-node/build.gradle b/x-pack/plugin/voting-only-node/build.gradle index 60dd79ef7f6b..1202f90977da 100644 --- a/x-pack/plugin/voting-only-node/build.gradle +++ b/x-pack/plugin/voting-only-node/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-voting-only-node' - description 'Elasticsearch Expanded Pack Plugin - Voting-only node' - classname 'org.elasticsearch.cluster.coordination.votingonly.VotingOnlyNodePlugin' + name = 'x-pack-voting-only-node' + description = 'Elasticsearch Expanded Pack Plugin - Voting-only node' + classname ='org.elasticsearch.cluster.coordination.votingonly.VotingOnlyNodePlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index edf767fad675..ce7c8e5d6311 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -1,11 +1,11 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { - name 'x-pack-watcher' - description 'Elasticsearch Expanded Pack Plugin - Watcher' - classname 'org.elasticsearch.xpack.watcher.Watcher' - hasNativeController false - requiresKeystore false + name = 'x-pack-watcher' + description = 'Elasticsearch Expanded Pack Plugin - Watcher' + classname ='org.elasticsearch.xpack.watcher.Watcher' + hasNativeController =false + requiresKeystore =false extendedPlugins = ['x-pack-core', 'lang-painless'] } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java index 724ba58c7e99..d1a2e1c84658 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java @@ -43,7 +43,7 @@ public abstract class WatcherConditionScript { WatcherConditionScript newInstance(Map params, WatchExecutionContext watcherContext); } - public static ScriptContext CONTEXT = new ScriptContext<>( + public static final ScriptContext CONTEXT = new ScriptContext<>( "watcher_condition", Factory.class, 200, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/DataAttachment.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/DataAttachment.java index d22a608e131f..7ce1b9ca8b60 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/DataAttachment.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/DataAttachment.java @@ -56,7 +56,7 @@ public enum DataAttachment implements ToXContentObject { } }; - public static DataAttachment DEFAULT = YAML; + public static final DataAttachment DEFAULT = YAML; public abstract String contentType(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java index 93e3eefb4aa2..d8c079e1cb00 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java @@ -44,7 +44,7 @@ public abstract class WatcherTransformScript { WatcherTransformScript newInstance(Map params, WatchExecutionContext watcherContext, Payload payload); } - public static ScriptContext CONTEXT = new ScriptContext<>( + public static final ScriptContext CONTEXT = new ScriptContext<>( "watcher_transform", Factory.class, 200, diff --git a/x-pack/plugin/wildcard/build.gradle b/x-pack/plugin/wildcard/build.gradle index 1a4f13340258..d87a461eaf85 100644 --- a/x-pack/plugin/wildcard/build.gradle +++ b/x-pack/plugin/wildcard/build.gradle @@ -9,9 +9,9 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - name 'wildcard' - description 'A plugin for a keyword field type with efficient wildcard search' - classname 'org.elasticsearch.xpack.wildcard.Wildcard' + name = 'wildcard' + description = 'A plugin for a keyword field type with efficient wildcard search' + classname = 'org.elasticsearch.xpack.wildcard.Wildcard' extendedPlugins = ['x-pack-core', 'lang-painless'] } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index f3b01bb89812..f87c73273a7a 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -94,7 +94,7 @@ import static org.elasticsearch.index.IndexSettings.IGNORE_ABOVE_SETTING; public class WildcardFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "wildcard"; - public static short MAX_CLAUSES_IN_APPROXIMATION_QUERY = 10; + public static final short MAX_CLAUSES_IN_APPROXIMATION_QUERY = 10; public static final int NGRAM_SIZE = 3; static final NamedAnalyzer WILDCARD_ANALYZER_7_10 = new NamedAnalyzer("_wildcard_7_10", AnalyzerScope.GLOBAL, new Analyzer() { @@ -252,7 +252,7 @@ public class WildcardFieldMapper extends FieldMapper { } } - public static TypeParser PARSER = new TypeParser( + public static final TypeParser PARSER = new TypeParser( (n, c) -> new Builder(n, IGNORE_ABOVE_SETTING.get(c.getSettings()), c.indexVersionCreated()) ); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 0b31e96ece84..108efaa0f769 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -1093,7 +1093,7 @@ public class WildcardFieldMapperTests extends MapperTestCase { IndexFieldData.Builder builder = fieldType.fielddataBuilder(fdc); return builder.build(new IndexFieldDataCache.None(), null); }; - MappingLookup lookup = MappingLookup.fromMapping(Mapping.EMPTY); + MappingLookup lookup = MappingLookup.fromMapping(Mapping.EMPTY, null); return new SearchExecutionContext( 0, 0, diff --git a/x-pack/plugin/write-load-forecaster/build.gradle b/x-pack/plugin/write-load-forecaster/build.gradle index 8a3db5830e83..3eecec6363a3 100644 --- a/x-pack/plugin/write-load-forecaster/build.gradle +++ b/x-pack/plugin/write-load-forecaster/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'elasticsearch.internal-cluster-test' esplugin { name = 'x-pack-write-load-forecaster' - description 'x' + description = 'x' classname = 'org.elasticsearch.xpack.writeloadforecaster.WriteLoadForecasterPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index ecd02ac9d209..f21aad9cdf75 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -50,6 +50,11 @@ if (OS.current() == OS.MAC && Architecture.current() == Architecture.AARCH64) { jdks.legacy.distributionVersion = '8.56.0.23' } +interface Injected { + @Inject + FileSystemOperations getFs() +} + if (OS.current() == OS.WINDOWS) { logger.warn("Disabling repository-old-versions tests because we can't get the pid file on windows") } else { @@ -95,10 +100,12 @@ if (OS.current() == OS.WINDOWS) { setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' } + def oldesFixtureConfiguration = project.configurations.oldesFixture + TaskProvider fixture = tasks.register("oldES${versionNoDots}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, config executable = "${buildParams.runtimeJavaHome.get()}/bin/java" - env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}" + env 'CLASSPATH', "${-> oldesFixtureConfiguration.asPath}" // old versions of Elasticsearch need JAVA_HOME env 'JAVA_HOME', jdks.legacy.javaHomePath // If we are running on certain arm systems we need to explicitly set the stack size to overcome JDK page size bug @@ -116,11 +123,15 @@ if (OS.current() == OS.WINDOWS) { // We need to explicitly disable ML when running old ES versions on ARM or on systems with newer GLIBC args 'xpack.ml.enabled: false' } + def injected = project.objects.newInstance(Injected) + doFirst { - delete(dataPath) - mkdir(dataPath) + injected.getFs().delete { d -> + d.delete(dataPath) + } + new File(dataPath).mkdirs() } - maxWaitInSeconds 60 + maxWaitInSeconds = 60 waitCondition = { fixture, ant -> // the fixture writes the ports file when Elasticsearch's HTTP service // is ready, so we can just wait for the file to exist @@ -131,9 +142,13 @@ if (OS.current() == OS.WINDOWS) { tasks.register("javaRestTestBeforeRestart#${versionNoDots}", StandaloneRestIntegTestTask) { useCluster testClusterProvider dependsOn fixture + def injected = project.objects.newInstance(Injected) + doFirst { - delete(repoLocation) - mkdir(repoLocation) + injected.getFs().delete { d -> + d.delete(repoLocation) + } + new File(repoLocation).mkdirs() } systemProperty 'tests.after_restart', 'false' } diff --git a/x-pack/qa/security-example-spi-extension/build.gradle b/x-pack/qa/security-example-spi-extension/build.gradle index 13e1325848f3..66912af4a837 100644 --- a/x-pack/qa/security-example-spi-extension/build.gradle +++ b/x-pack/qa/security-example-spi-extension/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.base-internal-es-plugin' esplugin { - name 'spi-extension' - description 'An example spi extension plugin for security' - classname 'org.elasticsearch.example.SpiExtensionPlugin' + name = 'spi-extension' + description = 'An example spi extension plugin for security' + classname = 'org.elasticsearch.example.SpiExtensionPlugin' extendedPlugins = ['x-pack-security'] }