diff --git a/.buildkite/pipelines/periodic-fwc.template.yml b/.buildkite/pipelines/periodic-fwc.template.yml new file mode 100644 index 000000000000..b9f00a649a14 --- /dev/null +++ b/.buildkite/pipelines/periodic-fwc.template.yml @@ -0,0 +1,15 @@ +steps: + - label: $FWC_VERSION / fwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$FWC_VERSION#fwcTest -Dtests.bwc.snapshot=false + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + matrix: + setup: + FWC_VERSION: $FWC_LIST + env: + FWC_VERSION: $FWC_VERSION diff --git a/.buildkite/pipelines/periodic-fwc.yml b/.buildkite/pipelines/periodic-fwc.yml new file mode 100644 index 000000000000..434a091aa1df --- /dev/null +++ b/.buildkite/pipelines/periodic-fwc.yml @@ -0,0 +1,16 @@ +# This file is auto-generated. See .buildkite/pipelines/periodic-fwc.template.yml +steps: + - label: "{{matrix.FWC_VERSION}} / fwc" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$$FWC_VERSION#fwcTest -Dtests.bwc.snapshot=false + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + matrix: + setup: + FWC_VERSION: [] + env: + FWC_VERSION: "{{matrix.FWC_VERSION}}" diff --git a/.buildkite/scripts/branches.sh b/.buildkite/scripts/branches.sh index 886fa59e4d02..8e909b362ef2 100755 --- a/.buildkite/scripts/branches.sh +++ b/.buildkite/scripts/branches.sh @@ -2,3 +2,7 @@ # This determines which branches will have pipelines triggered periodically, for dra workflows. BRANCHES=( $(cat branches.json | jq -r '.branches[].branch') ) + +# Sort them to make ordering predictable +IFS=$'\n' BRANCHES=($(sort <<<"${BRANCHES[*]}")) +unset IFS diff --git a/.buildkite/scripts/periodic.trigger.sh b/.buildkite/scripts/periodic.trigger.sh index cc10a5ae4186..30e13386f308 100755 --- a/.buildkite/scripts/periodic.trigger.sh +++ b/.buildkite/scripts/periodic.trigger.sh @@ -46,4 +46,15 @@ EOF branch: "$BRANCH" commit: "$LAST_GOOD_COMMIT" EOF +# Include forward compatibility tests only for the bugfix branch +if [[ "${BRANCH}" == "${BRANCHES[2]}" ]]; then + cat <Goal: This benchmark is designed to mimic and benchmark the execution of a range query in LogsDB, + * with and without a sparse doc values index on the {@code host.name} and {@code @timestamp} fields. + * + *

Document Structure: + *

+ * + *

Index Sorting: + * The index is sorted primarily by {@code host.name} (ascending) and secondarily by {@code @timestamp} (descending). + * Documents are grouped into batches, where each hostname gets a dedicated batch of timestamps. + * This is meant to simulate collecting logs from a set of hosts over a certain time interval. + * + *

Batched Data Behavior: + *

+ * + *

Example Output: + *

+ * | Document # | host.name | @timestamp (ms since epoch) |
+ * |-----------|-----------|------------------------------|
+ * | 1         | host-0    | 1704067200005               |
+ * | 2         | host-0    | 1704067201053               |
+ * | 3         | host-0    | 1704067202091               |
+ * | ...       | ...       | ...                          |
+ * | 10000     | host-0    | 1704077199568               |
+ * | 10001     | host-1    | 1704067200042               |
+ * | 10002     | host-1    | 1704067201099               |
+ * | ...       | ...       | ...                          |
+ * 
+ * + *

When running the range query, we retrieve only a fraction of the total data, + * simulating a real-world scenario where a dashboard only needs the most recent logs. + */ +@BenchmarkMode(Mode.SampleTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +@Fork(1) +@Threads(1) +@Warmup(iterations = 3) +@Measurement(iterations = 5) +public class DateFieldMapperDocValuesSkipperBenchmark { + + /** + * Total number of documents to index. + */ + @Param("1343120") + private int nDocs; + + /** + * Number of documents per hostname batch. + */ + @Param({ "1340", "121300" }) + private int batchSize; + + /** + * Maximum random increment (in milliseconds) added to each doc's timestamp. + */ + @Param("1000") + private int deltaTime; + + /** + * Fraction of the total time range (derived from {@code batchSize * deltaTime}) that the range query will cover. + */ + @Param({ "0.01", "0.2", "0.8" }) + private double queryRange; + + /** + * Number of docs to index before forcing a commit, thus creating multiple Lucene segments. + */ + @Param({ "7390", "398470" }) + private int commitEvery; + + /** + * Seed for random data generation. + */ + @Param("42") + private int seed; + + private static final String TIMESTAMP_FIELD = "@timestamp"; + private static final String HOSTNAME_FIELD = "host.name"; + private static final long BASE_TIMESTAMP = 1704067200000L; + + private IndexSearcher indexSearcherWithoutDocValuesSkipper; + private IndexSearcher indexSearcherWithDocValuesSkipper; + private ExecutorService executorService; + + /** + * Main entry point for running this benchmark via JMH. + * + * @param args command line arguments (unused) + * @throws RunnerException if the benchmark fails to run + */ + public static void main(String[] args) throws RunnerException { + final Options options = new OptionsBuilder().include(DateFieldMapperDocValuesSkipperBenchmark.class.getSimpleName()) + .addProfiler(AsyncProfiler.class) + .build(); + + new Runner(options).run(); + } + + /** + * Sets up the benchmark by creating Lucene indexes (with and without doc values skipper). + * Sets up a single-threaded executor for searching the indexes and avoid concurrent search threads. + * + * @throws IOException if an error occurs while building the index + */ + @Setup(Level.Trial) + public void setup() throws IOException { + executorService = Executors.newSingleThreadExecutor(); + + final Directory tempDirectoryWithoutDocValuesSkipper = FSDirectory.open(Files.createTempDirectory("temp1-")); + final Directory tempDirectoryWithDocValuesSkipper = FSDirectory.open(Files.createTempDirectory("temp2-")); + + indexSearcherWithoutDocValuesSkipper = createIndex(tempDirectoryWithoutDocValuesSkipper, false, commitEvery); + indexSearcherWithDocValuesSkipper = createIndex(tempDirectoryWithDocValuesSkipper, true, commitEvery); + } + + /** + * Creates an {@link IndexSearcher} after indexing documents in batches. + * Each batch commit forces multiple segments to be created. + * + * @param directory the Lucene {@link Directory} for writing the index + * @param withDocValuesSkipper true if we should enable doc values skipper on certain fields + * @param commitEvery number of documents after which to commit (and thus segment) + * @return an {@link IndexSearcher} for querying the newly built index + * @throws IOException if an I/O error occurs during index writing + */ + private IndexSearcher createIndex(final Directory directory, final boolean withDocValuesSkipper, final int commitEvery) + throws IOException { + + final IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer()); + // NOTE: index sort config matching LogsDB's sort order + config.setIndexSort( + new Sort( + new SortField(HOSTNAME_FIELD, SortField.Type.STRING, false), + new SortedNumericSortField(TIMESTAMP_FIELD, SortField.Type.LONG, true) + ) + ); + + final Random random = new Random(seed); + + try (IndexWriter indexWriter = new IndexWriter(directory, config)) { + int docCountSinceLastCommit = 0; + + for (int i = 0; i < nDocs; i++) { + final Document doc = new Document(); + addFieldsToDocument(doc, i, withDocValuesSkipper, random); + indexWriter.addDocument(doc); + docCountSinceLastCommit++; + + // Force commit periodically to create multiple Lucene segments + if (docCountSinceLastCommit >= commitEvery) { + indexWriter.commit(); + docCountSinceLastCommit = 0; + } + } + + indexWriter.commit(); + + // Open a reader and create a searcher on top of it using a single thread executor. + DirectoryReader reader = DirectoryReader.open(indexWriter); + return new IndexSearcher(reader, executorService); + } + } + + /** + * Populates the given {@link Document} with fields, optionally using doc values skipper. + * + * @param doc the Lucene document to fill + * @param docIndex index of the document being added + * @param withDocValuesSkipper true if doc values skipper is enabled + * @param random seeded {@link Random} for data variation + */ + private void addFieldsToDocument(final Document doc, int docIndex, boolean withDocValuesSkipper, final Random random) { + + final int batchIndex = docIndex / batchSize; + final String hostName = "host-" + batchIndex; + + // Slightly vary the timestamp in each document + final long timestamp = BASE_TIMESTAMP + ((docIndex % batchSize) * deltaTime) + random.nextInt(0, deltaTime); + + if (withDocValuesSkipper) { + // Sparse doc values index on `@timestamp` and `host.name` + doc.add(SortedNumericDocValuesField.indexedField(TIMESTAMP_FIELD, timestamp)); + doc.add(SortedDocValuesField.indexedField(HOSTNAME_FIELD, new BytesRef(hostName))); + } else { + // Standard doc values, points and inverted index + doc.add(new StringField(HOSTNAME_FIELD, hostName, Field.Store.NO)); + doc.add(new SortedDocValuesField(HOSTNAME_FIELD, new BytesRef(hostName))); + doc.add(new LongPoint(TIMESTAMP_FIELD, timestamp)); + doc.add(new SortedNumericDocValuesField(TIMESTAMP_FIELD, timestamp)); + } + } + + /** + * Calculates the upper bound for the timestamp range query based on {@code batchSize}, + * {@code deltaTime}, and {@code queryRange}. + * + * @return the computed upper bound for the timestamp range query + */ + private long rangeEndTimestamp() { + return BASE_TIMESTAMP + (long) (batchSize * deltaTime * queryRange); + } + + /** + * Executes a range query without doc values skipper. + * + * @param bh the blackhole consuming the query result + * @throws IOException if a search error occurs + */ + @Benchmark + public void rangeQueryWithoutDocValuesSkipper(final Blackhole bh) throws IOException { + bh.consume(rangeQuery(indexSearcherWithoutDocValuesSkipper, BASE_TIMESTAMP, rangeEndTimestamp(), true)); + } + + /** + * Executes a range query with doc values skipper enabled. + * + * @param bh the blackhole consuming the query result + * @throws IOException if a search error occurs + */ + @Benchmark + public void rangeQueryWithDocValuesSkipper(final Blackhole bh) throws IOException { + bh.consume(rangeQuery(indexSearcherWithDocValuesSkipper, BASE_TIMESTAMP, rangeEndTimestamp(), false)); + } + + /** + * Runs the actual Lucene range query, optionally combining a {@link LongPoint} index query + * with doc values ({@link SortedNumericDocValuesField}) via {@link IndexOrDocValuesQuery}, + * and then wrapping it with an {@link IndexSortSortedNumericDocValuesRangeQuery} to utilize the index sort. + * + * @param searcher the Lucene {@link IndexSearcher} + * @param rangeStartTimestamp lower bound of the timestamp range + * @param rangeEndTimestamp upper bound of the timestamp range + * @param isIndexed true if we should combine indexed and doc value queries + * @return the total number of matching documents + * @throws IOException if a search error occurs + */ + private long rangeQuery(final IndexSearcher searcher, long rangeStartTimestamp, long rangeEndTimestamp, boolean isIndexed) + throws IOException { + + assert rangeEndTimestamp > rangeStartTimestamp; + + final Query rangeQuery = isIndexed + ? new IndexOrDocValuesQuery( + LongPoint.newRangeQuery(TIMESTAMP_FIELD, rangeStartTimestamp, rangeEndTimestamp), + SortedNumericDocValuesField.newSlowRangeQuery(TIMESTAMP_FIELD, rangeStartTimestamp, rangeEndTimestamp) + ) + : SortedNumericDocValuesField.newSlowRangeQuery(TIMESTAMP_FIELD, rangeStartTimestamp, rangeEndTimestamp); + + final Query query = new IndexSortSortedNumericDocValuesRangeQuery( + TIMESTAMP_FIELD, + rangeStartTimestamp, + rangeEndTimestamp, + rangeQuery + ); + + return searcher.count(query); + } + + /** + * Shuts down the executor service after the trial completes. + */ + @TearDown(Level.Trial) + public void tearDown() { + if (executorService != null) { + executorService.shutdown(); + try { + if (executorService.awaitTermination(30, TimeUnit.SECONDS) == false) { + executorService.shutdownNow(); + } + } catch (InterruptedException e) { + executorService.shutdownNow(); + Thread.currentThread().interrupt(); + } + } + } +} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle new file mode 100644 index 000000000000..51301b405e51 --- /dev/null +++ b/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +def fwcVersions = buildParams.bwcVersions.released.findAll { it.major == VersionProperties.elasticsearchVersion.major && it.minor == VersionProperties.elasticsearchVersion.minor } +def previousMinorSnapshot = buildParams.bwcVersions.unreleased.find { it.major == VersionProperties.elasticsearchVersion.major && it.minor == VersionProperties.elasticsearchVersion.minor - 1 } + +fwcVersions.each { fwcVersion -> + tasks.register("v${fwcVersion}#fwcTest", StandaloneRestIntegTestTask) { + usesBwcDistribution(previousMinorSnapshot) + usesBwcDistribution(fwcVersion) + systemProperty("tests.old_cluster_version", previousMinorSnapshot) + systemProperty("tests.new_cluster_version", fwcVersion) + nonInputProperties.systemProperty 'tests.fwc', 'true' + } +} + +gradle.taskGraph.whenReady { graph -> + if (graph.allTasks.any { it.name.endsWith('#fwcTest') } && Boolean.parseBoolean(System.getProperty("tests.bwc.snapshot", "true"))) { + throw new GradleException("Running forward compatibility tests requires passing `-Dtests.bwc.snapshot=false`.") + } + + if (graph.allTasks.any { it.name.endsWith('#fwcTest') } && graph.allTasks.any { it.name.endsWith('#bwcTest') }) { + throw new GradleException("Backward compatibility and forward compatibility tests cannot be executed in the same build.") + } +} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 45d7c9a033d7..c491d74c589c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -145,18 +145,23 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { } } - // modifies the idea module config to enable preview features on ':libs:native' module + // modifies the idea module config to enable preview features on module that need them tasks.register("enablePreviewFeatures", EnablePreviewFeaturesTask) { group = 'ide' - description = 'Enables preview features on native library module' + description = 'Enables preview features on modules that need them' dependsOn tasks.named("enableExternalConfiguration") doLast { enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.main.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.test.iml', 'JDK_21_PREVIEW') + // due to org.elasticsearch.plugins.PluginsLoader + enablePreview('.idea/modules/server/elasticsearch.server.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/server/elasticsearch.server.test.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/entitlement/elasticsearch.libs.entitlement.main.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/entitlement/elasticsearch.libs.entitlement.test.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/entitlement/bridge/elasticsearch.libs.entitlement.bridge.main.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/entitlement/bridge/elasticsearch.libs.entitlement.bridge.test.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/qa/entitlement-test-plugin/elasticsearch.libs.entitlement.qa.entitlement-test-plugin.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/qa/entitlement-test-plugin/elasticsearch.libs.entitlement.qa.entitlement-test-plugin.test.iml', 'JDK_21_PREVIEW') } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java index 6050e5c9a60d..ccf7a6ea6350 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java @@ -145,7 +145,14 @@ public class BwcSetupExtension { loggedExec.args("-DisCI"); } - loggedExec.args("-Dbuild.snapshot=true", "-Dscan.tag.NESTED"); + loggedExec.args("-Dscan.tag.NESTED"); + + if (System.getProperty("tests.bwc.snapshot", "true").equals("false")) { + loggedExec.args("-Dbuild.snapshot=false", "-Dlicense.key=x-pack/plugin/core/snapshot.key"); + } else { + loggedExec.args("-Dbuild.snapshot=true"); + } + final LogLevel logLevel = project.getGradle().getStartParameter().getLogLevel(); List nonDefaultLogLevels = Arrays.asList(LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG); if (nonDefaultLogLevels.contains(logLevel)) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index b6c36285ca3a..fea895f90f91 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -355,8 +355,9 @@ public class InternalDistributionBwcSetupPlugin implements Plugin { String bwcTaskName = buildBwcTaskName(projectName); bwcSetupExtension.bwcTask(bwcTaskName, c -> { boolean useNativeExpanded = projectArtifact.expandedDistDir != null; + boolean isReleaseBuild = System.getProperty("tests.bwc.snapshot", "true").equals("false"); File expectedOutputFile = useNativeExpanded - ? new File(projectArtifact.expandedDistDir, "elasticsearch-" + bwcVersion.get() + "-SNAPSHOT") + ? new File(projectArtifact.expandedDistDir, "elasticsearch-" + bwcVersion.get() + (isReleaseBuild ? "" : "-SNAPSHOT")) : projectArtifact.distFile; c.getInputs().file(new File(project.getBuildDir(), "refspec")).withPathSensitivity(PathSensitivity.RELATIVE); if (useNativeExpanded) { diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c49e78c4abc2..45f958d9ec09 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -14,7 +14,7 @@ log4j = 2.19.0 slf4j = 2.0.6 ecsLogging = 1.2.0 jna = 5.12.1 -netty = 4.1.115.Final +netty = 4.1.118.Final commons_lang3 = 3.9 google_oauth_client = 1.34.1 awsv1sdk = 1.12.746 diff --git a/build.gradle b/build.gradle index 1f8f11fe0511..6d4893b21fe4 100644 --- a/build.gradle +++ b/build.gradle @@ -15,6 +15,7 @@ import com.fasterxml.jackson.databind.ObjectMapper import org.elasticsearch.gradle.DistributionDownloadPlugin import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.BaseInternalPluginBuildPlugin import org.elasticsearch.gradle.internal.ResolveAllDependencies import org.elasticsearch.gradle.util.GradleUtils @@ -120,10 +121,10 @@ tasks.register("updateCIBwcVersions") { outputFile.text = "# This file is auto-generated. See ${pipelineTemplatePath}\n" + pipeline } - // Writes a Buildkite pipelime from a template, and replaces $BWC_LIST with an array of versions + // Writes a Buildkite pipelime from a template, and replaces a variable with an array of versions // Useful for writing a list of versions in a matrix configuration - def expandBwcList = { String outputFilePath, String pipelineTemplatePath, List versions -> - writeBuildkitePipeline(outputFilePath, pipelineTemplatePath, [new ListExpansion(versions: versions, variable: "BWC_LIST")]) + def expandList = { String outputFilePath, String pipelineTemplatePath, String variable, List versions -> + writeBuildkitePipeline(outputFilePath, pipelineTemplatePath, [new ListExpansion(versions: versions, variable: variable)]) } // Writes a Buildkite pipeline from a template, and replaces $BWC_STEPS with a list of steps, one for each version @@ -140,11 +141,18 @@ tasks.register("updateCIBwcVersions") { doLast { writeVersions(file(".ci/bwcVersions"), filterIntermediatePatches(buildParams.bwcVersions.indexCompatible)) writeVersions(file(".ci/snapshotBwcVersions"), filterIntermediatePatches(buildParams.bwcVersions.unreleasedIndexCompatible)) - expandBwcList( + expandList( ".buildkite/pipelines/intake.yml", ".buildkite/pipelines/intake.template.yml", + "BWC_LIST", filterIntermediatePatches(buildParams.bwcVersions.unreleasedIndexCompatible) ) + expandList( + ".buildkite/pipelines/periodic-fwc.yml", + ".buildkite/pipelines/periodic-fwc.template.yml", + "FWC_LIST", + buildParams.bwcVersions.released.findAll { it.major == VersionProperties.elasticsearchVersion.major && it.minor == VersionProperties.elasticsearchVersion.minor } + ) writeBuildkitePipeline( ".buildkite/pipelines/periodic.yml", ".buildkite/pipelines/periodic.template.yml", diff --git a/docs/changelog/123427.yaml b/docs/changelog/123427.yaml new file mode 100644 index 000000000000..50c29edb7972 --- /dev/null +++ b/docs/changelog/123427.yaml @@ -0,0 +1,5 @@ +pr: 123427 +summary: Reduce iteration complexity for plan traversal +area: ES|QL +type: bug +issues: [] diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc index 25ac0f3a06a2..b2fadb107f3b 100644 --- a/docs/resiliency/index.asciidoc +++ b/docs/resiliency/index.asciidoc @@ -118,9 +118,10 @@ in the case of each type of failure. The plan is to have a test case that valida [discrete] === Run Jepsen (STATUS: ONGOING) -We have ported the known scenarios in the Jepsen blogs that check loss of acknowledged writes to our testing infrastructure. -The new tests are run continuously in our testing farm and are passing. We are also working on running Jepsen independently to verify -that no failures are found. +We have ported the known scenarios in the Jepsen blogs that check loss of +acknowledged writes to our testing infrastructure. The new tests are run +continuously in our testing farm and are passing. We will also monitor for new +failure scenarios and adapt our test suite as needed. == Completed diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3e8810cb5ef0..526891c7abb2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1404,114 +1404,74 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java index 609e0ed66cfa..c47b0b93c547 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.runtime.policy.entitlements; +import org.elasticsearch.core.Booleans; import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; import org.elasticsearch.entitlement.runtime.policy.PathLookup; import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; @@ -17,6 +18,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.stream.Stream; @@ -85,12 +87,12 @@ public record FilesEntitlement(List filesData) implements Entitlement return new RelativePathFileData(relativePath, baseDir, mode, null); } - static FileData ofPathSetting(String setting, Mode mode) { - return new PathSettingFileData(setting, mode, null); + static FileData ofPathSetting(String setting, Mode mode, boolean ignoreUrl) { + return new PathSettingFileData(setting, mode, ignoreUrl, null); } - static FileData ofRelativePathSetting(String setting, BaseDir baseDir, Mode mode) { - return new RelativePathSettingFileData(setting, baseDir, mode, null); + static FileData ofRelativePathSetting(String setting, BaseDir baseDir, Mode mode, boolean ignoreUrl) { + return new RelativePathSettingFileData(setting, baseDir, mode, ignoreUrl, null); } /** @@ -207,10 +209,10 @@ public record FilesEntitlement(List filesData) implements Entitlement } } - private record PathSettingFileData(String setting, Mode mode, Platform platform) implements FileData { + private record PathSettingFileData(String setting, Mode mode, boolean ignoreUrl, Platform platform) implements FileData { @Override public Stream resolvePaths(PathLookup pathLookup) { - return resolvePathSettings(pathLookup, setting); + return resolvePathSettings(pathLookup, setting, ignoreUrl); } @Override @@ -218,17 +220,17 @@ public record FilesEntitlement(List filesData) implements Entitlement if (platform == platform()) { return this; } - return new PathSettingFileData(setting, mode, platform); + return new PathSettingFileData(setting, mode, ignoreUrl, platform); } } - private record RelativePathSettingFileData(String setting, BaseDir baseDir, Mode mode, Platform platform) + private record RelativePathSettingFileData(String setting, BaseDir baseDir, Mode mode, boolean ignoreUrl, Platform platform) implements FileData, RelativeFileData { @Override public Stream resolveRelativePaths(PathLookup pathLookup) { - return resolvePathSettings(pathLookup, setting); + return resolvePathSettings(pathLookup, setting, ignoreUrl); } @Override @@ -236,16 +238,22 @@ public record FilesEntitlement(List filesData) implements Entitlement if (platform == platform()) { return this; } - return new RelativePathSettingFileData(setting, baseDir, mode, platform); + return new RelativePathSettingFileData(setting, baseDir, mode, ignoreUrl, platform); } } - private static Stream resolvePathSettings(PathLookup pathLookup, String setting) { + private static Stream resolvePathSettings(PathLookup pathLookup, String setting, boolean ignoreUrl) { + Stream result; if (setting.contains("*")) { - return pathLookup.settingGlobResolver().apply(setting).map(Path::of); + result = pathLookup.settingGlobResolver().apply(setting); + } else { + String path = pathLookup.settingResolver().apply(setting); + result = path == null ? Stream.of() : Stream.of(path); } - String path = pathLookup.settingResolver().apply(setting); - return path == null ? Stream.of() : Stream.of(Path.of(path)); + if (ignoreUrl) { + result = result.filter(s -> s.toLowerCase(Locale.ROOT).startsWith("https://") == false); + } + return result.map(Path::of); } private static Mode parseMode(String mode) { @@ -298,6 +306,7 @@ public record FilesEntitlement(List filesData) implements Entitlement String relativePathSetting = file.remove("relative_path_setting"); String modeAsString = file.remove("mode"); String platformAsString = file.remove("platform"); + String ignoreUrlAsString = file.remove("ignore_url"); if (file.isEmpty() == false) { throw new PolicyValidationException("unknown key(s) [" + file + "] in a listed file for files entitlement"); @@ -324,6 +333,14 @@ public record FilesEntitlement(List filesData) implements Entitlement baseDir = parseBaseDir(relativeTo); } + boolean ignoreUrl = false; + if (ignoreUrlAsString != null) { + if (relativePathAsString != null || pathAsString != null) { + throw new PolicyValidationException("'ignore_url' may only be used with `path_setting` or `relative_path_setting`"); + } + ignoreUrl = Booleans.parseBoolean(ignoreUrlAsString); + } + final FileData fileData; if (relativePathAsString != null) { if (baseDir == null) { @@ -342,12 +359,12 @@ public record FilesEntitlement(List filesData) implements Entitlement } fileData = FileData.ofPath(path, mode); } else if (pathSetting != null) { - fileData = FileData.ofPathSetting(pathSetting, mode); + fileData = FileData.ofPathSetting(pathSetting, mode, ignoreUrl); } else if (relativePathSetting != null) { if (baseDir == null) { throw new PolicyValidationException("files entitlement with a 'relative_path_setting' must specify 'relative_to'"); } - fileData = FileData.ofRelativePathSetting(relativePathSetting, baseDir, mode); + fileData = FileData.ofRelativePathSetting(relativePathSetting, baseDir, mode, ignoreUrl); } else { throw new AssertionError("File entry validation error"); } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java index e0c08d18b8c1..a453d6cf5499 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java @@ -20,6 +20,7 @@ import java.nio.file.Path; import java.util.List; import java.util.Map; +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.BaseDir.CONFIG; import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ; import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; import static org.hamcrest.Matchers.contains; @@ -94,22 +95,50 @@ public class FilesEntitlementTests extends ESTestCase { public void testPathSettingResolve() { var entitlement = FilesEntitlement.build(List.of(Map.of("path_setting", "foo.bar", "mode", "read"))); var filesData = entitlement.filesData(); - assertThat(filesData, contains(FileData.ofPathSetting("foo.bar", READ))); + assertThat(filesData, contains(FileData.ofPathSetting("foo.bar", READ, false))); - var fileData = FileData.ofPathSetting("foo.bar", READ); + var fileData = FileData.ofPathSetting("foo.bar", READ, false); // empty settings assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), empty()); - fileData = FileData.ofPathSetting("foo.bar", READ); + fileData = FileData.ofPathSetting("foo.bar", READ, false); settings = Settings.builder().put("foo.bar", "/setting/path").build(); assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), contains(Path.of("/setting/path"))); - fileData = FileData.ofPathSetting("foo.*.bar", READ); + fileData = FileData.ofPathSetting("foo.*.bar", READ, false); settings = Settings.builder().put("foo.baz.bar", "/setting/path").build(); assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), contains(Path.of("/setting/path"))); - fileData = FileData.ofPathSetting("foo.*.bar", READ); + fileData = FileData.ofPathSetting("foo.*.bar", READ, false); settings = Settings.builder().put("foo.baz.bar", "/setting/path").put("foo.baz2.bar", "/other/path").build(); assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), containsInAnyOrder(Path.of("/setting/path"), Path.of("/other/path"))); } + + public void testPathSettingIgnoreUrl() { + var fileData = FileData.ofPathSetting("foo.*.bar", READ, true); + settings = Settings.builder().put("foo.nonurl.bar", "/setting/path").put("foo.url.bar", "https://mysite").build(); + assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), contains(Path.of("/setting/path"))); + } + + public void testRelativePathSettingIgnoreUrl() { + var fileData = FileData.ofRelativePathSetting("foo.*.bar", CONFIG, READ, true); + settings = Settings.builder().put("foo.nonurl.bar", "path").put("foo.url.bar", "https://mysite").build(); + assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), contains(Path.of("/config/path"))); + } + + public void testIgnoreUrlValidation() { + var e = expectThrows( + PolicyValidationException.class, + () -> FilesEntitlement.build(List.of(Map.of("path", "/foo", "mode", "read", "ignore_url", "true"))) + ); + assertThat(e.getMessage(), is("'ignore_url' may only be used with `path_setting` or `relative_path_setting`")); + + e = expectThrows( + PolicyValidationException.class, + () -> FilesEntitlement.build( + List.of(Map.of("relative_path", "foo", "relative_to", "config", "mode", "read", "ignore_url", "true")) + ) + ); + assertThat(e.getMessage(), is("'ignore_url' may only be used with `path_setting` or `relative_path_setting`")); + } } diff --git a/modules/repository-azure/src/main/plugin-metadata/plugin-security.policy b/modules/repository-azure/src/main/plugin-metadata/plugin-security.policy index 8a7c62359737..3aeeb6bde391 100644 --- a/modules/repository-azure/src/main/plugin-metadata/plugin-security.policy +++ b/modules/repository-azure/src/main/plugin-metadata/plugin-security.policy @@ -12,6 +12,8 @@ grant { permission java.net.SocketPermission "*", "connect"; // io.netty.util.concurrent.GlobalEventExecutor.startThread permission java.lang.RuntimePermission "setContextClassLoader"; + // io.netty.util.concurrent.GlobalEventExecutor.startThread + permission java.lang.RuntimePermission "getClassLoader"; // Used by jackson bean deserialization permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java index 8fdb7051e2be..46f810ed2d9e 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java @@ -14,7 +14,6 @@ import io.netty.channel.ChannelInboundHandlerAdapter; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.core.Releasables; import org.elasticsearch.transport.InboundPipeline; @@ -51,8 +50,8 @@ public class Netty4MessageInboundHandler extends ChannelInboundHandlerAdapter { final ByteBuf buffer = (ByteBuf) msg; Netty4TcpChannel channel = ctx.channel().attr(Netty4Transport.CHANNEL_KEY).get(); activityTracker.startActivity(); - try (ReleasableBytesReference reference = Netty4Utils.toReleasableBytesReference(buffer)) { - pipeline.handleBytes(channel, reference); + try { + pipeline.handleBytes(channel, Netty4Utils.toReleasableBytesReference(buffer)); } finally { activityTracker.stopActivity(); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyByteBufSizer.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyByteBufSizer.java index 2d62f8eb19e0..4a9be0acaaa4 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyByteBufSizer.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyByteBufSizer.java @@ -12,12 +12,10 @@ package org.elasticsearch.transport.netty4; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToMessageDecoder; - -import java.util.List; +import io.netty.channel.ChannelInboundHandlerAdapter; @ChannelHandler.Sharable -public class NettyByteBufSizer extends MessageToMessageDecoder { +public class NettyByteBufSizer extends ChannelInboundHandlerAdapter { public static final NettyByteBufSizer INSTANCE = new NettyByteBufSizer(); @@ -26,14 +24,12 @@ public class NettyByteBufSizer extends MessageToMessageDecoder { } @Override - protected void decode(ChannelHandlerContext ctx, ByteBuf buf, List out) { - int readableBytes = buf.readableBytes(); - if (buf.capacity() >= 1024) { - ByteBuf resized = buf.discardReadBytes().capacity(readableBytes); - assert resized.readableBytes() == readableBytes; - out.add(resized.retain()); - } else { - out.add(buf.retain()); + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (msg instanceof ByteBuf buf && buf.capacity() >= 1024) { + int readableBytes = buf.readableBytes(); + buf = buf.discardReadBytes().capacity(readableBytes); + assert buf.readableBytes() == readableBytes; } + ctx.fireChannelRead(msg); } } diff --git a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy index ed278af96d92..dbf8e728c160 100644 --- a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy +++ b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy @@ -14,8 +14,9 @@ grant codeBase "${codebase.netty-common}" { // netty makes and accepts socket connections permission java.net.SocketPermission "*", "accept,connect"; - // Netty sets custom classloader for some of its internal threads + // Netty gets and sets classloaders for some of its internal threads permission java.lang.RuntimePermission "setContextClassLoader"; + permission java.lang.RuntimePermission "getClassLoader"; }; grant codeBase "${codebase.netty-transport}" { diff --git a/muted-tests.yml b/muted-tests.yml index ddf4b2e70dec..502b9a3ab063 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -161,9 +161,6 @@ tests: - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT method: testOldSourceOnlyRepoAccess issue: https://github.com/elastic/elasticsearch/issues/120080 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} - issue: https://github.com/elastic/elasticsearch/issues/120332 - class: org.elasticsearch.xpack.ccr.FollowIndexSecurityIT method: testCleanShardFollowTaskAfterDeleteFollower issue: https://github.com/elastic/elasticsearch/issues/120339 @@ -337,15 +334,6 @@ tests: - class: org.elasticsearch.action.admin.indices.diskusage.IndexDiskUsageAnalyzerTests method: testCompletionField issue: https://github.com/elastic/elasticsearch/issues/123269 -- class: org.elasticsearch.index.mapper.IPSyntheticSourceNativeArrayIntegrationTests - method: testSynthesizeArray - issue: https://github.com/elastic/elasticsearch/issues/123417 -- class: org.elasticsearch.index.mapper.IPSyntheticSourceNativeArrayIntegrationTests - method: testSynthesizeArrayRandom - issue: https://github.com/elastic/elasticsearch/issues/123418 -- class: org.elasticsearch.index.mapper.IPSyntheticSourceNativeArrayIntegrationTests - method: testSynthesizeArrayIgnoreMalformed - issue: https://github.com/elastic/elasticsearch/issues/123419 - class: org.elasticsearch.packaging.test.DockerTests method: test151MachineDependentHeapWithSizeOverride issue: https://github.com/elastic/elasticsearch/issues/123437 diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 1d7475427b33..27ae740af7c8 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -12,6 +12,7 @@ import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-test-artifact-base' apply plugin: 'elasticsearch.bwc-test' +apply plugin: 'elasticsearch.fwc-test' testArtifacts { registerTestArtifactFromSourceSet(sourceSets.javaRestTest) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index 9cc3fff5828c..6e936e9b1601 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgra import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.test.XContentTestUtils; +import org.junit.BeforeClass; import java.util.Collections; import java.util.List; @@ -30,6 +31,11 @@ public class FeatureUpgradeIT extends AbstractRollingUpgradeTestCase { super(upgradedNodes); } + @BeforeClass + public static void ensureNotForwardCompatTest() { + assumeFalse("Only supported by bwc tests", Boolean.parseBoolean(System.getProperty("tests.fwc", "false"))); + } + public void testGetFeatureUpgradeStatus() throws Exception { final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct " diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index a20981a119d8..296ee5180d5d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -103,8 +103,12 @@ public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase for (int n = 0; n < requestedUpgradedNodes; n++) { if (upgradedNodes.add(n)) { try { - logger.info("Upgrading node {} to version {}", n, Version.CURRENT); - getUpgradeCluster().upgradeNodeToVersion(n, Version.CURRENT); + Version upgradeVersion = System.getProperty("tests.new_cluster_version") == null + ? Version.CURRENT + : Version.fromString(System.getProperty("tests.new_cluster_version")); + + logger.info("Upgrading node {} to version {}", n, upgradeVersion); + getUpgradeCluster().upgradeNodeToVersion(n, upgradeVersion); } catch (Exception e) { upgradeFailed = true; throw e; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a99eef48b320..ef7634309ea1 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -205,6 +205,7 @@ public class TransportVersions { public static final TransportVersion VOYAGE_AI_INTEGRATION_ADDED = def(9_014_0_00); public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES = def(9_015_0_00); public static final TransportVersion ESQL_SERIALIZE_SOURCE_FUNCTIONS_WARNINGS = def(9_016_0_00); + public static final TransportVersion ESQL_DRIVER_NODE_DESCRIPTION = def(9_017_0_00); /* * WARNING: DO NOT MERGE INTO MAIN! diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java index 2e45938f3d2c..554dd4ce178e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java @@ -18,6 +18,8 @@ import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicReference; @@ -83,6 +85,74 @@ public class AllocationBalancingRoundSummaryService { }); } + /** + * Summarizes the work required to move from an old to new desired balance shard allocation. + */ + public static BalancingRoundSummary createBalancerRoundSummary(DesiredBalance oldDesiredBalance, DesiredBalance newDesiredBalance) { + return new BalancingRoundSummary( + createWeightsSummary(oldDesiredBalance, newDesiredBalance), + DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance) + ); + } + + /** + * Creates a summary of the node weight changes from {@code oldDesiredBalance} to {@code newDesiredBalance}. + * See {@link BalancingRoundSummary.NodesWeightsChanges} for content details. + */ + private static Map createWeightsSummary( + DesiredBalance oldDesiredBalance, + DesiredBalance newDesiredBalance + ) { + var oldWeightsPerNode = oldDesiredBalance.weightsPerNode(); + var newWeightsPerNode = newDesiredBalance.weightsPerNode(); + + Map nodeNameToWeightInfo = new HashMap<>(oldWeightsPerNode.size()); + for (var nodeAndWeights : oldWeightsPerNode.entrySet()) { + var discoveryNode = nodeAndWeights.getKey(); + var oldNodeWeightStats = nodeAndWeights.getValue(); + + // The node may no longer exists in the new DesiredBalance. If so, the new weights for that node are effectively zero. New + // weights of zero will result in correctly negative weight diffs for the removed node. + var newNodeWeightStats = newWeightsPerNode.getOrDefault(discoveryNode, DesiredBalanceMetrics.NodeWeightStats.ZERO); + + nodeNameToWeightInfo.put( + discoveryNode.getName(), + new BalancingRoundSummary.NodesWeightsChanges( + oldNodeWeightStats, + BalancingRoundSummary.NodeWeightsDiff.create(oldNodeWeightStats, newNodeWeightStats) + ) + ); + } + + // There may be a new node in the new DesiredBalance that was not in the old DesiredBalance. So we'll need to iterate the nodes in + // the new DesiredBalance to check. + for (var nodeAndWeights : newWeightsPerNode.entrySet()) { + var discoveryNode = nodeAndWeights.getKey(); + if (nodeNameToWeightInfo.containsKey(discoveryNode.getName()) == false) { + // This node is new in the new DesiredBalance, there was no entry added during iteration of the nodes in the old + // DesiredBalance. So we'll make a new entry with a base of zero value weights and a weights diff of the new node's weights. + nodeNameToWeightInfo.put( + discoveryNode.getName(), + new BalancingRoundSummary.NodesWeightsChanges( + DesiredBalanceMetrics.NodeWeightStats.ZERO, + BalancingRoundSummary.NodeWeightsDiff.create(DesiredBalanceMetrics.NodeWeightStats.ZERO, nodeAndWeights.getValue()) + ) + ); + } + } + + return nodeNameToWeightInfo; + } + + /** + * Creates and saves a balancer round summary for the work to move from {@code oldDesiredBalance} to {@code newDesiredBalance}. If + * balancer round summaries are not enabled in the cluster (see {@link #ENABLE_BALANCER_ROUND_SUMMARIES_SETTING}), then the summary is + * immediately discarded. + */ + public void addBalancerRoundSummary(DesiredBalance oldDesiredBalance, DesiredBalance newDesiredBalance) { + addBalancerRoundSummary(createBalancerRoundSummary(oldDesiredBalance, newDesiredBalance)); + } + /** * Adds the summary of a balancing round. If summaries are enabled, this will eventually be reported (logging, etc.). If balancer round * summaries are not enabled in the cluster, then the summary is immediately discarded (so as not to fill up a data structure that will @@ -110,7 +180,7 @@ public class AllocationBalancingRoundSummaryService { */ private void drainAndReportSummaries() { var combinedSummaries = drainSummaries(); - if (combinedSummaries == CombinedBalancingRoundSummary.EMPTY_RESULTS) { + if (combinedSummaries == BalancingRoundSummary.CombinedBalancingRoundSummary.EMPTY_RESULTS) { return; } @@ -120,14 +190,15 @@ public class AllocationBalancingRoundSummaryService { /** * Returns a combined summary of all unreported allocation round summaries: may summarize a single balancer round, multiple, or none. * - * @return {@link CombinedBalancingRoundSummary#EMPTY_RESULTS} if there are no balancing round summaries waiting to be reported. + * @return {@link BalancingRoundSummary.CombinedBalancingRoundSummary#EMPTY_RESULTS} if there are no balancing round summaries waiting + * to be reported. */ - private CombinedBalancingRoundSummary drainSummaries() { + private BalancingRoundSummary.CombinedBalancingRoundSummary drainSummaries() { ArrayList batchOfSummaries = new ArrayList<>(); while (summaries.isEmpty() == false) { batchOfSummaries.add(summaries.poll()); } - return CombinedBalancingRoundSummary.combine(batchOfSummaries); + return BalancingRoundSummary.CombinedBalancingRoundSummary.combine(batchOfSummaries); } /** @@ -186,7 +257,9 @@ public class AllocationBalancingRoundSummaryService { } } - // @VisibleForTesting + /** + * Checks that the number of entries in {@link #summaries} matches the given {@code numberOfSummaries}. + */ protected void verifyNumberOfSummaries(int numberOfSummaries) { assert numberOfSummaries == summaries.size(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index ef9533ad8da6..cc6f1bcbf347 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -169,6 +169,7 @@ public class BalancedShardsAllocator implements ShardsAllocator { balancer.moveShards(); balancer.balance(); + // Node weights are calculated after each internal balancing round and saved to the RoutingNodes copy. collectAndRecordNodeWeightStats(balancer, weightFunction, allocation); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java index 2662825eff48..62331019937b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java @@ -9,16 +9,149 @@ package org.elasticsearch.cluster.routing.allocation.allocator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + /** * Summarizes the impact to the cluster as a result of a rebalancing round. * - * @param numberOfShardsToMove The number of shard moves required to move from the previous desired balance to the new one. + * @param nodeNameToWeightChanges The shard balance weight changes for each node (by name), comparing a previous DesiredBalance shard + * allocation to a new DesiredBalance allocation. + * @param numberOfShardsToMove The number of shard moves required to move from the previous desired balance to the new one. Does not include + * new (index creation) or removed (index deletion) shard assignements. */ -public record BalancingRoundSummary(long numberOfShardsToMove) { +public record BalancingRoundSummary(Map nodeNameToWeightChanges, long numberOfShardsToMove) { + + /** + * Represents the change in weights for a node going from an old DesiredBalance to a new DesiredBalance + * Saves the node weights of an old DesiredBalance, along with a diff against a newer DesiredBalance. + * + * @param baseWeights The starting {@link DesiredBalanceMetrics.NodeWeightStats} of a previous DesiredBalance. + * @param weightsDiff The difference between the {@code baseWeights} and a new DesiredBalance. + */ + record NodesWeightsChanges(DesiredBalanceMetrics.NodeWeightStats baseWeights, NodeWeightsDiff weightsDiff) {} + + /** + * Represents the change of shard balance weights for a node, comparing an older DesiredBalance with the latest DesiredBalance. + * + * @param shardCountDiff How many more, or less, shards are assigned to the node in the latest DesiredBalance. + * @param diskUsageInBytesDiff How much more, or less, disk is used by shards assigned to the node in the latest DesiredBalance. + * @param writeLoadDiff How much more, or less, write load is estimated for shards assigned to the node in the latest DesiredBalance. + * @param totalWeightDiff How much more, or less, the total weight is of shards assigned to the node in the latest DesiredBalance. + */ + record NodeWeightsDiff(long shardCountDiff, double diskUsageInBytesDiff, double writeLoadDiff, double totalWeightDiff) { + + /** + * Creates a diff where the {@code base} weights will be subtracted from the {@code next} weights, to show the changes made to reach + * the {@code next} weights. + * + * @param base has the original weights + * @param next has the new weights + * @return The diff of ({@code next} - {@code base}) + */ + public static NodeWeightsDiff create(DesiredBalanceMetrics.NodeWeightStats base, DesiredBalanceMetrics.NodeWeightStats next) { + return new NodeWeightsDiff( + next.shardCount() - base.shardCount(), + next.diskUsageInBytes() - base.diskUsageInBytes(), + next.writeLoad() - base.writeLoad(), + next.nodeWeight() - base.nodeWeight() + ); + } + + /** + * Creates a new {@link NodeWeightsDiff} summing this instance's values with {@code otherDiff}'s values. + */ + public NodeWeightsDiff combine(NodeWeightsDiff otherDiff) { + return new NodeWeightsDiff( + this.shardCountDiff + otherDiff.shardCountDiff, + this.diskUsageInBytesDiff + otherDiff.diskUsageInBytesDiff, + this.writeLoadDiff + otherDiff.writeLoadDiff, + this.totalWeightDiff + otherDiff.totalWeightDiff + ); + } + } @Override public String toString() { - return "BalancingRoundSummary{" + "numberOfShardsToMove=" + numberOfShardsToMove + '}'; + return "BalancingRoundSummary{" + + "nodeNameToWeightChanges" + + nodeNameToWeightChanges + + ", numberOfShardsToMove=" + + numberOfShardsToMove + + '}'; + } + + /** + * Holds combined {@link BalancingRoundSummary} results. Essentially holds a list of the balancing events and the summed up changes + * across all those events: what allocation work was done across some period of time. + * TODO: WIP ES-10341 + * + * Note that each balancing round summary is the difference between, at the time, latest desired balance and the previous desired + * balance. Each summary represents a step towards the next desired balance, which is based on presuming the previous desired balance is + * reached. So combining them is roughly the difference between the first summary's previous desired balance and the last summary's + * latest desired balance. + * + * @param numberOfBalancingRounds How many balancing round summaries are combined in this report. + * @param nodeNameToWeightChanges + * @param numberOfShardMoves The sum of shard moves for each balancing round being combined into a single summary. + */ + public record CombinedBalancingRoundSummary( + int numberOfBalancingRounds, + Map nodeNameToWeightChanges, + long numberOfShardMoves + ) { + + public static final CombinedBalancingRoundSummary EMPTY_RESULTS = new CombinedBalancingRoundSummary(0, new HashMap<>(), 0); + + /** + * Merges multiple {@link BalancingRoundSummary} summaries into a single {@link CombinedBalancingRoundSummary}. + */ + public static CombinedBalancingRoundSummary combine(List summaries) { + if (summaries.isEmpty()) { + return EMPTY_RESULTS; + } + + // We will loop through the summaries and sum the weight diffs for each node entry. + Map combinedNodeNameToWeightChanges = new HashMap<>(); + + // Number of shards moves are simply summed across summaries. Each new balancing round is built upon the last one, so it is + // possible that a shard is reassigned back to a node before it even moves away, and that will still be counted as 2 moves here. + long numberOfShardMoves = 0; + + // Total number of summaries that are being combined. + int numSummaries = 0; + + var iterator = summaries.iterator(); + while (iterator.hasNext()) { + var summary = iterator.next(); + + // We'll build the weight changes by keeping the node weight base from the first summary in which a node appears and then + // summing the weight diffs in each summary to get total weight diffs across summaries. + for (var nodeNameAndWeights : summary.nodeNameToWeightChanges.entrySet()) { + var combined = combinedNodeNameToWeightChanges.get(nodeNameAndWeights.getKey()); + if (combined == null) { + // Either this is the first summary, and combinedNodeNameToWeightChanges hasn't been initialized yet for this node; + // or a later balancing round had a new node. Either way, initialize the node entry with the weight changes from the + // first summary in which it appears. + combinedNodeNameToWeightChanges.put(nodeNameAndWeights.getKey(), nodeNameAndWeights.getValue()); + } else { + // We have at least two summaries containing this node, so let's combine them. + var newCombinedChanges = new NodesWeightsChanges( + combined.baseWeights, + combined.weightsDiff.combine(nodeNameAndWeights.getValue().weightsDiff()) + ); + combinedNodeNameToWeightChanges.put(nodeNameAndWeights.getKey(), newCombinedChanges); + } + } + + ++numSummaries; + numberOfShardMoves += summary.numberOfShardsToMove(); + } + + return new CombinedBalancingRoundSummary(numSummaries, combinedNodeNameToWeightChanges, numberOfShardMoves); + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java deleted file mode 100644 index 78fa1f6c5f5f..000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.routing.allocation.allocator; - -import java.util.List; - -/** - * Holds combined {@link BalancingRoundSummary} results. Essentially holds a list of the balancing events and the summed up changes - * across all those events: what allocation work was done across some period of time. - * TODO: WIP ES-10341 - * - * Note that each balancing round summary is the difference between, at the time, latest desired balance and the previous desired balance. - * Each summary represents a step towards the next desired balance, which is based on presuming the previous desired balance is reached. So - * combining them is roughly the difference between the first summary's previous desired balance and the last summary's latest desired - * balance. - * - * @param numberOfBalancingRounds How many balancing round summaries are combined in this report. - * @param numberOfShardMoves The sum of shard moves for each balancing round being combined into a single summary. - */ -public record CombinedBalancingRoundSummary(int numberOfBalancingRounds, long numberOfShardMoves) { - - public static final CombinedBalancingRoundSummary EMPTY_RESULTS = new CombinedBalancingRoundSummary(0, 0); - - public static CombinedBalancingRoundSummary combine(List summaries) { - if (summaries.isEmpty()) { - return EMPTY_RESULTS; - } - - int numSummaries = 0; - long numberOfShardMoves = 0; - for (BalancingRoundSummary summary : summaries) { - ++numSummaries; - numberOfShardMoves += summary.numberOfShardsToMove(); - } - return new CombinedBalancingRoundSummary(numSummaries, numberOfShardMoves); - } - -} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java index 0eb4d89bd6d3..5771c27c5d5a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java @@ -38,7 +38,9 @@ public class DesiredBalanceMetrics { */ public record AllocationStats(long unassignedShards, long totalAllocations, long undesiredAllocationsExcludingShuttingDownNodes) {} - public record NodeWeightStats(long shardCount, double diskUsageInBytes, double writeLoad, double nodeWeight) {} + public record NodeWeightStats(long shardCount, double diskUsageInBytes, double writeLoad, double nodeWeight) { + public static final NodeWeightStats ZERO = new NodeWeightStats(0, 0, 0, 0); + } // Reconciliation metrics. /** See {@link #unassignedShards} */ diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 71656d693d2f..e8d8d509282a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -324,7 +324,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { } if (currentDesiredBalanceRef.compareAndSet(oldDesiredBalance, newDesiredBalance)) { - balancerRoundSummaryService.addBalancerRoundSummary(calculateBalancingRoundSummary(oldDesiredBalance, newDesiredBalance)); + balancerRoundSummaryService.addBalancerRoundSummary(oldDesiredBalance, newDesiredBalance); if (logger.isTraceEnabled()) { var diff = DesiredBalance.hasChanges(oldDesiredBalance, newDesiredBalance) ? "Diff: " + DesiredBalance.humanReadableDiff(oldDesiredBalance, newDesiredBalance) @@ -339,13 +339,6 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { } } - /** - * Summarizes the work required to move from an old to new desired balance shard allocation. - */ - private BalancingRoundSummary calculateBalancingRoundSummary(DesiredBalance oldDesiredBalance, DesiredBalance newDesiredBalance) { - return new BalancingRoundSummary(DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance)); - } - /** * Submits the desired balance to be reconciled (applies the desired changes to the routing table) and creates and publishes a new * cluster state. The data nodes will receive and apply the new cluster state to start/move/remove shards. diff --git a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java index eed3cbd7e824..fdcbd6912bc4 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java @@ -18,12 +18,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import java.io.IOException; import java.io.StreamCorruptedException; -import java.util.function.Consumer; public class InboundDecoder implements Releasable { @@ -53,7 +53,7 @@ public class InboundDecoder implements Releasable { this.channelType = channelType; } - public int decode(ReleasableBytesReference reference, Consumer fragmentConsumer) throws IOException { + public int decode(ReleasableBytesReference reference, CheckedConsumer fragmentConsumer) throws IOException { ensureOpen(); try { return internalDecode(reference, fragmentConsumer); @@ -63,7 +63,8 @@ public class InboundDecoder implements Releasable { } } - public int internalDecode(ReleasableBytesReference reference, Consumer fragmentConsumer) throws IOException { + public int internalDecode(ReleasableBytesReference reference, CheckedConsumer fragmentConsumer) + throws IOException { if (isOnHeader()) { int messageLength = TcpTransport.readMessageLength(reference); if (messageLength == -1) { @@ -104,25 +105,28 @@ public class InboundDecoder implements Releasable { } int remainingToConsume = totalNetworkSize - bytesConsumed; int maxBytesToConsume = Math.min(reference.length(), remainingToConsume); - ReleasableBytesReference retainedContent; - if (maxBytesToConsume == remainingToConsume) { - retainedContent = reference.retainedSlice(0, maxBytesToConsume); - } else { - retainedContent = reference.retain(); - } - int bytesConsumedThisDecode = 0; if (decompressor != null) { - bytesConsumedThisDecode += decompress(retainedContent); + bytesConsumedThisDecode += decompressor.decompress( + maxBytesToConsume == remainingToConsume ? reference.slice(0, maxBytesToConsume) : reference + ); bytesConsumed += bytesConsumedThisDecode; ReleasableBytesReference decompressed; while ((decompressed = decompressor.pollDecompressedPage(isDone())) != null) { - fragmentConsumer.accept(decompressed); + try (var buf = decompressed) { + fragmentConsumer.accept(buf); + } } } else { bytesConsumedThisDecode += maxBytesToConsume; bytesConsumed += maxBytesToConsume; - fragmentConsumer.accept(retainedContent); + if (maxBytesToConsume == remainingToConsume) { + try (ReleasableBytesReference retained = reference.retainedSlice(0, maxBytesToConsume)) { + fragmentConsumer.accept(retained); + } + } else { + fragmentConsumer.accept(reference); + } } if (isDone()) { finishMessage(fragmentConsumer); @@ -138,7 +142,7 @@ public class InboundDecoder implements Releasable { cleanDecodeState(); } - private void finishMessage(Consumer fragmentConsumer) { + private void finishMessage(CheckedConsumer fragmentConsumer) throws IOException { cleanDecodeState(); fragmentConsumer.accept(END_CONTENT); } @@ -154,12 +158,6 @@ public class InboundDecoder implements Releasable { } } - private int decompress(ReleasableBytesReference content) throws IOException { - try (content) { - return decompressor.decompress(content); - } - } - private boolean isDone() { return bytesConsumed == totalNetworkSize; } diff --git a/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java b/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java index 35665e95c803..abc3e29727b4 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java @@ -11,18 +11,17 @@ package org.elasticsearch.transport; import org.elasticsearch.common.bytes.CompositeBytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import java.io.IOException; import java.util.ArrayDeque; -import java.util.ArrayList; import java.util.function.BiConsumer; import java.util.function.LongSupplier; public class InboundPipeline implements Releasable { - private static final ThreadLocal> fragmentList = ThreadLocal.withInitial(ArrayList::new); private static final InboundMessage PING_MESSAGE = new InboundMessage(null, true); private final LongSupplier relativeTimeInMillis; @@ -56,81 +55,74 @@ public class InboundPipeline implements Releasable { public void handleBytes(TcpChannel channel, ReleasableBytesReference reference) throws IOException { if (uncaughtException != null) { + reference.close(); throw new IllegalStateException("Pipeline state corrupted by uncaught exception", uncaughtException); } try { - doHandleBytes(channel, reference); + channel.getChannelStats().markAccessed(relativeTimeInMillis.getAsLong()); + statsTracker.markBytesRead(reference.length()); + if (isClosed) { + reference.close(); + return; + } + pending.add(reference); + doHandleBytes(channel); } catch (Exception e) { uncaughtException = e; throw e; } } - public void doHandleBytes(TcpChannel channel, ReleasableBytesReference reference) throws IOException { - channel.getChannelStats().markAccessed(relativeTimeInMillis.getAsLong()); - statsTracker.markBytesRead(reference.length()); - pending.add(reference.retain()); - - final ArrayList fragments = fragmentList.get(); - boolean continueHandling = true; - - while (continueHandling && isClosed == false) { - boolean continueDecoding = true; - while (continueDecoding && pending.isEmpty() == false) { - try (ReleasableBytesReference toDecode = getPendingBytes()) { - final int bytesDecoded = decoder.decode(toDecode, fragments::add); - if (bytesDecoded != 0) { - releasePendingBytes(bytesDecoded); - if (fragments.isEmpty() == false && endOfMessage(fragments.get(fragments.size() - 1))) { - continueDecoding = false; - } - } else { - continueDecoding = false; - } + private void doHandleBytes(TcpChannel channel) throws IOException { + do { + CheckedConsumer decodeConsumer = f -> forwardFragment(channel, f); + int bytesDecoded = decoder.decode(pending.peekFirst(), decodeConsumer); + if (bytesDecoded == 0 && pending.size() > 1) { + final ReleasableBytesReference[] bytesReferences = new ReleasableBytesReference[pending.size()]; + int index = 0; + for (ReleasableBytesReference pendingReference : pending) { + bytesReferences[index] = pendingReference.retain(); + ++index; + } + try ( + ReleasableBytesReference toDecode = new ReleasableBytesReference( + CompositeBytesReference.of(bytesReferences), + () -> Releasables.closeExpectNoException(bytesReferences) + ) + ) { + bytesDecoded = decoder.decode(toDecode, decodeConsumer); } } - - if (fragments.isEmpty()) { - continueHandling = false; + if (bytesDecoded != 0) { + releasePendingBytes(bytesDecoded); } else { - try { - forwardFragments(channel, fragments); - } finally { - for (Object fragment : fragments) { - if (fragment instanceof ReleasableBytesReference) { - ((ReleasableBytesReference) fragment).close(); - } - } - fragments.clear(); - } + break; } - } + } while (pending.isEmpty() == false); } - private void forwardFragments(TcpChannel channel, ArrayList fragments) throws IOException { - for (Object fragment : fragments) { - if (fragment instanceof Header) { - headerReceived((Header) fragment); - } else if (fragment instanceof Compression.Scheme) { - assert aggregator.isAggregating(); - aggregator.updateCompressionScheme((Compression.Scheme) fragment); - } else if (fragment == InboundDecoder.PING) { - assert aggregator.isAggregating() == false; - messageHandler.accept(channel, PING_MESSAGE); - } else if (fragment == InboundDecoder.END_CONTENT) { - assert aggregator.isAggregating(); - InboundMessage aggregated = aggregator.finishAggregation(); - try { - statsTracker.markMessageReceived(); - messageHandler.accept(channel, aggregated); - } finally { - aggregated.decRef(); - } - } else { - assert aggregator.isAggregating(); - assert fragment instanceof ReleasableBytesReference; - aggregator.aggregate((ReleasableBytesReference) fragment); + private void forwardFragment(TcpChannel channel, Object fragment) throws IOException { + if (fragment instanceof Header) { + headerReceived((Header) fragment); + } else if (fragment instanceof Compression.Scheme) { + assert aggregator.isAggregating(); + aggregator.updateCompressionScheme((Compression.Scheme) fragment); + } else if (fragment == InboundDecoder.PING) { + assert aggregator.isAggregating() == false; + messageHandler.accept(channel, PING_MESSAGE); + } else if (fragment == InboundDecoder.END_CONTENT) { + assert aggregator.isAggregating(); + InboundMessage aggregated = aggregator.finishAggregation(); + try { + statsTracker.markMessageReceived(); + messageHandler.accept(channel, aggregated); + } finally { + aggregated.decRef(); } + } else { + assert aggregator.isAggregating(); + assert fragment instanceof ReleasableBytesReference; + aggregator.aggregate((ReleasableBytesReference) fragment); } } @@ -139,25 +131,6 @@ public class InboundPipeline implements Releasable { aggregator.headerReceived(header); } - private static boolean endOfMessage(Object fragment) { - return fragment == InboundDecoder.PING || fragment == InboundDecoder.END_CONTENT || fragment instanceof Exception; - } - - private ReleasableBytesReference getPendingBytes() { - if (pending.size() == 1) { - return pending.peekFirst().retain(); - } else { - final ReleasableBytesReference[] bytesReferences = new ReleasableBytesReference[pending.size()]; - int index = 0; - for (ReleasableBytesReference pendingReference : pending) { - bytesReferences[index] = pendingReference.retain(); - ++index; - } - final Releasable releasable = () -> Releasables.closeExpectNoException(bytesReferences); - return new ReleasableBytesReference(CompositeBytesReference.of(bytesReferences), releasable); - } - } - private void releasePendingBytes(int bytesConsumed) { int bytesToRelease = bytesConsumed; while (bytesToRelease != 0) { diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index ada61c118ec3..0e206a2005e7 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -120,8 +120,9 @@ grant codeBase "${codebase.httpasyncclient}" { grant codeBase "${codebase.netty-common}" { // for reading the system-wide configuration for the backlog of established sockets permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read"; - // Netty sets custom classloader for some of its internal threads + // Netty gets and sets classloaders for some of its internal threads permission java.lang.RuntimePermission "setContextClassLoader"; + permission java.lang.RuntimePermission "getClassLoader"; permission java.net.SocketPermission "*", "accept,connect"; }; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java index 337fad01f905..8345d3261139 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java @@ -12,38 +12,63 @@ package org.elasticsearch.cluster.routing.allocation.allocator; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; -import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; +import java.util.Map; +import java.util.Set; + public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { private static final Logger logger = LogManager.getLogger(AllocationBalancingRoundSummaryServiceTests.class); private static final String BALANCING_SUMMARY_MSG_PREFIX = "Balancing round summaries:*"; + private static final Map NODE_NAME_TO_WEIGHT_CHANGES = Map.of( + "node1", + new BalancingRoundSummary.NodesWeightsChanges( + new DesiredBalanceMetrics.NodeWeightStats(1L, 2, 3, 4), + new BalancingRoundSummary.NodeWeightsDiff(1, 2, 3, 4) + ), + "node2", + new BalancingRoundSummary.NodesWeightsChanges( + new DesiredBalanceMetrics.NodeWeightStats(1L, 2, 3, 4), + new BalancingRoundSummary.NodeWeightsDiff(1, 2, 3, 4) + ) + ); + + final DiscoveryNode DUMMY_NODE = new DiscoveryNode("node1Name", "node1Id", "eph-node1", "abc", "abc", null, Map.of(), Set.of(), null); + final DiscoveryNode SECOND_DUMMY_NODE = new DiscoveryNode( + "node2Name", + "node2Id", + "eph-node2", + "def", + "def", + null, + Map.of(), + Set.of(), + null + ); + + final String INDEX_NAME = "index"; + final String INDEX_UUID = "_indexUUID_"; + final Settings enabledSummariesSettings = Settings.builder() .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), true) .build(); final Settings disabledDefaultEmptySettings = Settings.builder().build(); - final Settings enabledButNegativeIntervalSettings = Settings.builder() - .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), true) - .put(AllocationBalancingRoundSummaryService.BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING.getKey(), TimeValue.MINUS_ONE) - .build(); ClusterSettings enabledClusterSettings = new ClusterSettings(enabledSummariesSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterSettings disabledDefaultEmptyClusterSettings = new ClusterSettings( disabledDefaultEmptySettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS ); - ClusterSettings enabledButNegativeIntervalClusterSettings = new ClusterSettings( - enabledButNegativeIntervalSettings, - ClusterSettings.BUILT_IN_CLUSTER_SETTINGS - ); // Construction parameters for the service. @@ -68,7 +93,7 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { * Add a summary and check it is not logged. */ - service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 50)); service.verifyNumberOfSummaries(0); // when summaries are disabled, summaries are not retained when added. mockLog.addExpectation( new MockLog.UnseenEventExpectation( @@ -96,7 +121,7 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { * Add a summary and check the service logs a report on it. */ - service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 50)); service.verifyNumberOfSummaries(1); mockLog.addExpectation( new MockLog.SeenEventExpectation( @@ -116,7 +141,7 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { * Add a second summary, check for more logging. */ - service.addBalancerRoundSummary(new BalancingRoundSummary(200)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 200)); service.verifyNumberOfSummaries(1); mockLog.addExpectation( new MockLog.SeenEventExpectation( @@ -141,8 +166,8 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { - service.addBalancerRoundSummary(new BalancingRoundSummary(50)); - service.addBalancerRoundSummary(new BalancingRoundSummary(100)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 100)); service.verifyNumberOfSummaries(2); mockLog.addExpectation( new MockLog.SeenEventExpectation( @@ -171,7 +196,7 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { * First add some summaries to report, ensuring that the logging is active. */ - service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 50)); service.verifyNumberOfSummaries(1); mockLog.addExpectation( new MockLog.SeenEventExpectation( @@ -224,7 +249,7 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { * summaries waiting to be reported. */ - service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 50)); service.verifyNumberOfSummaries(1); clusterSettings.applySettings(disabledSettingsUpdate); @@ -234,7 +259,7 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { * Verify that any additional summaries are not retained, since the service is disabled. */ - service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(NODE_NAME_TO_WEIGHT_CHANGES, 50)); service.verifyNumberOfSummaries(0); // Check that the service never logged anything. @@ -253,4 +278,173 @@ public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { } } + /** + * Tests the {@link AllocationBalancingRoundSummaryService#createBalancerRoundSummary(DesiredBalance, DesiredBalance)} logic. + */ + public void testCreateBalancerRoundSummary() { + // Initial desired balance allocations and node weights. + DesiredBalance firstDesiredBalance = new DesiredBalance( + 1, + // The shard assignments and node weights don't make sense together, but for summary purposes the first determines the summary's + // number of shards moved, and the second the weight changes: the summary service doesn't need them to make sense together + // because it looks at them separately. They do have to make sense individually across balancing rounds. + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of("a", "b"), 2, 0, 0)), + Map.of(DUMMY_NODE, new DesiredBalanceMetrics.NodeWeightStats(10, 20, 30, 40)), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + // Move two shards and change the node weights. + DesiredBalance secondDesiredBalance = new DesiredBalance( + 1, + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of("c", "d"), 2, 0, 0)), + Map.of(DUMMY_NODE, new DesiredBalanceMetrics.NodeWeightStats(20, 40, 60, 80)), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + // Move one shard and change the node weights. + DesiredBalance thirdDesiredBalance = new DesiredBalance( + 1, + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of("a", "d"), 2, 0, 0)), + Map.of(DUMMY_NODE, new DesiredBalanceMetrics.NodeWeightStats(30, 60, 90, 120)), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + + var firstSummary = AllocationBalancingRoundSummaryService.createBalancerRoundSummary(firstDesiredBalance, secondDesiredBalance); + var secondSummary = AllocationBalancingRoundSummaryService.createBalancerRoundSummary(secondDesiredBalance, thirdDesiredBalance); + + assertEquals(2, firstSummary.numberOfShardsToMove()); + assertEquals(1, firstSummary.nodeNameToWeightChanges().size()); + var firstSummaryWeights = firstSummary.nodeNameToWeightChanges().get(DUMMY_NODE.getName()); + assertEquals(10, firstSummaryWeights.baseWeights().shardCount()); + assertDoublesEqual(20, firstSummaryWeights.baseWeights().diskUsageInBytes()); + assertDoublesEqual(30, firstSummaryWeights.baseWeights().writeLoad()); + assertDoublesEqual(40, firstSummaryWeights.baseWeights().nodeWeight()); + assertEquals(10, firstSummaryWeights.weightsDiff().shardCountDiff()); + assertDoublesEqual(20, firstSummaryWeights.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(30, firstSummaryWeights.weightsDiff().writeLoadDiff()); + assertDoublesEqual(40, firstSummaryWeights.weightsDiff().totalWeightDiff()); + + assertEquals(1, secondSummary.numberOfShardsToMove()); + assertEquals(1, secondSummary.nodeNameToWeightChanges().size()); + var secondSummaryWeights = secondSummary.nodeNameToWeightChanges().get(DUMMY_NODE.getName()); + assertEquals(20, secondSummaryWeights.baseWeights().shardCount()); + assertDoublesEqual(40, secondSummaryWeights.baseWeights().diskUsageInBytes()); + assertDoublesEqual(60, secondSummaryWeights.baseWeights().writeLoad()); + assertDoublesEqual(80, secondSummaryWeights.baseWeights().nodeWeight()); + assertEquals(10, secondSummaryWeights.weightsDiff().shardCountDiff()); + assertDoublesEqual(20, secondSummaryWeights.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(30, secondSummaryWeights.weightsDiff().writeLoadDiff()); + assertDoublesEqual(40, secondSummaryWeights.weightsDiff().totalWeightDiff()); + } + + /** + * Tests that removing a node from old to new DesiredBalance will result in a weights diff of negative values bringing the weights down + * to zero. + */ + public void testCreateBalancerRoundSummaryWithRemovedNode() { + DesiredBalance firstDesiredBalance = new DesiredBalance( + 1, + // The shard assignments and node weights don't make sense together, but for summary purposes the first determines the summary's + // number of shards moved, and the second the weight changes: the summary service doesn't need them to make sense together + // because it looks at them separately. They do have to make sense individually across balancing rounds. + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of(DUMMY_NODE.getId()), 1, 0, 0)), + Map.of( + DUMMY_NODE, + new DesiredBalanceMetrics.NodeWeightStats(10, 20, 30, 40), + SECOND_DUMMY_NODE, + new DesiredBalanceMetrics.NodeWeightStats(5, 15, 25, 35) + ), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + // Remove a new node and don't move any shards. + DesiredBalance secondDesiredBalance = new DesiredBalance( + 1, + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of(DUMMY_NODE.getId()), 1, 0, 0)), + Map.of(DUMMY_NODE, new DesiredBalanceMetrics.NodeWeightStats(20, 40, 60, 80)), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + + var summary = AllocationBalancingRoundSummaryService.createBalancerRoundSummary(firstDesiredBalance, secondDesiredBalance); + + assertEquals(0, summary.numberOfShardsToMove()); + assertEquals(2, summary.nodeNameToWeightChanges().size()); + + var summaryDummyNodeWeights = summary.nodeNameToWeightChanges().get(DUMMY_NODE.getName()); + assertEquals(10, summaryDummyNodeWeights.baseWeights().shardCount()); + assertDoublesEqual(20, summaryDummyNodeWeights.baseWeights().diskUsageInBytes()); + assertDoublesEqual(30, summaryDummyNodeWeights.baseWeights().writeLoad()); + assertDoublesEqual(40, summaryDummyNodeWeights.baseWeights().nodeWeight()); + assertEquals(10, summaryDummyNodeWeights.weightsDiff().shardCountDiff()); + assertDoublesEqual(20, summaryDummyNodeWeights.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(30, summaryDummyNodeWeights.weightsDiff().writeLoadDiff()); + assertDoublesEqual(40, summaryDummyNodeWeights.weightsDiff().totalWeightDiff()); + + var summarySecondDummyNodeWeights = summary.nodeNameToWeightChanges().get(SECOND_DUMMY_NODE.getName()); + assertEquals(5, summarySecondDummyNodeWeights.baseWeights().shardCount()); + assertDoublesEqual(15, summarySecondDummyNodeWeights.baseWeights().diskUsageInBytes()); + assertDoublesEqual(25, summarySecondDummyNodeWeights.baseWeights().writeLoad()); + assertDoublesEqual(35, summarySecondDummyNodeWeights.baseWeights().nodeWeight()); + assertEquals(-5, summarySecondDummyNodeWeights.weightsDiff().shardCountDiff()); + assertDoublesEqual(-15, summarySecondDummyNodeWeights.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(-25, summarySecondDummyNodeWeights.weightsDiff().writeLoadDiff()); + assertDoublesEqual(-35, summarySecondDummyNodeWeights.weightsDiff().totalWeightDiff()); + } + + /** + * Tests that adding a node from old to new DesiredBalance will result in an entry in the summary for the new node with zero weights and + * a weights diff showing the new allocation weight changes. + */ + public void testCreateBalancerRoundSummaryWithAddedNode() { + DesiredBalance firstDesiredBalance = new DesiredBalance( + 1, + // The shard assignments and node weights don't make sense together, but for summary purposes the first determines the summary's + // number of shards moved, and the second the weight changes: the summary service doesn't need them to make sense together + // because it looks at them separately. They do have to make sense individually across balancing rounds. + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of(DUMMY_NODE.getId()), 1, 0, 0)), + Map.of(DUMMY_NODE, new DesiredBalanceMetrics.NodeWeightStats(10, 20, 30, 40)), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + // Add a new node and move one shard. + DesiredBalance secondDesiredBalance = new DesiredBalance( + 1, + Map.of(new ShardId(INDEX_NAME, INDEX_UUID, 0), new ShardAssignment(Set.of(SECOND_DUMMY_NODE.getId()), 1, 0, 0)), + Map.of( + DUMMY_NODE, + new DesiredBalanceMetrics.NodeWeightStats(20, 40, 60, 80), + SECOND_DUMMY_NODE, + new DesiredBalanceMetrics.NodeWeightStats(5, 15, 25, 35) + ), + DesiredBalance.ComputationFinishReason.CONVERGED + ); + + var summary = AllocationBalancingRoundSummaryService.createBalancerRoundSummary(firstDesiredBalance, secondDesiredBalance); + + assertEquals(1, summary.numberOfShardsToMove()); + assertEquals(2, summary.nodeNameToWeightChanges().size()); + + var summaryDummyNodeWeights = summary.nodeNameToWeightChanges().get(DUMMY_NODE.getName()); + assertEquals(10, summaryDummyNodeWeights.baseWeights().shardCount()); + assertDoublesEqual(20, summaryDummyNodeWeights.baseWeights().diskUsageInBytes()); + assertDoublesEqual(30, summaryDummyNodeWeights.baseWeights().writeLoad()); + assertDoublesEqual(40, summaryDummyNodeWeights.baseWeights().nodeWeight()); + assertEquals(10, summaryDummyNodeWeights.weightsDiff().shardCountDiff()); + assertDoublesEqual(20, summaryDummyNodeWeights.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(30, summaryDummyNodeWeights.weightsDiff().writeLoadDiff()); + assertDoublesEqual(40, summaryDummyNodeWeights.weightsDiff().totalWeightDiff()); + + var summarySecondDummyNodeWeights = summary.nodeNameToWeightChanges().get(SECOND_DUMMY_NODE.getName()); + assertEquals(0, summarySecondDummyNodeWeights.baseWeights().shardCount()); + assertDoublesEqual(0, summarySecondDummyNodeWeights.baseWeights().diskUsageInBytes()); + assertDoublesEqual(0, summarySecondDummyNodeWeights.baseWeights().writeLoad()); + assertDoublesEqual(0, summarySecondDummyNodeWeights.baseWeights().nodeWeight()); + assertEquals(5, summarySecondDummyNodeWeights.weightsDiff().shardCountDiff()); + assertDoublesEqual(15, summarySecondDummyNodeWeights.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(25, summarySecondDummyNodeWeights.weightsDiff().writeLoadDiff()); + assertDoublesEqual(35, summarySecondDummyNodeWeights.weightsDiff().totalWeightDiff()); + } + + /** + * Helper for double type inputs. assertEquals on double type inputs require a delta. + */ + private void assertDoublesEqual(double expected, double actual) { + assertEquals(expected, actual, 0.00001); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummaryTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummaryTests.java new file mode 100644 index 000000000000..6291c629281d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummaryTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.elasticsearch.cluster.routing.allocation.allocator.BalancingRoundSummary.CombinedBalancingRoundSummary; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class BalancingRoundSummaryTests extends ESTestCase { + + /** + * Tests the {@link BalancingRoundSummary.CombinedBalancingRoundSummary#combine(List)} method. + */ + public void testCombine() { + final String NODE_1 = "node1"; + final String NODE_2 = "node2"; + final var node1BaseWeights = new DesiredBalanceMetrics.NodeWeightStats(10, 20, 30, 40); + final var node2BaseWeights = new DesiredBalanceMetrics.NodeWeightStats(100, 200, 300, 400); + final var commonDiff = new BalancingRoundSummary.NodeWeightsDiff(1, 2, 3, 4); + final long shardMovesSummary1 = 50; + final long shardMovesSummary2 = 150; + + // Set up a summaries list with two summary entries for a two node cluster + List summaries = new ArrayList<>(); + summaries.add( + new BalancingRoundSummary( + Map.of( + NODE_1, + new BalancingRoundSummary.NodesWeightsChanges(node1BaseWeights, commonDiff), + NODE_2, + new BalancingRoundSummary.NodesWeightsChanges(node2BaseWeights, commonDiff) + ), + shardMovesSummary1 + ) + ); + summaries.add( + new BalancingRoundSummary( + Map.of( + NODE_1, + new BalancingRoundSummary.NodesWeightsChanges( + // The base weights for the next BalancingRoundSummary will be the previous BalancingRoundSummary's base + diff + // weights. + new DesiredBalanceMetrics.NodeWeightStats( + node1BaseWeights.shardCount() + commonDiff.shardCountDiff(), + node1BaseWeights.diskUsageInBytes() + commonDiff.diskUsageInBytesDiff(), + node1BaseWeights.writeLoad() + commonDiff.writeLoadDiff(), + node1BaseWeights.nodeWeight() + commonDiff.totalWeightDiff() + ), + commonDiff + ), + NODE_2, + new BalancingRoundSummary.NodesWeightsChanges( + new DesiredBalanceMetrics.NodeWeightStats( + node2BaseWeights.shardCount() + commonDiff.shardCountDiff(), + node2BaseWeights.diskUsageInBytes() + commonDiff.diskUsageInBytesDiff(), + node2BaseWeights.writeLoad() + commonDiff.writeLoadDiff(), + node2BaseWeights.nodeWeight() + commonDiff.totalWeightDiff() + ), + commonDiff + ) + ), + shardMovesSummary2 + ) + ); + + // Combine the summaries. + CombinedBalancingRoundSummary combined = BalancingRoundSummary.CombinedBalancingRoundSummary.combine(summaries); + + assertEquals(2, combined.numberOfBalancingRounds()); + assertEquals(shardMovesSummary1 + shardMovesSummary2, combined.numberOfShardMoves()); + assertEquals(2, combined.nodeNameToWeightChanges().size()); + + var combinedNode1WeightsChanges = combined.nodeNameToWeightChanges().get(NODE_1); + var combinedNode2WeightsChanges = combined.nodeNameToWeightChanges().get(NODE_2); + + // The base weights for each node should match the first BalancingRoundSummary's base weight values. The diff weights will be summed + // across all BalancingRoundSummary entries (in this case, there are two BalancingRoundSummary entries). + + assertEquals(node1BaseWeights.shardCount(), combinedNode1WeightsChanges.baseWeights().shardCount()); + assertDoublesEqual(node1BaseWeights.diskUsageInBytes(), combinedNode1WeightsChanges.baseWeights().diskUsageInBytes()); + assertDoublesEqual(node1BaseWeights.writeLoad(), combinedNode1WeightsChanges.baseWeights().writeLoad()); + assertDoublesEqual(node1BaseWeights.nodeWeight(), combinedNode1WeightsChanges.baseWeights().nodeWeight()); + assertEquals(2 * commonDiff.shardCountDiff(), combinedNode1WeightsChanges.weightsDiff().shardCountDiff()); + assertDoublesEqual(2 * commonDiff.diskUsageInBytesDiff(), combinedNode1WeightsChanges.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(2 * commonDiff.writeLoadDiff(), combinedNode1WeightsChanges.weightsDiff().writeLoadDiff()); + assertDoublesEqual(2 * commonDiff.totalWeightDiff(), combinedNode1WeightsChanges.weightsDiff().totalWeightDiff()); + + assertEquals(node2BaseWeights.shardCount(), combinedNode2WeightsChanges.baseWeights().shardCount()); + assertDoublesEqual(node2BaseWeights.diskUsageInBytes(), combinedNode2WeightsChanges.baseWeights().diskUsageInBytes()); + assertDoublesEqual(node2BaseWeights.writeLoad(), combinedNode2WeightsChanges.baseWeights().writeLoad()); + assertDoublesEqual(node2BaseWeights.nodeWeight(), combinedNode2WeightsChanges.baseWeights().nodeWeight()); + assertEquals(2 * commonDiff.shardCountDiff(), combinedNode2WeightsChanges.weightsDiff().shardCountDiff()); + assertDoublesEqual(2 * commonDiff.diskUsageInBytesDiff(), combinedNode2WeightsChanges.weightsDiff().diskUsageInBytesDiff()); + assertDoublesEqual(2 * commonDiff.writeLoadDiff(), combinedNode2WeightsChanges.weightsDiff().writeLoadDiff()); + assertDoublesEqual(2 * commonDiff.totalWeightDiff(), combinedNode2WeightsChanges.weightsDiff().totalWeightDiff()); + } + + /** + * Helper for double type inputs. assertEquals on double type inputs require a delta. + */ + private void assertDoublesEqual(double expected, double actual) { + assertEquals(expected, actual, 0.00001); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java index e074bf883ae1..7283dc822e12 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java @@ -141,7 +141,7 @@ public abstract class NativeArrayIntegrationTestCase extends ESSingleNodeTestCas } else { var copyExpectedStoredFields = new String[expectedStoredFields.length + 1]; System.arraycopy(expectedStoredFields, 0, copyExpectedStoredFields, 0, expectedStoredFields.length); - copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_ignored_source"; + copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_recovery_source"; assertThat(storedFieldNames, containsInAnyOrder(copyExpectedStoredFields)); } } diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index cfb3cc68e035..97ca7d2ecd98 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -117,8 +117,6 @@ public class InboundDecoderTests extends ESTestCase { assertEquals(messageBytes, content); // Ref count is incremented since the bytes are forwarded as a fragment assertTrue(releasable2.hasReferences()); - releasable2.decRef(); - assertTrue(releasable2.hasReferences()); assertTrue(releasable2.decRef()); assertEquals(InboundDecoder.END_CONTENT, endMarker); } @@ -335,7 +333,12 @@ public class InboundDecoderTests extends ESTestCase { final BytesReference bytes2 = totalBytes.slice(bytesConsumed, totalBytes.length() - bytesConsumed); final ReleasableBytesReference releasable2 = wrapAsReleasable(bytes2); - int bytesConsumed2 = decoder.decode(releasable2, fragments::add); + int bytesConsumed2 = decoder.decode(releasable2, e -> { + fragments.add(e); + if (e instanceof ReleasableBytesReference reference) { + reference.retain(); + } + }); assertEquals(totalBytes.length() - totalHeaderSize, bytesConsumed2); final Object compressionScheme = fragments.get(0); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java b/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java index 282cb720f52f..d0c6cd8b00ff 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java @@ -159,12 +159,11 @@ public class InboundPipelineTests extends ESTestCase { final int remainingBytes = networkBytes.length() - currentOffset; final int bytesToRead = Math.min(randomIntBetween(1, 32 * 1024), remainingBytes); final BytesReference slice = networkBytes.slice(currentOffset, bytesToRead); - try (ReleasableBytesReference reference = new ReleasableBytesReference(slice, () -> {})) { - toRelease.add(reference); - bytesReceived += reference.length(); - pipeline.handleBytes(channel, reference); - currentOffset += bytesToRead; - } + ReleasableBytesReference reference = new ReleasableBytesReference(slice, () -> {}); + toRelease.add(reference); + bytesReceived += reference.length(); + pipeline.handleBytes(channel, reference); + currentOffset += bytesToRead; } final int messages = expected.size(); @@ -288,13 +287,12 @@ public class InboundPipelineTests extends ESTestCase { final Releasable releasable = () -> bodyReleased.set(true); final int from = totalHeaderSize - 1; final BytesReference partHeaderPartBody = reference.slice(from, reference.length() - from - 1); - try (ReleasableBytesReference slice = new ReleasableBytesReference(partHeaderPartBody, releasable)) { - pipeline.handleBytes(new FakeTcpChannel(), slice); - } + pipeline.handleBytes(new FakeTcpChannel(), new ReleasableBytesReference(partHeaderPartBody, releasable)); assertFalse(bodyReleased.get()); - try (ReleasableBytesReference slice = new ReleasableBytesReference(reference.slice(reference.length() - 1, 1), releasable)) { - pipeline.handleBytes(new FakeTcpChannel(), slice); - } + pipeline.handleBytes( + new FakeTcpChannel(), + new ReleasableBytesReference(reference.slice(reference.length() - 1, 1), releasable) + ); assertTrue(bodyReleased.get()); } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSystemPropertyProvider.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSystemPropertyProvider.java index 62bbd10bcf85..fa1f38600719 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSystemPropertyProvider.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSystemPropertyProvider.java @@ -11,13 +11,21 @@ package org.elasticsearch.test.cluster.local; import org.elasticsearch.test.cluster.SystemPropertyProvider; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; -import static java.util.Map.entry; - public class DefaultSystemPropertyProvider implements SystemPropertyProvider { @Override public Map get(LocalClusterSpec.LocalNodeSpec nodeSpec) { - return Map.ofEntries(entry("ingest.geoip.downloader.enabled.default", "false"), entry("tests.testfeatures.enabled", "true")); + Map properties = new HashMap<>(); + properties.put("ingest.geoip.downloader.enabled.default", "false"); + + // enable test features unless we are running forwards compatibility tests + if (Boolean.parseBoolean(System.getProperty("tests.fwc", "false")) == false) { + properties.put("tests.testfeatures.enabled", "true"); + } + + return Collections.unmodifiableMap(properties); } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java index a84211674f8c..db71f8cec9ab 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java @@ -39,7 +39,8 @@ public class SnapshotDistributionResolver implements DistributionResolver { // Snapshot distributions are never release builds and always use the default distribution Version realVersion = Version.fromString(System.getProperty("tests.bwc.main.version", version.toString())); - return new DefaultDistributionDescriptor(realVersion, true, distributionDir, DistributionType.DEFAULT); + boolean isSnapshot = System.getProperty("tests.bwc.snapshot", "true").equals("false") == false; + return new DefaultDistributionDescriptor(realVersion, isSnapshot, distributionDir, DistributionType.DEFAULT); } return delegate.resolve(version, type); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json index fb7f96cd223d..ffdf43099b0e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json @@ -102,6 +102,9 @@ "unprivileged": { "type": "boolean" }, + "fips": { + "type": "boolean" + }, "version": { "type": "text", "fields": { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java index b0c000a3ffa6..840a1652c540 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -31,6 +32,7 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.E * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsCollection implements Writeable, ToXContentObject { private static final ObjectParser PARSER = ObjectParser.fromBuilder( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java index 8e1a960fce66..6c7020bc5f4e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import java.util.ArrayList; @@ -31,6 +32,7 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.E * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsCollectionResolver { private final IndexNameExpressionResolver indexNameExpressionResolver; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java index 322b3bdad036..28640939e068 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java @@ -19,6 +19,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -36,6 +37,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsCollectionService { private static final Logger logger = LogManager.getLogger(AnalyticsCollectionService.class); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java index 59c6e445365a..4159df2a2ada 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java @@ -7,10 +7,13 @@ package org.elasticsearch.xpack.application.analytics; +import org.elasticsearch.core.UpdateForV10; + /** * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsConstants { private AnalyticsConstants() {} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java index 8df0a1a7f3bc..3ddf56b95ced 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.application.analytics; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.xpack.application.analytics.action.PostAnalyticsEventAction; import org.elasticsearch.xpack.application.analytics.ingest.AnalyticsEventEmitter; @@ -20,6 +21,7 @@ import java.util.Objects; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsEventIngestService { private final AnalyticsCollectionResolver collectionResolver; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java index f77236393927..18a335378efb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -35,6 +36,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsTemplateRegistry extends IndexTemplateRegistry { // This number must be incremented when we make changes to built-in templates. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index aace5620bd64..193fe6237391 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,6 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class DeleteAnalyticsCollectionAction { public static final String NAME = "cluster:admin/xpack/application/analytics/delete"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index 8e68a99b7418..cd4d34a7ae84 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; @@ -29,6 +30,7 @@ import java.util.Objects; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class GetAnalyticsCollectionAction { public static final String NAME = "cluster:admin/xpack/application/analytics/get"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java index 21056239648f..a7bf5d62de22 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -40,6 +41,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class PostAnalyticsEventAction { public static final String NAME = "cluster:admin/xpack/application/analytics/post_event"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index db77c9cca417..1f102fa46710 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -27,6 +28,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class PutAnalyticsCollectionAction { public static final String NAME = "cluster:admin/xpack/application/analytics/put"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java index 439905aac848..d27417fdfcc7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.action; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestUtils; @@ -27,6 +28,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) @ServerlessScope(Scope.PUBLIC) public class RestDeleteAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler { public RestDeleteAnalyticsCollectionAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java index 1413a9b6c0a3..d768df445ae9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.application.analytics.action; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestUtils; @@ -27,6 +28,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) @ServerlessScope(Scope.PUBLIC) public class RestGetAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler { public RestGetAnalyticsCollectionAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java index bdfd1afb321a..88de0ab3bfff 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; @@ -33,6 +34,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) @ServerlessScope(Scope.PUBLIC) public class RestPostAnalyticsEventAction extends EnterpriseSearchBaseRestHandler { public RestPostAnalyticsEventAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java index 05358cdadb00..b184d7134fb9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.action; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,6 +29,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) @ServerlessScope(Scope.PUBLIC) public class RestPutAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler { public RestPutAnalyticsCollectionAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java index 88dc3ff2889c..72bfdea0b818 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -31,6 +32,7 @@ import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_AN * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransportMasterNodeAction< DeleteAnalyticsCollectionAction.Request> { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java index 62a038de76ae..d32d99a0f147 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -29,6 +30,7 @@ import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_AN * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeReadAction< GetAnalyticsCollectionAction.Request, GetAnalyticsCollectionAction.Response> { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java index a260ff25cf5f..6f743db0dccb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -29,6 +30,7 @@ import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_AN * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class TransportPostAnalyticsEventAction extends HandledTransportAction< PostAnalyticsEventAction.Request, PostAnalyticsEventAction.Response> { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java index b02bf3edbad2..1189fa188f3a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -31,6 +32,7 @@ import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_AN * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAction< PutAnalyticsCollectionAction.Request, PutAnalyticsCollectionAction.Response> { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java index cdf3415d43bd..a964a46c63d6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,6 +35,7 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.E * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsEvent implements Writeable, ToXContentObject { public static final ParseField TIMESTAMP_FIELD = new ParseField("@timestamp"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java index 6cf1c0c094bc..85cf88a4a414 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.application.analytics.event; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -32,6 +33,7 @@ import static org.elasticsearch.xpack.application.analytics.event.AnalyticsEvent * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsEventFactory { public static final AnalyticsEventFactory INSTANCE = new AnalyticsEventFactory(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java index ae0191cae019..25027dae8172 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.event; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.application.analytics.event.AnalyticsEvent; @@ -26,6 +27,7 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.U * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class PageViewAnalyticsEvent { private static final ObjectParser PARSER = ObjectParser.fromBuilder( "page_view_event", diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java index 16ae078e079b..a10d786ca162 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.event; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.application.analytics.event.AnalyticsEvent; @@ -24,6 +25,7 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.U * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SearchAnalyticsEvent { private static final ObjectParser PARSER = ObjectParser.fromBuilder( "search_event", diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java index f5ae40b1d06b..1f5ab4ba5ef0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.event; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.application.analytics.event.AnalyticsEvent; @@ -28,6 +29,7 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.U * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SearchClickAnalyticsEvent { private static final ObjectParser PARSER = ObjectParser.fromBuilder( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java index af55af7b480f..cddb98a4e905 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -22,6 +23,7 @@ import static org.elasticsearch.common.Strings.requireNonBlank; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class DocumentAnalyticsEventField { public static final ParseField DOCUMENT_FIELD = new ParseField("document"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java index 07f4a6fd0556..4fc19108eeed 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -20,6 +21,7 @@ import java.util.Map; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class PageAnalyticsEventField { public static final ParseField PAGE_FIELD = new ParseField("page"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java index 68b9d880a3a8..0732a1da0d91 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -21,6 +22,7 @@ import java.util.Map; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class PaginationAnalyticsEventField { public static final ParseField PAGINATION_FIELD = new ParseField("page"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java index 1595dc398e6f..fe552f70bd31 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -24,6 +25,7 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.S * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SearchAnalyticsEventField { public static final ParseField SEARCH_FIELD = new ParseField("search"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java index b3023920eb2b..f1e7b969390d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -22,6 +23,7 @@ import java.util.Map; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SearchFiltersAnalyticsEventField { public static final ParseField SEARCH_FILTERS_FIELD = new ParseField("filters"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java index 602f8b8f5a95..b08eb89783bc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -23,6 +24,7 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.P * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SearchResultAnalyticsEventField { public static final ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java index 96c4fcf83c85..7d605b2265ce 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -24,6 +25,7 @@ import static org.elasticsearch.common.Strings.requireNonBlank; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SessionAnalyticsEventField { public static final ParseField SESSION_FIELD = new ParseField("session"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java index 5ff7ebd843fc..81578fc646a9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -22,6 +23,7 @@ import static org.elasticsearch.common.Strings.requireNonBlank; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class SortOrderAnalyticsEventField { public static final ParseField SORT_FIELD = new ParseField("sort"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java index 76ff15d900bf..d602f16c5569 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.event.parser.field; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -23,6 +24,7 @@ import static org.elasticsearch.common.Strings.requireNonBlank; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class UserAnalyticsEventField { public static final ParseField USER_FIELD = new ParseField("user"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java index cb2df7f16db6..3842a24cfd2b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java @@ -16,6 +16,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -38,6 +39,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsEventEmitter extends AbstractLifecycleComponent { private static final Logger logger = LogManager.getLogger(AnalyticsEventEmitter.class); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java index fbf34e5e3fa4..851604c3e9eb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; /** @@ -23,6 +24,7 @@ import org.elasticsearch.injection.guice.Inject; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class AnalyticsEventIngestConfig { private static final String SETTING_ROOT_PATH = "xpack.applications.behavioral_analytics.ingest"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java index 595d4147e9f1..7be10a998e57 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -29,6 +30,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; * @deprecated in 9.0 */ @Deprecated +@UpdateForV10(owner = UpdateForV10.Owner.ENTERPRISE_SEARCH) public class BulkProcessorFactory { private static final Logger logger = LogManager.getLogger(AnalyticsEventEmitter.class); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 2f72c309b5f2..7547e2da3e18 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -34,6 +34,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -231,12 +232,20 @@ public abstract class LuceneOperator extends SourceOperator { public String toString() { StringBuilder sb = new StringBuilder(); sb.append(this.getClass().getSimpleName()).append("["); - sb.append("maxPageSize = ").append(maxPageSize); + sb.append("shards = ").append(sortedUnion(processedShards, sliceQueue.remainingShardsIdentifiers())); + sb.append(", maxPageSize = ").append(maxPageSize); describe(sb); sb.append("]"); return sb.toString(); } + private static Set sortedUnion(Collection a, Collection b) { + var result = new TreeSet(); + result.addAll(a); + result.addAll(b); + return result; + } + protected abstract void describe(StringBuilder sb); @Override @@ -254,7 +263,7 @@ public abstract class LuceneOperator extends SourceOperator { private final int processedSlices; private final Set processedQueries; private final Set processedShards; - private final long processingNanos; + private final long processNanos; private final int totalSlices; private final int pagesEmitted; private final int sliceIndex; @@ -266,7 +275,7 @@ public abstract class LuceneOperator extends SourceOperator { private Status(LuceneOperator operator) { processedSlices = operator.processedSlices; processedQueries = operator.processedQueries.stream().map(Query::toString).collect(Collectors.toCollection(TreeSet::new)); - processingNanos = operator.processingNanos; + processNanos = operator.processingNanos; processedShards = new TreeSet<>(operator.processedShards); sliceIndex = operator.sliceIndex; totalSlices = operator.sliceQueue.totalSlices(); @@ -293,7 +302,7 @@ public abstract class LuceneOperator extends SourceOperator { int processedSlices, Set processedQueries, Set processedShards, - long processingNanos, + long processNanos, int sliceIndex, int totalSlices, int pagesEmitted, @@ -305,7 +314,7 @@ public abstract class LuceneOperator extends SourceOperator { this.processedSlices = processedSlices; this.processedQueries = processedQueries; this.processedShards = processedShards; - this.processingNanos = processingNanos; + this.processNanos = processNanos; this.sliceIndex = sliceIndex; this.totalSlices = totalSlices; this.pagesEmitted = pagesEmitted; @@ -324,7 +333,7 @@ public abstract class LuceneOperator extends SourceOperator { processedQueries = Collections.emptySet(); processedShards = Collections.emptySet(); } - processingNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; + processNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; sliceIndex = in.readVInt(); totalSlices = in.readVInt(); pagesEmitted = in.readVInt(); @@ -346,7 +355,7 @@ public abstract class LuceneOperator extends SourceOperator { out.writeCollection(processedShards, StreamOutput::writeString); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { - out.writeVLong(processingNanos); + out.writeVLong(processNanos); } out.writeVInt(sliceIndex); out.writeVInt(totalSlices); @@ -377,7 +386,7 @@ public abstract class LuceneOperator extends SourceOperator { } public long processNanos() { - return processingNanos; + return processNanos; } public int sliceIndex() { @@ -414,9 +423,9 @@ public abstract class LuceneOperator extends SourceOperator { builder.field("processed_slices", processedSlices); builder.field("processed_queries", processedQueries); builder.field("processed_shards", processedShards); - builder.field("processing_nanos", processingNanos); + builder.field("process_nanos", processNanos); if (builder.humanReadable()) { - builder.field("processing_time", TimeValue.timeValueNanos(processingNanos)); + builder.field("process_time", TimeValue.timeValueNanos(processNanos)); } builder.field("slice_index", sliceIndex); builder.field("total_slices", totalSlices); @@ -436,7 +445,7 @@ public abstract class LuceneOperator extends SourceOperator { return processedSlices == status.processedSlices && processedQueries.equals(status.processedQueries) && processedShards.equals(status.processedShards) - && processingNanos == status.processingNanos + && processNanos == status.processNanos && sliceIndex == status.sliceIndex && totalSlices == status.totalSlices && pagesEmitted == status.pagesEmitted diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 9633051781f4..0407e0f72604 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Nullable; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Queue; @@ -45,8 +46,8 @@ public final class LuceneSliceQueue { return totalSlices; } - public Iterable getSlices() { - return slices; + public Collection remainingShardsIdentifiers() { + return slices.stream().map(slice -> slice.shardContext().shardIdentifier()).toList(); } public static LuceneSliceQueue create( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index d25cb3a870da..193307ae4b81 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -50,6 +50,7 @@ import static org.apache.lucene.search.ScoreMode.TOP_DOCS; * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ public final class LuceneTopNSourceOperator extends LuceneOperator { + public static class Factory extends LuceneOperator.Factory { private final int maxPageSize; private final List> sorts; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index c0d220fda5d4..06cbcbd2efc8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -77,7 +77,7 @@ public class Driver implements Releasable, Describable { private final DriverContext driverContext; private final Supplier description; private final List activeOperators; - private final List statusOfCompletedOperators = new ArrayList<>(); + private final List statusOfCompletedOperators = new ArrayList<>(); private final Releasable releasable; private final long statusNanos; @@ -117,6 +117,8 @@ public class Driver implements Releasable, Describable { public Driver( String sessionId, String taskDescription, + String clusterName, + String nodeName, long startTime, long startNanos, DriverContext driverContext, @@ -143,6 +145,8 @@ public class Driver implements Releasable, Describable { new DriverStatus( sessionId, taskDescription, + clusterName, + nodeName, startTime, System.currentTimeMillis(), 0, @@ -155,37 +159,6 @@ public class Driver implements Releasable, Describable { ); } - /** - * Creates a new driver with a chain of operators. - * @param driverContext the driver context - * @param source source operator - * @param intermediateOperators the chain of operators to execute - * @param sink sink operator - * @param releasable a {@link Releasable} to invoked once the chain of operators has run to completion - */ - public Driver( - String taskDescription, - DriverContext driverContext, - SourceOperator source, - List intermediateOperators, - SinkOperator sink, - Releasable releasable - ) { - this( - "unset", - taskDescription, - System.currentTimeMillis(), - System.nanoTime(), - driverContext, - () -> null, - source, - intermediateOperators, - sink, - DEFAULT_STATUS_INTERVAL, - releasable - ); - } - public DriverContext driverContext() { return driverContext; } @@ -329,7 +302,7 @@ public class Driver implements Releasable, Describable { Iterator itr = finishedOperators.iterator(); while (itr.hasNext()) { Operator op = itr.next(); - statusOfCompletedOperators.add(new DriverStatus.OperatorStatus(op.toString(), op.status())); + statusOfCompletedOperators.add(new OperatorStatus(op.toString(), op.status())); op.close(); itr.remove(); } @@ -502,6 +475,8 @@ public class Driver implements Releasable, Describable { } return new DriverProfile( status.taskDescription(), + status.clusterName(), + status.nodeName(), status.started(), status.lastUpdated(), finishNanos - startNanos, @@ -549,13 +524,15 @@ public class Driver implements Releasable, Describable { return new DriverStatus( sessionId, taskDescription, + prev.clusterName(), + prev.nodeName(), startTime, now, prev.cpuNanos() + extraCpuNanos, prev.iterations() + extraIterations, status, statusOfCompletedOperators, - activeOperators.stream().map(op -> new DriverStatus.OperatorStatus(op.toString(), op.status())).toList(), + activeOperators.stream().map(op -> new OperatorStatus(op.toString(), op.status())).toList(), sleeps ); }); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index 9cc9f2335ffd..b677fcfd30da 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -21,98 +21,48 @@ import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; import java.util.Iterator; import java.util.List; -import java.util.Objects; /** * Profile results from a single {@link Driver}. + * + * @param taskDescription Description of the task this driver is running. This description should be short and meaningful + * as a grouping identifier. We use the phase of the query right now: "data", "node_reduce", "final". + * @param clusterName The name of the cluster this driver is running on. + * @param nodeName The name of the node this driver is running on. + * @param startMillis Millis since epoch when the driver started. + * @param stopMillis Millis since epoch when the driver stopped. + * @param tookNanos Nanos between creation and completion of the {@link Driver}. + * @param cpuNanos Nanos this {@link Driver} has been running on the cpu. Does not include async or waiting time. + * @param iterations The number of times the driver has moved a single page up the chain of operators as far as it'll go. + * @param operators Status of each {@link Operator} in the driver when it finished. */ -public class DriverProfile implements Writeable, ChunkedToXContentObject { - /** - * Description of the task this driver is running. This description should be - * short and meaningful as a grouping identifier. We use the phase of the - * query right now: "data", "node_reduce", "final". - */ - private final String taskDescription; +public record DriverProfile( + String taskDescription, + String clusterName, + String nodeName, + long startMillis, + long stopMillis, + long tookNanos, + long cpuNanos, + long iterations, + List operators, + DriverSleeps sleeps +) implements Writeable, ChunkedToXContentObject { - /** - * Millis since epoch when the driver started. - */ - private final long startMillis; - - /** - * Millis since epoch when the driver stopped. - */ - private final long stopMillis; - - /** - * Nanos between creation and completion of the {@link Driver}. - */ - private final long tookNanos; - - /** - * Nanos this {@link Driver} has been running on the cpu. Does not - * include async or waiting time. - */ - private final long cpuNanos; - - /** - * The number of times the driver has moved a single page up the - * chain of operators as far as it'll go. - */ - private final long iterations; - - /** - * Status of each {@link Operator} in the driver when it finished. - */ - private final List operators; - - private final DriverSleeps sleeps; - - public DriverProfile( - String taskDescription, - long startMillis, - long stopMillis, - long tookNanos, - long cpuNanos, - long iterations, - List operators, - DriverSleeps sleeps - ) { - this.taskDescription = taskDescription; - this.startMillis = startMillis; - this.stopMillis = stopMillis; - this.tookNanos = tookNanos; - this.cpuNanos = cpuNanos; - this.iterations = iterations; - this.operators = operators; - this.sleeps = sleeps; - } - - public DriverProfile(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) - || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { - this.taskDescription = in.readString(); - } else { - this.taskDescription = ""; - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { - this.startMillis = in.readVLong(); - this.stopMillis = in.readVLong(); - } else { - this.startMillis = 0; - this.stopMillis = 0; - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { - this.tookNanos = in.readVLong(); - this.cpuNanos = in.readVLong(); - this.iterations = in.readVLong(); - } else { - this.tookNanos = 0; - this.cpuNanos = 0; - this.iterations = 0; - } - this.operators = in.readCollectionAsImmutableList(DriverStatus.OperatorStatus::new); - this.sleeps = DriverSleeps.read(in); + public static DriverProfile readFrom(StreamInput in) throws IOException { + return new DriverProfile( + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) + || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90) ? in.readString() : "", + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_NODE_DESCRIPTION) ? in.readString() : "", + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_NODE_DESCRIPTION) ? in.readString() : "", + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0, + in.readCollectionAsImmutableList(OperatorStatus::readFrom), + DriverSleeps.read(in) + ); } @Override @@ -121,6 +71,10 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { out.writeString(taskDescription); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_NODE_DESCRIPTION)) { + out.writeString(clusterName); + out.writeString(nodeName); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(startMillis); out.writeVLong(stopMillis); @@ -134,68 +88,12 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { sleeps.writeTo(out); } - /** - * Description of the task this driver is running. - */ - public String taskDescription() { - return taskDescription; - } - - /** - * Millis since epoch when the driver started. - */ - public long startMillis() { - return startMillis; - } - - /** - * Millis since epoch when the driver stopped. - */ - public long stopMillis() { - return stopMillis; - } - - /** - * Nanos between creation and completion of the {@link Driver}. - */ - public long tookNanos() { - return tookNanos; - } - - /** - * Nanos this {@link Driver} has been running on the cpu. Does not - * include async or waiting time. - */ - public long cpuNanos() { - return cpuNanos; - } - - /** - * The number of times the driver has moved a single page up the - * chain of operators as far as it'll go. - */ - public long iterations() { - return iterations; - } - - /** - * Status of each {@link Operator} in the driver when it finished. - */ - public List operators() { - return operators; - } - - /** - * Records of the times the driver has slept. - */ - public DriverSleeps sleeps() { - return sleeps; - } - @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { b.field("task_description", taskDescription); + b.field("cluster_name", clusterName); + b.field("node_name", nodeName); b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); b.field("took_nanos", tookNanos); @@ -215,30 +113,6 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { ); } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - DriverProfile that = (DriverProfile) o; - return taskDescription.equals(that.taskDescription) - && startMillis == that.startMillis - && stopMillis == that.stopMillis - && tookNanos == that.tookNanos - && cpuNanos == that.cpuNanos - && iterations == that.iterations - && Objects.equals(operators, that.operators) - && sleeps.equals(that.sleeps); - } - - @Override - public int hashCode() { - return Objects.hash(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); - } - @Override public String toString() { return Strings.toString(this); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java index 41d0aee14fe6..1eb570e1b034 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java @@ -12,124 +12,71 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ToXContentFragment; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.List; import java.util.Locale; -import java.util.Objects; /** * {@link Task.Status} reported from a {@link Driver} to be reported by the tasks api. + * + * @param sessionId The session for this driver. + * @param taskDescription Description of the task this driver is running. + * @param clusterName The name of the cluster this driver is running on. + * @param nodeName The name of the node this driver is running on. + * @param started When this {@link Driver} was started. + * @param lastUpdated When this status was generated. + * @param cpuNanos Nanos this {@link Driver} has been running on the cpu. Does not include async or waiting time. + * @param iterations The number of times the driver has moved a single page up the chain of operators as far as it'll go. + * @param status The state of the overall driver - queue, starting, running, finished. + * @param completedOperators Status of each completed {@link Operator} in the driver. + * @param activeOperators Status of each active {@link Operator} in the driver. */ -public class DriverStatus implements Task.Status { +public record DriverStatus( + String sessionId, + String taskDescription, + String clusterName, + String nodeName, + long started, + long lastUpdated, + long cpuNanos, + long iterations, + Status status, + List completedOperators, + List activeOperators, + DriverSleeps sleeps +) implements Task.Status { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Task.Status.class, "driver", - DriverStatus::new + DriverStatus::readFrom ); - /** - * The session for this driver. - */ - private final String sessionId; - - /** - * Description of the task this driver is running. - */ - private final String taskDescription; - - /** - * Milliseconds since epoch when this driver started. - */ - private final long started; - - /** - * When this status was generated. - */ - private final long lastUpdated; - - /** - * Nanos this {@link Driver} has been running on the cpu. Does not - * include async or waiting time. - */ - private final long cpuNanos; - - /** - * The number of times the driver has moved a single page up the - * chain of operators as far as it'll go. - */ - private final long iterations; - - /** - * The state of the overall driver - queue, starting, running, finished. - */ - private final Status status; - - /** - * Status of each completed {@link Operator} in the driver. - */ - private final List completedOperators; - - /** - * Status of each active {@link Operator} in the driver. - */ - private final List activeOperators; - - private final DriverSleeps sleeps; - - DriverStatus( - String sessionId, - String taskDescription, - long started, - long lastUpdated, - long cpuTime, - long iterations, - Status status, - List completedOperators, - List activeOperators, - DriverSleeps sleeps - ) { - this.sessionId = sessionId; - this.taskDescription = taskDescription; - this.started = started; - this.lastUpdated = lastUpdated; - this.cpuNanos = cpuTime; - this.iterations = iterations; - this.status = status; - this.completedOperators = completedOperators; - this.activeOperators = activeOperators; - this.sleeps = sleeps; - } - - public DriverStatus(StreamInput in) throws IOException { - this.sessionId = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) - || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { - this.taskDescription = in.readString(); - } else { - this.taskDescription = ""; - } - this.started = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readLong() : 0; - this.lastUpdated = in.readLong(); - this.cpuNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; - this.iterations = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; - this.status = Status.read(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - this.completedOperators = in.readCollectionAsImmutableList(OperatorStatus::new); - } else { - this.completedOperators = List.of(); - } - this.activeOperators = in.readCollectionAsImmutableList(OperatorStatus::new); - this.sleeps = DriverSleeps.read(in); + public static DriverStatus readFrom(StreamInput in) throws IOException { + return new DriverStatus( + in.readString(), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) + || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90) ? in.readString() : "", + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_NODE_DESCRIPTION) ? in.readString() : "", + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_NODE_DESCRIPTION) ? in.readString() : "", + in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readLong() : 0, + in.readLong(), + in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0, + Status.read(in), + in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0) + ? in.readCollectionAsImmutableList(OperatorStatus::readFrom) + : List.of(), + in.readCollectionAsImmutableList(OperatorStatus::readFrom), + DriverSleeps.read(in) + ); } @Override @@ -139,6 +86,10 @@ public class DriverStatus implements Task.Status { || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { out.writeString(taskDescription); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_NODE_DESCRIPTION)) { + out.writeString(clusterName); + out.writeString(nodeName); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { out.writeLong(started); } @@ -160,85 +111,13 @@ public class DriverStatus implements Task.Status { return ENTRY.name; } - /** - * The session for this driver. - */ - public String sessionId() { - return sessionId; - } - - /** - * Description of the task this driver is running. This description should be - * short and meaningful as a grouping identifier. We use the phase of the - * query right now: "data", "node_reduce", "final". - */ - public String taskDescription() { - return taskDescription; - } - - /** - * When this {@link Driver} was started. - */ - public long started() { - return started; - } - - /** - * When this status was generated. - */ - public long lastUpdated() { - return lastUpdated; - } - - /** - * Nanos this {@link Driver} has been running on the cpu. Does not - * include async or waiting time. - */ - public long cpuNanos() { - return cpuNanos; - } - - /** - * The number of times the driver has moved a single page up the - * chain of operators as far as it'll go. - */ - public long iterations() { - return iterations; - } - - /** - * The state of the overall driver - queue, starting, running, finished. - */ - public Status status() { - return status; - } - - /** - * Status of each completed {@link Operator} in the driver. - */ - public List completedOperators() { - return completedOperators; - } - - /** - * Records of the times the driver has slept. - */ - public DriverSleeps sleeps() { - return sleeps; - } - - /** - * Status of each active {@link Operator} in the driver. - */ - public List activeOperators() { - return activeOperators; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("session_id", sessionId); builder.field("task_description", taskDescription); + builder.field("cluster_name", clusterName); + builder.field("node_name", nodeName); builder.field("started", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(started)); builder.field("last_updated", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(lastUpdated)); builder.field("cpu_nanos", cpuNanos); @@ -261,112 +140,11 @@ public class DriverStatus implements Task.Status { return builder.endObject(); } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DriverStatus that = (DriverStatus) o; - return sessionId.equals(that.sessionId) - && taskDescription.equals(that.taskDescription) - && started == that.started - && lastUpdated == that.lastUpdated - && cpuNanos == that.cpuNanos - && iterations == that.iterations - && status == that.status - && completedOperators.equals(that.completedOperators) - && activeOperators.equals(that.activeOperators) - && sleeps.equals(that.sleeps); - } - - @Override - public int hashCode() { - return Objects.hash( - sessionId, - taskDescription, - started, - lastUpdated, - cpuNanos, - iterations, - status, - completedOperators, - activeOperators, - sleeps - ); - } - @Override public String toString() { return Strings.toString(this); } - /** - * Status of an {@link Operator}. - */ - public static class OperatorStatus implements Writeable, ToXContentObject { - /** - * String representation of the {@link Operator}. Literally just the - * {@link Object#toString()} of it. - */ - private final String operator; - /** - * Status as reported by the {@link Operator}. - */ - @Nullable - private final Operator.Status status; - - public OperatorStatus(String operator, Operator.Status status) { - this.operator = operator; - this.status = status; - } - - OperatorStatus(StreamInput in) throws IOException { - operator = in.readString(); - status = in.readOptionalNamedWriteable(Operator.Status.class); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(operator); - out.writeOptionalNamedWriteable(status != null && VersionedNamedWriteable.shouldSerialize(out, status) ? status : null); - } - - public String operator() { - return operator; - } - - public Operator.Status status() { - return status; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("operator", operator); - if (status != null) { - builder.field("status", status); - } - return builder.endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OperatorStatus that = (OperatorStatus) o; - return operator.equals(that.operator) && Objects.equals(status, that.status); - } - - @Override - public int hashCode() { - return Objects.hash(operator, status); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - public enum Status implements Writeable, ToXContentFragment { QUEUED, STARTING, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OperatorStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OperatorStatus.java new file mode 100644 index 000000000000..7576661decf2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OperatorStatus.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * Status of an {@link Operator}. + * + * @param operator String representation of the {@link Operator}. + * @param status Status as reported by the {@link Operator}. + */ +public record OperatorStatus(String operator, @Nullable Operator.Status status) implements Writeable, ToXContentObject { + + public static OperatorStatus readFrom(StreamInput in) throws IOException { + return new OperatorStatus(in.readString(), in.readOptionalNamedWriteable(Operator.Status.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(operator); + out.writeOptionalNamedWriteable(status != null && VersionedNamedWriteable.shouldSerialize(out, status) ? status : null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("operator", operator); + if (status != null) { + builder.field("status", status); + } + return builder.endObject(); + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 401fa0d14cd9..5a1679a3ed01 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -63,6 +63,7 @@ import org.elasticsearch.compute.operator.ShuffleDocsOperator; import org.elasticsearch.compute.test.BlockTestUtils; import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -123,7 +124,7 @@ public class OperatorTests extends MapperServiceTestCase { } }); DriverContext driverContext = driverContext(); - drivers.add(new Driver("test", driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); + drivers.add(TestDriverFactory.create(driverContext, factory.get(driverContext), List.of(), docCollector)); } OperatorTestCase.runDriver(drivers); Set expectedDocIds = searchForDocIds(reader, query); @@ -214,8 +215,7 @@ public class OperatorTests extends MapperServiceTestCase { driverContext ) ); - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), operators, @@ -228,8 +228,7 @@ public class OperatorTests extends MapperServiceTestCase { actualCounts.put(BytesRef.deepCopyOf(spare), counts.getLong(i)); } page.releaseBlocks(); - }), - () -> {} + }) ); OperatorTestCase.runDriver(driver); assertThat(actualCounts, equalTo(expectedCounts)); @@ -248,8 +247,7 @@ public class OperatorTests extends MapperServiceTestCase { var results = new ArrayList(); DriverContext driverContext = driverContext(); try ( - var driver = new Driver( - "test", + var driver = TestDriverFactory.create( driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), @@ -258,8 +256,7 @@ public class OperatorTests extends MapperServiceTestCase { for (int i = 0; i < page.getPositionCount(); i++) { results.add(block.getLong(i)); } - }), - () -> {} + }) ) ) { OperatorTestCase.runDriver(driver); @@ -336,8 +333,7 @@ public class OperatorTests extends MapperServiceTestCase { var actualValues = new ArrayList<>(); var actualPrimeOrds = new ArrayList<>(); try ( - var driver = new Driver( - "test", + var driver = TestDriverFactory.create( driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of( @@ -354,8 +350,7 @@ public class OperatorTests extends MapperServiceTestCase { } finally { page.releaseBlocks(); } - }), - () -> {} + }) ) ) { OperatorTestCase.runDriver(driver); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index abac7a4cd47e..3f47be19b22e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -31,6 +31,7 @@ import org.elasticsearch.compute.operator.PositionMergingSourceOperator; import org.elasticsearch.compute.test.BlockTestUtils; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.hamcrest.Matcher; @@ -110,13 +111,11 @@ public abstract class AggregatorFunctionTestCase extends ForkingOperatorTestCase List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new NullInsertingSourceOperator(new CannedSourceOperator(input.iterator()), blockFactory), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 8657caafef40..5259331a7a81 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import java.util.List; import java.util.stream.LongStream; @@ -64,13 +65,11 @@ public class CountDistinctIntAggregatorFunctionTests extends AggregatorFunctionT DriverContext driverContext = driverContext(); BlockFactory blockFactory = driverContext.blockFactory(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), - new PageConsumerOperator(page -> fail("shouldn't have made it this far")), - () -> {} + new PageConsumerOperator(page -> fail("shouldn't have made it this far")) ) ) { expectThrows(Exception.class, () -> runDriver(d)); // ### find a more specific exception type diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 55f522f31b28..a9aeeda7e04d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import java.util.List; import java.util.stream.LongStream; @@ -65,13 +66,11 @@ public class CountDistinctLongAggregatorFunctionTests extends AggregatorFunction DriverContext driverContext = driverContext(); BlockFactory blockFactory = driverContext.blockFactory(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), - new PageConsumerOperator(page -> fail("shouldn't have made it this far")), - () -> {} + new PageConsumerOperator(page -> fail("shouldn't have made it this far")) ) ) { expectThrows(Exception.class, () -> runDriver(d)); // ### find a more specific exception type diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index a64ec4e155ad..9332c44de893 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.test.ESTestCase; @@ -52,13 +53,11 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase DriverContext driverContext = driverContext(); List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(Double.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -71,16 +70,14 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase DriverContext driverContext = driverContext(); List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceDoubleBlockSourceOperator( driverContext.blockFactory(), DoubleStream.of(0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7) ), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -101,13 +98,11 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase } driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(values)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -124,13 +119,11 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase } driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -144,13 +137,11 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase } driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java index 11205907acb2..69187e40fd72 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SequenceFloatBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.test.ESTestCase; @@ -52,13 +53,11 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase DriverContext driverContext = driverContext(); List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(Float.MAX_VALUE - 1, 2f)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -71,16 +70,14 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase DriverContext driverContext = driverContext(); List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceFloatBlockSourceOperator( driverContext.blockFactory(), Stream.of(0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f, 1.0f, 1.1f, 1.2f, 1.3f, 1.4f, 1.5f, 1.6f, 1.7f) ), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -101,13 +98,11 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase } driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(values)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -124,13 +119,11 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase } driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -144,13 +137,11 @@ public class SumFloatAggregatorFunctionTests extends AggregatorFunctionTestCase } driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index 6484382d5ff5..d7df7f211008 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import java.util.List; import java.util.stream.LongStream; @@ -51,13 +52,11 @@ public class SumIntAggregatorFunctionTests extends AggregatorFunctionTestCase { DriverContext driverContext = driverContext(); BlockFactory blockFactory = driverContext.blockFactory(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), - new PageConsumerOperator(page -> fail("shouldn't have made it this far")), - () -> {} + new PageConsumerOperator(page -> fail("shouldn't have made it this far")) ) ) { expectThrows(Exception.class, () -> runDriver(d)); // ### find a more specific exception type diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index c2b805291f4f..73f0837edabd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import java.util.List; import java.util.stream.LongStream; @@ -50,13 +51,11 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { public void testOverflowFails() { DriverContext driverContext = driverContext(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), - new PageConsumerOperator(page -> fail("shouldn't have made it this far")), - () -> {} + new PageConsumerOperator(page -> fail("shouldn't have made it this far")) ) ) { Exception e = expectThrows(ArithmeticException.class, () -> runDriver(d)); @@ -68,13 +67,11 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { DriverContext driverContext = driverContext(); BlockFactory blockFactory = driverContext.blockFactory(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), - new PageConsumerOperator(page -> fail("shouldn't have made it this far")), - () -> {} + new PageConsumerOperator(page -> fail("shouldn't have made it this far")) ) ) { expectThrows(Exception.class, () -> runDriver(d)); // ### find a more specific exception type diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index 42e9fc8deafc..b1319e65e098 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -415,8 +416,7 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { List intermediateOutput = new ArrayList<>(); - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new LocalSourceOperator(input1), List.of( @@ -431,13 +431,11 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { analysisRegistry ).get(driverContext) ), - new PageConsumerOperator(intermediateOutput::add), - () -> {} + new PageConsumerOperator(intermediateOutput::add) ); runDriver(driver); - driver = new Driver( - "test", + driver = TestDriverFactory.create( driverContext, new LocalSourceOperator(input2), List.of( @@ -452,15 +450,13 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { analysisRegistry ).get(driverContext) ), - new PageConsumerOperator(intermediateOutput::add), - () -> {} + new PageConsumerOperator(intermediateOutput::add) ); runDriver(driver); List finalOutput = new ArrayList<>(); - driver = new Driver( - "test", + driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( @@ -475,8 +471,7 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { analysisRegistry ).get(driverContext) ), - new PageConsumerOperator(finalOutput::add), - () -> {} + new PageConsumerOperator(finalOutput::add) ); runDriver(driver); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java index 9c89317e4c35..734b0660d24a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -136,8 +137,7 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { List intermediateOutput = new ArrayList<>(); - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new LocalSourceOperator(input1), List.of( @@ -149,13 +149,11 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { analysisRegistry ).get(driverContext) ), - new PageConsumerOperator(intermediateOutput::add), - () -> {} + new PageConsumerOperator(intermediateOutput::add) ); runDriver(driver); - driver = new Driver( - "test", + driver = TestDriverFactory.create( driverContext, new LocalSourceOperator(input2), List.of( @@ -167,15 +165,13 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { analysisRegistry ).get(driverContext) ), - new PageConsumerOperator(intermediateOutput::add), - () -> {} + new PageConsumerOperator(intermediateOutput::add) ); runDriver(driver); List finalOutput = new ArrayList<>(); - driver = new Driver( - "test", + driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( @@ -187,8 +183,7 @@ public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { analysisRegistry ).get(driverContext) ), - new PageConsumerOperator(finalOutput::add), - () -> {} + new PageConsumerOperator(finalOutput::add) ); runDriver(driver); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 61c7582c7424..87636ed4c05b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; @@ -94,7 +95,7 @@ public class LuceneCountOperatorTests extends AnyOperatorTestCase { @Override protected Matcher expectedToStringOfSimple() { - return matchesRegex("LuceneCountOperator\\[maxPageSize = \\d+, remainingDocs=100]"); + return matchesRegex("LuceneCountOperator\\[shards = \\[test], maxPageSize = \\d+, remainingDocs=100]"); } @Override @@ -151,7 +152,7 @@ public class LuceneCountOperatorTests extends AnyOperatorTestCase { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(TestDriverFactory.create(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add))); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java index f6fba20a2888..4fee40f8e147 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; @@ -166,7 +167,7 @@ public abstract class LuceneMaxOperatorTestCase extends AnyOperatorTestCase { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(TestDriverFactory.create(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add))); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); @@ -194,7 +195,7 @@ public abstract class LuceneMaxOperatorTestCase extends AnyOperatorTestCase { @Override protected final Matcher expectedToStringOfSimple() { - return matchesRegex("LuceneMinMaxOperator\\[maxPageSize = \\d+, remainingDocs=100]"); + return matchesRegex("LuceneMinMaxOperator\\[shards = \\[test\\], maxPageSize = \\d+, remainingDocs=100]"); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java index 3033efa50f37..4449a653945b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; @@ -166,7 +167,7 @@ public abstract class LuceneMinOperatorTestCase extends AnyOperatorTestCase { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(TestDriverFactory.create(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add))); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); @@ -194,7 +195,7 @@ public abstract class LuceneMinOperatorTestCase extends AnyOperatorTestCase { @Override protected final Matcher expectedToStringOfSimple() { - return matchesRegex("LuceneMinMaxOperator\\[maxPageSize = \\d+, remainingDocs=100]"); + return matchesRegex("LuceneMinMaxOperator\\[shards = \\[test], maxPageSize = \\d+, remainingDocs=100]"); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index 4a628d596f14..551bdaad7fe9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.ShuffleDocsOperator; import org.elasticsearch.compute.test.ComputeTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.BlockDocValuesReader; @@ -209,13 +210,11 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase { ); operators.add(new EvalOperator(blockFactory, luceneQueryEvaluator)); List results = new ArrayList<>(); - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT, scoring).get(driverContext), operators, - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ); OperatorTestCase.runDriver(driver); OperatorTests.assertDriverContext(driverContext); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index 28aa9e7976c7..f45a2c645e9d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -46,8 +46,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest "a:0", "a:1" ], - "processing_nanos" : 1002, - "processing_time" : "1micros", + "process_nanos" : 1002, + "process_time" : "1micros", "slice_index" : 0, "total_slices" : 1, "pages_emitted" : 5, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 42c9f49a2db7..82012ae36e32 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -22,12 +22,12 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; @@ -99,7 +99,7 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase { @Override protected Matcher expectedToStringOfSimple() { - return matchesRegex("LuceneSourceOperator\\[maxPageSize = \\d+, remainingDocs = \\d+]"); + return matchesRegex("LuceneSourceOperator\\[shards = \\[test], maxPageSize = \\d+, remainingDocs = \\d+]"); } @Override @@ -182,7 +182,7 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + TestDriverFactory.create(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add)) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java index 3af21ba37d08..da867dba61e8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -22,10 +22,10 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType; @@ -109,7 +109,9 @@ public class LuceneTopNSourceOperatorScoringTests extends LuceneTopNSourceOperat @Override protected Matcher expectedToStringOfSimple() { - return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, scoreMode = COMPLETE, sorts = \\[\\{.+}]]"); + return matchesRegex( + "LuceneTopNSourceOperator\\[shards = \\[test], maxPageSize = \\d+, limit = 100, scoreMode = COMPLETE, sorts = \\[\\{.+}]]" + ); } @Override @@ -127,7 +129,7 @@ public class LuceneTopNSourceOperatorScoringTests extends LuceneTopNSourceOperat List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + TestDriverFactory.create(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add)) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 92eaa78eedcd..95d5cbc65d75 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -25,11 +25,11 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType; @@ -115,7 +115,9 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { @Override protected Matcher expectedToStringOfSimple() { var s = scoring ? "COMPLETE" : "TOP_DOCS"; - return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, scoreMode = " + s + ", sorts = \\[\\{.+}]]"); + return matchesRegex( + "LuceneTopNSourceOperator\\[shards = \\[test], maxPageSize = \\d+, limit = 100, scoreMode = " + s + ", sorts = \\[\\{.+}]]" + ); } @Override @@ -187,7 +189,7 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + TestDriverFactory.create(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add)) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 934fbcc0b897..ce0f5c1ccf2c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -33,11 +33,11 @@ import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; @@ -173,13 +173,11 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { List results = new ArrayList<>(); var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( - new Driver( - "test", + TestDriverFactory.create( driverContext, timeSeriesFactory.get(driverContext), List.of(ValuesSourceReaderOperatorTests.factory(reader, metricField, ElementType.LONG).get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ); docs.sort(Comparator.comparing(Doc::host).thenComparing(Comparator.comparingLong(Doc::timestamp).reversed())); @@ -248,13 +246,11 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { var driverContext = driverContext(); List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver( - "test", + TestDriverFactory.create( driverContext, timeSeriesFactory.get(driverContext), List.of(), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ); assertThat(results, empty()); @@ -307,16 +303,14 @@ public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { var voltageField = new NumberFieldMapper.NumberFieldType("voltage", NumberFieldMapper.NumberType.LONG); var hostnameField = new KeywordFieldMapper.KeywordFieldType("hostname"); OperatorTestCase.runDriver( - new Driver( - "test", + TestDriverFactory.create( ctx, timeSeriesFactory.get(ctx), List.of( ValuesSourceReaderOperatorTests.factory(reader, voltageField, ElementType.LONG).get(ctx), ValuesSourceReaderOperatorTests.factory(reader, hostnameField, ElementType.BYTES_REF).get(ctx) ), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index 32164c7954dd..f589da34a86e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; @@ -1298,8 +1299,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { ); var vsShardContext = new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, luceneFactory.get(driverContext), List.of( @@ -1328,8 +1328,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { } finally { page.releaseBlocks(); } - }), - () -> {} + }) ) ) { runDriver(driver); @@ -1376,8 +1375,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { List shardContexts = initShardContexts(); int[] pages = new int[] { 0 }; try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, simpleInput(driverContext, 10), List.of( @@ -1400,8 +1398,7 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { } finally { page.releaseBlocks(); } - }), - () -> {} + }) ) ) { runDriver(d); @@ -1498,13 +1495,11 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { List results = new ArrayList<>(); boolean success = false; try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(input), operators, - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -1526,22 +1521,15 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase { int dummyDrivers = between(0, 10); for (int i = 0; i < dummyDrivers; i++) { drivers.add( - new Driver( - "test", - "dummy-session", - 0, - 0, + TestDriverFactory.create( new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, TestBlockFactory.getNonBreakingInstance()), - () -> "dummy-driver", new SequenceLongBlockSourceOperator( TestBlockFactory.getNonBreakingInstance(), LongStream.range(0, between(1, 100)), between(1, 100) ), List.of(), - new PageConsumerOperator(Page::releaseBlocks), - Driver.DEFAULT_STATUS_INTERVAL, - () -> {} + new PageConsumerOperator(Page::releaseBlocks) ) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 07a66a473f3b..f3c67a25cc6f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -1306,8 +1307,7 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase { false // no scoring ); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, luceneFactory.get(driverContext), List.of( @@ -1336,8 +1336,7 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase { } finally { page.releaseBlocks(); } - }), - () -> {} + }) ) ) { runDriver(driver); @@ -1409,8 +1408,7 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase { DriverContext driverContext = driverContext(); int[] pages = new int[] { 0 }; try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, simpleInput(driverContext.blockFactory(), 10), List.of( @@ -1433,8 +1431,7 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase { } finally { page.releaseBlocks(); } - }), - () -> {} + }) ) ) { runDriver(d); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index acc62de0884c..bee30fb8a622 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -165,8 +166,7 @@ public class AsyncOperatorTests extends ESTestCase { } }); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, sourceOperator, intermediateOperators, @@ -302,7 +302,7 @@ public class AsyncOperatorTests extends ESTestCase { }; SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver("test", driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); + Driver driver = TestDriverFactory.create(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); assertBusy(() -> assertTrue(future.isDone())); if (failed.get()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index a39aa10af5f3..3b4579d27ea2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -28,14 +28,16 @@ public class DriverProfileTests extends AbstractWireSerializingTestCase instanceReader() { - return DriverProfile::new; + return DriverProfile::readFrom; } @Override protected DriverProfile createTestInstance() { return new DriverProfile( - DriverStatusTests.randomTaskDescription(), + randomIdentifier(), + randomIdentifier(), + randomIdentifier(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -117,6 +123,8 @@ public class DriverProfileTests extends AbstractWireSerializingTestCase taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); - case 1 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 2 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 3 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); - case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 6 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); - case 7 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + switch (between(0, 9)) { + case 0 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomIdentifier); + case 1 -> clusterName = randomValueOtherThan(clusterName, DriverStatusTests::randomIdentifier); + case 2 -> nodeName = randomValueOtherThan(nodeName, DriverStatusTests::randomIdentifier); + case 3 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 4 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 5 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); + case 6 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 7 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 8 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); + case 9 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverProfile(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return new DriverProfile( + taskDescription, + clusterName, + nodeName, + startMillis, + stopMillis, + tookNanos, + cpuNanos, + iterations, + operators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index 83deb57a3ba7..549a97f65739 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -7,8 +7,6 @@ package org.elasticsearch.compute.operator; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -33,16 +31,18 @@ public class DriverStatusTests extends AbstractWireSerializingTestCase instanceReader() { - return DriverStatus::new; + return DriverStatus::readFrom; } @Override protected DriverStatus createTestInstance() { return new DriverStatus( - randomSessionId(), - randomTaskDescription(), + randomIdentifier(), + randomIdentifier(), + randomIdentifier(), + randomIdentifier(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -128,36 +132,30 @@ public class DriverStatusTests extends AbstractWireSerializingTestCase randomOperatorStatuses() { + static List randomOperatorStatuses() { return randomList(0, 5, DriverStatusTests::randomOperatorStatus); } - private static DriverStatus.OperatorStatus randomOperatorStatus() { + private static OperatorStatus randomOperatorStatus() { Supplier status = randomFrom( new LuceneSourceOperatorStatusTests()::createTestInstance, new ValuesSourceReaderOperatorStatusTests()::createTestInstance, new ExchangeSinkOperatorStatusTests()::createTestInstance, () -> null ); - return new DriverStatus.OperatorStatus(randomAlphaOfLength(3), status.get()); + return new OperatorStatus(randomAlphaOfLength(3), status.get()); } @Override protected DriverStatus mutateInstance(DriverStatus instance) throws IOException { var sessionId = instance.sessionId(); var taskDescription = instance.taskDescription(); + var clusterName = instance.clusterName(); + var nodeName = instance.nodeName(); long started = instance.started(); long lastUpdated = instance.lastUpdated(); long cpuNanos = instance.cpuNanos(); @@ -166,22 +164,26 @@ public class DriverStatusTests extends AbstractWireSerializingTestCase sessionId = randomValueOtherThan(sessionId, this::randomSessionId); - case 1 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); - case 2 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); - case 3 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); - case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 6 -> status = randomValueOtherThan(status, this::randomStatus); - case 7 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); - case 8 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); - case 9 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + switch (between(0, 11)) { + case 0 -> sessionId = randomValueOtherThan(sessionId, ESTestCase::randomIdentifier); + case 1 -> taskDescription = randomValueOtherThan(taskDescription, ESTestCase::randomIdentifier); + case 2 -> clusterName = randomValueOtherThan(clusterName, ESTestCase::randomIdentifier); + case 3 -> nodeName = randomValueOtherThan(nodeName, ESTestCase::randomIdentifier); + case 4 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); + case 5 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); + case 6 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 7 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 8 -> status = randomValueOtherThan(status, this::randomStatus); + case 9 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); + case 10 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); + case 11 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } return new DriverStatus( sessionId, taskDescription, + clusterName, + nodeName, started, lastUpdated, cpuNanos, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index 35ccf0da4296..23f21e67e14c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -65,19 +66,7 @@ public class DriverTests extends ESTestCase { long waitTime = randomLongBetween(1000, 100000); long tickTime = randomLongBetween(1, 10000); - Driver driver = new Driver( - "unset", - "test", - startEpoch, - startNanos, - driverContext, - () -> "unset", - new CannedSourceOperator(inPages.iterator()), - List.of(), - new TestResultPageSinkOperator(outPages::add), - TimeValue.timeValueDays(10), - () -> {} - ); + Driver driver = createDriver(startEpoch, startNanos, driverContext, inPages, outPages, TimeValue.timeValueDays(10)); NowSupplier nowSupplier = new NowSupplier(startNanos, waitTime, tickTime); @@ -115,19 +104,7 @@ public class DriverTests extends ESTestCase { long waitTime = randomLongBetween(1000, 100000); long tickTime = randomLongBetween(1, 10000); - Driver driver = new Driver( - "unset", - "test", - startEpoch, - startNanos, - driverContext, - () -> "unset", - new CannedSourceOperator(inPages.iterator()), - List.of(), - new TestResultPageSinkOperator(outPages::add), - TimeValue.timeValueDays(10), - () -> {} - ); + Driver driver = createDriver(startEpoch, startNanos, driverContext, inPages, outPages, TimeValue.timeValueDays(10)); NowSupplier nowSupplier = new NowSupplier(startNanos, waitTime, tickTime); for (int i = 0; i < inPages.size(); i++) { @@ -166,19 +143,7 @@ public class DriverTests extends ESTestCase { long waitTime = randomLongBetween(1000, 100000); long tickTime = randomLongBetween(1, 10000); - Driver driver = new Driver( - "unset", - "test", - startEpoch, - startNanos, - driverContext, - () -> "unset", - new CannedSourceOperator(inPages.iterator()), - List.of(), - new TestResultPageSinkOperator(outPages::add), - TimeValue.timeValueNanos(tickTime), - () -> {} - ); + Driver driver = createDriver(startEpoch, startNanos, driverContext, inPages, outPages, TimeValue.timeValueNanos(tickTime)); NowSupplier nowSupplier = new NowSupplier(startNanos, waitTime, tickTime); for (int i = 0; i < inPages.size(); i++) { @@ -202,7 +167,32 @@ public class DriverTests extends ESTestCase { assertThat(driver.profile().iterations(), equalTo((long) inPages.size())); } - class NowSupplier implements LongSupplier { + private static Driver createDriver( + long startEpoch, + long startNanos, + DriverContext driverContext, + List inPages, + List outPages, + TimeValue statusInterval + ) { + return new Driver( + "unset", + "test", + "test", + "test", + startEpoch, + startNanos, + driverContext, + () -> "unset", + new CannedSourceOperator(inPages.iterator()), + List.of(), + new TestResultPageSinkOperator(outPages::add), + statusInterval, + () -> {} + ); + } + + static class NowSupplier implements LongSupplier { private final long startNanos; private final long waitTime; private final long tickTime; @@ -234,7 +224,7 @@ public class DriverTests extends ESTestCase { WarningsOperator warning1 = new WarningsOperator(threadPool); WarningsOperator warning2 = new WarningsOperator(threadPool); CyclicBarrier allPagesProcessed = new CyclicBarrier(2); - Driver driver = new Driver("test", driverContext, new CannedSourceOperator(inPages.iterator()) { + Driver driver = TestDriverFactory.create(driverContext, new CannedSourceOperator(inPages.iterator()) { @Override public Page getOutput() { assertRunningWithRegularUser(threadPool); @@ -250,7 +240,7 @@ public class DriverTests extends ESTestCase { throw new AssertionError(e); } } - }), () -> {}); + })); ThreadContext threadContext = threadPool.getThreadContext(); CountDownLatch driverCompleted = new CountDownLatch(1); try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { @@ -318,7 +308,7 @@ public class DriverTests extends ESTestCase { } }); - Driver driver = new Driver("test", driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); + Driver driver = TestDriverFactory.create(driverContext, sourceOperator, List.of(delayOperator), sinkOperator); ThreadContext threadContext = threadPool.getThreadContext(); PlainActionFuture future = new PlainActionFuture<>(); @@ -338,7 +328,7 @@ public class DriverTests extends ESTestCase { var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), between(1, 5), System::currentTimeMillis); var sourceOperator = new ExchangeSourceOperator(sourceHandler.createExchangeSource()); var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {}), Function.identity()); - Driver driver = new Driver("test", driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); + Driver driver = TestDriverFactory.create(driverContext, sourceOperator, List.of(), sinkOperator); PlainActionFuture future = new PlainActionFuture<>(); Driver.start(threadPool.getThreadContext(), threadPool.executor("esql"), driver, between(1, 1000), future); assertBusy( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index f08552913963..bc3024ac4f45 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.compute.test.BlockTestUtils; import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -67,13 +68,11 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(input.iterator()), List.of(simpleWithMode(AggregatorMode.INITIAL).get(driverContext), simpleWithMode(AggregatorMode.FINAL).get(driverContext)), - new TestResultPageSinkOperator(page -> results.add(page)), - () -> {} + new TestResultPageSinkOperator(page -> results.add(page)) ) ) { runDriver(d); @@ -89,13 +88,11 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(partials.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -111,8 +108,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(input.iterator()), List.of( @@ -120,8 +116,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { simpleWithMode(AggregatorMode.INTERMEDIATE).get(driverContext), simpleWithMode(AggregatorMode.FINAL).get(driverContext) ), - new TestResultPageSinkOperator(page -> results.add(page)), - () -> {} + new TestResultPageSinkOperator(page -> results.add(page)) ) ) { runDriver(d); @@ -144,13 +139,11 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { List results = new ArrayList<>(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(intermediates.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -239,8 +232,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { for (List pages : splitInput) { DriverContext driver1Context = driverContext(); drivers.add( - new Driver( - "test", + TestDriverFactory.create( driver1Context, new CannedSourceOperator(pages.iterator()), List.of( @@ -250,15 +242,13 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { simpleWithMode(AggregatorMode.INTERMEDIATE).get(driver1Context), intermediateOperatorItr.next() ), - new ExchangeSinkOperator(sinkExchanger.createExchangeSink(() -> {}), Function.identity()), - () -> {} + new ExchangeSinkOperator(sinkExchanger.createExchangeSink(() -> {}), Function.identity()) ) ); } DriverContext driver2Context = driverContext(); drivers.add( - new Driver( - "test", + TestDriverFactory.create( driver2Context, new ExchangeSourceOperator(sourceExchanger.createExchangeSource()), List.of( @@ -268,8 +258,7 @@ public abstract class ForkingOperatorTestCase extends OperatorTestCase { simpleWithMode(AggregatorMode.FINAL).get(driver2Context), intermediateOperatorItr.next() ), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ); assert intermediateOperatorItr.hasNext() == false; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 103a6a35651c..6956e9ba59d9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; import org.elasticsearch.compute.test.ComputeTestCase; import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; @@ -303,13 +304,11 @@ public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver( - "test", + TestDriverFactory.create( ctx, sourceOperatorFactory.get(ctx), CollectionUtils.concatLists(intermediateOperators, List.of(intialAgg, intermediateAgg, finalAgg)), - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ); List> values = new ArrayList<>(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 57dfe65ca485..2b2959516478 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -300,40 +300,24 @@ public class ExchangeServiceTests extends ESTestCase { int numSources = randomIntBetween(1, 8); List drivers = new ArrayList<>(numSinks + numSources); for (int i = 0; i < numSinks; i++) { - String description = "sink-" + i; - ExchangeSinkOperator sinkOperator = new ExchangeSinkOperator(exchangeSink.get(), Function.identity()); DriverContext dc = driverContext(); - Driver d = new Driver( + Driver d = createDriver( "test-session:1", - "test", - 0, - 0, + "sink-" + i, dc, - () -> description, seqNoGenerator.get(dc), - List.of(), - sinkOperator, - Driver.DEFAULT_STATUS_INTERVAL, - () -> {} + new ExchangeSinkOperator(exchangeSink.get(), Function.identity()) ); drivers.add(d); } for (int i = 0; i < numSources; i++) { - String description = "source-" + i; - ExchangeSourceOperator sourceOperator = new ExchangeSourceOperator(exchangeSource.get()); DriverContext dc = driverContext(); - Driver d = new Driver( + Driver d = createDriver( "test-session:2", - "test", - 0, - 0, + "source-" + i, dc, - () -> description, - sourceOperator, - List.of(), - seqNoCollector.get(dc), - Driver.DEFAULT_STATUS_INTERVAL, - () -> {} + new ExchangeSourceOperator(exchangeSource.get()), + seqNoCollector.get(dc) ); drivers.add(d); } @@ -348,6 +332,30 @@ public class ExchangeServiceTests extends ESTestCase { return seqNoCollector.receivedSeqNos; } + private static Driver createDriver( + String sessionId, + String description, + DriverContext dc, + SourceOperator sourceOperator, + SinkOperator sinkOperator + ) { + return new Driver( + sessionId, + "test", + "unset", + "unset", + 0, + 0, + dc, + () -> description, + sourceOperator, + List.of(), + sinkOperator, + Driver.DEFAULT_STATUS_INTERVAL, + () -> {} + ); + } + public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 49d91df556d1..753a546256cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.test.TestBlockBuilder; import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.Tuple; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -542,8 +543,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actualTop = new ArrayList<>(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -557,8 +557,7 @@ public class TopNOperatorTests extends OperatorTestCase { randomPageSize() ) ), - new PageConsumerOperator(page -> readInto(actualTop, page)), - () -> {} + new PageConsumerOperator(page -> readInto(actualTop, page)) ) ) { runDriver(driver); @@ -633,8 +632,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actualTop = new ArrayList<>(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -648,8 +646,7 @@ public class TopNOperatorTests extends OperatorTestCase { randomPageSize() ) ), - new PageConsumerOperator(page -> readInto(actualTop, page)), - () -> {} + new PageConsumerOperator(page -> readInto(actualTop, page)) ) ) { runDriver(driver); @@ -669,8 +666,7 @@ public class TopNOperatorTests extends OperatorTestCase { ) { List> outputValues = new ArrayList<>(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( @@ -691,8 +687,7 @@ public class TopNOperatorTests extends OperatorTestCase { outputValues.add(tuple(block1.isNull(i) ? null : block1.getLong(i), block2.isNull(i) ? null : block2.getLong(i))); } page.releaseBlocks(); - }), - () -> {} + }) ) ) { runDriver(driver); @@ -940,8 +935,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actualValues = new ArrayList<>(); int topCount = randomIntBetween(1, values.size()); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( @@ -955,8 +949,7 @@ public class TopNOperatorTests extends OperatorTestCase { randomPageSize() ) ), - new PageConsumerOperator(p -> readInto(actualValues, p)), - () -> {} + new PageConsumerOperator(p -> readInto(actualValues, p)) ) ) { runDriver(driver); @@ -1115,8 +1108,7 @@ public class TopNOperatorTests extends OperatorTestCase { DriverContext driverContext = driverContext(); List> actual = new ArrayList<>(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1130,8 +1122,7 @@ public class TopNOperatorTests extends OperatorTestCase { randomPageSize() ) ), - new PageConsumerOperator(p -> readInto(actual, p)), - () -> {} + new PageConsumerOperator(p -> readInto(actual, p)) ) ) { runDriver(driver); @@ -1243,8 +1234,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actual = new ArrayList<>(); DriverContext driverContext = driverContext(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1258,8 +1248,7 @@ public class TopNOperatorTests extends OperatorTestCase { randomPageSize() ) ), - new PageConsumerOperator(p -> readInto(actual, p)), - () -> {} + new PageConsumerOperator(p -> readInto(actual, p)) ) ) { runDriver(driver); @@ -1332,8 +1321,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actual = new ArrayList<>(); DriverContext driverContext = driverContext(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -1350,8 +1338,7 @@ public class TopNOperatorTests extends OperatorTestCase { randomPageSize() ) ), - new PageConsumerOperator(p -> readInto(actual, p)), - () -> {} + new PageConsumerOperator(p -> readInto(actual, p)) ) ) { runDriver(driver); @@ -1373,8 +1360,7 @@ public class TopNOperatorTests extends OperatorTestCase { List> actual = new ArrayList<>(); DriverContext driverContext = driverContext(); try ( - Driver driver = new Driver( - "test", + Driver driver = TestDriverFactory.create( driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), List.of( @@ -1396,8 +1382,7 @@ public class TopNOperatorTests extends OperatorTestCase { p.releaseBlocks(); throw new RuntimeException("boo"); } - }), - () -> {} + }) ) ) { Exception e = expectThrows(RuntimeException.class, () -> runDriver(driver)); diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java index 876461cafcf8..0fad56dc99b8 100644 --- a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java @@ -188,13 +188,11 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { while (source.hasNext()) { List in = source.next(); try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext(), new CannedSourceOperator(in.iterator()), operators.get(), - new PageConsumerOperator(result::add), - () -> {} + new PageConsumerOperator(result::add) ) ) { runDriver(d); @@ -263,13 +261,11 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { List results = new ArrayList<>(); boolean success = false; try ( - Driver d = new Driver( - "test", + Driver d = TestDriverFactory.create( driverContext, new CannedSourceOperator(input), operators, - new TestResultPageSinkOperator(results::add), - () -> {} + new TestResultPageSinkOperator(results::add) ) ) { runDriver(d); @@ -291,22 +287,15 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { int dummyDrivers = between(0, 10); for (int i = 0; i < dummyDrivers; i++) { drivers.add( - new Driver( - "test", - "dummy-session", - 0, - 0, + TestDriverFactory.create( new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, TestBlockFactory.getNonBreakingInstance()), - () -> "dummy-driver", new SequenceLongBlockSourceOperator( TestBlockFactory.getNonBreakingInstance(), LongStream.range(0, between(1, 100)), between(1, 100) ), List.of(), - new PageConsumerOperator(page -> page.releaseBlocks()), - Driver.DEFAULT_STATUS_INTERVAL, - () -> {} + new PageConsumerOperator(Page::releaseBlocks) ) ); } diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestDriverFactory.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestDriverFactory.java new file mode 100644 index 000000000000..bd1d4d5fc53d --- /dev/null +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestDriverFactory.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.test; + +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.SinkOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Releasable; + +import java.util.List; + +public class TestDriverFactory { + + /** + * This is a convenience factory to create a test driver instance + * that accepts fewer parameters + */ + public static Driver create( + DriverContext driverContext, + SourceOperator source, + List intermediateOperators, + SinkOperator sink + ) { + return create(driverContext, source, intermediateOperators, sink, () -> {}); + } + + public static Driver create( + DriverContext driverContext, + SourceOperator source, + List intermediateOperators, + SinkOperator sink, + Releasable releasable + ) { + return new Driver( + "unset", + "test-task", + "unset", + "unset", + System.currentTimeMillis(), + System.nanoTime(), + driverContext, + () -> null, + source, + intermediateOperators, + sink, + Driver.DEFAULT_STATUS_INTERVAL, + releasable + ); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 58c82d800954..4365292c621f 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -531,7 +531,10 @@ public class RestEsqlIT extends RestEsqlTestCase { } private MapMatcher commonProfile() { - return matchesMap().entry("task_description", any(String.class)) + return matchesMap() // + .entry("task_description", any(String.class)) + .entry("cluster_name", any(String.class)) + .entry("node_name", any(String.class)) .entry("start_millis", greaterThan(0L)) .entry("stop_millis", greaterThan(0L)) .entry("iterations", greaterThan(0L)) @@ -565,7 +568,7 @@ public class RestEsqlIT extends RestEsqlTestCase { .entry("current", DocIdSetIterator.NO_MORE_DOCS) .entry("pages_emitted", greaterThan(0)) .entry("rows_emitted", greaterThan(0)) - .entry("processing_nanos", greaterThan(0)) + .entry("process_nanos", greaterThan(0)) .entry("processed_queries", List.of("*:*")); case "ValuesSourceReaderOperator" -> basicProfile().entry("readers_built", matchesMap().extraOk()); case "AggregationOperator" -> matchesMap().entry("pages_processed", greaterThan(0)) @@ -599,7 +602,7 @@ public class RestEsqlIT extends RestEsqlTestCase { .entry("total_slices", greaterThan(0)) .entry("slice_max", 0) .entry("slice_min", 0) - .entry("processing_nanos", greaterThan(0)) + .entry("process_nanos", greaterThan(0)) .entry("processed_queries", List.of("*:*")) .entry("slice_index", 0); default -> throw new AssertionError("unexpected status: " + o); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/semantic_text.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/semantic_text.csv index bd5fe7fad3a4..f79e44ab67ca 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/semantic_text.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/semantic_text.csv @@ -1,4 +1,4 @@ -_id:keyword,semantic_text_field:semantic_text,st_bool:semantic_text,st_cartesian_point:semantic_text,st_cartesian_shape:semantic_text,st_datetime:semantic_text,st_double:semantic_text,st_geopoint:semantic_text,st_geoshape:semantic_text,st_integer:semantic_text,st_ip:semantic_text,st_long:semantic_text,st_unsigned_long:semantic_text,st_version:semantic_text,st_multi_value:semantic_text,st_unicode:semantic_text,host:keyword,description:text,value:long,st_base64:semantic_text,st_logs:semantic_text -1,live long and prosper,false,"POINT(4297.11 -1475.53)",,1953-09-02T00:00:00.000Z,5.20128E11,"POINT(42.97109630194 14.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",23,1.1.1.1,2147483648,2147483648,1.2.3,["Hello there!", "This is a random value", "for testing purposes"],你吃饭了吗,"host1","some description1",1001,ZWxhc3RpYw==,"2024-12-23T12:15:00.000Z 1.2.3.4 example@example.com 4553" -2,all we have to decide is what to do with the time that is given to us,true,"POINT(7580.93 2272.77)",,2023-09-24T15:57:00.000Z,4541.11,"POINT(37.97109630194 21.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",122,1.1.2.1,123,2147483648.2,9.0.0,["nice to meet you", "bye bye!"],["谢谢", "对不起我的中文不好"],"host2","some description2",1002,aGVsbG8=,"2024-01-23T12:15:00.000Z 1.2.3.4 foo@example.com 42" -3,be excellent to each other,,,,,,,,,,,,,,,"host3","some description3",1003,,"2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" +_id:keyword,semantic_text_field:semantic_text,st_bool:semantic_text,st_cartesian_point:semantic_text,st_cartesian_shape:semantic_text,st_datetime:semantic_text,st_double:semantic_text,st_geopoint:semantic_text,st_geoshape:semantic_text,st_integer:semantic_text,st_ip:semantic_text,st_long:semantic_text,st_unsigned_long:semantic_text,st_version:semantic_text,st_multi_value:semantic_text,st_unicode:semantic_text,host:keyword,description:text,value:long,st_base64:semantic_text,st_logs:semantic_text,language_name:keyword +1,live long and prosper,false,"POINT(4297.11 -1475.53)",,1953-09-02T00:00:00.000Z,5.20128E11,"POINT(42.97109630194 14.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",23,1.1.1.1,2147483648,2147483648,1.2.3,["Hello there!", "This is a random value", "for testing purposes"],你吃饭了吗,"host1","some description1",1001,ZWxhc3RpYw==,"2024-12-23T12:15:00.000Z 1.2.3.4 example@example.com 4553",English +2,all we have to decide is what to do with the time that is given to us,true,"POINT(7580.93 2272.77)",,2023-09-24T15:57:00.000Z,4541.11,"POINT(37.97109630194 21.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",122,1.1.2.1,123,2147483648.2,9.0.0,["nice to meet you", "bye bye!"],["谢谢", "对不起我的中文不好"],"host2","some description2",1002,aGVsbG8=,"2024-01-23T12:15:00.000Z 1.2.3.4 foo@example.com 42",French +3,be excellent to each other,,,,,,,,,,,,,,,"host3","some description3",1003,,"2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42",Spanish diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json index db15133f036b..5fa25e01ef0e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json @@ -76,6 +76,9 @@ "st_logs": { "type": "semantic_text", "inference_id": "test_sparse_inference" + }, + "language_name": { + "type": "keyword" } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec index e6a63d1078d9..bf27ee0d3316 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec @@ -721,3 +721,39 @@ book_no:keyword 7140 2714 ; + +testMatchWithSemanticTextAndLookupJoin +required_capability: match_operator_colon +required_capability: semantic_text_field_caps +required_capability: join_lookup_v12 + +from semantic_text METADATA _id +| where semantic_text_field:"something" +| LOOKUP JOIN languages_lookup ON language_name +| KEEP _id, language_name, language_code +; + +ignoreOrder: true + +_id:keyword | language_name:keyword | language_code:integer +1 | English | 1 +2 | French | 2 +3 | Spanish | 3 +; + +testMatchWithSemanticTextKqlAndLookupJoin +required_capability: match_operator_colon +required_capability: semantic_text_type +required_capability: kql_function +required_capability: semantic_text_field_caps +required_capability: join_lookup_v12 + +from semantic_text +| where kql("host:host1") AND semantic_text_field:"something" +| LOOKUP JOIN languages_lookup ON language_name +| KEEP host, semantic_text_field, language_name, language_code +; + +host:keyword | semantic_text_field:text | language_name:keyword | language_code:integer +"host1" | live long and prosper | English | 1 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index c4da0bf32ef9..19157b636dff 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.DriverProfile; -import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.compute.operator.OperatorStatus; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -350,7 +350,7 @@ public class EnrichIT extends AbstractEsqlIntegTestCase { assertNotNull(profile); List drivers = profile.drivers(); assertThat(drivers.size(), greaterThanOrEqualTo(2)); - List enrichOperators = drivers.stream() + List enrichOperators = drivers.stream() .flatMap(d -> d.operators().stream()) .filter(status -> status.operator().startsWith("EnrichOperator")) .toList(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index b15e4cfe739f..fd8cc5325ed1 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.compute.operator.DriverTaskRunner; +import org.elasticsearch.compute.operator.OperatorStatus; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; @@ -104,9 +105,9 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { DriverStatus status = (DriverStatus) task.status(); assertThat(status.sessionId(), not(emptyOrNullString())); String taskDescription = status.taskDescription(); - for (DriverStatus.OperatorStatus o : status.activeOperators()) { + for (OperatorStatus o : status.activeOperators()) { logger.info("status {}", o); - if (o.operator().startsWith("LuceneSourceOperator[maxPageSize = " + pageSize())) { + if (o.operator().startsWith("LuceneSourceOperator[")) { assertThat(taskDescription, equalTo("data")); LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 1bbcc46c0555..da18eaa3a6a4 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverRunner; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.TestDriverFactory; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; @@ -226,8 +227,7 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { ); DriverContext driverContext = driverContext(); try ( - var driver = new Driver( - "test", + var driver = TestDriverFactory.create( driverContext, source.get(driverContext), List.of(reader.get(driverContext), lookup.get(driverContext)), @@ -247,8 +247,7 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { } finally { page.releaseBlocks(); } - }), - () -> {} + }) ) ) { PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index a3e89716ee6a..7c5e2538e2e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -354,7 +354,7 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action. } public Profile(StreamInput in) throws IOException { - this.drivers = in.readCollectionAsImmutableList(DriverProfile::new); + this.drivers = in.readCollectionAsImmutableList(DriverProfile::readFrom); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 2b08a60e1622..92a27ba9cbef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -333,6 +333,8 @@ public abstract class AbstractLookupService(); - forEachFromRelation(plan, relation -> indices.addAll(relation.concreteIndices())); + forEachRelation(plan, relation -> indices.addAll(relation.concreteIndices())); return indices; } @@ -122,42 +118,16 @@ public class PlannerUtils { return Strings.EMPTY_ARRAY; } var indices = new LinkedHashSet(); - forEachFromRelation(plan, relation -> indices.addAll(asList(Strings.commaDelimitedListToStringArray(relation.indexPattern())))); + forEachRelation(plan, relation -> indices.addAll(asList(Strings.commaDelimitedListToStringArray(relation.indexPattern())))); return indices.toArray(String[]::new); } - /** - * Iterates over the plan and applies the action to each {@link EsRelation} node. - *

- * This method ignores the right side of joins. - *

- */ - private static void forEachFromRelation(PhysicalPlan plan, Consumer action) { - // Take the non-join-side fragments - forEachUpWithChildren(plan, FragmentExec.class, fragment -> { - // Take the non-join-side relations - forEachUpWithChildren( - fragment.fragment(), - EsRelation.class, - action, - node -> node instanceof Join join ? List.of(join.left()) : node.children() - ); - }, node -> node instanceof LookupJoinExec join ? List.of(join.left()) : node.children()); - } - - /** - * Similar to {@link Node#forEachUp(Class, Consumer)}, but with a custom callback to get the node children. - */ - private static , E extends T> void forEachUpWithChildren( - T node, - Class typeToken, - Consumer action, - Function> childrenGetter - ) { - childrenGetter.apply(node).forEach(c -> forEachUpWithChildren(c, typeToken, action, childrenGetter)); - if (typeToken.isInstance(node)) { - action.accept(typeToken.cast(node)); - } + private static void forEachRelation(PhysicalPlan plan, Consumer action) { + plan.forEachDown(FragmentExec.class, f -> f.fragment().forEachDown(EsRelation.class, r -> { + if (r.indexMode() != IndexMode.LOOKUP) { + action.accept(r); + } + })); } public static PhysicalPlan localPlan( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java index 8d2e092cd414..72d5187bfd7c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java @@ -54,7 +54,7 @@ final class ComputeResponse extends TransportResponse { super(in); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { if (in.readBoolean()) { - profiles = in.readCollectionAsImmutableList(DriverProfile::new); + profiles = in.readCollectionAsImmutableList(DriverProfile::readFrom); } else { profiles = null; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java index b1eb41ffc99d..1313db9e7044 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java @@ -32,7 +32,7 @@ final class DataNodeComputeResponse extends TransportResponse { DataNodeComputeResponse(StreamInput in) throws IOException { if (DataNodeComputeHandler.supportShardLevelRetryFailure(in.getTransportVersion())) { - this.profiles = in.readCollectionAsImmutableList(DriverProfile::new); + this.profiles = in.readCollectionAsImmutableList(DriverProfile::readFrom); this.shardLevelFailures = in.readMap(ShardId::new, StreamInput::readException); } else { this.profiles = Objects.requireNonNullElse(new ComputeResponse(in).getProfiles(), List.of()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index cc4e70632d67..c9af81c8e35b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -7,16 +7,14 @@ package org.elasticsearch.xpack.esql.action; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverSleeps; -import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.OperatorStatus; import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.util.List; @@ -51,7 +49,9 @@ public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCa private DriverProfile randomDriverProfile() { return new DriverProfile( - RandomStrings.randomAsciiLettersOfLength(random(), 5), + randomIdentifier(), + randomIdentifier(), + randomIdentifier(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -62,7 +62,7 @@ public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCa ); } - private DriverStatus.OperatorStatus randomOperatorStatus() { + private OperatorStatus randomOperatorStatus() { String name = randomAlphaOfLength(4); Operator.Status status = randomBoolean() ? null @@ -72,6 +72,6 @@ public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCa randomNonNegativeLong(), randomNonNegativeLong() ); - return new DriverStatus.OperatorStatus(name, status); + return new OperatorStatus(name, status); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 065495cbad93..726b5a959653 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverSleeps; -import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.compute.operator.OperatorStatus; import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; @@ -724,12 +724,14 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase rootCauseExceptions = ConcurrentCollections.newQueue(); IntStream.range(0, between(1, 100)) .forEach( diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java index 91afa4da560b..cf55fc0e0ac5 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.xpack.ilm; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.WarningFailureException; -import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.xcontent.XContentHelper; @@ -81,20 +80,15 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { indexDocument(client(), dataStream, true); - assertBusy(() -> assertTrue(indexExists(DataStream.getDefaultBackingIndexName(dataStream, 2)))); - assertBusy( - () -> assertTrue( - Boolean.parseBoolean( - (String) getIndexSettingsAsMap(DataStream.getDefaultBackingIndexName(dataStream, 2)).get("index.hidden") - ) - ) - ); - assertBusy( - () -> assertThat( - getStepKeyForIndex(client(), DataStream.getDefaultBackingIndexName(dataStream, 1)), - equalTo(PhaseCompleteStep.finalStep("hot").getKey()) - ) - ); + assertBusy(() -> { + final var backingIndices = getBackingIndices(client(), dataStream); + assertEquals(2, backingIndices.size()); + assertTrue(Boolean.parseBoolean((String) getIndexSettingsAsMap(backingIndices.getLast()).get("index.hidden"))); + }); + assertBusy(() -> { + final var backingIndices = getBackingIndices(client(), dataStream); + assertEquals(PhaseCompleteStep.finalStep("hot").getKey(), getStepKeyForIndex(client(), backingIndices.getFirst())); + }); } public void testRolloverIsSkippedOnManualDataStreamRollover() throws Exception { @@ -104,7 +98,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { indexDocument(client(), dataStream, true); - String firstGenerationIndex = DataStream.getDefaultBackingIndexName(dataStream, 1); + String firstGenerationIndex = getBackingIndices(client(), dataStream).getFirst(); assertBusy( () -> assertThat(getStepKeyForIndex(client(), firstGenerationIndex).name(), equalTo(WaitForRolloverReadyStep.NAME)), 30, @@ -112,7 +106,10 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { ); rolloverMaxOneDocCondition(client(), dataStream); - assertBusy(() -> assertThat(indexExists(DataStream.getDefaultBackingIndexName(dataStream, 2)), is(true)), 30, TimeUnit.SECONDS); + assertBusy(() -> { + final var backingIndices = getBackingIndices(client(), dataStream); + assertEquals(2, backingIndices.size()); + }, 30, TimeUnit.SECONDS); // even though the first index doesn't have 2 documents to fulfill the rollover condition, it should complete the rollover action // because it's not the write index anymore @@ -129,7 +126,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName)); indexDocument(client(), dataStream, true); - String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1); + String backingIndexName = getBackingIndices(client(), dataStream).getFirst(); assertBusy( () -> assertThat( "original index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore", @@ -143,8 +140,11 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { // Manual rollover the original index such that it's not the write index in the data stream anymore rolloverMaxOneDocCondition(client(), dataStream); // Wait for rollover to happen - String rolloverIndex = DataStream.getDefaultBackingIndexName(dataStream, 2); - assertBusy(() -> assertTrue("the rollover action created the rollover index", indexExists(rolloverIndex)), 30, TimeUnit.SECONDS); + assertBusy( + () -> assertEquals("the rollover action created the rollover index", 2, getBackingIndices(client(), dataStream).size()), + 30, + TimeUnit.SECONDS + ); String shrunkenIndex = waitAndGetShrinkIndexName(client(), backingIndexName); assertBusy(() -> assertTrue(indexExists(shrunkenIndex)), 30, TimeUnit.SECONDS); @@ -160,7 +160,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName)); indexDocument(client(), dataStream, true); - String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1); + String backingIndexName = getBackingIndices(client(), dataStream).getFirst(); String restoredIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + backingIndexName; assertBusy( @@ -191,7 +191,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName)); indexDocument(client(), dataStream, true); - String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1); + String backingIndexName = getBackingIndices(client(), dataStream).getFirst(); assertBusy( () -> assertThat( "index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore", @@ -226,7 +226,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { indexDocument(client(), dataStream, true); // The freeze action is a noop action with only noop steps and should pass through to complete the phase asap. - String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1); + String backingIndexName = getBackingIndices(client(), dataStream).getFirst(); assertBusy(() -> { try { assertThat(explainIndex(client(), backingIndexName).get("step"), is(PhaseCompleteStep.NAME)); @@ -248,7 +248,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase { createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName)); indexDocument(client(), dataStream, true); - String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1); + String backingIndexName = getBackingIndices(client(), dataStream).getFirst(); assertBusy( () -> assertThat( "index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore", diff --git a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy index d814dfbb1c11..b4791207a15b 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy @@ -46,8 +46,9 @@ grant { grant codeBase "${codebase.netty-common}" { // for reading the system-wide configuration for the backlog of established sockets permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read"; - // Netty sets custom classloader for some of its internal threads + // Netty gets and sets classloaders for some of its internal threads permission java.lang.RuntimePermission "setContextClassLoader"; + permission java.lang.RuntimePermission "getClassLoader"; }; grant codeBase "${codebase.netty-transport}" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java index 2ac2d4ebf0c3..4f64b780e1f9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.SSLService; import org.junit.Before; @@ -362,6 +363,11 @@ public class SSLErrorMessageFileTests extends ESTestCase { String configKey, BiConsumer configure ) throws Exception { + assumeTrue( + "Requires Security Manager to block access, entitlements are not checked for unit tests", + RuntimeVersionFeature.isSecurityManagerAvailable() + ); + final String prefix = randomSslPrefix(); final Settings.Builder settings = Settings.builder(); configure.accept(prefix, settings); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml index ec73081cd424..917645d9e1d4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml @@ -113,7 +113,10 @@ setup: snapshot: test_snapshot_2 wait_for_completion: true body: | - { "indices": "test_synthetic" } + { + "indices": "test_synthetic", + "include_global_state": false + } - match: { snapshot.snapshot: test_snapshot_2 } - match: { snapshot.state : PARTIAL } @@ -130,7 +133,10 @@ setup: snapshot: test_snapshot_3 wait_for_completion: true body: | - { "indices": "test_*" } + { + "indices": "test_*", + "include_global_state": false + } - match: { snapshot.snapshot: test_snapshot_3 } - match: { snapshot.state : PARTIAL }