From 7563e71e90f55f385a8cfdc1b3880d7e5e7e9ba1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 23 Jan 2025 09:14:16 +0000 Subject: [PATCH 01/29] Release ref on cancellation in `GetSnapshotInfoExecutor` (#120529) Not a bug today because we also throttle calls to `getSnapshotInfo` so we never run out of refs before processing all the cancelled tasks, but still we should release `ref` on every path through this method. --- .../cluster/snapshots/get/TransportGetSnapshotsAction.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index b5eeb2b0f7f2..d9fef7e0af8a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -721,7 +721,9 @@ public class TransportGetSnapshotsAction extends TransportMasterNodeAction listener) { enqueueTask(listener.delegateFailure((l, ref) -> { if (isCancelledSupplier.getAsBoolean()) { - l.onFailure(new TaskCancelledException("task cancelled")); + try (ref) { + l.onFailure(new TaskCancelledException("task cancelled")); + } } else { repository.getSnapshotInfo(snapshotId, ActionListener.releaseAfter(l, ref)); } From a1fd7bc374858a0ce60bbc8bef1d271f18afdbda Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 23 Jan 2025 09:35:45 +0000 Subject: [PATCH 02/29] Fix trappy timeouts in persistent tasks requests (#120514) Ensure that callers constructing these master-node requests pass in an explicit timeout. Relates #107984 --- .../PersistentTaskCreationFailureIT.java | 2 +- ...PersistentTaskInitializationFailureIT.java | 2 +- .../PersistentTasksExecutorFullRestartIT.java | 2 +- .../persistent/PersistentTasksExecutorIT.java | 87 ++++++++++++++++--- .../decider/EnableAssignmentDeciderIT.java | 2 +- .../TransportPostFeatureUpgradeAction.java | 3 +- .../selection/HealthNodeTaskExecutor.java | 3 +- .../persistent/AllocatedPersistentTask.java | 10 ++- .../CompletionPersistentTaskAction.java | 5 +- .../PersistentTasksNodeService.java | 5 +- .../persistent/PersistentTasksService.java | 73 +++++++--------- .../RemovePersistentTaskAction.java | 5 +- .../persistent/StartPersistentTaskAction.java | 5 +- .../UpdatePersistentTaskStatusAction.java | 5 +- .../HealthNodeTaskExecutorTests.java | 5 +- .../CancelPersistentTaskRequestTests.java | 2 +- .../CompletionPersistentTaskRequestTests.java | 4 +- .../PersistentTasksNodeServiceTests.java | 14 +-- .../StartPersistentActionRequestTests.java | 2 +- .../UpdatePersistentTaskRequestTests.java | 2 +- .../ccr/action/ShardFollowTaskCleaner.java | 2 + .../downsample/TransportDownsampleAction.java | 2 +- .../ReindexDataStreamTransportAction.java | 3 +- .../integration/MlDistributedFailureIT.java | 1 + .../action/TransportPutRollupJobAction.java | 2 +- .../action/PutJobStateMachineTests.java | 9 +- .../xpack/security/Security.java | 3 +- .../xpack/shutdown/NodeShutdownTasksIT.java | 18 ++-- .../TransportStopTransformActionTests.java | 18 +++- .../transforms/TransformTaskTests.java | 3 +- 30 files changed, 188 insertions(+), 111 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java index 8a4d1ceda784..6452968f2467 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java @@ -113,7 +113,7 @@ public class PersistentTaskCreationFailureIT extends ESIntegTestCase { UUIDs.base64UUID(), FailingCreationPersistentTaskExecutor.TASK_NAME, new FailingCreationTaskParams(), - null, + TEST_REQUEST_TIMEOUT, l.map(ignored -> null) ) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskInitializationFailureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskInitializationFailureIT.java index 2f0896925859..6e739b12f406 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskInitializationFailureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskInitializationFailureIT.java @@ -50,7 +50,7 @@ public class PersistentTaskInitializationFailureIT extends ESIntegTestCase { UUIDs.base64UUID(), FailingInitializationPersistentTaskExecutor.TASK_NAME, new FailingInitializationTaskParams(), - null, + TEST_REQUEST_TIMEOUT, startPersistentTaskFuture ); startPersistentTaskFuture.actionGet(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorFullRestartIT.java b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorFullRestartIT.java index b710a05ec378..684f6b71d0ef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorFullRestartIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorFullRestartIT.java @@ -45,7 +45,7 @@ public class PersistentTasksExecutorFullRestartIT extends ESIntegTestCase { PlainActionFuture> future = new PlainActionFuture<>(); futures.add(future); taskIds[i] = UUIDs.base64UUID(); - service.sendStartRequest(taskIds[i], TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future); + service.sendStartRequest(taskIds[i], TestPersistentTasksExecutor.NAME, new TestParams("Blah"), TEST_REQUEST_TIMEOUT, future); } for (int i = 0; i < numberOfTasks; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorIT.java index 203ad831bd2e..8af2111afd5b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTasksExecutorIT.java @@ -69,7 +69,13 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { public void testPersistentActionFailure() throws Exception { PersistentTasksService persistentTasksService = internalCluster().getInstance(PersistentTasksService.class); PlainActionFuture> future = new PlainActionFuture<>(); - persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future); + persistentTasksService.sendStartRequest( + UUIDs.base64UUID(), + TestPersistentTasksExecutor.NAME, + new TestParams("Blah"), + TEST_REQUEST_TIMEOUT, + future + ); long allocationId = future.get().getAllocationId(); waitForTaskToStart(); TaskInfo firstRunningTask = clusterAdmin().prepareListTasks() @@ -100,7 +106,13 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { PersistentTasksService persistentTasksService = internalCluster().getInstance(PersistentTasksService.class); PlainActionFuture> future = new PlainActionFuture<>(); String taskId = UUIDs.base64UUID(); - persistentTasksService.sendStartRequest(taskId, TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future); + persistentTasksService.sendStartRequest( + taskId, + TestPersistentTasksExecutor.NAME, + new TestParams("Blah"), + TEST_REQUEST_TIMEOUT, + future + ); long allocationId = future.get().getAllocationId(); waitForTaskToStart(); TaskInfo firstRunningTask = clusterAdmin().prepareListTasks() @@ -119,7 +131,14 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { logger.info("Simulating errant completion notification"); // try sending completion request with incorrect allocation id PlainActionFuture> failedCompletionNotificationFuture = new PlainActionFuture<>(); - persistentTasksService.sendCompletionRequest(taskId, Long.MAX_VALUE, null, null, null, failedCompletionNotificationFuture); + persistentTasksService.sendCompletionRequest( + taskId, + Long.MAX_VALUE, + null, + null, + TEST_REQUEST_TIMEOUT, + failedCompletionNotificationFuture + ); assertFutureThrows(failedCompletionNotificationFuture, ResourceNotFoundException.class); // Make sure that the task is still running assertThat( @@ -141,7 +160,13 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { PlainActionFuture> future = new PlainActionFuture<>(); TestParams testParams = new TestParams("Blah"); testParams.setExecutorNodeAttr("test"); - persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, testParams, null, future); + persistentTasksService.sendStartRequest( + UUIDs.base64UUID(), + TestPersistentTasksExecutor.NAME, + testParams, + TEST_REQUEST_TIMEOUT, + future + ); String taskId = future.get().getId(); Settings nodeSettings = Settings.builder().put(nodeSettings(0, Settings.EMPTY)).put("node.attr.test_attr", "test").build(); @@ -165,7 +190,7 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { // Remove the persistent task PlainActionFuture> removeFuture = new PlainActionFuture<>(); - persistentTasksService.sendRemoveRequest(taskId, null, removeFuture); + persistentTasksService.sendRemoveRequest(taskId, TEST_REQUEST_TIMEOUT, removeFuture); assertEquals(removeFuture.get().getId(), taskId); } @@ -182,7 +207,13 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { PersistentTasksService persistentTasksService = internalCluster().getInstance(PersistentTasksService.class); PlainActionFuture> future = new PlainActionFuture<>(); TestParams testParams = new TestParams("Blah"); - persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, testParams, null, future); + persistentTasksService.sendStartRequest( + UUIDs.base64UUID(), + TestPersistentTasksExecutor.NAME, + testParams, + TEST_REQUEST_TIMEOUT, + future + ); String taskId = future.get().getId(); assertThat(clusterAdmin().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), empty()); @@ -197,14 +228,20 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { // Remove the persistent task PlainActionFuture> removeFuture = new PlainActionFuture<>(); - persistentTasksService.sendRemoveRequest(taskId, null, removeFuture); + persistentTasksService.sendRemoveRequest(taskId, TEST_REQUEST_TIMEOUT, removeFuture); assertEquals(removeFuture.get().getId(), taskId); } public void testPersistentActionStatusUpdate() throws Exception { PersistentTasksService persistentTasksService = internalCluster().getInstance(PersistentTasksService.class); PlainActionFuture> future = new PlainActionFuture<>(); - persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future); + persistentTasksService.sendStartRequest( + UUIDs.base64UUID(), + TestPersistentTasksExecutor.NAME, + new TestParams("Blah"), + TEST_REQUEST_TIMEOUT, + future + ); String taskId = future.get().getId(); waitForTaskToStart(); TaskInfo firstRunningTask = clusterAdmin().prepareListTasks() @@ -250,7 +287,7 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { assertFutureThrows(future1, IllegalStateException.class, "timed out after 10ms"); PlainActionFuture> failedUpdateFuture = new PlainActionFuture<>(); - persistentTasksService.sendUpdateStateRequest(taskId, -2, new State("should fail"), null, failedUpdateFuture); + persistentTasksService.sendUpdateStateRequest(taskId, -2, new State("should fail"), TEST_REQUEST_TIMEOUT, failedUpdateFuture); assertFutureThrows( failedUpdateFuture, ResourceNotFoundException.class, @@ -275,11 +312,23 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { PersistentTasksService persistentTasksService = internalCluster().getInstance(PersistentTasksService.class); PlainActionFuture> future = new PlainActionFuture<>(); String taskId = UUIDs.base64UUID(); - persistentTasksService.sendStartRequest(taskId, TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future); + persistentTasksService.sendStartRequest( + taskId, + TestPersistentTasksExecutor.NAME, + new TestParams("Blah"), + TEST_REQUEST_TIMEOUT, + future + ); future.get(); PlainActionFuture> future2 = new PlainActionFuture<>(); - persistentTasksService.sendStartRequest(taskId, TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future2); + persistentTasksService.sendStartRequest( + taskId, + TestPersistentTasksExecutor.NAME, + new TestParams("Blah"), + TEST_REQUEST_TIMEOUT, + future2 + ); assertFutureThrows(future2, ResourceAlreadyExistsException.class); waitForTaskToStart(); @@ -315,7 +364,13 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { PlainActionFuture> future = new PlainActionFuture<>(); TestParams testParams = new TestParams("Blah"); testParams.setExecutorNodeAttr("test"); - persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, testParams, null, future); + persistentTasksService.sendStartRequest( + UUIDs.base64UUID(), + TestPersistentTasksExecutor.NAME, + testParams, + TEST_REQUEST_TIMEOUT, + future + ); PersistentTask task = future.get(); String taskId = task.getId(); @@ -366,7 +421,13 @@ public class PersistentTasksExecutorIT extends ESIntegTestCase { persistentTasksClusterService.setRecheckInterval(TimeValue.timeValueMillis(1)); PersistentTasksService persistentTasksService = internalCluster().getInstance(PersistentTasksService.class); PlainActionFuture> future = new PlainActionFuture<>(); - persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, new TestParams("Blah"), null, future); + persistentTasksService.sendStartRequest( + UUIDs.base64UUID(), + TestPersistentTasksExecutor.NAME, + new TestParams("Blah"), + TEST_REQUEST_TIMEOUT, + future + ); String taskId = future.get().getId(); long allocationId = future.get().getAllocationId(); waitForTaskToStart(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java index 7038dca99249..b75156763c9e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java @@ -52,7 +52,7 @@ public class EnableAssignmentDeciderIT extends ESIntegTestCase { "task_" + i, TestPersistentTasksExecutor.NAME, new TestParams(randomAlphaOfLength(10)), - null, + TEST_REQUEST_TIMEOUT, ActionListener.running(latch::countDown) ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java index 57ebe8ef626f..ecf7bab6a21c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksService; @@ -95,7 +96,7 @@ public class TransportPostFeatureUpgradeAction extends TransportMasterNodeAction SYSTEM_INDEX_UPGRADE_TASK_NAME, SYSTEM_INDEX_UPGRADE_TASK_NAME, new SystemIndexMigrationTaskParams(), - null, + TimeValue.THIRTY_SECONDS /* TODO should this be configurable? longer by default? infinite? */, ActionListener.wrap(startedTask -> { listener.onResponse(new PostFeatureUpgradeResponse(true, featuresToMigrate, null, null)); }, ex -> { diff --git a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java index 5991bc248ba7..7c37a0ce5d92 100644 --- a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.FeatureService; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -162,7 +163,7 @@ public final class HealthNodeTaskExecutor extends PersistentTasksExecutor logger.debug("Created the health node task"), e -> { if (e instanceof NodeClosedException) { logger.debug("Failed to create health node task because node is shutting down", e); diff --git a/server/src/main/java/org/elasticsearch/persistent/AllocatedPersistentTask.java b/server/src/main/java/org/elasticsearch/persistent/AllocatedPersistentTask.java index 7f00562758a8..cda73d4fa0bc 100644 --- a/server/src/main/java/org/elasticsearch/persistent/AllocatedPersistentTask.java +++ b/server/src/main/java/org/elasticsearch/persistent/AllocatedPersistentTask.java @@ -65,7 +65,13 @@ public class AllocatedPersistentTask extends CancellableTask { final PersistentTaskState state, final ActionListener> listener ) { - persistentTasksService.sendUpdateStateRequest(persistentTaskId, allocationId, state, null, listener); + persistentTasksService.sendUpdateStateRequest( + persistentTaskId, + allocationId, + state, + TimeValue.THIRTY_SECONDS /* TODO should this be longer? infinite? */, + listener + ); } public String getPersistentTaskId() { @@ -201,7 +207,7 @@ public class AllocatedPersistentTask extends CancellableTask { getAllocationId(), failure, localAbortReason, - null, + TimeValue.THIRTY_SECONDS /* TODO should this be longer? infinite? */, new ActionListener<>() { @Override public void onResponse(PersistentTasksCustomMetadata.PersistentTask persistentTask) { diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index 74761144742c..be37af165b42 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -56,8 +57,8 @@ public class CompletionPersistentTaskAction { localAbortReason = in.readOptionalString(); } - public Request(String taskId, long allocationId, Exception exception, String localAbortReason) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout, String taskId, long allocationId, Exception exception, String localAbortReason) { + super(masterNodeTimeout); this.taskId = taskId; this.exception = exception; this.allocationId = allocationId; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java index ff6a0b901870..16fdc82074e3 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.tasks.Task; @@ -310,7 +311,7 @@ public class PersistentTasksNodeService implements ClusterStateListener { taskInProgress.getAllocationId(), originalException, null, - null, + TimeValue.THIRTY_SECONDS /* TODO should this be longer? infinite? */, new ActionListener<>() { @Override public void onResponse(PersistentTask persistentTask) { @@ -346,7 +347,7 @@ public class PersistentTasksNodeService implements ClusterStateListener { if (task.markAsCancelled()) { // Cancel the local task using the task manager String reason = "task has been removed, cancelling locally"; - persistentTasksService.sendCancelRequest(task.getId(), reason, null, new ActionListener<>() { + persistentTasksService.sendCancelRequest(task.getId(), reason, new ActionListener<>() { @Override public void onResponse(ListTasksResponse cancelTasksResponse) { logger.trace( diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java index 4e828a1280b1..b540a9160241 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java @@ -27,6 +27,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Objects; import java.util.function.Predicate; /** @@ -57,16 +58,16 @@ public class PersistentTasksService { final String taskId, final String taskName, final Params taskParams, - final @Nullable TimeValue timeout, + final TimeValue timeout, final ActionListener> listener ) { @SuppressWarnings("unchecked") final ActionListener> wrappedListener = listener.map(t -> (PersistentTask) t); - StartPersistentTaskAction.Request request = new StartPersistentTaskAction.Request(taskId, taskName, taskParams); - if (timeout != null) { - request.masterNodeTimeout(timeout); - } - execute(request, StartPersistentTaskAction.INSTANCE, wrappedListener); + execute( + new StartPersistentTaskAction.Request(Objects.requireNonNull(timeout), taskId, taskName, taskParams), + StartPersistentTaskAction.INSTANCE, + wrappedListener + ); } /** @@ -85,33 +86,27 @@ public class PersistentTasksService { final @Nullable TimeValue timeout, final ActionListener> listener ) { - CompletionPersistentTaskAction.Request request = new CompletionPersistentTaskAction.Request( - taskId, - taskAllocationId, - taskFailure, - localAbortReason + execute( + new CompletionPersistentTaskAction.Request( + Objects.requireNonNull(timeout), + taskId, + taskAllocationId, + taskFailure, + localAbortReason + ), + CompletionPersistentTaskAction.INSTANCE, + listener ); - if (timeout != null) { - request.masterNodeTimeout(timeout); - } - execute(request, CompletionPersistentTaskAction.INSTANCE, listener); } /** * Cancels a locally running task using the Task Manager API. Accepts operation timeout as optional parameter */ - void sendCancelRequest( - final long taskId, - final String reason, - final @Nullable TimeValue timeout, - final ActionListener listener - ) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { CancelTasksRequest request = new CancelTasksRequest(); request.setTargetTaskId(new TaskId(clusterService.localNode().getId(), taskId)); request.setReason(reason); - if (timeout != null) { - request.setTimeout(timeout); - } + // TODO set timeout? try { client.admin().cluster().cancelTasks(request, listener); } catch (Exception e) { @@ -130,33 +125,25 @@ public class PersistentTasksService { final String taskId, final long taskAllocationID, final PersistentTaskState taskState, - final @Nullable TimeValue timeout, + final TimeValue timeout, final ActionListener> listener ) { - UpdatePersistentTaskStatusAction.Request request = new UpdatePersistentTaskStatusAction.Request( - taskId, - taskAllocationID, - taskState + execute( + new UpdatePersistentTaskStatusAction.Request(Objects.requireNonNull(timeout), taskId, taskAllocationID, taskState), + UpdatePersistentTaskStatusAction.INSTANCE, + listener ); - if (timeout != null) { - request.masterNodeTimeout(timeout); - } - execute(request, UpdatePersistentTaskStatusAction.INSTANCE, listener); } /** * Notifies the master node to remove a persistent task from the cluster state. Accepts operation timeout as optional parameter */ - public void sendRemoveRequest( - final String taskId, - final @Nullable TimeValue timeout, - final ActionListener> listener - ) { - RemovePersistentTaskAction.Request request = new RemovePersistentTaskAction.Request(taskId); - if (timeout != null) { - request.masterNodeTimeout(timeout); - } - execute(request, RemovePersistentTaskAction.INSTANCE, listener); + public void sendRemoveRequest(final String taskId, final TimeValue timeout, final ActionListener> listener) { + execute( + new RemovePersistentTaskAction.Request(Objects.requireNonNull(timeout), taskId), + RemovePersistentTaskAction.INSTANCE, + listener + ); } /** diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 87d712ececd2..5fa18a070b16 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -44,8 +45,8 @@ public class RemovePersistentTaskAction { taskId = in.readString(); } - public Request(String taskId) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout, String taskId) { + super(masterNodeTimeout); this.taskId = taskId; } diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index d79f271bbc32..91c2d41a4a80 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -52,8 +53,8 @@ public class StartPersistentTaskAction { params = in.readNamedWriteable(PersistentTaskParams.class); } - public Request(String taskId, String taskName, PersistentTaskParams params) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout, String taskId, String taskName, PersistentTaskParams params) { + super(masterNodeTimeout); this.taskId = taskId; this.taskName = taskName; this.params = params; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index 346628f6224a..b3692ecfdd55 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -49,8 +50,8 @@ public class UpdatePersistentTaskStatusAction { state = in.readOptionalNamedWriteable(PersistentTaskState.class); } - public Request(String taskId, long allocationId, PersistentTaskState state) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout, String taskId, long allocationId, PersistentTaskState state) { + super(masterNodeTimeout); this.taskId = taskId; this.allocationId = allocationId; this.state = state; diff --git a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java index 3069589f9556..aee02fb288b5 100644 --- a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java @@ -42,6 +42,7 @@ import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNotNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; @@ -102,7 +103,7 @@ public class HealthNodeTaskExecutorTests extends ESTestCase { eq("health-node"), eq("health-node"), eq(new HealthNodeTaskParams()), - eq(null), + isNotNull(), any() ) ); @@ -121,7 +122,7 @@ public class HealthNodeTaskExecutorTests extends ESTestCase { eq("health-node"), eq("health-node"), eq(new HealthNodeTaskParams()), - eq(null), + isNotNull(), any() ); } diff --git a/server/src/test/java/org/elasticsearch/persistent/CancelPersistentTaskRequestTests.java b/server/src/test/java/org/elasticsearch/persistent/CancelPersistentTaskRequestTests.java index 11a4810f113f..c21a2ccd66ef 100644 --- a/server/src/test/java/org/elasticsearch/persistent/CancelPersistentTaskRequestTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/CancelPersistentTaskRequestTests.java @@ -18,7 +18,7 @@ public class CancelPersistentTaskRequestTests extends AbstractWireSerializingTes @Override protected Request createTestInstance() { - return new Request(randomAsciiOfLength(10)); + return new Request(randomTimeValue(), randomAsciiOfLength(10)); } @Override diff --git a/server/src/test/java/org/elasticsearch/persistent/CompletionPersistentTaskRequestTests.java b/server/src/test/java/org/elasticsearch/persistent/CompletionPersistentTaskRequestTests.java index b57f401ed56f..cb61d3b3862d 100644 --- a/server/src/test/java/org/elasticsearch/persistent/CompletionPersistentTaskRequestTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/CompletionPersistentTaskRequestTests.java @@ -17,9 +17,9 @@ public class CompletionPersistentTaskRequestTests extends AbstractWireSerializin @Override protected Request createTestInstance() { if (randomBoolean()) { - return new Request(randomAlphaOfLength(10), randomNonNegativeLong(), null, null); + return new Request(randomTimeValue(), randomAlphaOfLength(10), randomNonNegativeLong(), null, null); } else { - return new Request(randomAlphaOfLength(10), randomNonNegativeLong(), null, randomAlphaOfLength(20)); + return new Request(randomTimeValue(), randomAlphaOfLength(10), randomNonNegativeLong(), null, randomAlphaOfLength(20)); } } diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java index 7cf57325baa0..9d408a0d152d 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java @@ -260,12 +260,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { when(client.settings()).thenReturn(Settings.EMPTY); PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, client) { @Override - void sendCancelRequest( - final long taskId, - final String reason, - final TimeValue timeout, - final ActionListener listener - ) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { capturedTaskId.set(taskId); capturedListener.set(listener); } @@ -356,12 +351,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { when(client.settings()).thenReturn(Settings.EMPTY); PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, client) { @Override - void sendCancelRequest( - final long taskId, - final String reason, - final TimeValue timeout, - final ActionListener listener - ) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { fail("Shouldn't be called during local abort"); } diff --git a/server/src/test/java/org/elasticsearch/persistent/StartPersistentActionRequestTests.java b/server/src/test/java/org/elasticsearch/persistent/StartPersistentActionRequestTests.java index 07079f6c64df..44434b6500ca 100644 --- a/server/src/test/java/org/elasticsearch/persistent/StartPersistentActionRequestTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/StartPersistentActionRequestTests.java @@ -30,7 +30,7 @@ public class StartPersistentActionRequestTests extends AbstractWireSerializingTe if (randomBoolean()) { testParams.setExecutorNodeAttr(randomAlphaOfLengthBetween(1, 20)); } - return new Request(UUIDs.base64UUID(), randomAlphaOfLengthBetween(1, 20), testParams); + return new Request(randomTimeValue(), UUIDs.base64UUID(), randomAlphaOfLengthBetween(1, 20), testParams); } @Override diff --git a/server/src/test/java/org/elasticsearch/persistent/UpdatePersistentTaskRequestTests.java b/server/src/test/java/org/elasticsearch/persistent/UpdatePersistentTaskRequestTests.java index 3988b3879956..61dc7f06dcbf 100644 --- a/server/src/test/java/org/elasticsearch/persistent/UpdatePersistentTaskRequestTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/UpdatePersistentTaskRequestTests.java @@ -22,7 +22,7 @@ public class UpdatePersistentTaskRequestTests extends AbstractWireSerializingTes @Override protected Request createTestInstance() { - return new Request(UUIDs.base64UUID(), randomLong(), new State(randomAlphaOfLength(10))); + return new Request(randomTimeValue(), UUIDs.base64UUID(), randomLong(), new State(randomAlphaOfLength(10))); } @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java index 7a05a4e712fc..fd3ed852650f 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; @@ -110,6 +111,7 @@ public final class ShardFollowTaskCleaner implements ClusterStateListener { client.execute( CompletionPersistentTaskAction.INSTANCE, new CompletionPersistentTaskAction.Request( + MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, persistentTask.getId(), persistentTask.getAllocationId(), new IndexNotFoundException(followerIndex), diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 796ff4e677aa..7c26ad60fb13 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -549,7 +549,7 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc persistentTaskId, DownsampleShardTask.TASK_NAME, params, - null, + TimeValue.THIRTY_SECONDS /* TODO should this be configurable? longer by default? infinite? */, ActionListener.wrap( startedTask -> persistentTasksService.waitForPersistentTaskCondition( startedTask.getId(), diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java index 2d7c17db054a..26301f1397a0 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.Task; @@ -81,7 +82,7 @@ public class ReindexDataStreamTransportAction extends HandledTransportAction listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) ); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index dfb960794537..60dc4325fee1 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -308,6 +308,7 @@ public class MlDistributedFailureIT extends BaseMlIntegTestCase { } UpdatePersistentTaskStatusAction.Request updatePersistentTaskStatusRequest = new UpdatePersistentTaskStatusAction.Request( + TEST_REQUEST_TIMEOUT, task.getId(), task.getAllocationId(), DatafeedState.STOPPING diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index f3830d5cbf68..d124d5014c7e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -322,7 +322,7 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode job.getConfig().getId(), RollupField.TASK_NAME, job, - null, + TimeValue.THIRTY_SECONDS /* TODO should this be configurable? longer by default? infinite? */, ActionListener.wrap(rollupConfigPersistentTask -> waitForRollupStarted(job, listener, persistentTasksService), e -> { if (e instanceof ResourceAlreadyExistsException) { e = new ElasticsearchStatusException( diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index fed2439e513c..5868a762ed51 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -42,6 +42,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNotNull; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -373,10 +374,10 @@ public class PutJobStateMachineTests extends ESTestCase { requestCaptor.getValue().onFailure(new ResourceAlreadyExistsException(job.getConfig().getRollupIndex())); return null; }).when(tasksService) - .sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), eq(null), requestCaptor.capture()); + .sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), isNotNull(), requestCaptor.capture()); TransportPutRollupJobAction.startPersistentTask(job, testListener, tasksService); - verify(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), eq(null), any()); + verify(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), isNotNull(), any()); } @SuppressWarnings({ "unchecked", "rawtypes" }) @@ -401,7 +402,7 @@ public class PutJobStateMachineTests extends ESTestCase { requestCaptor.getValue().onResponse(response); return null; }).when(tasksService) - .sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), eq(null), requestCaptor.capture()); + .sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), isNotNull(), requestCaptor.capture()); ArgumentCaptor requestCaptor2 = ArgumentCaptor.forClass( PersistentTasksService.WaitForPersistentTaskListener.class @@ -413,7 +414,7 @@ public class PutJobStateMachineTests extends ESTestCase { }).when(tasksService).waitForPersistentTaskCondition(eq(job.getConfig().getId()), any(), any(), requestCaptor2.capture()); TransportPutRollupJobAction.startPersistentTask(job, testListener, tasksService); - verify(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), eq(null), any()); + verify(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), isNotNull(), any()); verify(tasksService).waitForPersistentTaskCondition(eq(job.getConfig().getId()), any(), any(), any()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index fd530a338b26..6004f8ebf95c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -63,6 +63,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.features.FeatureService; @@ -1273,7 +1274,7 @@ public class Security extends Plugin SecurityMigrationTaskParams.TASK_NAME, SecurityMigrationTaskParams.TASK_NAME, new SecurityMigrationTaskParams(migrationsVersion, securityMigrationNeeded), - null, + TimeValue.THIRTY_SECONDS /* TODO should this be configurable? longer by default? infinite? */, ActionListener.wrap((response) -> { logger.debug("Security migration task submitted"); }, (exception) -> { diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java index 46f568d286f9..784f1c1fbe23 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java @@ -183,12 +183,18 @@ public class NodeShutdownTasksIT extends ESIntegTestCase { private void startTask() { logger.info("--> sending start request"); - persistentTasksService.sendStartRequest("task_id", "task_name", new TestTaskParams(), null, ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceAlreadyExistsException == false) { - logger.error("failed to create task", e); - fail("failed to create task"); - } - })); + persistentTasksService.sendStartRequest( + "task_id", + "task_name", + new TestTaskParams(), + TEST_REQUEST_TIMEOUT, + ActionListener.wrap(r -> {}, e -> { + if (e instanceof ResourceAlreadyExistsException == false) { + logger.error("failed to create task", e); + fail("failed to create task"); + } + }) + ); } @Override diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java index 08e0982b2ab8..7c3b9f655b97 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java @@ -266,11 +266,23 @@ public class TransportStopTransformActionTests extends ESTestCase { when(client.threadPool()).thenReturn(threadPool); doAnswer(randomBoolean() ? withResponse() : withException(new ResourceNotFoundException("task not found"))).when(client) - .execute(same(RemovePersistentTaskAction.INSTANCE), eq(new RemovePersistentTaskAction.Request("task-A")), any()); + .execute( + same(RemovePersistentTaskAction.INSTANCE), + eq(new RemovePersistentTaskAction.Request(TEST_REQUEST_TIMEOUT, "task-A")), + any() + ); doAnswer(randomBoolean() ? withResponse() : withException(new ResourceNotFoundException("task not found"))).when(client) - .execute(same(RemovePersistentTaskAction.INSTANCE), eq(new RemovePersistentTaskAction.Request("task-B")), any()); + .execute( + same(RemovePersistentTaskAction.INSTANCE), + eq(new RemovePersistentTaskAction.Request(TEST_REQUEST_TIMEOUT, "task-B")), + any() + ); doAnswer(withException(new IllegalStateException("real issue while removing task"))).when(client) - .execute(same(RemovePersistentTaskAction.INSTANCE), eq(new RemovePersistentTaskAction.Request("task-C")), any()); + .execute( + same(RemovePersistentTaskAction.INSTANCE), + eq(new RemovePersistentTaskAction.Request(TEST_REQUEST_TIMEOUT, "task-C")), + any() + ); PersistentTasksService persistentTasksService = new PersistentTasksService(mock(ClusterService.class), threadPool, client); Set transformTasks = Set.of("task-A", "task-B", "task-C"); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java index e381659b1e01..535484ed3a19 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java @@ -80,6 +80,7 @@ import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNotNull; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -352,7 +353,7 @@ public class TransformTaskTests extends ESTestCase { eq(42L), isNull(), eq("Node is shutting down."), - isNull(), + isNotNull(), any() ); } From 928040ee765096224e4411d46a011f0bce3da9da Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 23 Jan 2025 10:56:18 +0000 Subject: [PATCH 03/29] [ML] Automatically rollover legacy ml indices (#120405) Rollover ml indices created in 7.x and create new indices that version 9 can read and write to. This is required for ml to continue to run after during upgrade and reindex of 7.x indices --- docs/changelog/120405.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../core/ml/annotations/AnnotationIndex.java | 1 + .../xpack/core/ml/utils/MlIndexAndAlias.java | 25 +- .../core/ml/utils/MlIndexAndAliasTests.java | 9 +- .../xpack/ml/MachineLearning.java | 24 +- .../xpack/ml/MlAutoUpdateService.java | 25 +- .../xpack/ml/MlIndexRollover.java | 176 +++++++++++ .../datafeed/DatafeedConfigAutoUpdater.java | 2 +- .../xpack/ml/MlIndexRolloverTests.java | 283 ++++++++++++++++++ .../DatafeedConfigAutoUpdaterTests.java | 6 +- 11 files changed, 536 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/120405.yaml create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java diff --git a/docs/changelog/120405.yaml b/docs/changelog/120405.yaml new file mode 100644 index 000000000000..9ca30a9473e7 --- /dev/null +++ b/docs/changelog/120405.yaml @@ -0,0 +1,5 @@ +pr: 120405 +summary: Automatically rollover legacy ml indices +area: Machine Learning +type: upgrade +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 750b23caf215..65a745f0fe36 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -161,6 +161,7 @@ public class TransportVersions { public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_00_0); public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_00_0); public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); + public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index 95753f02e396..4ab096ca5858 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -49,6 +49,7 @@ public class AnnotationIndex { // Exposed for testing, but always use the aliases in non-test code. public static final String LATEST_INDEX_NAME = ".ml-annotations-000001"; + public static final String INDEX_PATTERN = ".ml-annotations-*"; // Due to historical bugs this index may not have the correct mappings // in some production clusters. Therefore new annotations should be // written to the latest index. If we ever switch to another new annotations diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index b630bafdbc77..e85acc159059 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -66,7 +66,6 @@ public final class MlIndexAndAlias { private static final Logger logger = LogManager.getLogger(MlIndexAndAlias.class); - // Visible for testing static final Comparator INDEX_NAME_COMPARATOR = new Comparator<>() { private final Predicate HAS_SIX_DIGIT_SUFFIX = Pattern.compile("\\d{6}").asMatchPredicate(); @@ -172,7 +171,7 @@ public final class MlIndexAndAlias { } else { if (indexPointedByCurrentWriteAlias.isEmpty()) { assert concreteIndexNames.length > 0; - String latestConcreteIndexName = Arrays.stream(concreteIndexNames).max(INDEX_NAME_COMPARATOR).get(); + String latestConcreteIndexName = latestIndex(concreteIndexNames); updateWriteAlias(client, alias, null, latestConcreteIndexName, loggingListener); return; } @@ -279,18 +278,22 @@ public final class MlIndexAndAlias { ); } - private static void updateWriteAlias( + public static void updateWriteAlias( Client client, String alias, @Nullable String currentIndex, String newIndex, ActionListener listener ) { - logger.info("About to move write alias [{}] from index [{}] to index [{}]", alias, currentIndex, newIndex); + if (currentIndex != null) { + logger.info("About to move write alias [{}] from index [{}] to index [{}]", alias, currentIndex, newIndex); + } else { + logger.info("About to create write alias [{}] for index [{}]", alias, newIndex); + } IndicesAliasesRequestBuilder requestBuilder = client.admin() .indices() .prepareAliases() - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index(newIndex).alias(alias).isHidden(true)); + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index(newIndex).alias(alias).isHidden(true).writeIndex(true)); if (currentIndex != null) { requestBuilder.removeAlias(currentIndex, alias); } @@ -380,4 +383,16 @@ public final class MlIndexAndAlias { public static boolean hasIndexTemplate(ClusterState state, String templateName) { return state.getMetadata().templatesV2().containsKey(templateName); } + + /** + * Returns the latest index. Latest is the index with the highest + * 6 digit suffix. + * @param concreteIndices List of index names + * @return The latest index by index name version suffix + */ + public static String latestIndex(String[] concreteIndices) { + return concreteIndices.length == 1 + ? concreteIndices[0] + : Arrays.stream(concreteIndices).max(MlIndexAndAlias.INDEX_NAME_COMPARATOR).get(); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index 8e20ba4bfa9b..8fc1e55ec0ac 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -298,7 +298,7 @@ public class MlIndexAndAliasTests extends ESTestCase { assertThat( indicesAliasesRequest.getAliasActions(), contains( - AliasActions.add().alias(TEST_INDEX_ALIAS).index(FIRST_CONCRETE_INDEX).isHidden(true), + AliasActions.add().alias(TEST_INDEX_ALIAS).index(FIRST_CONCRETE_INDEX).isHidden(true).writeIndex(true), AliasActions.remove().alias(TEST_INDEX_ALIAS).index(LEGACY_INDEX_WITHOUT_SUFFIX) ) ); @@ -318,7 +318,7 @@ public class MlIndexAndAliasTests extends ESTestCase { IndicesAliasesRequest indicesAliasesRequest = aliasesRequestCaptor.getValue(); assertThat( indicesAliasesRequest.getAliasActions(), - contains(AliasActions.add().alias(TEST_INDEX_ALIAS).index(expectedWriteIndexName).isHidden(true)) + contains(AliasActions.add().alias(TEST_INDEX_ALIAS).index(expectedWriteIndexName).isHidden(true).writeIndex(true)) ); } @@ -364,6 +364,11 @@ public class MlIndexAndAliasTests extends ESTestCase { assertThat(Stream.of(".a-000002", ".b-000001").max(comparator).get(), equalTo(".a-000002")); } + public void testLatestIndex() { + var names = new String[] { "index-000001", "index-000002", "index-000003" }; + assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000003")); + } + private void createIndexAndAliasIfNecessary(ClusterState clusterState) { MlIndexAndAlias.createIndexAndAliasIfNecessary( client, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 08c876dfdcc5..043a27b7cd14 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -183,6 +183,7 @@ import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelDeploymentAction import org.elasticsearch.xpack.core.ml.action.UpgradeJobModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; +import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; import org.elasticsearch.xpack.core.ml.dataframe.analyses.MlDataFrameAnalysisNamedXContentProvider; @@ -1222,7 +1223,25 @@ public class MachineLearning extends Plugin MlAutoUpdateService mlAutoUpdateService = new MlAutoUpdateService( threadPool, - List.of(new DatafeedConfigAutoUpdater(datafeedConfigProvider, indexNameExpressionResolver)) + List.of( + new DatafeedConfigAutoUpdater(datafeedConfigProvider, indexNameExpressionResolver), + new MlIndexRollover( + List.of( + new MlIndexRollover.IndexPatternAndAlias( + AnomalyDetectorsIndex.jobStateIndexPattern(), + AnomalyDetectorsIndex.jobStateIndexWriteAlias() + ), + new MlIndexRollover.IndexPatternAndAlias(MlStatsIndex.indexPattern(), MlStatsIndex.writeAlias()), + new MlIndexRollover.IndexPatternAndAlias(AnnotationIndex.INDEX_PATTERN, AnnotationIndex.WRITE_ALIAS_NAME) + // TODO notifications = https://github.com/elastic/elasticsearch/pull/120064 + // TODO anomaly results + // TODO .ml-inference-XXXXXX - requires alias + // TODO .ml-inference-native-XXXXXX - requires alias (index added in 8.0) + ), + indexNameExpressionResolver, + client + ) + ) ); clusterService.addListener(mlAutoUpdateService); // this object registers as a license state listener, and is never removed, so there's no need to retain another reference to it @@ -2025,6 +2044,9 @@ public class MachineLearning extends Plugin new AssociatedIndexDescriptor(MlStatsIndex.indexPattern(), "ML stats index"), new AssociatedIndexDescriptor(".ml-notifications*", "ML notifications indices"), new AssociatedIndexDescriptor(".ml-annotations*", "ML annotations indices") + // TODO should the inference indices be included here? + // new AssociatedIndexDescriptor(".ml-inference-*", "ML Data Frame Analytics") + // new AssociatedIndexDescriptor(".ml-inference-native*", "ML indices for trained models") ); @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAutoUpdateService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAutoUpdateService.java index 94800daebf29..05c4d70e013e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAutoUpdateService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAutoUpdateService.java @@ -30,7 +30,7 @@ public class MlAutoUpdateService implements ClusterStateListener { String getName(); - void runUpdate(); + void runUpdate(ClusterState latestState); } private final List updateActions; @@ -47,27 +47,34 @@ public class MlAutoUpdateService implements ClusterStateListener { @Override public void clusterChanged(ClusterChangedEvent event) { - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { - return; - } if (event.localNodeMaster() == false) { return; } + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + return; + } - TransportVersion minTransportVersion = event.state().getMinTransportVersion(); + if (completedUpdates.size() == updateActions.size()) { + return; // all work complete + } + + final var latestState = event.state(); + TransportVersion minTransportVersion = latestState.getMinTransportVersion(); final List toRun = updateActions.stream() .filter(action -> action.isMinTransportVersionSupported(minTransportVersion)) .filter(action -> completedUpdates.contains(action.getName()) == false) - .filter(action -> action.isAbleToRun(event.state())) + .filter(action -> action.isAbleToRun(latestState)) .filter(action -> currentlyUpdating.add(action.getName())) .toList(); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> toRun.forEach(this::runUpdate)); + // TODO run updates serially + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute(() -> toRun.forEach((action) -> this.runUpdate(action, latestState))); } - private void runUpdate(UpdateAction action) { + private void runUpdate(UpdateAction action, ClusterState latestState) { try { logger.debug(() -> "[" + action.getName() + "] starting executing update action"); - action.runUpdate(); + action.runUpdate(latestState); this.completedUpdates.add(action.getName()); logger.debug(() -> "[" + action.getName() + "] succeeded executing update action"); } catch (Exception ex) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java new file mode 100644 index 000000000000..7dbafdc2676b --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + +/** + * If any of the indices listed in {@code indicesToRollover} are legacy indices + * then call rollover to produce a new index with the current version. If the + * index does not have an alias the alias is created first. + * If none of the {@code indicesToRollover} exist or they are all non-legacy + * indices then nothing will be updated. + */ +public class MlIndexRollover implements MlAutoUpdateService.UpdateAction { + + private static final Logger logger = LogManager.getLogger(MlIndexRollover.class); + + public record IndexPatternAndAlias(String indexPattern, String alias) {} + + private final IndexNameExpressionResolver expressionResolver; + private final OriginSettingClient client; + private final List indicesToRollover; + + public MlIndexRollover(List indicesToRollover, IndexNameExpressionResolver expressionResolver, Client client) { + this.expressionResolver = expressionResolver; + this.client = new OriginSettingClient(client, ML_ORIGIN); + this.indicesToRollover = indicesToRollover; + } + + @Override + public boolean isMinTransportVersionSupported(TransportVersion minTransportVersion) { + // Wait for all nodes to be upgraded to ensure that the + // newly created index will be of the latest version. + return minTransportVersion.onOrAfter(TransportVersions.ML_ROLLOVER_LEGACY_INDICES); + } + + @Override + public boolean isAbleToRun(ClusterState latestState) { + for (var indexPatternAndAlias : indicesToRollover) { + String[] indices = expressionResolver.concreteIndexNames( + latestState, + IndicesOptions.lenientExpandOpenHidden(), + indexPatternAndAlias.indexPattern + ); + if (indices.length == 0) { + // The index does not exist but the MlAutoUpdateService will + // need to run this action and mark it as done. + // Ignore the missing index and continue the loop + continue; + } + + String latestIndex = MlIndexAndAlias.latestIndex(indices); + IndexRoutingTable routingTable = latestState.getRoutingTable().index(latestIndex); + if (routingTable == null || routingTable.allPrimaryShardsActive() == false) { + return false; + } + } + + return true; + } + + @Override + public String getName() { + return "ml_legacy_index_rollover"; + } + + @Override + public void runUpdate(ClusterState latestState) { + List failures = new ArrayList<>(); + + for (var indexPatternAndAlias : indicesToRollover) { + PlainActionFuture rolloverIndices = new PlainActionFuture<>(); + rolloverLegacyIndices(latestState, indexPatternAndAlias.indexPattern(), indexPatternAndAlias.alias(), rolloverIndices); + try { + rolloverIndices.actionGet(); + } catch (Exception ex) { + logger.warn(() -> "failed rolling over legacy index [" + indexPatternAndAlias.indexPattern() + "]", ex); + if (ex instanceof ElasticsearchException elasticsearchException) { + failures.add( + new ElasticsearchStatusException("Failed rollover", elasticsearchException.status(), elasticsearchException) + ); + } else { + failures.add(new ElasticsearchStatusException("Failed rollover", RestStatus.REQUEST_TIMEOUT, ex)); + } + + break; + } + } + + if (failures.isEmpty()) { + logger.info("ML legacy indies rolled over"); + return; + } + + ElasticsearchException exception = new ElasticsearchException("some error"); + failures.forEach(exception::addSuppressed); + throw exception; + } + + private void rolloverLegacyIndices(ClusterState clusterState, String indexPattern, String alias, ActionListener listener) { + var concreteIndices = expressionResolver.concreteIndexNames(clusterState, IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED, indexPattern); + + if (concreteIndices.length == 0) { + // no matching indices + listener.onResponse(Boolean.FALSE); + return; + } + + String latestIndex = MlIndexAndAlias.latestIndex(concreteIndices); + boolean isCompatibleIndexVersion = isCompatibleIndexVersion(clusterState.metadata().index(latestIndex).getCreationVersion()); + boolean hasAlias = clusterState.getMetadata().hasAlias(alias); + + if (isCompatibleIndexVersion && hasAlias) { + // v8 index with alias, no action required + listener.onResponse(Boolean.FALSE); + return; + } + + SubscribableListener.newForked(l -> { + if (hasAlias == false) { + MlIndexAndAlias.updateWriteAlias(client, alias, null, latestIndex, l); + } else { + l.onResponse(Boolean.TRUE); + } + }).andThen((l, success) -> { + if (isCompatibleIndexVersion == false) { + logger.info("rolling over legacy index [{}] with alias [{}]", latestIndex, alias); + rollover(alias, l); + } else { + l.onResponse(Boolean.TRUE); + } + }).addListener(listener); + } + + private void rollover(String alias, ActionListener listener) { + client.admin().indices().rolloverIndex(new RolloverRequest(alias, null), listener.delegateFailure((l, response) -> { + l.onResponse(Boolean.TRUE); + })); + } + + /** + * True if the version is read *and* write compatible not just read only compatible + */ + static boolean isCompatibleIndexVersion(IndexVersion version) { + return version.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java index e61ffba9b316..9fe9a5226f28 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java @@ -77,7 +77,7 @@ public class DatafeedConfigAutoUpdater implements MlAutoUpdateService.UpdateActi } @Override - public void runUpdate() { + public void runUpdate(ClusterState latestState) { PlainActionFuture> getdatafeeds = new PlainActionFuture<>(); provider.expandDatafeedConfigs("_all", true, null, getdatafeeds); List datafeedConfigBuilders = getdatafeeds.actionGet(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java new file mode 100644 index 000000000000..aa59028a4cc0 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java @@ -0,0 +1,283 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RecoverySource; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.Map; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class MlIndexRolloverTests extends ESTestCase { + + private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); + + public void testIsAbleToRun_IndicesDoNotExist() { + RoutingTable.Builder routingTable = RoutingTable.builder(); + var rollover = new MlIndexRollover( + List.of( + new MlIndexRollover.IndexPatternAndAlias("my-index1-*", "my-index1-alias"), + new MlIndexRollover.IndexPatternAndAlias("my-index2-*", "my-index2-alias") + ), + indexNameExpressionResolver, + mock(Client.class) + ); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(routingTable.build()); + assertTrue(rollover.isAbleToRun(csBuilder.build())); + } + + public void testIsAbleToRun_IndicesHaveNoRouting() { + IndexMetadata.Builder indexMetadata = IndexMetadata.builder("my-index-000001"); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(RoutingTable.builder().build()); // no routing to index + csBuilder.metadata(metadata); + + var rollover = new MlIndexRollover( + List.of(new MlIndexRollover.IndexPatternAndAlias("my-index-*", "my-index-alias")), + indexNameExpressionResolver, + mock(Client.class) + ); + + assertFalse(rollover.isAbleToRun(csBuilder.build())); + } + + public void testIsAbleToRun_IndicesHaveNoActiveShards() { + String indexName = "my-index-000001"; + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(indexName); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + Index index = new Index(indexName, "_uuid"); + ShardId shardId = new ShardId(index, 0); + ShardRouting shardRouting = ShardRouting.newUnassigned( + shardId, + true, + RecoverySource.EmptyStoreRecoverySource.INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""), + ShardRouting.Role.DEFAULT + ); + shardRouting = shardRouting.initialize("node_id", null, 0L); + var routingTable = RoutingTable.builder() + .add(IndexRoutingTable.builder(index).addIndexShard(IndexShardRoutingTable.builder(shardId).addShard(shardRouting))) + .build(); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(routingTable); + csBuilder.metadata(metadata); + + var rollover = new MlIndexRollover( + List.of(new MlIndexRollover.IndexPatternAndAlias("my-index-*", "my-index-alias")), + indexNameExpressionResolver, + mock(Client.class) + ); + + assertFalse(rollover.isAbleToRun(csBuilder.build())); + } + + public void testRunUpdate_NoMatchingIndices() { + RoutingTable.Builder routingTable = RoutingTable.builder(); + + var client = mock(Client.class); + var rollover = new MlIndexRollover( + List.of( + new MlIndexRollover.IndexPatternAndAlias("my-index1-*", "my-index1-alias"), + new MlIndexRollover.IndexPatternAndAlias("my-index2-*", "my-index2-alias") + ), + indexNameExpressionResolver, + client + ); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(routingTable.build()); + rollover.runUpdate(csBuilder.build()); + verify(client).settings(); + verify(client).threadPool(); + verifyNoMoreInteractions(client); + } + + public void testRunUpdate_UpToDateIndicesWithAlias() { + String indexName = "my-index-000001"; + String indexAlias = "my-index-write"; + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(indexName); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + indexMetadata.putAlias(AliasMetadata.builder(indexAlias).build()); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mock(Client.class); + var rollover = new MlIndexRollover( + List.of(new MlIndexRollover.IndexPatternAndAlias("my-index-*", indexAlias)), + indexNameExpressionResolver, + client + ); + + rollover.runUpdate(csBuilder.build()); + // everything up to date so no action for the client + verify(client).settings(); + verify(client).threadPool(); + verifyNoMoreInteractions(client); + } + + public void testRunUpdate_LegacyIndexWithAlias() { + String indexName = "my-index-000001"; + String indexAlias = "my-index-write"; + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(indexName); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_17_0) // cannot read and write to a 7.x index + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + indexMetadata.putAlias(AliasMetadata.builder(indexAlias).build()); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mockClientWithRolloverAndAlias(); + var rollover = new MlIndexRollover( + List.of(new MlIndexRollover.IndexPatternAndAlias("my-index-*", indexAlias)), + indexNameExpressionResolver, + client + ); + + rollover.runUpdate(csBuilder.build()); + verify(client).settings(); + verify(client, times(3)).threadPool(); + verify(client).execute(same(RolloverAction.INSTANCE), any(), any()); // index rolled over + verifyNoMoreInteractions(client); + } + + public void testRunUpdate_LegacyIndexWithoutAlias() { + String indexName = "my-index-000001"; + String indexAlias = "my-index-write"; + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(indexName); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_17_0) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + // index is missing alias + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mockClientWithRolloverAndAlias(); + var rollover = new MlIndexRollover( + List.of(new MlIndexRollover.IndexPatternAndAlias("my-index-*", indexAlias)), + indexNameExpressionResolver, + client + ); + + rollover.runUpdate(csBuilder.build()); + verify(client).settings(); + verify(client, times(5)).threadPool(); + verify(client).execute(same(TransportIndicesAliasesAction.TYPE), any(), any()); // alias created + verify(client).execute(same(RolloverAction.INSTANCE), any(), any()); // index rolled over + verifyNoMoreInteractions(client); + } + + public void testIsCompatibleIndexVersion() { + assertTrue(MlIndexRollover.isCompatibleIndexVersion(IndexVersion.current())); + assertTrue(MlIndexRollover.isCompatibleIndexVersion(IndexVersions.MINIMUM_COMPATIBLE)); + assertFalse(MlIndexRollover.isCompatibleIndexVersion(IndexVersions.MINIMUM_READONLY_COMPATIBLE)); + } + + @SuppressWarnings("unchecked") + private Client mockClientWithRolloverAndAlias() { + var client = mock(Client.class); + + doAnswer(invocationOnMock -> { + ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; + actionListener.onResponse(new RolloverResponse("old", "new", Map.of(), false, true, true, true, true)); + return null; + }).when(client).execute(same(RolloverAction.INSTANCE), any(RolloverRequest.class), any(ActionListener.class)); + + doAnswer(invocationOnMock -> { + ActionListener actionListener = (ActionListener) invocationOnMock + .getArguments()[2]; + actionListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); + return null; + }).when(client).execute(same(TransportIndicesAliasesAction.TYPE), any(IndicesAliasesRequest.class), any(ActionListener.class)); + + var threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(client.threadPool()).thenReturn(threadPool); + + return client; + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdaterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdaterTests.java index bf6e63faeb6c..337de5ae7d7a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdaterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdaterTests.java @@ -79,7 +79,7 @@ public class DatafeedConfigAutoUpdaterTests extends ESTestCase { withDatafeed(datafeedWithRewrite2, true); DatafeedConfigAutoUpdater updater = new DatafeedConfigAutoUpdater(provider, indexNameExpressionResolver); - updater.runUpdate(); + updater.runUpdate(mock(ClusterState.class)); verify(provider, times(1)).updateDatefeedConfig( eq(datafeedWithRewrite1), @@ -120,7 +120,7 @@ public class DatafeedConfigAutoUpdaterTests extends ESTestCase { }).when(provider).updateDatefeedConfig(eq(datafeedWithRewriteFailure), any(), any(), any(), any()); DatafeedConfigAutoUpdater updater = new DatafeedConfigAutoUpdater(provider, indexNameExpressionResolver); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, updater::runUpdate); + ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> updater.runUpdate(mock(ClusterState.class))); assertThat(ex.getMessage(), equalTo("some datafeeds failed being upgraded.")); assertThat(ex.getSuppressed().length, equalTo(1)); assertThat(ex.getSuppressed()[0].getMessage(), equalTo("Failed to update datafeed " + datafeedWithRewriteFailure)); @@ -155,7 +155,7 @@ public class DatafeedConfigAutoUpdaterTests extends ESTestCase { withDatafeed(datafeedWithoutRewrite2, false); DatafeedConfigAutoUpdater updater = new DatafeedConfigAutoUpdater(provider, indexNameExpressionResolver); - updater.runUpdate(); + updater.runUpdate(mock(ClusterState.class)); verify(provider, times(0)).updateDatefeedConfig(any(), any(DatafeedUpdate.class), eq(Collections.emptyMap()), any(), any()); } From 2002510d4a42658002091d3ce38ad84c0f2b4c52 Mon Sep 17 00:00:00 2001 From: Alexey Ivanov Date: Thu, 23 Jan 2025 11:39:36 +0000 Subject: [PATCH 04/29] Fix Existing System Index Migration Integration Tests (ES-10527) (#120663) Re-enables system index migration tests in main. Previously, these tests were only running on the last minor version, leaving the system indices migration infrastructure largely untested for most of the time. --- .../migration/AbstractFeatureMigrationIntegTest.java | 10 +++------- .../elasticsearch/migration/FeatureMigrationIT.java | 8 ++++---- .../migration/MultiFeatureMigrationIT.java | 10 +++++----- .../TransportGetFeatureUpgradeStatusAction.java | 12 +++++------- .../org/elasticsearch/indices/SystemIndices.java | 4 +++- 5 files changed, 20 insertions(+), 24 deletions(-) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java index ea1c8ade00ab..2a1401242f81 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java @@ -68,10 +68,11 @@ public abstract class AbstractFeatureMigrationIntegTest extends ESIntegTestCase static final String INTERNAL_MANAGED_INDEX_NAME = ".int-man-old"; static final int INDEX_DOC_COUNT = 100; // arbitrarily chosen static final int INTERNAL_MANAGED_FLAG_VALUE = 1; - public static final Version NEEDS_UPGRADE_VERSION = TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_VERSION.previousMajor(); - public static final IndexVersion NEEDS_UPGRADE_INDEX_VERSION = IndexVersionUtils.getPreviousMajorVersion( + static final String FIELD_NAME = "some_field"; + protected static final IndexVersion NEEDS_UPGRADE_INDEX_VERSION = IndexVersionUtils.getPreviousMajorVersion( TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_INDEX_VERSION ); + protected static final int UPGRADED_TO_VERSION = TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_VERSION.major + 1; static final SystemIndexDescriptor EXTERNAL_UNMANAGED = SystemIndexDescriptor.builder() .setIndexPattern(".ext-unman-*") @@ -131,11 +132,6 @@ public abstract class AbstractFeatureMigrationIntegTest extends ESIntegTestCase @Before public void setup() { - assumeTrue( - "We can only create the test indices we need if they're in the previous major version", - NEEDS_UPGRADE_VERSION.onOrAfter(Version.CURRENT.previousMajor()) - ); - internalCluster().setBootstrapMasterNodeIndex(0); masterName = internalCluster().startMasterOnlyNode(); masterAndDataNode = internalCluster().startNode(); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index a4aa0514bb47..06233b614782 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -208,7 +208,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest { assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-9", + ".int-man-old-reindexed-for-" + UPGRADED_TO_VERSION, INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -216,7 +216,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-9", + ".int-unman-old-reindexed-for-" + UPGRADED_TO_VERSION, INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -224,7 +224,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-9", + ".ext-man-old-reindexed-for-" + UPGRADED_TO_VERSION, EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -232,7 +232,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-9", + ".ext-unman-old-reindexed-for-" + UPGRADED_TO_VERSION, EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 3442e9dc4392..01a414243f39 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -218,7 +218,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest { // Finally, verify that all the indices exist and have the properties we expect. assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-9", + ".int-man-old-reindexed-for-" + UPGRADED_TO_VERSION, INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -226,7 +226,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-9", + ".int-unman-old-reindexed-for-" + UPGRADED_TO_VERSION, INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -234,7 +234,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-9", + ".ext-man-old-reindexed-for-" + UPGRADED_TO_VERSION, EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -242,7 +242,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-9", + ".ext-unman-old-reindexed-for-" + UPGRADED_TO_VERSION, EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, @@ -251,7 +251,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest { assertIndexHasCorrectProperties( finalMetadata, - ".second-int-man-old-reindexed-for-9", + ".second-int-man-old-reindexed-for-" + UPGRADED_TO_VERSION, SECOND_FEATURE_IDX_FLAG_VALUE, true, true, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index 7378cad0ed29..57673e5fe137 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -19,12 +19,12 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; -import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -53,13 +53,13 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA GetFeatureUpgradeStatusResponse> { /** - * Once all feature migrations for 9.x -> 10.x have been tested, we can bump this to Version.V_9_0_0 + * These versions should be set to current major and current major's index version */ - public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_8_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) + public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_9_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.UPGRADE_TO_LUCENE_10_0_0; private final SystemIndices systemIndices; - PersistentTasksService persistentTasksService; @Inject public TransportGetFeatureUpgradeStatusAction( @@ -68,7 +68,6 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA ActionFilters actionFilters, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, - PersistentTasksService persistentTasksService, SystemIndices systemIndices ) { super( @@ -83,7 +82,6 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA ); this.systemIndices = systemIndices; - this.persistentTasksService = persistentTasksService; } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 42cda4da1a9e..d01763d676f3 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -16,6 +16,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateResponse.ResetFeatureStateStatus; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; @@ -110,7 +111,8 @@ import static org.elasticsearch.tasks.TaskResultsService.TASKS_FEATURE_NAME; public class SystemIndices { public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed"; public static final String EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_external_system_index_access_origin"; - public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-9"; + private static final int UPGRADED_TO_VERSION = TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_VERSION.major + 1; + public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-" + UPGRADED_TO_VERSION; private static final Automaton EMPTY = Automata.makeEmpty(); From 45ae0718cc6f52c78c9dc96380d623be9917ba07 Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Thu, 23 Jan 2025 12:10:26 +0000 Subject: [PATCH 05/29] Report Deprecated Indices That Are Flagged To Ignore Migration Reindex As A Warning (#120629) * Add block state matching option to deprecation check predicate * Add new deprecation checks to warn on old indices with ignore reindex flag * Test for new deprecation checks * Update docs/changelog/120629.yaml * PR Changes - Remove leftover comment that's no longer true --- docs/changelog/120629.yaml | 6 ++ .../deprecation/DeprecatedIndexPredicate.java | 34 ++++++--- .../DataStreamDeprecationChecks.java | 41 +++++++++-- .../xpack/deprecation/DeprecationChecks.java | 4 +- .../deprecation/IndexDeprecationChecks.java | 20 +++++- .../DataStreamDeprecationChecksTests.java | 72 +++++++++++++++++++ .../IndexDeprecationChecksTests.java | 20 ++++++ ...ReindexDataStreamIndexTransportAction.java | 2 +- .../ReindexDataStreamTransportAction.java | 2 +- ...indexDataStreamPersistentTaskExecutor.java | 6 +- 10 files changed, 186 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/120629.yaml diff --git a/docs/changelog/120629.yaml b/docs/changelog/120629.yaml new file mode 100644 index 000000000000..7862888d7fd4 --- /dev/null +++ b/docs/changelog/120629.yaml @@ -0,0 +1,6 @@ +pr: 120629 +summary: Report Deprecated Indices That Are Flagged To Ignore Migration Reindex As + A Warning +area: Data streams +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java index 4c8a63ed7386..48fb8ebdc577 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java @@ -20,29 +20,38 @@ public class DeprecatedIndexPredicate { public static final IndexVersion MINIMUM_WRITEABLE_VERSION_AFTER_UPGRADE = IndexVersions.UPGRADE_TO_LUCENE_10_0_0; - /* + /** * This predicate allows through only indices that were created with a previous lucene version, meaning that they need to be reindexed - * in order to be writable in the _next_ lucene version. + * in order to be writable in the _next_ lucene version. It excludes searchable snapshots as they are not writable. * * It ignores searchable snapshots as they are not writable. + * + * @param metadata the cluster metadata + * @param filterToBlockedStatus if true, only indices that are write blocked will be returned, + * if false, only those without a block are returned + * @return a predicate that returns true for indices that need to be reindexed */ - public static Predicate getReindexRequiredPredicate(Metadata metadata) { + public static Predicate getReindexRequiredPredicate(Metadata metadata, boolean filterToBlockedStatus) { return index -> { IndexMetadata indexMetadata = metadata.index(index); - return reindexRequired(indexMetadata); + return reindexRequired(indexMetadata, filterToBlockedStatus); }; } - public static boolean reindexRequired(IndexMetadata indexMetadata) { + /** + * This method check if the indices that were created with a previous lucene version, meaning that they need to be reindexed + * in order to be writable in the _next_ lucene version. It excludes searchable snapshots as they are not writable. + * + * @param indexMetadata the index metadata + * @param filterToBlockedStatus if true, only indices that are write blocked will be returned, + * if false, only those without a block are returned + * @return a predicate that returns true for indices that need to be reindexed + */ + public static boolean reindexRequired(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { return creationVersionBeforeMinimumWritableVersion(indexMetadata) && isNotSearchableSnapshot(indexMetadata) && isNotClosed(indexMetadata) - && isNotVerifiedReadOnly(indexMetadata); - } - - private static boolean isNotVerifiedReadOnly(IndexMetadata indexMetadata) { - // no need to check blocks. - return MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()) == false; + && matchBlockedStatus(indexMetadata, filterToBlockedStatus); } private static boolean isNotSearchableSnapshot(IndexMetadata indexMetadata) { @@ -57,4 +66,7 @@ public class DeprecatedIndexPredicate { return indexMetadata.getState().equals(IndexMetadata.State.CLOSE) == false; } + private static boolean matchBlockedStatus(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { + return MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()) == filterToBlockedStatus; + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java index 65f2659fda04..8af4868f9451 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java @@ -24,10 +24,7 @@ public class DataStreamDeprecationChecks { static DeprecationIssue oldIndicesCheck(DataStream dataStream, ClusterState clusterState) { List backingIndices = dataStream.getIndices(); - Set indicesNeedingUpgrade = backingIndices.stream() - .filter(DeprecatedIndexPredicate.getReindexRequiredPredicate(clusterState.metadata())) - .map(Index::getName) - .collect(Collectors.toUnmodifiableSet()); + Set indicesNeedingUpgrade = getReindexRequiredIndices(backingIndices, clusterState, false); if (indicesNeedingUpgrade.isEmpty() == false) { return new DeprecationIssue( @@ -47,4 +44,40 @@ public class DataStreamDeprecationChecks { return null; } + + static DeprecationIssue ignoredOldIndicesCheck(DataStream dataStream, ClusterState clusterState) { + List backingIndices = dataStream.getIndices(); + + Set ignoredIndices = getReindexRequiredIndices(backingIndices, clusterState, true); + + if (ignoredIndices.isEmpty() == false) { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Old data stream with a compatibility version < 9.0 Have Been Ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This data stream has read only backing indices that were created before Elasticsearch 9.0.0 and have been marked as " + + "OK to remain read-only after upgrade", + false, + ofEntries( + entry("reindex_required", true), + entry("total_backing_indices", backingIndices.size()), + entry("ignored_indices_requiring_upgrade_count", ignoredIndices.size()), + entry("ignored_indices_requiring_upgrade", ignoredIndices) + ) + ); + } + + return null; + } + + private static Set getReindexRequiredIndices( + List backingIndices, + ClusterState clusterState, + boolean filterToBlockedStatus + ) { + return backingIndices.stream() + .filter(DeprecatedIndexPredicate.getReindexRequiredPredicate(clusterState.metadata(), filterToBlockedStatus)) + .map(Index::getName) + .collect(Collectors.toUnmodifiableSet()); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 1bc040418bf0..f7a26597e07f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -94,6 +94,7 @@ public class DeprecationChecks { static List> INDEX_SETTINGS_CHECKS = List.of( IndexDeprecationChecks::oldIndicesCheck, + IndexDeprecationChecks::ignoredOldIndicesCheck, IndexDeprecationChecks::translogRetentionSettingCheck, IndexDeprecationChecks::checkIndexDataPath, IndexDeprecationChecks::storeTypeSettingCheck, @@ -102,7 +103,8 @@ public class DeprecationChecks { ); static List> DATA_STREAM_CHECKS = List.of( - DataStreamDeprecationChecks::oldIndicesCheck + DataStreamDeprecationChecks::oldIndicesCheck, + DataStreamDeprecationChecks::ignoredOldIndicesCheck ); /** diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 1bef1464152d..5a9d6771e5f4 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -36,7 +36,7 @@ public class IndexDeprecationChecks { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks - if (DeprecatedIndexPredicate.reindexRequired(indexMetadata) && isNotDataStreamIndex(indexMetadata, clusterState)) { + if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, false) && isNotDataStreamIndex(indexMetadata, clusterState)) { return new DeprecationIssue( DeprecationIssue.Level.CRITICAL, "Old index with a compatibility version < 9.0", @@ -49,6 +49,24 @@ public class IndexDeprecationChecks { return null; } + static DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); + // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks + if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, true) && isNotDataStreamIndex(indexMetadata, clusterState)) { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Old index with a compatibility version < 9.0 Has Been Ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This read-only index has version: " + + currentCompatibilityVersion.toReleaseVersion() + + " and will be supported as read-only in 9.0", + false, + Collections.singletonMap("reindex_required", true) + ); + } + return null; + } + private static boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) { return clusterState.metadata().findDataStreams(indexMetadata.getIndex().getName()).isEmpty(); } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java index 712807db46ec..edc7ea03823d 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamOptions; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; @@ -224,4 +225,75 @@ public class DataStreamDeprecationChecksTests extends ESTestCase { nameToIndexMetadata.put(indexMetadata.getIndex().getName(), indexMetadata); return indexMetadata.getIndex(); } + + public void testOldIndicesIgnoredWarningCheck() { + int oldIndexCount = randomIntBetween(1, 100); + int newIndexCount = randomIntBetween(1, 100); + + List allIndices = new ArrayList<>(); + Map nameToIndexMetadata = new HashMap<>(); + Set expectedIndices = new HashSet<>(); + + for (int i = 0; i < oldIndexCount; i++) { + Settings.Builder settings = settings(IndexVersion.fromId(7170099)); + + String indexName = "old-data-stream-index-" + i; + settings.put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + expectedIndices.add(indexName); + + Settings.Builder settingsBuilder = settings; + IndexMetadata oldIndexMetadata = IndexMetadata.builder(indexName) + .settings(settingsBuilder) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + allIndices.add(oldIndexMetadata.getIndex()); + nameToIndexMetadata.put(oldIndexMetadata.getIndex().getName(), oldIndexMetadata); + } + + for (int i = 0; i < newIndexCount; i++) { + Index newIndex = createNewIndex(i, false, nameToIndexMetadata); + allIndices.add(newIndex); + } + + DataStream dataStream = new DataStream( + randomAlphaOfLength(10), + allIndices, + randomNegativeLong(), + Map.of(), + randomBoolean(), + false, + false, + randomBoolean(), + randomFrom(IndexMode.values()), + null, + randomFrom(DataStreamOptions.EMPTY, DataStreamOptions.FAILURE_STORE_DISABLED, DataStreamOptions.FAILURE_STORE_ENABLED, null), + List.of(), + randomBoolean(), + null + ); + + Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + + DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Old data stream with a compatibility version < 9.0 Have Been Ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This data stream has read only backing indices that were created before Elasticsearch 9.0.0 and have been marked as " + + "OK to remain read-only after upgrade", + false, + ofEntries( + entry("reindex_required", true), + entry("total_backing_indices", oldIndexCount + newIndexCount), + entry("ignored_indices_requiring_upgrade_count", expectedIndices.size()), + entry("ignored_indices_requiring_upgrade", expectedIndices) + ) + ); + + List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); + + assertThat(issues, equalTo(singletonList(expected))); + } + } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index de229c555ade..ed119634427e 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.DataStreamMetadata; import org.elasticsearch.cluster.metadata.DataStreamOptions; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexMode; @@ -132,6 +133,25 @@ public class IndexDeprecationChecksTests extends ESTestCase { assertThat(issues, empty()); } + public void testOldIndicesIgnoredWarningCheck() { + IndexVersion createdWith = IndexVersion.fromId(7170099); + Settings.Builder settings = settings(createdWith).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .build(); + DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Old index with a compatibility version < 9.0 Has Been Ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This read-only index has version: " + createdWith.toReleaseVersion() + " and will be supported as read-only in 9.0", + false, + singletonMap("reindex_required", true) + ); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); + assertEquals(singletonList(expected), issues); + } + public void testTranslogRetentionSettings() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue()); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index 45c318a6ec5a..fc2ca0364e8a 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -118,7 +118,7 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio IndexMetadata sourceIndex = clusterService.state().getMetadata().index(sourceIndexName); Settings settingsBefore = sourceIndex.getSettings(); - var hasOldVersion = DeprecatedIndexPredicate.getReindexRequiredPredicate(clusterService.state().metadata()); + var hasOldVersion = DeprecatedIndexPredicate.getReindexRequiredPredicate(clusterService.state().metadata(), false); if (hasOldVersion.test(sourceIndex.getIndex()) == false) { logger.warn( "Migrating index [{}] with version [{}] is unnecessary as its version is not before [{}]", diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java index 26301f1397a0..a6d9adc6b4e3 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java @@ -69,7 +69,7 @@ public class ReindexDataStreamTransportAction extends HandledTransportAction dataStreamInfos = response.getDataStreams(); if (dataStreamInfos.size() == 1) { DataStream dataStream = dataStreamInfos.getFirst().getDataStream(); - if (getReindexRequiredPredicate(clusterService.state().metadata()).test(dataStream.getWriteIndex())) { + if (getReindexRequiredPredicate(clusterService.state().metadata(), false).test(dataStream.getWriteIndex())) { RolloverRequest rolloverRequest = new RolloverRequest(sourceDataStream, null); rolloverRequest.setParentTask(taskId); reindexClient.execute( @@ -161,7 +161,9 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec TaskId parentTaskId ) { List indices = dataStream.getIndices(); - List indicesToBeReindexed = indices.stream().filter(getReindexRequiredPredicate(clusterService.state().metadata())).toList(); + List indicesToBeReindexed = indices.stream() + .filter(getReindexRequiredPredicate(clusterService.state().metadata(), false)) + .toList(); final ReindexDataStreamPersistentTaskState updatedState; if (params.totalIndices() != totalIndicesInDataStream || params.totalIndicesToBeUpgraded() != indicesToBeReindexed.size() From ec546e31dbf73c77039ea9eb13bb94444a05d26c Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Thu, 23 Jan 2025 15:04:54 +0200 Subject: [PATCH 06/29] Initial support for TEXT fields in LOOKUP JOIN condition (#119473) When the join field on the right hand-side is a TEXT field, we cannot do an exact match. Since ES|QL treats TEXT fields as KEYWORD in all cases, ideally we would like to do the same for JOIN. However, this is achieved on the left-hand index in a way that is not easily achievable on the right-hand side. Comparing filtering and field extraction of left and right: * `FROM left` * FieldExtraction is done using `field.keyword` subfield if it exists, or from `_source` otherwise * Filtering is done by pushing down to Lucene `field.keyword` if it exists, or by not pushing down and filtering the value extracted from `_source` inside the compute engine itself * `LOOKUP JOIN right` * FieldExtraction is done simplistically, with no `_source` extraction * Filtering pushdown can be done with `field.keyword` if it exists, but we have no easy solution to filtering otherwise The decision taken is to disallow joining on TEXT fields, but allow explicit joining on the underlying keyword field (explicit in the query): | left type | right type | result | | --- | --- | --- | | KEYWORD | KEYWORD | :white_check_mark: Works | | TEXT | KEYWORD | :white_check_mark: Works | | KEYWORD | TEXT | :x: Type mismatch error | | TEXT | TEXT | :x: Type mismatch error | ### Examples #### KEYWORD-KEYWORD :white_check_mark: ``` FROM test | LOOKUP JOIN `test-lookup` ON color.keyword ``` #### TEXT-KEYWORD :white_check_mark: ``` FROM test | RENAME color AS x | EVAL color.keyword = x | LOOKUP JOIN `test-lookup` ON color.keyword ``` #### KEYWORD-TEXT :x: ``` FROM test | EVAL color = color.keyword | LOOKUP JOIN `test-lookup` ON color ``` #### TEXT-TEXT :x: ``` FROM test | LOOKUP JOIN `test-lookup` ON color ``` --- .../src/main/resources/lookup-join.csv-spec | 30 +++- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../esql/enrich/LookupFromIndexService.java | 15 +- .../xpack/esql/plan/logical/join/Join.java | 8 +- .../esql/planner/LocalExecutionPlanner.java | 36 +++-- .../test/esql/191_lookup_join_text.yml | 133 ++++++++++++++++++ 6 files changed, 201 insertions(+), 26 deletions(-) create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index ae7593b7999a..7b2395030a53 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -520,7 +520,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; ########################################################################### -# nested filed join behavior with languages_nested_fields index +# nested field join behavior with languages_nested_fields index ########################################################################### joinOnNestedField @@ -568,6 +568,34 @@ language.id:integer | language.name:text | language.name.keyword:keyword 1 | English | English ; +joinOnNestedNestedFieldRowExplicitKeyword +required_capability: join_lookup_v11 +required_capability: lookup_join_text + +ROW language.name.keyword = "English" +| LOOKUP JOIN languages_nested_fields ON language.name.keyword +| KEEP language.id, language.name, language.name.keyword +; + +language.id:integer | language.name:text | language.name.keyword:keyword +1 | English | English +; + +joinOnNestedNestedFieldRowExplicitKeywords +required_capability: join_lookup_v11 +required_capability: lookup_join_text + +ROW language.name.keyword = ["English", "French"] +| MV_EXPAND language.name.keyword +| LOOKUP JOIN languages_nested_fields ON language.name.keyword +| KEEP language.id, language.name, language.name.keyword, language.code +; + +language.id:integer | language.name:text | language.name.keyword:keyword | language.code:keyword +1 | English | English | EN +2 | French | French | FR +; + ############################################### # Tests with clientips_lookup index ############################################### diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index fd132af21001..08e0f0cf473e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -690,6 +690,11 @@ public class EsqlCapabilities { */ JOIN_LOOKUP_V11(Build.current().isSnapshot()), + /** + * LOOKUP JOIN with TEXT fields on the right (right side of the join) (#119473) + */ + LOOKUP_JOIN_TEXT(Build.current().isSnapshot()), + /** * LOOKUP JOIN without MV matching (https://github.com/elastic/elasticsearch/issues/118780) */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 37d2e14d7d89..9bea212a56aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -19,12 +19,10 @@ import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -79,9 +77,7 @@ public class LookupFromIndexService extends AbstractLookupService matchFields = new ArrayList<>(join.leftFields().size()); - for (Attribute m : join.leftFields()) { - Layout.ChannelAndType t = source.layout.get(m.id()); - if (t == null) { - throw new IllegalArgumentException("can't plan [" + join + "][" + m + "]"); + if (join.leftFields().size() != join.rightFields().size()) { + throw new IllegalArgumentException("can't plan [" + join + "]: mismatching left and right field count"); + } + List matchFields = new ArrayList<>(join.leftFields().size()); + for (int i = 0; i < join.leftFields().size(); i++) { + TypedAttribute left = (TypedAttribute) join.leftFields().get(i); + FieldAttribute right = (FieldAttribute) join.rightFields().get(i); + Layout.ChannelAndType input = source.layout.get(left.id()); + if (input == null) { + throw new IllegalArgumentException("can't plan [" + join + "][" + left + "]"); } - matchFields.add(t); + matchFields.add(new MatchConfig(right, input)); } if (matchFields.size() != 1) { - throw new IllegalArgumentException("can't plan [" + join + "]"); + throw new IllegalArgumentException("can't plan [" + join + "]: multiple join predicates are not supported"); } + // TODO support multiple match fields, and support more than equality predicates + MatchConfig matchConfig = matchFields.getFirst(); return source.with( new LookupFromIndexOperator.Factory( sessionId, parentTask, context.queryPragmas().enrichMaxWorkers(), - matchFields.getFirst().channel(), + matchConfig.channel(), ctx -> lookupFromIndexService, - matchFields.getFirst().type(), + matchConfig.type(), indexName, - join.leftFields().getFirst().name(), + matchConfig.fieldName(), join.addedFields().stream().map(f -> (NamedExpression) f).toList(), join.source() ), @@ -600,6 +609,13 @@ public class LocalExecutionPlanner { ); } + private record MatchConfig(String fieldName, int channel, DataType type) { + private MatchConfig(FieldAttribute match, Layout.ChannelAndType input) { + // Note, this handles TEXT fields with KEYWORD subfields + this(match.exactAttribute().name(), input.channel(), input.type()); + } + } + private PhysicalOperation planLocal(LocalSourceExec localSourceExec, LocalExecutionPlannerContext context) { Layout.Builder layout = new Layout.Builder(); layout.append(localSourceExec.output()); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml new file mode 100644 index 000000000000..1b532ab80eeb --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml @@ -0,0 +1,133 @@ +--- +setup: + - requires: + test_runner_features: [capabilities, contains] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_join_text] + reason: "uses LOOKUP JOIN" + - do: + indices.create: + index: test + body: + mappings: + properties: + color: + type: text + fields: + keyword: + type: keyword + description: + type: text + fields: + keyword: + type: keyword + - do: + indices.create: + index: test-lookup + body: + settings: + index: + mode: lookup + number_of_shards: 1 + mappings: + properties: + color: + type: text + fields: + keyword: + type: keyword + description: + type: text + fields: + keyword: + type: keyword + - do: + bulk: + index: "test" + refresh: true + body: + - { "index": { } } + - { "color": "red", "description": "The color Red" } + - { "index": { } } + - { "color": "blue", "description": "The color Blue" } + - { "index": { } } + - { "color": "green", "description": "The color Green" } + - do: + bulk: + index: "test-lookup" + refresh: true + body: + - { "index": { } } + - { "color": "red", "description": "As red as a tomato" } + - { "index": { } } + - { "color": "blue", "description": "As blue as the sky" } + +--- +keyword-keyword: + - do: + esql.query: + body: + query: 'FROM test | SORT color | LOOKUP JOIN `test-lookup` ON color.keyword | LIMIT 3' + + - length: { columns: 4 } + - length: { values: 3 } + - match: {columns.0.name: "color.keyword"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "text"} + - match: {columns.2.name: "description"} + - match: {columns.2.type: "text"} + - match: {columns.3.name: "description.keyword"} + - match: {columns.3.type: "keyword"} + - match: {values.0: ["blue", "blue", "As blue as the sky", "As blue as the sky"]} + - match: {values.1: ["green", null, null, null]} + - match: {values.2: ["red", "red", "As red as a tomato", "As red as a tomato"]} + +--- +text-keyword: + - do: + esql.query: + body: + query: 'FROM test | SORT color | RENAME color AS x | EVAL color.keyword = x | LOOKUP JOIN `test-lookup` ON color.keyword | LIMIT 3' + + - length: { columns: 5 } + - length: { values: 3 } + - match: {columns.0.name: "x"} + - match: {columns.0.type: "text"} + - match: {columns.1.name: "color.keyword"} + - match: {columns.1.type: "text"} + - match: {columns.2.name: "color"} + - match: {columns.2.type: "text"} + - match: {columns.3.name: "description"} + - match: {columns.3.type: "text"} + - match: {columns.4.name: "description.keyword"} + - match: {columns.4.type: "keyword"} + - match: {values.0: ["blue", "blue", "blue", "As blue as the sky", "As blue as the sky"]} + - match: {values.1: ["green", "green", null, null, null]} + - match: {values.2: ["red", "red", "red", "As red as a tomato", "As red as a tomato"]} + +--- +text-text: + - do: + esql.query: + body: + query: 'FROM test | SORT color | LOOKUP JOIN `test-lookup` ON color | LIMIT 3' + catch: "bad_request" + + - match: { error.type: "verification_exception" } + - contains: { error.reason: "Found 1 problem\nline 1:55: JOIN with right field [color] of type [TEXT] is not supported" } + +--- +keyword-text: + - do: + esql.query: + body: + query: 'FROM test | SORT color | EVAL color = color.keyword | LOOKUP JOIN `test-lookup` ON color | LIMIT 3' + catch: "bad_request" + + - match: { error.type: "verification_exception" } + - contains: { error.reason: "Found 1 problem\nline 1:84: JOIN with right field [color] of type [TEXT] is not supported" } + From bb0d0ed6dd9dc07f5cdbc381cdb38d11ee1fe159 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 23 Jan 2025 14:08:27 +0100 Subject: [PATCH 07/29] Removes outdated admonition (#120556) (#120703) Resolves /security-docs/https://github.com/elastic/security-docs/issues/6430. Removes an outdated admonition. (cherry picked from commit 63074d8e7042059ee1a10243b005534114f9fe82) Co-authored-by: Benjamin Ironside Goldstein <91905639+benironside@users.noreply.github.com> --- docs/reference/esql/esql-security-solution.asciidoc | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docs/reference/esql/esql-security-solution.asciidoc b/docs/reference/esql/esql-security-solution.asciidoc index 24766a5ef93f..835f2e59c46e 100644 --- a/docs/reference/esql/esql-security-solution.asciidoc +++ b/docs/reference/esql/esql-security-solution.asciidoc @@ -34,8 +34,3 @@ more, refer to {security-guide}/rules-ui-create.html#create-esql-rule[Create an Use the Elastic AI Assistant to build {esql} queries, or answer questions about the {esql} query language. To learn more, refer to {security-guide}/security-assistant.html[AI Assistant]. - -NOTE: For AI Assistant to answer questions about {esql} and write {esql} -queries, you need to -{security-guide}/security-assistant.html#set-up-ai-assistant[enable knowledge -base]. From 77ef1d41a0e58fe6eb036a93952fc38031fe69cf Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 23 Jan 2025 15:24:45 +0200 Subject: [PATCH 08/29] Failure store - No selectors in snapshots (#120114) In this PR we explore how we can include the failure store indices while disallowing the user to use selectors in the `indices` field of the create and restore snapshot requests. **Security concerns** The create and restore snapshot require cluster privileges only, so we do not have to worry about how to handle index level security (see [Create Snapshot API](https://www.elastic.co/guide/en/elasticsearch/reference/current/create-snapshot-api.html#create-snapshot-api-prereqs) & [Restore Snapshot API](https://www.elastic.co/guide/en/elasticsearch/reference/current/restore-snapshot-api.html#restore-snapshot-api-prereqs)). **Challenges** While there are other APIs that do not support selectors but they do include the failure indices in their response such as `GET _data_stream/`, `GET _data_stream//stats`, etc; the snapshot APIs are a little bit different. The reason for that is that the data stream APIs first collect only the relevant data streams and then they select the backing indices and/or the failure indices. On the other hand, the snapshot API that works with both indices and data streams cannot take such a shortcut and needs to use the `concreteIndices` method. We propose, to add a flag that when the selectors are not "allowed" can determine if we need to include the failure store or not. In the past we had something similar called the default selector, but it was more flexible and it was used a fallback always. Our goal now is to only specify the behaviour we want when the selectors are not supported. This new flag allowed also to simplify the concrete index resolution in `GET _data_stream//stats` Relates to https://github.com/elastic/elasticsearch/issues/119545 --- .../datastreams/DataStreamsSnapshotsIT.java | 49 ++++++- .../DataStreamsStatsTransportAction.java | 12 -- .../org/elasticsearch/TransportVersions.java | 1 + .../create/CreateSnapshotRequest.java | 5 +- .../restore/RestoreSnapshotRequest.java | 13 +- .../datastreams/DataStreamsActionUtil.java | 59 ++------- .../datastreams/DataStreamsStatsAction.java | 1 + .../action/support/IndicesOptions.java | 121 +++++++++--------- .../metadata/IndexNameExpressionResolver.java | 27 ++-- .../snapshots/SnapshotsService.java | 3 - .../create/CreateSnapshotRequestTests.java | 1 + .../restore/RestoreSnapshotRequestTests.java | 1 + .../DataStreamsActionUtilTests.java | 13 ++ .../action/support/IndicesOptionsTests.java | 9 +- .../datafeed/DatafeedNodeSelectorTests.java | 7 +- 15 files changed, 177 insertions(+), 145 deletions(-) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 0ae7504bb9d7..e78d9b4f2b8c 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAlias; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.index.Index; @@ -341,13 +340,13 @@ public class DataStreamsSnapshotsIT extends AbstractSnapshotIntegTestCase { assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("ds", 3))); } - public void testFailureStoreSnapshotAndRestore() throws Exception { + public void testFailureStoreSnapshotAndRestore() { String dataStreamName = "with-fs"; CreateSnapshotResponse createSnapshotResponse = client.admin() .cluster() .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) .setWaitForCompletion(true) - .setIndices(IndexNameExpressionResolver.combineSelector(dataStreamName, IndexComponentSelector.ALL_APPLICABLE)) + .setIndices(dataStreamName) .setIncludeGlobalState(false) .get(); @@ -398,6 +397,49 @@ public class DataStreamsSnapshotsIT extends AbstractSnapshotIntegTestCase { } } + public void testSelectorsNotAllowedInSnapshotAndRestore() { + String dataStreamName = "with-fs"; + try { + client.admin() + .cluster() + .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(dataStreamName + "::" + randomFrom(IndexComponentSelector.values()).getKey()) + .setIncludeGlobalState(false) + .get(); + fail("Should have failed because selectors are not allowed in snapshot creation"); + } catch (IllegalArgumentException e) { + assertThat( + e.getMessage(), + containsString("Index component selectors are not supported in this context but found selector in expression") + ); + } + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices("ds") + .setIncludeGlobalState(false) + .get(); + + RestStatus status = createSnapshotResponse.getSnapshotInfo().status(); + assertEquals(RestStatus.OK, status); + try { + client.admin() + .cluster() + .prepareRestoreSnapshot(TEST_REQUEST_TIMEOUT, REPO, SNAPSHOT) + .setWaitForCompletion(true) + .setIndices(dataStreamName + "::" + randomFrom(IndexComponentSelector.values()).getKey()) + .get(); + fail("Should have failed because selectors are not allowed in snapshot restore"); + } catch (IllegalArgumentException e) { + assertThat( + e.getMessage(), + containsString("Index component selectors are not supported in this context but found selector in expression") + ); + } + } + public void testSnapshotAndRestoreAllIncludeSpecificDataStream() throws Exception { DocWriteResponse indexResponse = client.prepareIndex("other-ds") .setOpType(DocWriteRequest.OpType.CREATE) @@ -1241,6 +1283,7 @@ public class DataStreamsSnapshotsIT extends AbstractSnapshotIntegTestCase { SnapshotInfo retrievedSnapshot = getSnapshot(REPO, SNAPSHOT); assertThat(retrievedSnapshot.dataStreams(), contains(dataStreamName)); assertThat(retrievedSnapshot.indices(), containsInAnyOrder(fsBackingIndexName)); + assertThat(retrievedSnapshot.indices(), not(containsInAnyOrder(fsFailureIndexName))); assertAcked( safeGet(client.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, "*"))) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index c6bee86e20fc..4d6eead07b94 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.datastreams.DataStreamsStatsAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.IndexComponentSelector; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -101,17 +100,6 @@ public class DataStreamsStatsTransportAction extends TransportBroadcastByNodeAct return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, concreteIndices); } - @Override - protected String[] resolveConcreteIndexNames(ClusterState clusterState, DataStreamsStatsAction.Request request) { - return DataStreamsActionUtil.resolveConcreteIndexNamesWithSelector( - indexNameExpressionResolver, - clusterState, - request.indices(), - IndexComponentSelector.ALL_APPLICABLE, - request.indicesOptions() - ).toArray(String[]::new); - } - @Override protected ShardsIterator shards(ClusterState clusterState, DataStreamsStatsAction.Request request, String[] concreteIndices) { return clusterState.getRoutingTable().allShards(concreteIndices); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 65a745f0fe36..a50f888927d4 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -162,6 +162,7 @@ public class TransportVersions { public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_00_0); public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); + public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index bfdf41e58f6d..5637971665a0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeRequest; -import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; @@ -69,9 +68,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest abstractionNames = indexNameExpressionResolver.dataStreams( + List resolvedDataStreamExpressions = indexNameExpressionResolver.dataStreams( clusterState, updateIndicesOptions(indicesOptions), names ); SortedMap indicesLookup = clusterState.getMetadata().getIndicesLookup(); - List results = new ArrayList<>(abstractionNames.size()); - for (ResolvedExpression abstractionName : abstractionNames) { - IndexAbstraction indexAbstraction = indicesLookup.get(abstractionName.resource()); + List results = new ArrayList<>(resolvedDataStreamExpressions.size()); + for (ResolvedExpression resolvedExpression : resolvedDataStreamExpressions) { + IndexAbstraction indexAbstraction = indicesLookup.get(resolvedExpression.resource()); assert indexAbstraction != null; if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { - selectDataStreamIndicesNames( - (DataStream) indexAbstraction, - IndexComponentSelector.FAILURES.equals(abstractionName.selector()), - results - ); - } - } - return results; - } - - /** - * Resolves a list of expressions into data stream names and then collects the concrete indices - * that are applicable for those data streams based on the selector provided in the arguments. - * @param indexNameExpressionResolver resolver object - * @param clusterState state to query - * @param names data stream expressions - * @param selector which component indices of the data stream should be returned - * @param indicesOptions options for expression resolution - * @return A stream of concrete index names that belong to the components specified - * on the data streams returned from the expressions given - */ - public static List resolveConcreteIndexNamesWithSelector( - IndexNameExpressionResolver indexNameExpressionResolver, - ClusterState clusterState, - String[] names, - IndexComponentSelector selector, - IndicesOptions indicesOptions - ) { - assert indicesOptions.allowSelectors() == false : "If selectors are enabled, use resolveConcreteIndexNames instead"; - List abstractionNames = indexNameExpressionResolver.dataStreamNames( - clusterState, - updateIndicesOptions(indicesOptions), - names - ); - SortedMap indicesLookup = clusterState.getMetadata().getIndicesLookup(); - - List results = new ArrayList<>(abstractionNames.size()); - for (String abstractionName : abstractionNames) { - IndexAbstraction indexAbstraction = indicesLookup.get(abstractionName); - assert indexAbstraction != null; - if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { - if (selector.shouldIncludeData()) { - selectDataStreamIndicesNames((DataStream) indexAbstraction, false, results); + DataStream dataStream = (DataStream) indexAbstraction; + if (IndexNameExpressionResolver.shouldIncludeRegularIndices(indicesOptions, resolvedExpression.selector())) { + selectDataStreamIndicesNames(dataStream, false, results); } - if (selector.shouldIncludeFailures()) { - selectDataStreamIndicesNames((DataStream) indexAbstraction, true, results); + if (IndexNameExpressionResolver.shouldIncludeFailureIndices(indicesOptions, resolvedExpression.selector())) { + selectDataStreamIndicesNames(dataStream, true, results); } } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java index 82afeec75237..aa89a2b04173 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsStatsAction.java @@ -57,6 +57,7 @@ public class DataStreamsStatsAction extends ActionType states = EnumSet.noneOf(WildcardStates.class); @@ -937,10 +936,15 @@ public record IndicesOptions( } else if (in.getTransportVersion().onOrAfter(TransportVersions.REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX)) { allowSelectors = options.contains(Option.ALLOW_SELECTORS); } + boolean includeFailureIndices = false; + if (in.getTransportVersion().onOrAfter(TransportVersions.ADD_INCLUDE_FAILURE_INDICES_OPTION)) { + includeFailureIndices = options.contains(Option.INCLUDE_FAILURE_INDICES); + } GatekeeperOptions gatekeeperOptions = GatekeeperOptions.builder() .allowClosedIndices(options.contains(Option.ERROR_WHEN_CLOSED_INDICES) == false) .allowAliasToMultipleIndices(options.contains(Option.ERROR_WHEN_ALIASES_TO_MULTIPLE_INDICES) == false) .allowSelectors(allowSelectors) + .includeFailureIndices(includeFailureIndices) .ignoreThrottled(options.contains(Option.IGNORE_THROTTLED)) .build(); if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { @@ -1319,6 +1323,15 @@ public record IndicesOptions( return STRICT_EXPAND_OPEN; } + /** + * @return indices options that requires every specified index to exist, expands wildcards only to open indices and + * allows that no indices are resolved from wildcard expressions (not returning an error). It disallows selectors + * in the expression (no :: separators). + */ + public static IndicesOptions strictExpandOpenFailureNoSelectors() { + return STRICT_EXPAND_OPEN_FAILURE_NO_SELECTOR; + } + /** * @return indices options that requires every specified index to exist, expands wildcards only to open indices, * allows that no indices are resolved from wildcard expressions (not returning an error) and forbids the @@ -1362,22 +1375,13 @@ public record IndicesOptions( return STRICT_EXPAND_OPEN_CLOSED_HIDDEN_NO_SELECTORS; } - /** - * @return indices option that expands wildcards to both open and closed indices, includes failure store - * (with data stream) and allows that indices can be missing and no indices are resolved from wildcard expressions - * (not returning an error). - */ - public static IndicesOptions lenientExpandIncludeFailureStore() { - return LENIENT_EXPAND_OPEN_CLOSED_FAILURE_STORE; - } - /** * @return indices option that requires every specified index to exist, expands wildcards to both open and closed indices, includes - * hidden indices, includes failure store (with data stream) and allows that no indices are resolved from wildcard expressions - * (not returning an error). + * hidden indices, allows that no indices are resolved from wildcard expressions (not returning an error), and disallows selectors + * in the expression (no :: separators) but includes the failure indices. */ - public static IndicesOptions strictExpandHiddenIncludeFailureStore() { - return STRICT_EXPAND_OPEN_CLOSED_HIDDEN_FAILURE_STORE; + public static IndicesOptions strictExpandHiddenFailureNoSelectors() { + return STRICT_EXPAND_OPEN_CLOSED_HIDDEN_FAILURE_NO_SELECTORS; } /** @@ -1467,6 +1471,9 @@ public record IndicesOptions( + ignoreThrottled() // Until the feature flag is removed we access the field directly from the gatekeeper options. + (DataStream.isFailureStoreFeatureFlagEnabled() ? ", allow_selectors=" + gatekeeperOptions().allowSelectors() : "") + + (DataStream.isFailureStoreFeatureFlagEnabled() + ? ", include_failure_indices=" + gatekeeperOptions().includeFailureIndices() + : "") + ']'; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 9ad00b517d51..cb074b143704 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -93,7 +93,17 @@ public class IndexNameExpressionResolver { */ public record ResolvedExpression(String resource, @Nullable IndexComponentSelector selector) { public ResolvedExpression(String indexAbstraction) { - this(indexAbstraction, null); + this(indexAbstraction, (IndexComponentSelector) null); + } + + /** + * Constructs a ResolvedExpression with the DATA selector if the selectors are allowed + * or null otherwise. + * @param indexAbstraction + * @param options + */ + public ResolvedExpression(String indexAbstraction, IndicesOptions options) { + this(indexAbstraction, options.allowSelectors() ? IndexComponentSelector.DATA : null); } public String combined() { @@ -599,19 +609,19 @@ public class IndexNameExpressionResolver { } } - private static boolean shouldIncludeRegularIndices(IndicesOptions indicesOptions, IndexComponentSelector expressionSelector) { + public static boolean shouldIncludeRegularIndices(IndicesOptions indicesOptions, IndexComponentSelector expressionSelector) { if (indicesOptions.allowSelectors()) { return expressionSelector == null || expressionSelector.shouldIncludeData(); } return true; } - private static boolean shouldIncludeFailureIndices(IndicesOptions indicesOptions, IndexComponentSelector expressionSelector) { + public static boolean shouldIncludeFailureIndices(IndicesOptions indicesOptions, IndexComponentSelector expressionSelector) { // We return failure indices regardless of whether the data stream actually has the `failureStoreEnabled` flag set to true. if (indicesOptions.allowSelectors()) { return expressionSelector != null && expressionSelector.shouldIncludeFailures(); } - return false; + return indicesOptions.includeFailureIndices(); } private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstraction, Context context, ResolvedExpression expression) { @@ -1699,12 +1709,7 @@ public class IndexNameExpressionResolver { Index index = indexAbstraction.getIndices().get(i); IndexMetadata indexMetadata = context.state.metadata().index(index); if (indexMetadata.getState() != excludeState) { - resources.add( - new ResolvedExpression( - index.getName(), - context.options.allowSelectors() ? IndexComponentSelector.DATA : null - ) - ); + resources.add(new ResolvedExpression(index.getName(), context.getOptions())); } } } @@ -1715,7 +1720,7 @@ public class IndexNameExpressionResolver { Index index = failureIndices.get(i); IndexMetadata indexMetadata = context.state.metadata().index(index); if (indexMetadata.getState() != excludeState) { - resources.add(new ResolvedExpression(index.getName(), IndexComponentSelector.DATA)); + resources.add(new ResolvedExpression(index.getName(), context.getOptions())); } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index d8763c47ecc4..008c75ed1347 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -4136,9 +4136,6 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement request.partial(), indexIds, CollectionUtils.concatLists( - // It's ok to just get the data stream names here because we have already resolved every concrete index that will be - // in the snapshot, and thus already resolved any selectors that might be present. We now only care about which data - // streams we're packing up in the resulting snapshot, not what their contents are. indexNameExpressionResolver.dataStreamNames(currentState, request.indicesOptions(), request.indices()), systemDataStreamNames ), diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java index d0f734474f7c..6d620acca63b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java @@ -89,6 +89,7 @@ public class CreateSnapshotRequestTests extends ESTestCase { randomBoolean() ) ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).includeFailureIndices(true).build()) .build() ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java index a1c4466280a4..2be8989216d7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java @@ -89,6 +89,7 @@ public class RestoreSnapshotRequestTests extends AbstractWireSerializingTestCase randomBoolean() ) ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).includeFailureIndices(true).build()) .build() ); } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java index 445a5ad067b7..2f6d8ffc9e15 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java @@ -104,6 +104,19 @@ public class DataStreamsActionUtilTests extends ESTestCase { assertThat(resolved, containsInAnyOrder(".ds-foo1", ".ds-foo2", ".ds-baz1")); + // Including the failure indices + resolved = DataStreamsActionUtil.resolveConcreteIndexNames( + indexNameExpressionResolver, + clusterState, + query, + IndicesOptions.builder() + .wildcardOptions(IndicesOptions.WildcardOptions.builder().includeHidden(true)) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).includeFailureIndices(true)) + .build() + ); + + assertThat(resolved, containsInAnyOrder(".ds-foo1", ".ds-foo2", ".ds-baz1", ".fs-foo1")); + when(indexNameExpressionResolver.dataStreams(any(), any(), eq(query))).thenReturn( List.of( new ResolvedExpression("fooDs", IndexComponentSelector.DATA), diff --git a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index e33185d64e68..ebcd7698356e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -55,6 +55,7 @@ public class IndicesOptionsTests extends ESTestCase { .ignoreThrottled(randomBoolean()) .allowAliasToMultipleIndices(randomBoolean()) .allowClosedIndices(randomBoolean()) + .allowSelectors(randomBoolean()) ) .build(); @@ -340,7 +341,13 @@ public class IndicesOptionsTests extends ESTestCase { randomBoolean(), randomBoolean() ); - GatekeeperOptions gatekeeperOptions = new GatekeeperOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); + GatekeeperOptions gatekeeperOptions = new GatekeeperOptions( + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean() + ); IndicesOptions indicesOptions = new IndicesOptions(concreteTargetOptions, wildcardOptions, gatekeeperOptions); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index f6cc282dde4d..50c260eff3d8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -349,7 +349,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase { + "indices_options [IndicesOptions[ignore_unavailable=false, allow_no_indices=true, expand_wildcards_open=true, " + "expand_wildcards_closed=false, expand_wildcards_hidden=false, allow_aliases_to_multiple_indices=true, " + "forbid_closed_indices=true, ignore_aliases=false, ignore_throttled=true, " - + "allow_selectors=true]] with exception [no such index [not_foo]]" + + "allow_selectors=true, include_failure_indices=false]] with exception [no such index [not_foo]]" ) ) ); @@ -383,7 +383,8 @@ public class DatafeedNodeSelectorTests extends ESTestCase { + "indices given [not_foo] and indices_options [IndicesOptions[ignore_unavailable=false, allow_no_indices=true, " + "expand_wildcards_open=true, expand_wildcards_closed=false, expand_wildcards_hidden=false, " + "allow_aliases_to_multiple_indices=true, forbid_closed_indices=true, ignore_aliases=false, " - + "ignore_throttled=true, allow_selectors=true]] with exception [no such index [not_foo]]]" + + "ignore_throttled=true, allow_selectors=true, include_failure_indices=false]] with exception " + + "[no such index [not_foo]]]" ) ) ); @@ -560,7 +561,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase { + "indices_options [IndicesOptions[ignore_unavailable=false, allow_no_indices=true, expand_wildcards_open=true, " + "expand_wildcards_closed=false, expand_wildcards_hidden=false, allow_aliases_to_multiple_indices=true, " + "forbid_closed_indices=true, ignore_aliases=false, ignore_throttled=true, " - + "allow_selectors=true]] with exception [no such index [not_foo]]]" + + "allow_selectors=true, include_failure_indices=false]] with exception [no such index [not_foo]]]" ) ) ); From 0e5fe752508e95357400d4312691907ba76e8bde Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 23 Jan 2025 14:37:00 +0100 Subject: [PATCH 09/29] Rename test-plugin to entitlement-test-plugin (#120696) --- libs/entitlement/qa/build.gradle | 4 ++-- .../{test-plugin => entitlement-test-plugin}/build.gradle | 2 +- .../src/main/java/module-info.java | 0 .../entitlement/qa/test/DummyImplementations.java | 0 .../entitlement/qa/test/EntitlementTestPlugin.java | 0 .../qa/test/LoadNativeLibrariesCheckActions.java | 0 .../entitlement/qa/test/NetworkAccessCheckActions.java | 0 .../entitlement/qa/test/RestEntitlementsCheckAction.java | 0 .../entitlement/qa/test/VersionSpecificNetworkChecks.java | 0 .../entitlement/qa/test/WritePropertiesCheckActions.java | 0 .../entitlement/qa/test/VersionSpecificNetworkChecks.java | 0 .../entitlement/qa/test/VersionSpecificNetworkChecks.java | 0 .../elasticsearch/entitlement/qa/EntitlementsAllowedIT.java | 2 +- .../entitlement/qa/EntitlementsAllowedNonModularIT.java | 2 +- .../elasticsearch/entitlement/qa/EntitlementsDeniedIT.java | 2 +- .../entitlement/qa/EntitlementsDeniedNonModularIT.java | 2 +- muted-tests.yml | 6 ------ 17 files changed, 7 insertions(+), 13 deletions(-) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/build.gradle (96%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/module-info.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTestPlugin.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java (100%) rename libs/entitlement/qa/{test-plugin => entitlement-test-plugin}/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java (100%) diff --git a/libs/entitlement/qa/build.gradle b/libs/entitlement/qa/build.gradle index fea746eb3bfa..b3b1c830a1b8 100644 --- a/libs/entitlement/qa/build.gradle +++ b/libs/entitlement/qa/build.gradle @@ -12,6 +12,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-test-artifact' dependencies { - javaRestTestImplementation project(':libs:entitlement:qa:test-plugin') - clusterModules project(':libs:entitlement:qa:test-plugin') + javaRestTestImplementation project(':libs:entitlement:qa:entitlement-test-plugin') + clusterModules project(':libs:entitlement:qa:entitlement-test-plugin') } diff --git a/libs/entitlement/qa/test-plugin/build.gradle b/libs/entitlement/qa/entitlement-test-plugin/build.gradle similarity index 96% rename from libs/entitlement/qa/test-plugin/build.gradle rename to libs/entitlement/qa/entitlement-test-plugin/build.gradle index 74409d1f4e07..f23a8e979e36 100644 --- a/libs/entitlement/qa/test-plugin/build.gradle +++ b/libs/entitlement/qa/entitlement-test-plugin/build.gradle @@ -14,7 +14,7 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.mrjar' esplugin { - name = 'test-plugin' + name = 'entitlement-test-plugin' description = 'A test plugin that invokes methods checked by entitlements' classname = 'org.elasticsearch.entitlement.qa.test.EntitlementTestPlugin' } diff --git a/libs/entitlement/qa/test-plugin/src/main/java/module-info.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/module-info.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTestPlugin.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTestPlugin.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTestPlugin.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTestPlugin.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/LoadNativeLibrariesCheckActions.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java diff --git a/libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java diff --git a/libs/entitlement/qa/test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java diff --git a/libs/entitlement/qa/test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java similarity index 100% rename from libs/entitlement/qa/test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java index c9eb4ea665aa..54628fc674d7 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java @@ -28,7 +28,7 @@ public class EntitlementsAllowedIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, true, ALLOWED_ENTITLEMENTS)) + .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, true, ALLOWED_ENTITLEMENTS)) .systemProperty("es.entitlements.enabled", "true") .setting("xpack.security.enabled", "false") .build(); diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java index 5c56774ca906..8390f0e5fd11 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java @@ -28,7 +28,7 @@ public class EntitlementsAllowedNonModularIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, false, ALLOWED_ENTITLEMENTS)) + .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, false, ALLOWED_ENTITLEMENTS)) .systemProperty("es.entitlements.enabled", "true") .setting("xpack.security.enabled", "false") .build(); diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java index fac70e3167f4..3405e41897cc 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java @@ -26,7 +26,7 @@ public class EntitlementsDeniedIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, true, null)) + .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, true, null)) .systemProperty("es.entitlements.enabled", "true") .setting("xpack.security.enabled", "false") // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsDeniedIT.xml diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java index d48e23e6332e..a2a4773bf752 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java @@ -26,7 +26,7 @@ public class EntitlementsDeniedNonModularIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, false, null)) + .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, false, null)) .systemProperty("es.entitlements.enabled", "true") .setting("xpack.security.enabled", "false") // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsDeniedIT.xml diff --git a/muted-tests.yml b/muted-tests.yml index a603e7fc2e39..c591e19c373d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -233,12 +233,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testMultipleInferencesTriggeringDownloadAndDeploy issue: https://github.com/elastic/elasticsearch/issues/120668 -- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedIT - issue: https://github.com/elastic/elasticsearch/issues/120674 -- class: org.elasticsearch.entitlement.qa.EntitlementsDeniedNonModularIT - issue: https://github.com/elastic/elasticsearch/issues/120675 -- class: org.elasticsearch.entitlement.qa.EntitlementsDeniedIT - issue: https://github.com/elastic/elasticsearch/issues/120676 # Examples: # From 443f0f3ded3ef76b85fc3f91f0c3f75357f47c63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 23 Jan 2025 14:41:12 +0100 Subject: [PATCH 10/29] [DOCS] Adds note about differences between chat completion and stream API (#120636) --- docs/reference/inference/chat-completion-inference.asciidoc | 6 +++++- docs/reference/inference/stream-inference.asciidoc | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/reference/inference/chat-completion-inference.asciidoc b/docs/reference/inference/chat-completion-inference.asciidoc index 83a8f94634f2..1d7d05b0f7d8 100644 --- a/docs/reference/inference/chat-completion-inference.asciidoc +++ b/docs/reference/inference/chat-completion-inference.asciidoc @@ -34,9 +34,13 @@ However, if you do not plan to use the {infer} APIs to use these models or if yo The chat completion {infer} API enables real-time responses for chat completion tasks by delivering answers incrementally, reducing response times during computation. It only works with the `chat_completion` task type for `openai` and `elastic` {infer} services. + [NOTE] ==== -The `chat_completion` task type is only available within the _unified API and only supports streaming. +* The `chat_completion` task type is only available within the _unified API and only supports streaming. +* The Chat completion {infer} API and the Stream {infer} API differ in their response structure and capabilities. +The Chat completion {infer} API provides more comprehensive customization options through more fields and function calling support. +If you use the `openai` service or the `elastic` service, use the Chat completion {infer} API. ==== [discrete] diff --git a/docs/reference/inference/stream-inference.asciidoc b/docs/reference/inference/stream-inference.asciidoc index 4a3ce3190971..bfcead654258 100644 --- a/docs/reference/inference/stream-inference.asciidoc +++ b/docs/reference/inference/stream-inference.asciidoc @@ -40,6 +40,10 @@ However, if you do not plan to use the {infer} APIs to use these models or if yo The stream {infer} API enables real-time responses for completion tasks by delivering answers incrementally, reducing response times during computation. It only works with the `completion` and `chat_completion` task types. +The Chat completion {infer} API and the Stream {infer} API differ in their response structure and capabilities. +The Chat completion {infer} API provides more comprehensive customization options through more fields and function calling support. +If you use the `openai` service or the `elastic` service, use the Chat completion {infer} API. + [NOTE] ==== include::inference-shared.asciidoc[tag=chat-completion-docs] From dd5e467f8558cc70c619f7eaf1501adba1d5528e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 23 Jan 2025 08:41:45 -0500 Subject: [PATCH 11/29] ESQL: More tests for LOOKUP (#120656) Expand the integration tests for LOOKUP a little. --- .../xpack/esql/action/LookupFromIndexIT.java | 65 ++++++++++--------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 13abadd66769..15bbc06836de 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -20,10 +20,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.lucene.DataPartitioning; @@ -34,6 +32,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverRunner; import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.test.BlockTestUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; @@ -53,6 +52,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.plugin.TransportEsqlQueryAction; @@ -68,21 +68,22 @@ import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { - // TODO should we remove this now that this is integrated into ESQL proper? - /** - * Quick and dirty test for looking up data from a lookup index. - */ - public void testLookupIndex() throws IOException { - runLookup(new UsingSingleLookupTable(new String[] { "aa", "bb", "cc", "dd" })); + public void testKeywordKey() throws IOException { + runLookup(DataType.KEYWORD, new UsingSingleLookupTable(new String[] { "aa", "bb", "cc", "dd" })); + } + + public void testLongKey() throws IOException { + runLookup(DataType.LONG, new UsingSingleLookupTable(new Long[] { 12L, 33L, 1L })); } /** - * Tests when multiple results match. + * LOOKUP multiple results match. */ public void testLookupIndexMultiResults() throws IOException { - runLookup(new UsingSingleLookupTable(new String[] { "aa", "bb", "bb", "dd" })); + runLookup(DataType.KEYWORD, new UsingSingleLookupTable(new String[] { "aa", "bb", "bb", "dd" })); } interface PopulateIndices { @@ -90,10 +91,10 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { } class UsingSingleLookupTable implements PopulateIndices { - private final Map> matches = new HashMap<>(); - private final String[] lookupData; + private final Map> matches = new HashMap<>(); + private final Object[] lookupData; - UsingSingleLookupTable(String[] lookupData) { + UsingSingleLookupTable(Object[] lookupData) { this.lookupData = lookupData; for (int i = 0; i < lookupData.length; i++) { matches.computeIfAbsent(lookupData[i], k -> new ArrayList<>()).add(i); @@ -104,26 +105,26 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { public void populate(int docCount, List expected) { List docs = new ArrayList<>(); for (int i = 0; i < docCount; i++) { - String data = lookupData[i % lookupData.length]; - docs.add(client().prepareIndex("source").setSource(Map.of("data", data))); - for (Integer match : matches.get(data)) { - expected.add(data + ":" + match); + Object key = lookupData[i % lookupData.length]; + docs.add(client().prepareIndex("source").setSource(Map.of("key", key))); + for (Integer match : matches.get(key)) { + expected.add(key + ":" + match); } } for (int i = 0; i < lookupData.length; i++) { - docs.add(client().prepareIndex("lookup").setSource(Map.of("data", lookupData[i], "l", i))); + docs.add(client().prepareIndex("lookup").setSource(Map.of("key", lookupData[i], "l", i))); } Collections.sort(expected); indexRandom(true, true, docs); } } - private void runLookup(PopulateIndices populateIndices) throws IOException { + private void runLookup(DataType keyType, PopulateIndices populateIndices) throws IOException { client().admin() .indices() .prepareCreate("source") .setSettings(Settings.builder().put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)) - .setMapping("data", "type=keyword") + .setMapping("key", "type=" + keyType.esType()) .get(); client().admin() .indices() @@ -134,7 +135,7 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { // TODO lookup index mode doesn't seem to force a single shard. That'll break the lookup command. .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) ) - .setMapping("data", "type=keyword", "l", "type=long") + .setMapping("key", "type=" + keyType.esType(), "l", "type=long") .get(); client().admin().cluster().prepareHealth(TEST_REQUEST_TIMEOUT).setWaitForGreenStatus().get(); @@ -189,9 +190,9 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { ValuesSourceReaderOperator.Factory reader = new ValuesSourceReaderOperator.Factory( List.of( new ValuesSourceReaderOperator.FieldInfo( - "data", - ElementType.BYTES_REF, - shard -> searchContext.getSearchExecutionContext().getFieldType("data").blockLoader(null) + "key", + PlannerUtils.toElementType(keyType), + shard -> searchContext.getSearchExecutionContext().getFieldType("key").blockLoader(null) ) ), List.of(new ValuesSourceReaderOperator.ShardContext(searchContext.getSearchExecutionContext().getIndexReader(), () -> { @@ -217,9 +218,9 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { QueryPragmas.ENRICH_MAX_WORKERS.get(Settings.EMPTY), 1, ctx -> internalCluster().getInstance(TransportEsqlQueryAction.class, finalNodeWithShard).getLookupFromIndexService(), - DataType.KEYWORD, + keyType, "lookup", - "data", + "key", List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))), Source.EMPTY ); @@ -231,10 +232,16 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase { List.of(reader.get(driverContext), lookup.get(driverContext)), new PageConsumerOperator(page -> { try { - BytesRefVector dataBlock = page.getBlock(1).asVector(); + Block keyBlock = page.getBlock(1); LongVector loadedBlock = page.getBlock(2).asVector(); for (int p = 0; p < page.getPositionCount(); p++) { - results.add(dataBlock.getBytesRef(p, new BytesRef()).utf8ToString() + ":" + loadedBlock.getLong(p)); + List key = BlockTestUtils.valuesAtPositions(keyBlock, p, p + 1).get(0); + assertThat(key, hasSize(1)); + Object keyValue = key.get(0); + if (keyValue instanceof BytesRef b) { + keyValue = b.utf8ToString(); + } + results.add(keyValue + ":" + loadedBlock.getLong(p)); } } finally { page.releaseBlocks(); From 1e069c369d508973e887cc812ff08e6b79199466 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 23 Jan 2025 07:46:11 -0600 Subject: [PATCH 12/29] Using OriginSettingClient for reindex data streams (#120661) --- .../xpack/core/ClientHelper.java | 1 + .../core/security/user/InternalUsers.java | 48 +++++++ .../core/security/user/UsernamesField.java | 2 + .../security/user/InternalUsersTests.java | 49 +++++++ .../xpack/migrate/MigratePlugin.java | 9 +- .../task/ExecuteWithHeadersClient.java | 40 ------ ...indexDataStreamPersistentTaskExecutor.java | 31 ++--- .../security/authz/AuthorizationUtils.java | 4 + .../upgrades/DataStreamsUpgradeIT.java | 123 +++++++++++------- 9 files changed, 200 insertions(+), 107 deletions(-) delete mode 100644 x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ExecuteWithHeadersClient.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 8c02462375e1..9a0d1a58a30a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -195,6 +195,7 @@ public final class ClientHelper { public static final String INFERENCE_ORIGIN = "inference"; public static final String APM_ORIGIN = "apm"; public static final String OTEL_ORIGIN = "otel"; + public static final String REINDEX_DATA_STREAM_ORIGIN = "reindex_data_stream"; private ClientHelper() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 23431e184422..52f077b658d0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -7,7 +7,10 @@ package org.elasticsearch.xpack.core.security.user; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; @@ -15,7 +18,14 @@ import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; +import org.elasticsearch.action.bulk.TransportBulkAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.downsample.DownsampleAction; +import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.support.MetadataUtils; @@ -180,6 +190,43 @@ public class InternalUsers { ) ); + public static final InternalUser REINDEX_DATA_STREAM_USER = new InternalUser( + UsernamesField.REINDEX_DATA_STREAM_NAME, + new RoleDescriptor( + UsernamesField.REINDEX_DATA_STREAM_ROLE, + new String[] {}, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices("*") + .privileges( + GetDataStreamAction.NAME, + RolloverAction.NAME, + IndicesStatsAction.NAME, + TransportDeleteIndexAction.TYPE.name(), + "indices:admin/data_stream/index/reindex", + "indices:admin/index/create_from_source", + TransportAddIndexBlockAction.TYPE.name(), + TransportCreateIndexAction.TYPE.name(), + TransportClusterSearchShardsAction.TYPE.name(), + TransportUpdateSettingsAction.TYPE.name(), + RefreshAction.NAME, + ReindexAction.NAME, + TransportSearchAction.NAME, + TransportBulkAction.NAME, + TransportIndexAction.NAME, + TransportSearchScrollAction.TYPE.name(), + ModifyDataStreamsAction.NAME + ) + .allowRestrictedIndices(false) + .build() }, + null, + null, + new String[] {}, + MetadataUtils.DEFAULT_RESERVED_METADATA, + Map.of() + ) + ); + /** * Internal user that can rollover an index/data stream. */ @@ -234,6 +281,7 @@ public class InternalUsers { ASYNC_SEARCH_USER, STORAGE_USER, DATA_STREAM_LIFECYCLE_USER, + REINDEX_DATA_STREAM_USER, SYNONYMS_USER, LAZY_ROLLOVER_USER ).collect(Collectors.toUnmodifiableMap(InternalUser::principal, Function.identity())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java index 22e3c2df22ec..cff27e268ef6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java @@ -40,6 +40,8 @@ public final class UsernamesField { public static final String REMOTE_MONITORING_INDEXING_ROLE = "remote_monitoring_agent"; public static final String LAZY_ROLLOVER_NAME = "_lazy_rollover"; public static final String LAZY_ROLLOVER_ROLE = "_lazy_rollover"; + public static final String REINDEX_DATA_STREAM_NAME = "_reindex_data_stream"; + public static final String REINDEX_DATA_STREAM_ROLE = "_reindex_data_stream"; private UsernamesField() {} } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java index 3878977df935..62dae84f30fc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; @@ -26,13 +27,18 @@ import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettin import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.bulk.TransportBulkAction; +import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.get.TransportGetAction; +import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.XPackPlugin; @@ -56,6 +62,7 @@ import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.I import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_TOKENS_INDEX_7; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.SECURITY_MAIN_ALIAS; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.SECURITY_TOKENS_ALIAS; +import static org.elasticsearch.xpack.core.security.user.UsernamesField.REINDEX_DATA_STREAM_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -286,6 +293,48 @@ public class InternalUsersTests extends ESTestCase { checkIndexAccess(role, randomFrom(sampleSystemDataStreamActions), randomFrom(TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES), false); } + public void testReindexDataStreamUser() { + assertThat(InternalUsers.getUser(REINDEX_DATA_STREAM_NAME), is(InternalUsers.REINDEX_DATA_STREAM_USER)); + assertThat( + InternalUsers.REINDEX_DATA_STREAM_USER.getLocalClusterRoleDescriptor().get().getMetadata(), + equalTo(MetadataUtils.DEFAULT_RESERVED_METADATA) + ); + + final SimpleRole role = getLocalClusterRole(InternalUsers.REINDEX_DATA_STREAM_USER); + + assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); + assertThat(role.application(), is(ApplicationPermission.NONE)); + assertThat(role.remoteIndices(), is(RemoteIndicesPermission.NONE)); + + final List sampleIndexActions = List.of( + TransportDeleteIndexAction.TYPE.name(), + "indices:admin/data_stream/index/reindex", + "indices:admin/index/create_from_source", + TransportAddIndexBlockAction.TYPE.name(), + TransportCreateIndexAction.TYPE.name(), + TransportClusterSearchShardsAction.TYPE.name(), + TransportUpdateSettingsAction.TYPE.name(), + RefreshAction.NAME, + ReindexAction.NAME, + TransportSearchAction.NAME, + TransportBulkAction.NAME, + TransportIndexAction.NAME, + TransportSearchScrollAction.TYPE.name(), + ModifyDataStreamsAction.NAME + ); + + final String dataStream = randomAlphaOfLengthBetween(3, 12); + checkIndexAccess(role, randomFrom(sampleIndexActions), dataStream, true); + // Also check backing index access + checkIndexAccess( + role, + randomFrom(sampleIndexActions), + DataStream.BACKING_INDEX_PREFIX + dataStream + randomAlphaOfLengthBetween(4, 8), + true + ); + } + public void testRegularUser() { var username = randomAlphaOfLengthBetween(4, 12); expectThrows(IllegalStateException.class, () -> InternalUsers.getUser(username)); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index 93b90e551e72..10cf498c85bf 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.migrate; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -58,6 +59,7 @@ import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; +import static org.elasticsearch.xpack.core.ClientHelper.REINDEX_DATA_STREAM_ORIGIN; import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamIndexTransportAction.REINDEX_MAX_REQUESTS_PER_SECOND_SETTING; import static org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor.MAX_CONCURRENT_INDICES_REINDEXED_PER_DATA_STREAM_SETTING; @@ -150,7 +152,12 @@ public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTas ) { if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { return List.of( - new ReindexDataStreamPersistentTaskExecutor(client, clusterService, ReindexDataStreamTask.TASK_NAME, threadPool) + new ReindexDataStreamPersistentTaskExecutor( + new OriginSettingClient(client, REINDEX_DATA_STREAM_ORIGIN), + clusterService, + ReindexDataStreamTask.TASK_NAME, + threadPool + ) ); } else { return List.of(); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ExecuteWithHeadersClient.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ExecuteWithHeadersClient.java deleted file mode 100644 index a8962f56468b..000000000000 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ExecuteWithHeadersClient.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.migrate.task; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.support.AbstractClient; -import org.elasticsearch.xpack.core.ClientHelper; - -import java.util.Map; - -public class ExecuteWithHeadersClient extends AbstractClient { - - private final Client client; - private final Map headers; - - public ExecuteWithHeadersClient(Client client, Map headers) { - super(client.settings(), client.threadPool()); - this.client = client; - this.headers = headers; - } - - @Override - protected void doExecute( - ActionType action, - Request request, - ActionListener listener - ) { - ClientHelper.executeWithHeadersAsync(headers, null, client, action, request, listener); - } - -} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 8c490466f62c..1f6a87138835 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -107,15 +107,14 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec request.setParentTask(taskId); assert task instanceof ReindexDataStreamTask; final ReindexDataStreamTask reindexDataStreamTask = (ReindexDataStreamTask) task; - ExecuteWithHeadersClient reindexClient = new ExecuteWithHeadersClient(client, params.headers()); - reindexClient.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { + client.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { List dataStreamInfos = response.getDataStreams(); if (dataStreamInfos.size() == 1) { DataStream dataStream = dataStreamInfos.getFirst().getDataStream(); if (getReindexRequiredPredicate(clusterService.state().metadata(), false).test(dataStream.getWriteIndex())) { RolloverRequest rolloverRequest = new RolloverRequest(sourceDataStream, null); rolloverRequest.setParentTask(taskId); - reindexClient.execute( + client.execute( RolloverAction.INSTANCE, rolloverRequest, ActionListener.wrap( @@ -125,7 +124,6 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec reindexDataStreamTask, params, state, - reindexClient, sourceDataStream, taskId ), @@ -139,7 +137,6 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec reindexDataStreamTask, params, state, - reindexClient, sourceDataStream, taskId ); @@ -156,7 +153,6 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec ReindexDataStreamTask reindexDataStreamTask, ReindexDataStreamTaskParams params, ReindexDataStreamPersistentTaskState state, - ExecuteWithHeadersClient reindexClient, String sourceDataStream, TaskId parentTaskId ) { @@ -190,7 +186,7 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec List indicesRemaining = Collections.synchronizedList(new ArrayList<>(indicesToBeReindexed)); logger.debug("Reindexing {} indices, with up to {} handled concurrently", indicesRemaining.size(), maxConcurrentIndices); for (int i = 0; i < maxConcurrentIndices; i++) { - maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, reindexClient, sourceDataStream, listener, parentTaskId); + maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, sourceDataStream, listener, parentTaskId); } // This takes care of the additional latch count referenced above: listener.onResponse(null); @@ -199,7 +195,6 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec private void maybeProcessNextIndex( List indicesRemaining, ReindexDataStreamTask reindexDataStreamTask, - ExecuteWithHeadersClient reindexClient, String sourceDataStream, CountDownActionListener listener, TaskId parentTaskId @@ -218,16 +213,16 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec reindexDataStreamIndexRequest.setParentTask(parentTaskId); SubscribableListener.newForked( - l -> reindexClient.execute(ReindexDataStreamIndexAction.INSTANCE, reindexDataStreamIndexRequest, l) + l -> client.execute(ReindexDataStreamIndexAction.INSTANCE, reindexDataStreamIndexRequest, l) ) .andThen( - (l, result) -> updateDataStream(sourceDataStream, index.getName(), result.getDestIndex(), l, reindexClient, parentTaskId) + (l, result) -> updateDataStream(sourceDataStream, index.getName(), result.getDestIndex(), l, parentTaskId) ) - .andThen(l -> deleteIndex(index.getName(), reindexClient, parentTaskId, l)) + .andThen(l -> deleteIndex(index.getName(), parentTaskId, l)) .addListener(ActionListener.wrap(unused -> { reindexDataStreamTask.reindexSucceeded(index.getName()); listener.onResponse(null); - maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, reindexClient, sourceDataStream, listener, parentTaskId); + maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, sourceDataStream, listener, parentTaskId); }, e -> { reindexDataStreamTask.reindexFailed(index.getName(), e); listener.onResponse(null); @@ -239,7 +234,6 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec String oldIndex, String newIndex, ActionListener listener, - ExecuteWithHeadersClient reindexClient, TaskId parentTaskId ) { ModifyDataStreamsAction.Request modifyDataStreamRequest = new ModifyDataStreamsAction.Request( @@ -248,18 +242,13 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec List.of(DataStreamAction.removeBackingIndex(dataStream, oldIndex), DataStreamAction.addBackingIndex(dataStream, newIndex)) ); modifyDataStreamRequest.setParentTask(parentTaskId); - reindexClient.execute(ModifyDataStreamsAction.INSTANCE, modifyDataStreamRequest, listener); + client.execute(ModifyDataStreamsAction.INSTANCE, modifyDataStreamRequest, listener); } - private void deleteIndex( - String indexName, - ExecuteWithHeadersClient reindexClient, - TaskId parentTaskId, - ActionListener listener - ) { + private void deleteIndex(String indexName, TaskId parentTaskId, ActionListener listener) { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName); deleteIndexRequest.setParentTask(parentTaskId); - reindexClient.execute(TransportDeleteIndexAction.TYPE, deleteIndexRequest, listener); + client.execute(TransportDeleteIndexAction.TYPE, deleteIndexRequest, listener); } private void completeSuccessfulPersistentTask( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 4173f3db4540..e7457d144fa9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -41,6 +41,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.OTEL_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.PROFILING_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.REINDEX_DATA_STREAM_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ROLLUP_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SEARCHABLE_SNAPSHOTS_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; @@ -136,6 +137,9 @@ public final class AuthorizationUtils { case DATA_STREAM_LIFECYCLE_ORIGIN: securityContext.executeAsInternalUser(InternalUsers.DATA_STREAM_LIFECYCLE_USER, version, consumer); break; + case REINDEX_DATA_STREAM_ORIGIN: + securityContext.executeAsInternalUser(InternalUsers.REINDEX_DATA_STREAM_USER, version, consumer); + break; case LAZY_ROLLOVER_ORIGIN: securityContext.executeAsInternalUser(InternalUsers.LAZY_ROLLOVER_USER, version, consumer); break; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 83fd0c8d3ead..746c8c926086 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -9,12 +9,18 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.client.Node; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Strings; @@ -283,54 +289,61 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { "index": "%s" } }""", dataStreamName)); - Response reindexResponse = client().performRequest(reindexRequest); - assertOK(reindexResponse); - assertBusy(() -> { - Request statusRequest = new Request("GET", "_migration/reindex/" + dataStreamName + "/_status"); - Response statusResponse = client().performRequest(statusRequest); - Map statusResponseMap = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - statusResponse.getEntity().getContent(), - false - ); - assertOK(statusResponse); - assertThat(statusResponseMap.get("complete"), equalTo(true)); - final int originalWriteIndex = 1; - if (isOriginalClusterSameMajorVersionAsCurrent()) { - assertThat( - statusResponseMap.get("total_indices_in_data_stream"), - equalTo(originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount) + + String upgradeUser = "upgrade_user"; + String upgradeUserPassword = "x-pack-test-password"; + createRole("upgrade_role", dataStreamName); + createUser(upgradeUser, upgradeUserPassword, "upgrade_role"); + try (RestClient upgradeUserClient = getClient(upgradeUser, upgradeUserPassword)) { + Response reindexResponse = upgradeUserClient.performRequest(reindexRequest); + assertOK(reindexResponse); + assertBusy(() -> { + Request statusRequest = new Request("GET", "_migration/reindex/" + dataStreamName + "/_status"); + Response statusResponse = upgradeUserClient.performRequest(statusRequest); + Map statusResponseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + statusResponse.getEntity().getContent(), + false ); - // If the original cluster was the same as this one, we don't want any indices reindexed: - assertThat(statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0)); - assertThat(statusResponseMap.get("successes"), equalTo(0)); - } else { - // The number of rollovers that will have happened when we call reindex: - final int rolloversPerformedByReindex = explicitRolloverOnNewClusterCount == 0 ? 1 : 0; - final int expectedTotalIndicesInDataStream = originalWriteIndex + numRolloversOnOldCluster - + explicitRolloverOnNewClusterCount + rolloversPerformedByReindex; - assertThat(statusResponseMap.get("total_indices_in_data_stream"), equalTo(expectedTotalIndicesInDataStream)); - /* - * total_indices_requiring_upgrade is made up of: (the original write index) + numRolloversOnOldCluster. The number of - * rollovers on the upgraded cluster is irrelevant since those will not be reindexed. - */ - assertThat( - statusResponseMap.get("total_indices_requiring_upgrade"), - equalTo(originalWriteIndex + numRolloversOnOldCluster - closedOldIndices.size()) - ); - assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1 - closedOldIndices.size())); - // We expect all the original indices to have been deleted - for (String oldIndex : indicesNeedingUpgrade) { - if (closedOldIndices.contains(oldIndex) == false) { - assertThat(indexExists(oldIndex), equalTo(false)); + assertOK(statusResponse); + assertThat(statusResponseMap.get("complete"), equalTo(true)); + final int originalWriteIndex = 1; + if (isOriginalClusterSameMajorVersionAsCurrent()) { + assertThat( + statusResponseMap.get("total_indices_in_data_stream"), + equalTo(originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount) + ); + // If the original cluster was the same as this one, we don't want any indices reindexed: + assertThat(statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0)); + assertThat(statusResponseMap.get("successes"), equalTo(0)); + } else { + // The number of rollovers that will have happened when we call reindex: + final int rolloversPerformedByReindex = explicitRolloverOnNewClusterCount == 0 ? 1 : 0; + final int expectedTotalIndicesInDataStream = originalWriteIndex + numRolloversOnOldCluster + + explicitRolloverOnNewClusterCount + rolloversPerformedByReindex; + assertThat(statusResponseMap.get("total_indices_in_data_stream"), equalTo(expectedTotalIndicesInDataStream)); + /* + * total_indices_requiring_upgrade is made up of: (the original write index) + numRolloversOnOldCluster. The number of + * rollovers on the upgraded cluster is irrelevant since those will not be reindexed. + */ + assertThat( + statusResponseMap.get("total_indices_requiring_upgrade"), + equalTo(originalWriteIndex + numRolloversOnOldCluster - closedOldIndices.size()) + ); + assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1 - closedOldIndices.size())); + // We expect all the original indices to have been deleted + for (String oldIndex : indicesNeedingUpgrade) { + if (closedOldIndices.contains(oldIndex) == false) { + assertThat(indexExists(oldIndex), equalTo(false)); + } } + assertThat(getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream)); } - assertThat(getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream)); - } - }, 60, TimeUnit.SECONDS); - Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); - Response cancelResponse = client().performRequest(cancelRequest); - assertOK(cancelResponse); + }, 60, TimeUnit.SECONDS); + Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); + Response cancelResponse = upgradeUserClient.performRequest(cancelRequest); + assertOK(cancelResponse); + } } @SuppressWarnings("unchecked") @@ -414,4 +427,24 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { String oldIndexName = (String) entityAsMap(rolloverResponse).get("old_index"); return oldIndexName; } + + private void createUser(String name, String password, String role) throws IOException { + Request request = new Request("PUT", "/_security/user/" + name); + request.setJsonEntity("{ \"password\": \"" + password + "\", \"roles\": [ \"" + role + "\"] }"); + assertOK(adminClient().performRequest(request)); + } + + private void createRole(String name, String dataStream) throws IOException { + Request request = new Request("PUT", "/_security/role/" + name); + request.setJsonEntity("{ \"indices\": [ { \"names\" : [ \"" + dataStream + "\"], \"privileges\": [ \"manage\" ] } ] }"); + assertOK(adminClient().performRequest(request)); + } + + private RestClient getClient(String user, String passwd) throws IOException { + RestClientBuilder builder = RestClient.builder(adminClient().getNodes().toArray(new Node[0])); + String token = basicAuthHeaderValue(user, new SecureString(passwd.toCharArray())); + configureClient(builder, Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build()); + builder.setStrictDeprecationMode(true); + return builder.build(); + } } From 3d43797688da58df98c36c46085acf61dcdb30f7 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 23 Jan 2025 15:13:24 +0100 Subject: [PATCH 13/29] [Inference API] Export inference.telemetry to mitigate warning in BaseTransportInferenceAction (#119847) --- x-pack/plugin/inference/src/main/java/module-info.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 1c2240e8c521..78f30e7da067 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -42,6 +42,7 @@ module org.elasticsearch.inference { exports org.elasticsearch.xpack.inference.services; exports org.elasticsearch.xpack.inference; exports org.elasticsearch.xpack.inference.action.task; + exports org.elasticsearch.xpack.inference.telemetry; provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.inference.InferenceFeatures; } From 74bc4424866eac58a218d4b689bdc4831b7d373f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 24 Jan 2025 01:21:18 +1100 Subject: [PATCH 14/29] Mute org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests #119882 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c591e19c373d..ae61311fdb4d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -233,6 +233,8 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testMultipleInferencesTriggeringDownloadAndDeploy issue: https://github.com/elastic/elasticsearch/issues/120668 +- class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests + issue: https://github.com/elastic/elasticsearch/issues/119882 # Examples: # From 609705a744583eac828930c57b4034dce96a3e80 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 24 Jan 2025 01:23:18 +1100 Subject: [PATCH 15/29] Mute org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT #120691 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ae61311fdb4d..c0f4a4430a9b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,6 +235,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120668 - class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/119882 +- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT + issue: https://github.com/elastic/elasticsearch/issues/120691 # Examples: # From 1484f783d8d7131c82a2377647c2421f57f28e39 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 23 Jan 2025 15:38:56 +0100 Subject: [PATCH 16/29] Breakup release tests in PR (#120692) * Breakup release tests in PR * fix test release task dependencies --- .../pipelines/pull-request/release-tests.yml | 27 +++++++--- .buildkite/scripts/release-tests.sh | 2 +- build.gradle | 49 ++++++++++++++++--- 3 files changed, 62 insertions(+), 16 deletions(-) diff --git a/.buildkite/pipelines/pull-request/release-tests.yml b/.buildkite/pipelines/pull-request/release-tests.yml index 7d7a5c77d332..101f13e56951 100644 --- a/.buildkite/pipelines/pull-request/release-tests.yml +++ b/.buildkite/pipelines/pull-request/release-tests.yml @@ -1,11 +1,22 @@ config: allow-labels: test-release steps: - - label: release-tests - command: .buildkite/scripts/release-tests.sh - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - diskSizeGb: 350 - machineType: custom-32-98304 + - group: release-tests + steps: + - label: "{{matrix.CHECK_TASK}} / release-tests" + key: "packaging-tests-unix" + command: .buildkite/scripts/release-tests.sh {{matrix.CHECK_TASK}} + timeout_in_minutes: 120 + matrix: + setup: + CHECK_TASK: + - checkPart1 + - checkPart2 + - checkPart3 + - checkPart4 + - checkPart5 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + diskSizeGb: 350 + machineType: custom-32-98304 diff --git a/.buildkite/scripts/release-tests.sh b/.buildkite/scripts/release-tests.sh index e4185c642f24..700704bca012 100755 --- a/.buildkite/scripts/release-tests.sh +++ b/.buildkite/scripts/release-tests.sh @@ -20,4 +20,4 @@ curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/m curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT.zip .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false -Dbuild.ml_cpp.repo=file://${ML_IVY_REPO} \ - -Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef assemble functionalTests + -Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef ${@:-functionalTests} diff --git a/build.gradle b/build.gradle index e6fc1f4eba28..440032675213 100644 --- a/build.gradle +++ b/build.gradle @@ -293,22 +293,57 @@ allprojects { } } + ext.withReleaseBuild = { Closure config -> + if(buildParams.snapshotBuild == false) { + config.call() + } + } + plugins.withId('lifecycle-base') { if (project.path.startsWith(":x-pack:")) { if (project.path.contains("security") || project.path.contains(":ml")) { - tasks.register('checkPart4') { dependsOn 'check' } + tasks.register('checkPart4') { + dependsOn 'check' + withReleaseBuild { + dependsOn 'assemble' + } + } } else if (project.path == ":x-pack:plugin" || project.path.contains("ql") || project.path.contains("smoke-test")) { - tasks.register('checkPart3') { dependsOn 'check' } + tasks.register('checkPart3') { + dependsOn 'check' + withReleaseBuild { + dependsOn 'assemble' + } + } } else if (project.path.contains("multi-node")) { - tasks.register('checkPart5') { dependsOn 'check' } + tasks.register('checkPart5') { + dependsOn 'check' + withReleaseBuild { + dependsOn 'assemble' + } + } } else { - tasks.register('checkPart2') { dependsOn 'check' } + tasks.register('checkPart2') { + dependsOn 'check' + withReleaseBuild { + dependsOn 'assemble' + } + } } } else { - tasks.register('checkPart1') { dependsOn 'check' } + tasks.register('checkPart1') { + dependsOn 'check' + withReleaseBuild { + dependsOn 'assemble' + } + } + } + tasks.register('functionalTests') { + dependsOn 'check' + withReleaseBuild { + dependsOn 'assemble' + } } - - tasks.register('functionalTests') { dependsOn 'check' } } /* From aa70dc86fee12fd1349c34a3978822689ae8770d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 23 Jan 2025 16:43:02 +0100 Subject: [PATCH 17/29] Revert "Mute org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT #120691" (#120724) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …odularIT org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT #120691" This reverts commit 609705a744583eac828930c57b4034dce96a3e80. --- muted-tests.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index c0f4a4430a9b..ae61311fdb4d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,8 +235,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120668 - class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/119882 -- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT - issue: https://github.com/elastic/elasticsearch/issues/120691 # Examples: # From cdff3defdedc1625cf6b7f01af3e51694862ae5b Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 23 Jan 2025 07:51:58 -0800 Subject: [PATCH 18/29] Fix typo in synthetic source docs (#120685) --- docs/reference/mapping/fields/synthetic-source.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/fields/synthetic-source.asciidoc b/docs/reference/mapping/fields/synthetic-source.asciidoc index ddbefb73f452..1678441b13bf 100644 --- a/docs/reference/mapping/fields/synthetic-source.asciidoc +++ b/docs/reference/mapping/fields/synthetic-source.asciidoc @@ -290,7 +290,7 @@ with the number and sizes of arrays present in source of each document, naturall [[synthetic-source-fields-native-list]] ===== Field types that support synthetic source with no storage overhead The following field types support synthetic source using data from <> or ->, and require no additional storage space to construct the `_source` field. +<>, and require no additional storage space to construct the `_source` field. NOTE: If you enable the <> or <> settings, then additional storage is required to store ignored field values for these types. From eae93a2097b3ff85803056be209a3f4ca1c5ec38 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 23 Jan 2025 10:57:53 -0500 Subject: [PATCH 19/29] ESQL: Signatures for `NOT IN` et al (#120673) * ESQL: Signatures for `NOT IN` et al This generates signatures for `NOT IN`, `NOT LIKE`, and `NOT RLIKE` using a small hack on top of the process used to generate the signatures for `IN`, `LIKE`, and `RLIKE`. This is a very perl-worth hack, replacing `LIKE` with `NOT LIKE` in the description. But it's useful for our kibana friends and if we need to make it nicer we can do so later. * Zap --- .../functions/kibana/definition/not_in.json | 263 ++++++++++++++++++ .../functions/kibana/definition/not_like.json | 47 ++++ .../kibana/definition/not_rlike.json | 47 ++++ .../esql/functions/kibana/docs/not_in.md | 7 + .../esql/functions/kibana/docs/not_like.md | 15 + .../esql/functions/kibana/docs/not_rlike.md | 10 + .../esql/functions/types/not_in.asciidoc | 22 ++ .../esql/functions/types/not_like.asciidoc | 10 + .../esql/functions/types/not_rlike.asciidoc | 10 + .../function/AbstractFunctionTestCase.java | 79 +++--- .../function/scalar/string/RLikeTests.java | 7 + .../scalar/string/WildcardLikeTests.java | 72 +++++ .../operator/comparison/InTests.java | 13 + 13 files changed, 568 insertions(+), 34 deletions(-) create mode 100644 docs/reference/esql/functions/kibana/definition/not_in.json create mode 100644 docs/reference/esql/functions/kibana/definition/not_like.json create mode 100644 docs/reference/esql/functions/kibana/definition/not_rlike.json create mode 100644 docs/reference/esql/functions/kibana/docs/not_in.md create mode 100644 docs/reference/esql/functions/kibana/docs/not_like.md create mode 100644 docs/reference/esql/functions/kibana/docs/not_rlike.md create mode 100644 docs/reference/esql/functions/types/not_in.asciidoc create mode 100644 docs/reference/esql/functions/types/not_like.asciidoc create mode 100644 docs/reference/esql/functions/types/not_rlike.asciidoc diff --git a/docs/reference/esql/functions/kibana/definition/not_in.json b/docs/reference/esql/functions/kibana/definition/not_in.json new file mode 100644 index 000000000000..3fa25d793b50 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/not_in.json @@ -0,0 +1,263 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "operator" : "NOT IN", + "name" : "not_in", + "description" : "The `NOT IN` operator allows testing whether a field or expression does *not* equal any element in a list of literals, fields or expressions.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "boolean", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "cartesian_point", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "cartesian_shape", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "double", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "geo_point", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "geo_shape", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "integer", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "ip", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "keyword", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "text", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "long", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "keyword", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "text", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "version", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/not_like.json b/docs/reference/esql/functions/kibana/definition/not_like.json new file mode 100644 index 000000000000..bba70d14d7cb --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/not_like.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "operator" : "NOT LIKE", + "name" : "not_like", + "description" : "Use `NOT LIKE` to filter data based on string patterns using wildcards. `NOT LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "A literal expression." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "Pattern." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "A literal expression." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "Pattern." + } + ], + "variadic" : true, + "returnType" : "boolean" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/not_rlike.json b/docs/reference/esql/functions/kibana/definition/not_rlike.json new file mode 100644 index 000000000000..09abd5ab567e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/not_rlike.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "operator" : "NOT RLIKE", + "name" : "not_rlike", + "description" : "Use `NOT RLIKE` to filter data based on string patterns using using\n<>. `NOT RLIKE` usually acts on a field placed on\nthe left-hand side of the operator, but it can also act on a constant (literal)\nexpression. The right-hand side of the operator represents the pattern.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "A literal value." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "A regular expression." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "A literal value." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "A regular expression." + } + ], + "variadic" : true, + "returnType" : "boolean" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/not_in.md b/docs/reference/esql/functions/kibana/docs/not_in.md new file mode 100644 index 000000000000..e9e5a7b384d1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/not_in.md @@ -0,0 +1,7 @@ + + +### NOT_IN +The `NOT IN` operator allows testing whether a field or expression does *not* equal any element in a list of literals, fields or expressions. + diff --git a/docs/reference/esql/functions/kibana/docs/not_like.md b/docs/reference/esql/functions/kibana/docs/not_like.md new file mode 100644 index 000000000000..fd1cf7a68630 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/not_like.md @@ -0,0 +1,15 @@ + + +### NOT_LIKE +Use `NOT LIKE` to filter data based on string patterns using wildcards. `NOT LIKE` +usually acts on a field placed on the left-hand side of the operator, but it can +also act on a constant (literal) expression. The right-hand side of the operator +represents the pattern. + +The following wildcard characters are supported: + +* `*` matches zero or more characters. +* `?` matches one character. + diff --git a/docs/reference/esql/functions/kibana/docs/not_rlike.md b/docs/reference/esql/functions/kibana/docs/not_rlike.md new file mode 100644 index 000000000000..dac23438c161 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/not_rlike.md @@ -0,0 +1,10 @@ + + +### NOT_RLIKE +Use `NOT RLIKE` to filter data based on string patterns using using +<>. `NOT RLIKE` usually acts on a field placed on +the left-hand side of the operator, but it can also act on a constant (literal) +expression. The right-hand side of the operator represents the pattern. + diff --git a/docs/reference/esql/functions/types/not_in.asciidoc b/docs/reference/esql/functions/types/not_in.asciidoc new file mode 100644 index 000000000000..6ed2c250ef0a --- /dev/null +++ b/docs/reference/esql/functions/types/not_in.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | inlist | result +boolean | boolean | boolean +cartesian_point | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +double | double | boolean +geo_point | geo_point | boolean +geo_shape | geo_shape | boolean +integer | integer | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +version | version | boolean +|=== diff --git a/docs/reference/esql/functions/types/not_like.asciidoc b/docs/reference/esql/functions/types/not_like.asciidoc new file mode 100644 index 000000000000..fffa6dc0b837 --- /dev/null +++ b/docs/reference/esql/functions/types/not_like.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | pattern | result +keyword | keyword | boolean +text | keyword | boolean +|=== diff --git a/docs/reference/esql/functions/types/not_rlike.asciidoc b/docs/reference/esql/functions/types/not_rlike.asciidoc new file mode 100644 index 000000000000..fffa6dc0b837 --- /dev/null +++ b/docs/reference/esql/functions/types/not_rlike.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | pattern | result +keyword | keyword | boolean +text | keyword | boolean +|=== diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 67dec69b5139..30ac9fc69ed9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -823,12 +823,13 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { if (System.getProperty("generateDocs") == null) { return; } - String rendered = buildSignatureSvg(functionName()); + String name = functionName(); + String rendered = buildSignatureSvg(name); if (rendered == null) { LogManager.getLogger(getTestClass()).info("Skipping rendering signature because the function isn't registered"); } else { LogManager.getLogger(getTestClass()).info("Writing function signature"); - writeToTempDir("signature", rendered, "svg"); + writeToTempDir("signature", name, "svg", rendered); } } @@ -890,10 +891,13 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { @AfterClass public static void renderDocs() throws IOException { + renderDocs(functionName()); + } + + protected static void renderDocs(String name) throws IOException { if (System.getProperty("generateDocs") == null) { return; } - String name = functionName(); if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { renderDocsForOperators(name); return; @@ -922,12 +926,12 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { description.isAggregation() ); } - renderTypes(description.args()); - renderParametersList(description.argNames(), description.argDescriptions()); + renderTypes(name, description.args()); + renderParametersList(name, description.argNames(), description.argDescriptions()); FunctionInfo info = EsqlFunctionRegistry.functionInfo(definition); - renderDescription(description.description(), info.detailedDescription(), info.note()); - boolean hasExamples = renderExamples(info); - boolean hasAppendix = renderAppendix(info.appendix()); + renderDescription(name, description.description(), info.detailedDescription(), info.note()); + boolean hasExamples = renderExamples(name, info); + boolean hasAppendix = renderAppendix(name, info.appendix()); renderFullLayout(name, info.preview(), hasExamples, hasAppendix); renderKibanaInlineDocs(name, info); renderKibanaFunctionDefinition(name, info, description.args(), description.variadic()); @@ -944,7 +948,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { + "may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview " + "are not subject to the support SLA of official GA features.\"]\n"; - private static void renderTypes(List args) throws IOException { + private static void renderTypes(String name, List args) throws IOException { StringBuilder header = new StringBuilder(); List argNames = args.stream().map(EsqlFunctionRegistry.ArgSignature::name).toList(); for (String arg : argNames) { @@ -984,11 +988,11 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { [%header.monospaced.styled,format=dsv,separator=|] |=== """ + header + "\n" + table.stream().collect(Collectors.joining("\n")) + "\n|===\n"; - LogManager.getLogger(getTestClass()).info("Writing function types for [{}]:\n{}", functionName(), rendered); - writeToTempDir("types", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing function types for [{}]:\n{}", name, rendered); + writeToTempDir("types", name, "asciidoc", rendered); } - private static void renderParametersList(List argNames, List argDescriptions) throws IOException { + private static void renderParametersList(String name, List argNames, List argDescriptions) throws IOException { StringBuilder builder = new StringBuilder(); builder.append(DOCS_WARNING); builder.append("*Parameters*\n"); @@ -996,11 +1000,11 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { builder.append("\n`").append(argNames.get(a)).append("`::\n").append(argDescriptions.get(a)).append('\n'); } String rendered = builder.toString(); - LogManager.getLogger(getTestClass()).info("Writing parameters for [{}]:\n{}", functionName(), rendered); - writeToTempDir("parameters", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing parameters for [{}]:\n{}", name, rendered); + writeToTempDir("parameters", name, "asciidoc", rendered); } - private static void renderDescription(String description, String detailedDescription, String note) throws IOException { + private static void renderDescription(String name, String description, String detailedDescription, String note) throws IOException { String rendered = DOCS_WARNING + """ *Description* @@ -1013,11 +1017,11 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { if (Strings.isNullOrEmpty(note) == false) { rendered += "\nNOTE: " + note + "\n"; } - LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", functionName(), rendered); - writeToTempDir("description", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", name, rendered); + writeToTempDir("description", name, "asciidoc", rendered); } - private static boolean renderExamples(FunctionInfo info) throws IOException { + private static boolean renderExamples(String name, FunctionInfo info) throws IOException { if (info == null || info.examples().length == 0) { return false; } @@ -1051,20 +1055,20 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { } builder.append('\n'); String rendered = builder.toString(); - LogManager.getLogger(getTestClass()).info("Writing examples for [{}]:\n{}", functionName(), rendered); - writeToTempDir("examples", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing examples for [{}]:\n{}", name, rendered); + writeToTempDir("examples", name, "asciidoc", rendered); return true; } - private static boolean renderAppendix(String appendix) throws IOException { + private static boolean renderAppendix(String name, String appendix) throws IOException { if (appendix.isEmpty()) { return false; } String rendered = DOCS_WARNING + appendix + "\n"; - LogManager.getLogger(getTestClass()).info("Writing appendix for [{}]:\n{}", functionName(), rendered); - writeToTempDir("appendix", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing appendix for [{}]:\n{}", name, rendered); + writeToTempDir("appendix", name, "asciidoc", rendered); return true; } @@ -1091,11 +1095,11 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { if (hasAppendix) { rendered += "include::../appendix/" + name + ".asciidoc[]\n"; } - LogManager.getLogger(getTestClass()).info("Writing layout for [{}]:\n{}", functionName(), rendered); - writeToTempDir("layout", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing layout for [{}]:\n{}", name, rendered); + writeToTempDir("layout", name, "asciidoc", rendered); } - private static Constructor constructorWithFunctionInfo(Class clazz) { + protected static Constructor constructorWithFunctionInfo(Class clazz) { for (Constructor ctor : clazz.getConstructors()) { FunctionInfo functionInfo = ctor.getAnnotation(FunctionInfo.class); if (functionInfo != null) { @@ -1110,6 +1114,10 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { assert ctor != null; FunctionInfo functionInfo = ctor.getAnnotation(FunctionInfo.class); assert functionInfo != null; + renderDocsForOperators(name, ctor, functionInfo); + } + + protected static void renderDocsForOperators(String name, Constructor ctor, FunctionInfo functionInfo) throws IOException { renderKibanaInlineDocs(name, functionInfo); var params = ctor.getParameters(); @@ -1127,7 +1135,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { } } renderKibanaFunctionDefinition(name, functionInfo, args, likeOrInOperator(name)); - renderTypes(args); + renderTypes(name, args); } private static void renderKibanaInlineDocs(String name, FunctionInfo info) throws IOException { @@ -1151,8 +1159,8 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { builder.append("Note: ").append(removeAsciidocLinks(info.note())).append("\n"); } String rendered = builder.toString(); - LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", functionName(), rendered); - writeToTempDir("kibana/docs", rendered, "md"); + LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", name, rendered); + writeToTempDir("kibana/docs", name, "md", rendered); } private static void renderKibanaFunctionDefinition( @@ -1244,8 +1252,8 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { builder.field("snapshot_only", EsqlFunctionRegistry.isSnapshotOnly(name)); String rendered = Strings.toString(builder.endObject()); - LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", functionName(), rendered); - writeToTempDir("kibana/definition", rendered, "json"); + LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", name, rendered); + writeToTempDir("kibana/definition", name, "json", rendered); } private static String removeAsciidocLinks(String asciidoc) { @@ -1340,7 +1348,10 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { * If this tests is for a like or rlike operator return true, otherwise return {@code null}. */ private static boolean likeOrInOperator(String name) { - return name.equalsIgnoreCase("rlike") || name.equalsIgnoreCase("like") || name.equalsIgnoreCase("in"); + return switch (name.toLowerCase(Locale.ENGLISH)) { + case "rlike", "like", "in", "not_rlike", "not_like", "not_in" -> true; + default -> false; + }; } /** @@ -1350,11 +1361,11 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { * don't have write permission to the docs. *

*/ - private static void writeToTempDir(String subdir, String str, String extension) throws IOException { + private static void writeToTempDir(String subdir, String name, String extension, String str) throws IOException { // We have to write to a tempdir because it's all test are allowed to write to. Gradle can move them. Path dir = PathUtils.get(System.getProperty("java.io.tmpdir")).resolve("esql").resolve("functions").resolve(subdir); Files.createDirectories(dir); - Path file = dir.resolve(functionName() + "." + extension); + Path file = dir.resolve(name + "." + extension); Files.writeString(file, str); LogManager.getLogger(getTestClass()).info("Wrote to file: {}", file); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 589477a8bebd..26340be22408 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -20,7 +20,9 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.junit.AfterClass; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -150,4 +152,9 @@ public class RLikeTests extends AbstractScalarFunctionTestCase { ? new RLike(source, expression, new RLikePattern(patternString), true) : new RLike(source, expression, new RLikePattern(patternString)); } + + @AfterClass + public static void renderNotRLike() throws IOException { + WildcardLikeTests.renderNot(constructorWithFunctionInfo(RLike.class), "RLIKE", d -> d); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index e60c5f77ab42..7f04f076ed15 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.unboundid.util.NotNull; import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -18,11 +19,19 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPatt import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.junit.AfterClass; +import java.io.IOException; +import java.lang.annotation.Annotation; +import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.function.Function; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -87,4 +96,67 @@ public class WildcardLikeTests extends AbstractScalarFunctionTestCase { } return new WildcardLike(source, expression, new WildcardPattern(((BytesRef) pattern.fold(FoldContext.small())).utf8ToString())); } + + @AfterClass + public static void renderNotLike() throws IOException { + renderNot(constructorWithFunctionInfo(WildcardLike.class), "LIKE", d -> d); + } + + public static void renderNot(@NotNull Constructor ctor, String name, Function description) throws IOException { + FunctionInfo orig = ctor.getAnnotation(FunctionInfo.class); + assert orig != null; + FunctionInfo functionInfo = new FunctionInfo() { + @Override + public Class annotationType() { + return orig.annotationType(); + } + + @Override + public String operator() { + return "NOT " + name; + } + + @Override + public String[] returnType() { + return orig.returnType(); + } + + @Override + public boolean preview() { + return orig.preview(); + } + + @Override + public String description() { + return description.apply(orig.description().replace(name, "NOT " + name)); + } + + @Override + public String detailedDescription() { + return ""; + } + + @Override + public String note() { + return orig.note().replace(name, "NOT " + name); + } + + @Override + public String appendix() { + return orig.appendix().replace(name, "NOT " + name); + } + + @Override + public boolean isAggregation() { + return orig.isAggregation(); + } + + @Override + public Example[] examples() { + // throw away examples + return new Example[] {}; + } + }; + renderDocsForOperators("not_" + name.toLowerCase(Locale.ENGLISH), ctor, functionInfo); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java index 80f67ec8e5e3..03a4b063d629 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java @@ -19,7 +19,10 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLikeTests; +import org.junit.AfterClass; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -333,4 +336,14 @@ public class InTests extends AbstractFunctionTestCase { protected Expression build(Source source, List args) { return new In(source, args.get(args.size() - 1), args.subList(0, args.size() - 1)); } + + @AfterClass + public static void renderNotIn() throws IOException { + WildcardLikeTests.renderNot( + constructorWithFunctionInfo(In.class), + "IN", + d -> "The `NOT IN` operator allows testing whether a field or expression does *not* equal any element " + + "in a list of literals, fields or expressions." + ); + } } From 787a16d0d5f01502b1d73f649d7d9f15de9d8c69 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 23 Jan 2025 15:59:19 +0000 Subject: [PATCH 20/29] Update the index version compatible test to only check the minimum (#120406) --- .../test/index/IndexVersionUtilsTests.java | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java index 53758c165a3c..05e8a93ad99e 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.test.index; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; @@ -19,26 +18,19 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import static org.hamcrest.Matchers.equalTo; + public class IndexVersionUtilsTests extends ESTestCase { /** * Tests that {@link IndexVersions#MINIMUM_COMPATIBLE} and {@link IndexVersionUtils#allReleasedVersions()} - * agree with the list of index compatible versions we build in gradle. + * agree on the minimum version that should be tested. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98054") - public void testGradleVersionsMatchVersionUtils() { + public void testIndexCompatibleVersionMatches() { VersionsFromProperty indexCompatible = new VersionsFromProperty("tests.gradle_index_compat_versions"); - List released = IndexVersionUtils.allReleasedVersions() - .stream() - /* Java lists all versions from the 5.x series onwards, but we only want to consider - * ones that we're supposed to be compatible with. */ - .filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)) - .toList(); - List releasedIndexCompatible = released.stream() - .filter(v -> IndexVersion.current().equals(v) == false) - .map(Object::toString) - .toList(); - assertEquals(releasedIndexCompatible, indexCompatible.released); + String minIndexVersion = IndexVersions.MINIMUM_COMPATIBLE.toReleaseVersion(); + String lowestCompatibleVersion = indexCompatible.released.get(0); + assertThat(lowestCompatibleVersion, equalTo(minIndexVersion)); } /** From 9a9bc698838a6e1fee8b079fa240a18f95859107 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 23 Jan 2025 17:28:52 +0100 Subject: [PATCH 21/29] Stop caching source map on SearchHit#getSourceMap (#119888) This call has the side effect that if you are iterating a number of hits calling this method, you will be increasing the memory usage by a non trivial number which in most of cases is unwanted. Therefore this commit removes this caching all together and add an assertion so the method is call once during the lifetime of the object. --- .../join/query/ChildQuerySearchIT.java | 15 +- .../aggregations/metrics/TopHitsIT.java | 10 +- .../search/fetch/subphase/InnerHitsIT.java | 42 ++-- .../search/searchafter/SearchAfterIT.java | 21 +- .../search/sort/FieldSortIT.java | 6 +- .../search/source/SourceFetchingIT.java | 12 +- .../suggest/CompletionSuggestSearchIT.java | 5 +- .../org/elasticsearch/search/SearchHit.java | 22 +- .../aggregations/metrics/InternalTopHits.java | 2 + .../elasticsearch/search/SearchHitTests.java | 2 - .../search/SearchResponseUtils.java | 1 - .../xpack/enrich/EnrichCache.java | 5 +- .../EnrichShardMultiSearchActionTests.java | 9 +- .../rules/QueryRulesIndexService.java | 6 +- .../registry/ModelRegistryTests.java | 1 - .../extractor/scroll/ScrollDataExtractor.java | 8 +- .../scroll/SearchHitToJsonProcessor.java | 5 +- .../scroll/TimeBasedExtractedFields.java | 5 +- .../extractor/DataFrameDataExtractor.java | 33 +-- .../dataframe/inference/InferenceRunner.java | 17 +- .../process/DataFrameRowsJoiner.java | 10 +- .../xpack/ml/extractor/DocValueField.java | 2 +- .../xpack/ml/extractor/ExtractedField.java | 6 +- .../xpack/ml/extractor/ExtractedFields.java | 4 +- .../xpack/ml/extractor/GeoPointField.java | 4 +- .../xpack/ml/extractor/GeoShapeField.java | 4 +- .../xpack/ml/extractor/MultiField.java | 4 +- .../xpack/ml/extractor/ProcessedField.java | 4 +- .../xpack/ml/extractor/ScriptField.java | 2 +- .../xpack/ml/extractor/SourceField.java | 13 +- .../xpack/ml/extractor/SourceSupplier.java | 34 +++ .../xpack/ml/extractor/TimeField.java | 2 +- .../scroll/SearchHitToJsonProcessorTests.java | 3 +- .../scroll/TimeBasedExtractedFieldsTests.java | 11 +- .../DataFrameDataExtractorTests.java | 2 +- .../ExtractedFieldsDetectorTests.java | 8 +- .../process/DataFrameRowsJoinerTests.java | 1 + .../ml/extractor/DocValueFieldTests.java | 8 +- .../ml/extractor/ExtractedFieldsTests.java | 12 +- .../ml/extractor/GeoPointFieldTests.java | 6 +- .../ml/extractor/GeoShapeFieldTests.java | 8 +- .../xpack/ml/extractor/MultiFieldTests.java | 4 +- .../ml/extractor/ProcessedFieldTests.java | 8 +- .../xpack/ml/extractor/ScriptFieldTests.java | 8 +- .../xpack/ml/extractor/SourceFieldTests.java | 10 +- .../xpack/ml/extractor/TimeFieldTests.java | 8 +- .../monitoring/integration/MonitoringIT.java | 25 ++- .../LocalExporterResourceIntegTests.java | 8 +- .../DocumentAndFieldLevelSecurityTests.java | 52 +++-- .../DocumentLevelSecurityTests.java | 66 +++--- .../integration/FieldLevelSecurityTests.java | 203 ++++++++++-------- .../xpack/security/authc/TokenService.java | 6 +- .../transform/transforms/latest/Latest.java | 8 +- .../integration/RejectedExecutionTests.java | 14 +- .../transform/TransformIntegrationTests.java | 20 +- 55 files changed, 461 insertions(+), 354 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index cce0ef06cbf6..96802ac76295 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -184,8 +184,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); - assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(extractValue("join_field.name", source), equalTo("child")); + assertThat(extractValue("join_field.parent", source), equalTo("p1")); }); @@ -197,11 +198,13 @@ public class ChildQuerySearchIT extends ParentChildTestCase { response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); - assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + Map source0 = response.getHits().getAt(0).getSourceAsMap(); + assertThat(extractValue("join_field.name", source0), equalTo("child")); + assertThat(extractValue("join_field.parent", source0), equalTo("p1")); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2"))); - assertThat(extractValue("join_field.name", response.getHits().getAt(1).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", response.getHits().getAt(1).getSourceAsMap()), equalTo("p1")); + Map source1 = response.getHits().getAt(1).getSourceAsMap(); + assertThat(extractValue("join_field.name", source1), equalTo("child")); + assertThat(extractValue("join_field.parent", source1), equalTo("p1")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index affa371d92aa..c246b7cc2f5c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -655,8 +655,9 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(hit.field("field2").getValue(), equalTo(2.71f)); assertThat(hit.field("script").getValue().toString(), equalTo("5")); - assertThat(hit.getSourceAsMap().size(), equalTo(1)); - assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain")); + Map source = hit.getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("text").toString(), equalTo("some text to entertain")); assertEquals("some text to entertain", hit.getFields().get("text").getValue()); assertEquals("some text to entertain", hit.getFields().get("text_stored_lookup").getValue()); } @@ -927,8 +928,9 @@ public class TopHitsIT extends ESIntegTestCase { field = searchHit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); - assertThat(searchHit.getSourceAsMap().size(), equalTo(1)); - assertThat(extractValue("message", searchHit.getSourceAsMap()), equalTo("some comment")); + Map source = searchHit.getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(extractValue("message", source), equalTo("some comment")); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index e39f8df9bad3..edc0c65bc773 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -490,8 +490,9 @@ public class InnerHitsIT extends ESIntegTestCase { response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertNotNull(innerHits.getAt(0).getSourceAsMap()); - assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + Map source = innerHits.getAt(0).getSourceAsMap(); + assertNotNull(source); + assertFalse(source.isEmpty()); } ); assertNoFailuresAndResponse( @@ -507,8 +508,9 @@ public class InnerHitsIT extends ESIntegTestCase { response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertNotNull(innerHits.getAt(0).getSourceAsMap()); - assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + Map source = innerHits.getAt(0).getSourceAsMap(); + assertNotNull(source); + assertFalse(source.isEmpty()); } ); } @@ -845,16 +847,12 @@ public class InnerHitsIT extends ESIntegTestCase { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), - equalTo("fox eat quick") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), - equalTo("fox ate rabbit x y z") - ); + Map source0 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap(); + assertThat(source0.size(), equalTo(1)); + assertThat(source0.get("message"), equalTo("fox eat quick")); + Map source1 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap(); + assertThat(source1.size(), equalTo(1)); + assertThat(source1.get("message"), equalTo("fox ate rabbit x y z")); } ); @@ -866,16 +864,12 @@ public class InnerHitsIT extends ESIntegTestCase { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), - equalTo("fox eat quick") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), - equalTo("fox ate rabbit x y z") - ); + Map source0 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap(); + assertThat(source0.size(), equalTo(2)); + assertThat(source0.get("message"), equalTo("fox eat quick")); + Map source1 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap(); + assertThat(source1.size(), equalTo(2)); + assertThat(source1.get("message"), equalTo("fox ate rabbit x y z")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 353858e9d697..29f56eeb5ecb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -47,6 +47,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -152,8 +153,9 @@ public class SearchAfterIT extends ESIntegTestCase { searchResponse -> { assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + Map source = searchResponse.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.get("field1"), Matchers.equalTo(100)); + assertThat(source.get("field2"), Matchers.equalTo("toto")); } ); } @@ -438,8 +440,9 @@ public class SearchAfterIT extends ESIntegTestCase { int foundHits = 0; do { for (SearchHit hit : resp.getHits().getHits()) { - assertNotNull(hit.getSourceAsMap()); - final Object timestamp = hit.getSourceAsMap().get("timestamp"); + Map source = hit.getSourceAsMap(); + assertNotNull(source); + final Object timestamp = source.get("timestamp"); assertNotNull(timestamp); assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); foundHits++; @@ -469,8 +472,9 @@ public class SearchAfterIT extends ESIntegTestCase { do { Object[] after = null; for (SearchHit hit : resp.getHits().getHits()) { - assertNotNull(hit.getSourceAsMap()); - final Object timestamp = hit.getSourceAsMap().get("timestamp"); + Map source = hit.getSourceAsMap(); + assertNotNull(source); + final Object timestamp = source.get("timestamp"); assertNotNull(timestamp); assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); after = hit.getSortValues(); @@ -505,8 +509,9 @@ public class SearchAfterIT extends ESIntegTestCase { do { Object[] after = null; for (SearchHit hit : resp.getHits().getHits()) { - assertNotNull(hit.getSourceAsMap()); - final Object timestamp = hit.getSourceAsMap().get("timestamp"); + Map source = hit.getSourceAsMap(); + assertNotNull(source); + final Object timestamp = source.get("timestamp"); assertNotNull(timestamp); foundSeqNos.add(((Number) timestamp).longValue()); after = hit.getSortValues(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index f407c14c48c5..7fd31b056779 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -130,10 +130,11 @@ public class FieldSortIT extends ESIntegTestCase { .setSize(10), response -> { logClusterState(); + Number previous = (Number) response.getHits().getHits()[0].getSourceAsMap().get("entry"); for (int j = 1; j < response.getHits().getHits().length; j++) { Number current = (Number) response.getHits().getHits()[j].getSourceAsMap().get("entry"); - Number previous = (Number) response.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); assertThat(response.toString(), current.intValue(), lessThan(previous.intValue())); + previous = current; } } ); @@ -144,10 +145,11 @@ public class FieldSortIT extends ESIntegTestCase { .setSize(10), response -> { logClusterState(); + Number previous = (Number) response.getHits().getHits()[0].getSourceAsMap().get("entry"); for (int j = 1; j < response.getHits().getHits().length; j++) { Number current = (Number) response.getHits().getHits()[j].getSourceAsMap().get("entry"); - Number previous = (Number) response.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); assertThat(response.toString(), current.intValue(), greaterThan(previous.intValue())); + previous = current; } } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index 0e7f8b604a8d..0b1d665f4f3e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -11,6 +11,8 @@ package org.elasticsearch.search.source; import org.elasticsearch.test.ESIntegTestCase; +import java.util.Map; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.notNullValue; @@ -57,8 +59,9 @@ public class SourceFetchingIT extends ESIntegTestCase { assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field1"), equalTo("value")); }, prepareSearch("test").setFetchSource("field1", null), prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }) @@ -84,8 +87,9 @@ public class SourceFetchingIT extends ESIntegTestCase { assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field"), equalTo("value")); }, prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 8b21bb54361b..cb0e91ce57c7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -356,8 +356,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertThat(option.getText().toString(), equalTo("suggestion" + id)); assertThat(option.getHit(), hasId("" + id)); assertThat(option.getHit(), hasScore((id))); - assertNotNull(option.getHit().getSourceAsMap()); - Set sourceFields = option.getHit().getSourceAsMap().keySet(); + Map source = option.getHit().getSourceAsMap(); + assertNotNull(source); + Set sourceFields = source.keySet(); assertThat(sourceFields, contains("a")); assertThat(sourceFields, not(contains("b"))); id--; diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 701d451ac2c1..8a70f8a7f41a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -104,7 +104,8 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted private transient String index; private transient String clusterAlias; - private Map sourceAsMap; + // For asserting that the method #getSourceAsMap is called just once on the lifetime of this object + private boolean sourceAsMapCalled = false; private Map innerHits; @@ -142,7 +143,6 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted null, null, null, - null, new HashMap<>(), new HashMap<>(), refCounted @@ -166,7 +166,6 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted SearchShardTarget shard, String index, String clusterAlias, - Map sourceAsMap, Map innerHits, Map documentFields, Map metaFields, @@ -188,7 +187,6 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted this.shard = shard; this.index = index; this.clusterAlias = clusterAlias; - this.sourceAsMap = sourceAsMap; this.innerHits = innerHits; this.documentFields = documentFields; this.metaFields = metaFields; @@ -279,7 +277,6 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted shardTarget, index, clusterAlias, - null, innerHits, documentFields, metaFields, @@ -447,7 +444,6 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted */ public SearchHit sourceRef(BytesReference source) { this.source = source; - this.sourceAsMap = null; return this; } @@ -476,19 +472,18 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted } /** - * The source of the document as a map (can be {@code null}). + * The source of the document as a map (can be {@code null}). This method is expected + * to be called at most once during the lifetime of the object as the generated map + * is expensive to generate and it does not get cache. */ public Map getSourceAsMap() { assert hasReferences(); + assert sourceAsMapCalled == false : "getSourceAsMap() called twice"; + sourceAsMapCalled = true; if (source == null) { return null; } - if (sourceAsMap != null) { - return sourceAsMap; - } - - sourceAsMap = Source.fromBytes(source).source(); - return sourceAsMap; + return Source.fromBytes(source).source(); } /** @@ -758,7 +753,6 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted shard, index, clusterAlias, - sourceAsMap, innerHits == null ? null : innerHits.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().asUnpooled())), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 8ff381cbbc84..7e7e10c48ea1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -235,6 +235,8 @@ public class InternalTopHits extends InternalAggregation implements TopHits { } else if (tokens[0].equals(SCORE)) { return topHit.getScore(); } else if (tokens[0].equals(SOURCE)) { + // Caching the map might help here but memory usage is a concern for this class + // This is dead code, pipeline aggregations do not support _source.field. Map sourceAsMap = topHit.getSourceAsMap(); if (sourceAsMap != null) { Object property = sourceAsMap.get(tokens[1]); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 2b082f2f8b02..25a71d04b321 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -317,9 +317,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); - assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceAsString(), nullValue()); - assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); assertThat(searchHit.getSourceAsString(), nullValue()); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index b0edbb829df2..330058b16a81 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -889,7 +889,6 @@ public enum SearchResponseUtils { shardTarget, index, clusterAlias, - null, get(SearchHit.Fields.INNER_HITS, values, null), get(SearchHit.DOCUMENT_FIELDS, values, Collections.emptyMap()), get(SearchHit.METADATA_FIELDS, values, Collections.emptyMap()), diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index 400d9f0cc84b..a2899813aa42 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -173,8 +173,11 @@ public final class EnrichCache { List> result = new ArrayList<>(response.getHits().getHits().length); long size = 0; for (SearchHit hit : response.getHits()) { - result.add(deepCopy(hit.getSourceAsMap(), true)); + // There is a cost of decompressing source here plus caching it. + // We do it first so we don't decompress it twice. size += hit.getSourceRef() != null ? hit.getSourceRef().ramBytesUsed() : 0; + // Do we need deep copy here, we are creating a modifiable map already? + result.add(deepCopy(hit.getSourceAsMap(), true)); } return new CacheValue(Collections.unmodifiableList(result), size); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 8dbc9b0f4f43..0ae84b62bafd 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.enrich.LocalStateEnrich; import java.util.Collection; import java.util.List; +import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; @@ -65,11 +66,9 @@ public class EnrichShardMultiSearchActionTests extends ESSingleNodeTestCase { for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); - assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), - equalTo("value1") - ); + Map sourceAsMap = response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap(); + assertThat(sourceAsMap.size(), equalTo(1)); + assertThat(sourceAsMap.get("key1"), equalTo("value1")); } } ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 9b264a2cc41c..d80a4af3dbac 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -33,7 +33,6 @@ import org.elasticsearch.indices.ExecutorNames; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; @@ -433,13 +432,12 @@ public class QueryRulesIndexService { private static QueryRulesetResult mapSearchResponseToQueryRulesetList(SearchResponse response) { final List rulesetResults = Arrays.stream(response.getHits().getHits()) - .map(QueryRulesIndexService::hitToQueryRulesetListItem) + .map(searchHit -> QueryRulesIndexService.hitToQueryRulesetListItem(searchHit.getSourceAsMap())) .toList(); return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } - private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { - final Map sourceMap = searchHit.getSourceAsMap(); + private static QueryRulesetListItem hitToQueryRulesetListItem(final Map sourceMap) { final String rulesetId = (String) sourceMap.get(QueryRuleset.ID_FIELD.getPreferredName()); @SuppressWarnings("unchecked") final List> rules = ((List>) sourceMap.get(QueryRuleset.RULES_FIELD.getPreferredName())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 409d62426949..6f033fdfd2f2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -187,7 +187,6 @@ public class ModelRegistryTests extends ESTestCase { var listener = new PlainActionFuture(); registry.getModel("1", listener); - registry.getModel("1", listener); var modelConfig = listener.actionGet(TIMEOUT); assertEquals("1", modelConfig.inferenceEntityId()); assertEquals("foo", modelConfig.service()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 52ffe3893f33..e043fb60e92f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorUtils; import org.elasticsearch.xpack.ml.extractor.ExtractedField; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.io.IOException; import java.io.InputStream; @@ -203,11 +204,12 @@ class ScrollDataExtractor implements DataExtractor { BytesStreamOutput outputStream = new BytesStreamOutput(); SearchHit lastHit = hits.getAt(hits.getHits().length - 1); - lastTimestamp = context.extractedFields.timeFieldValue(lastHit); + lastTimestamp = context.extractedFields.timeFieldValue(lastHit, new SourceSupplier(lastHit)); try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(context.extractedFields, outputStream)) { for (SearchHit hit : hits) { + SourceSupplier sourceSupplier = new SourceSupplier(hit); if (isCancelled) { - Long timestamp = context.extractedFields.timeFieldValue(hit); + Long timestamp = context.extractedFields.timeFieldValue(hit, sourceSupplier); if (timestamp != null) { if (timestampOnCancel == null) { timestampOnCancel = timestamp; @@ -218,7 +220,7 @@ class ScrollDataExtractor implements DataExtractor { } } } - hitProcessor.process(hit); + hitProcessor.process(hit, sourceSupplier); } } return outputStream.bytes().streamInput(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java index c2353d71a71d..cc2c5028039e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java @@ -12,6 +12,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.io.IOException; import java.io.OutputStream; @@ -27,10 +28,10 @@ class SearchHitToJsonProcessor implements Releasable { this.jsonBuilder = new XContentBuilder(JsonXContent.jsonXContent, outputStream); } - public void process(SearchHit hit) throws IOException { + public void process(SearchHit hit, SourceSupplier sourceSupplier) throws IOException { jsonBuilder.startObject(); for (ExtractedField field : fields.getAllFields()) { - writeKeyValue(field.getName(), field.value(hit)); + writeKeyValue(field.getName(), field.value(hit, sourceSupplier)); } jsonBuilder.endObject(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java index 7e78e5b9fd24..74bde7b57bd9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.util.ArrayList; import java.util.Arrays; @@ -40,8 +41,8 @@ public class TimeBasedExtractedFields extends ExtractedFields { return timeField.getName(); } - public Long timeFieldValue(SearchHit hit) { - Object[] value = timeField.value(hit); + public Long timeFieldValue(SearchHit hit, SourceSupplier source) { + Object[] value = timeField.value(hit, source); if (value.length != 1) { throw new RuntimeException( "Time field [" + timeField.getName() + "] expected a single value; actual was: " + Arrays.toString(value) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index ff96c73bc002..3a4cdbfee062 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.ml.dataframe.traintestsplit.TrainTestSplitter; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; import org.elasticsearch.xpack.ml.extractor.ProcessedField; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.io.IOException; import java.util.ArrayList; @@ -156,7 +157,7 @@ public class DataFrameDataExtractor { List rows = new ArrayList<>(searchResponse.getHits().getHits().length); for (SearchHit hit : searchResponse.getHits().getHits()) { - String[] extractedValues = extractValues(hit); + String[] extractedValues = extractValues(hit, new SourceSupplier(hit)); rows.add(extractedValues); } delegate.onResponse(rows); @@ -255,9 +256,9 @@ public class DataFrameDataExtractor { return searchResponse.getHits().asUnpooled().getHits(); } - private String extractNonProcessedValues(SearchHit hit, String organicFeature) { + private String extractNonProcessedValues(SearchHit hit, SourceSupplier sourceSupplier, String organicFeature) { ExtractedField field = extractedFieldsByName.get(organicFeature); - Object[] values = field.value(hit); + Object[] values = field.value(hit, sourceSupplier); if (values.length == 1 && isValidValue(values[0])) { return Objects.toString(values[0]); } @@ -270,8 +271,8 @@ public class DataFrameDataExtractor { return null; } - private String[] extractProcessedValue(ProcessedField processedField, SearchHit hit) { - Object[] values = processedField.value(hit, extractedFieldsByName::get); + private String[] extractProcessedValue(ProcessedField processedField, SearchHit hit, SourceSupplier sourceSupplier) { + Object[] values = processedField.value(hit, sourceSupplier, extractedFieldsByName::get); if (values.length == 0 && context.supportsRowsWithMissingValues == false) { return null; } @@ -309,12 +310,13 @@ public class DataFrameDataExtractor { } public Row createRow(SearchHit hit) { - String[] extractedValues = extractValues(hit); + SourceSupplier sourceSupplier = new SourceSupplier(hit); + String[] extractedValues = extractValues(hit, sourceSupplier); if (extractedValues == null) { - return new Row(null, hit, true); + return new Row(null, hit, sourceSupplier, true); } boolean isTraining = trainTestSplitter.get().isTraining(extractedValues); - Row row = new Row(extractedValues, hit, isTraining); + Row row = new Row(extractedValues, hit, sourceSupplier, isTraining); LOGGER.trace( () -> format( "[%s] Extracted row: sort key = [%s], is_training = [%s], values = %s", @@ -327,18 +329,18 @@ public class DataFrameDataExtractor { return row; } - private String[] extractValues(SearchHit hit) { + private String[] extractValues(SearchHit hit, SourceSupplier sourceSupplier) { String[] extractedValues = new String[organicFeatures.length + processedFeatures.length]; int i = 0; for (String organicFeature : organicFeatures) { - String extractedValue = extractNonProcessedValues(hit, organicFeature); + String extractedValue = extractNonProcessedValues(hit, sourceSupplier, organicFeature); if (extractedValue == null) { return null; } extractedValues[i++] = extractedValue; } for (ProcessedField processedField : context.extractedFields.getProcessedFields()) { - String[] processedValues = extractProcessedValue(processedField, hit); + String[] processedValues = extractProcessedValue(processedField, hit, sourceSupplier); if (processedValues == null) { return null; } @@ -445,9 +447,12 @@ public class DataFrameDataExtractor { private final boolean isTraining; - private Row(String[] values, SearchHit hit, boolean isTraining) { + private final SourceSupplier sourceSupplier; + + private Row(String[] values, SearchHit hit, SourceSupplier sourceSupplier, boolean isTraining) { this.values = values; this.hit = hit; + this.sourceSupplier = sourceSupplier; this.isTraining = isTraining; } @@ -475,5 +480,9 @@ public class DataFrameDataExtractor { public long getSortKey() { return (long) hit.getSortValues()[0]; } + + public Map getSource() { + return sourceSupplier.get(); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index 64cf493028ad..1d80bfbc0729 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.ml.dataframe.stats.DataCountsTracker; import org.elasticsearch.xpack.ml.dataframe.stats.ProgressTracker; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; @@ -210,8 +211,11 @@ public class InferenceRunner { for (SearchHit doc : batch) { dataCountsTracker.incrementTestDocsCount(); - InferenceResults inferenceResults = model.inferNoStats(featuresFromDoc(doc)); - bulkIndexer.addAndExecuteIfNeeded(createIndexRequest(doc, inferenceResults, config.getDest().getResultsField())); + SourceSupplier sourceSupplier = new SourceSupplier(doc); + InferenceResults inferenceResults = model.inferNoStats(featuresFromDoc(doc, sourceSupplier)); + bulkIndexer.addAndExecuteIfNeeded( + createIndexRequest(doc, sourceSupplier, inferenceResults, config.getDest().getResultsField()) + ); processedDocCount++; int progressPercent = Math.min((int) (processedDocCount * 100.0 / totalDocCount), MAX_PROGRESS_BEFORE_COMPLETION); @@ -225,10 +229,10 @@ public class InferenceRunner { } } - private Map featuresFromDoc(SearchHit doc) { + private Map featuresFromDoc(SearchHit doc, SourceSupplier sourceSupplier) { Map features = new HashMap<>(); for (ExtractedField extractedField : extractedFields.getAllFields()) { - Object[] values = extractedField.value(doc); + Object[] values = extractedField.value(doc, sourceSupplier); if (values.length == 1) { features.put(extractedField.getName(), values[0]); } @@ -236,11 +240,10 @@ public class InferenceRunner { return features; } - private IndexRequest createIndexRequest(SearchHit hit, InferenceResults results, String resultField) { + private IndexRequest createIndexRequest(SearchHit hit, SourceSupplier sourceSupplier, InferenceResults results, String resultField) { Map resultsMap = new LinkedHashMap<>(results.asMap()); resultsMap.put(DestinationIndex.IS_TRAINING, false); - - Map source = new LinkedHashMap<>(hit.getSourceAsMap()); + Map source = new LinkedHashMap<>(sourceSupplier.get()); source.put(resultField, resultsMap); IndexRequest indexRequest = new IndexRequest(hit.getIndex()); indexRequest.id(hit.getId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java index 3e1968ca19ce..08eb78e8274a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java @@ -102,7 +102,7 @@ class DataFrameRowsJoiner implements AutoCloseable { RowResults result = currentResults.pop(); DataFrameDataExtractor.Row row = dataFrameRowsIterator.next(); checkChecksumsMatch(row, result); - bulkIndexer.addAndExecuteIfNeeded(createIndexRequest(result, row.getHit())); + bulkIndexer.addAndExecuteIfNeeded(createIndexRequest(result, row)); } } @@ -130,11 +130,11 @@ class DataFrameRowsJoiner implements AutoCloseable { } } - private IndexRequest createIndexRequest(RowResults result, SearchHit hit) { - Map source = new LinkedHashMap<>(hit.getSourceAsMap()); + private IndexRequest createIndexRequest(RowResults result, DataFrameDataExtractor.Row row) { + Map source = new LinkedHashMap<>(row.getSource()); source.putAll(result.getResults()); - IndexRequest indexRequest = new IndexRequest(hit.getIndex()); - indexRequest.id(hit.getId()); + IndexRequest indexRequest = new IndexRequest(row.getHit().getIndex()); + indexRequest.id(row.getHit().getId()); indexRequest.source(source); indexRequest.opType(DocWriteRequest.OpType.INDEX); indexRequest.setParentTask(parentTaskId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java index b2a6b887ce31..0ec3cc6b577e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java @@ -23,7 +23,7 @@ public class DocValueField extends AbstractField { } @Override - public Object[] value(SearchHit hit) { + public Object[] value(SearchHit hit, SourceSupplier source) { return getFieldValue(hit); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java index 988263745e41..83a56f388a30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java @@ -48,10 +48,12 @@ public interface ExtractedField { /** * Extracts the value from a {@link SearchHit} - * @param hit the search hit + * + * @param hit the search hit + * @param source the source supplier * @return the extracted value */ - Object[] value(SearchHit hit); + Object[] value(SearchHit hit, SourceSupplier source); /** * @return Whether the field can be fetched from source instead diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java index ffe30516f676..69602c2c37ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java @@ -263,8 +263,8 @@ public class ExtractedFields { } @Override - public Object[] value(SearchHit hit) { - Object[] value = field.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + Object[] value = field.value(hit, source); if (value != null) { return Arrays.stream(value).map(v -> { boolean asBoolean; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java index 9edc72ca38f7..be63ce30f5f9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java @@ -23,8 +23,8 @@ public class GeoPointField extends DocValueField { } @Override - public Object[] value(SearchHit hit) { - Object[] value = super.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + Object[] value = super.value(hit, source); if (value.length == 0) { return value; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java index d7b8827add05..c4ee723b4ae8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java @@ -33,8 +33,8 @@ public class GeoShapeField extends SourceField { } @Override - public Object[] value(SearchHit hit) { - Object[] value = super.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + Object[] value = super.value(hit, source); if (value.length == 0) { return value; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java index 8bbfc714e35d..b86da90736c6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java @@ -50,8 +50,8 @@ public class MultiField implements ExtractedField { } @Override - public Object[] value(SearchHit hit) { - return field.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + return field.value(hit, source); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java index 62ee8a3ffd20..ebf43e15391d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java @@ -38,7 +38,7 @@ public class ProcessedField { return Collections.singleton(preProcessor.getOutputFieldType(outputField)); } - public Object[] value(SearchHit hit, Function fieldExtractor) { + public Object[] value(SearchHit hit, SourceSupplier sourceSupplier, Function fieldExtractor) { List inputFields = getInputFieldNames(); Map inputs = Maps.newMapWithExpectedSize(inputFields.size()); for (String field : inputFields) { @@ -46,7 +46,7 @@ public class ProcessedField { if (extractedField == null) { return new Object[0]; } - Object[] values = extractedField.value(hit); + Object[] values = extractedField.value(hit, sourceSupplier); if (values == null || values.length == 0) { continue; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java index add0bdc2fb1e..d064a75d73a9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java @@ -22,7 +22,7 @@ public class ScriptField extends AbstractField { } @Override - public Object[] value(SearchHit hit) { + public Object[] value(SearchHit hit, SourceSupplier source) { return getFieldValue(hit); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java index dc29533881cb..57abe104af2c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java @@ -27,15 +27,16 @@ public class SourceField extends AbstractField { } @Override - public Object[] value(SearchHit hit) { - Map source = hit.getSourceAsMap(); + public Object[] value(SearchHit hit, SourceSupplier source) { + // This is the only one that might be problematic + Map sourceMap = source.get(); int level = 0; - while (source != null && level < path.length - 1) { - source = getNextLevel(source, path[level]); + while (sourceMap != null && level < path.length - 1) { + sourceMap = getNextLevel(sourceMap, path[level]); level++; } - if (source != null) { - Object values = source.get(path[level]); + if (sourceMap != null) { + Object values = sourceMap.get(path[level]); if (values != null) { if (values instanceof List) { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java new file mode 100644 index 000000000000..192eaf1c6a65 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.extractor; + +import org.elasticsearch.search.SearchHit; + +import java.util.Map; +import java.util.function.Supplier; + +/** + * A supplier for the source of a search hit with caching capabilities. + */ +public final class SourceSupplier implements Supplier> { + + private final SearchHit searchHit; + private Map sourceMap; + + public SourceSupplier(SearchHit searchHit) { + this.searchHit = searchHit; + } + + @Override + public Map get() { + if (sourceMap == null) { + sourceMap = searchHit.getSourceAsMap(); + } + return sourceMap; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java index 7bea64e5a9a4..a1b40f1600a0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java @@ -38,7 +38,7 @@ public class TimeField extends AbstractField { } @Override - public Object[] value(SearchHit hit) { + public Object[] value(SearchHit hit, SourceSupplier source) { Object[] value = getFieldValue(hit); if (value.length != 1) { return value; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java index ecb49ef9fa71..a19634e22761 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.extractor.DocValueField; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.extractor.TimeField; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; @@ -75,7 +76,7 @@ public class SearchHitToJsonProcessorTests extends ESTestCase { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(fields, outputStream)) { for (int i = 0; i < searchHits.length; i++) { - hitProcessor.process(searchHits[i]); + hitProcessor.process(searchHits[i], new SourceSupplier(searchHits[i])); } } return outputStream.toString(StandardCharsets.UTF_8.name()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java index a2264a4cf7a6..707fb1d5be20 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; import org.elasticsearch.xpack.ml.extractor.ScriptField; import org.elasticsearch.xpack.ml.extractor.SourceField; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.extractor.TimeField; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; @@ -79,7 +80,7 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase { long millis = randomLong(); SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField)); - assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); + assertThat(extractedFields.timeFieldValue(hit, new SourceSupplier(hit)), equalTo(millis)); } public void testPre6xTimeFieldValue() { @@ -87,7 +88,7 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase { long millis = randomLong(); SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField)); - assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); + assertThat(extractedFields.timeFieldValue(hit, new SourceSupplier(hit)), equalTo(millis)); } public void testTimeFieldValueGivenEmptyArray() { @@ -95,7 +96,7 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase { TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField)); - expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); + expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit, new SourceSupplier(hit))); } public void testTimeFieldValueGivenValueHasTwoElements() { @@ -103,7 +104,7 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase { TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField)); - expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); + expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit, new SourceSupplier(hit))); } public void testTimeFieldValueGivenValueIsString() { @@ -111,7 +112,7 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase { TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField)); - expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); + expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit, new SourceSupplier(hit))); } public void testBuildGivenMixtureOfTypes() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 2ba9146533b7..1ca74d4a43a8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -592,7 +592,7 @@ public class DataFrameDataExtractorTests extends ESTestCase { public void testExtractionWithProcessedFieldThrows() { ProcessedField processedField = mock(ProcessedField.class); - doThrow(new RuntimeException("process field error")).when(processedField).value(any(), any()); + doThrow(new RuntimeException("process field error")).when(processedField).value(any(), any(), any()); extractedFields = new ExtractedFields( Arrays.asList( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java index fdece811e702..ee7bdcb51d60 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; import java.util.ArrayList; @@ -814,13 +815,14 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { ); SearchHit hit = new SearchHitBuilder(42).addField("some_boolean", true).build(); - assertThat(booleanField.value(hit), arrayContaining(1)); + SourceSupplier sourceSupplier = new SourceSupplier(hit); + assertThat(booleanField.value(hit, sourceSupplier), arrayContaining(1)); hit = new SearchHitBuilder(42).addField("some_boolean", false).build(); - assertThat(booleanField.value(hit), arrayContaining(0)); + assertThat(booleanField.value(hit, sourceSupplier), arrayContaining(0)); hit = new SearchHitBuilder(42).addField("some_boolean", Arrays.asList(false, true, false)).build(); - assertThat(booleanField.value(hit), arrayContaining(0, 1, 0)); + assertThat(booleanField.value(hit, sourceSupplier), arrayContaining(0, 1, 0)); } public void testDetect_GivenBooleanField_OutlierDetection() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index cb02b8294b11..a87f3f88190c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -326,6 +326,7 @@ public class DataFrameRowsJoinerTests extends ESTestCase { private static DataFrameDataExtractor.Row newRow(SearchHit hit, String[] values, boolean isTraining, int checksum) { DataFrameDataExtractor.Row row = mock(DataFrameDataExtractor.Row.class); when(row.getHit()).thenReturn(hit); + when(row.getSource()).thenReturn(hit.getSourceAsMap()); when(row.getValues()).thenReturn(values); when(row.isTraining()).thenReturn(isTraining); when(row.getChecksum()).thenReturn(checksum); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java index 9fdae0c517ef..5239a71045b4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java @@ -25,7 +25,7 @@ public class DocValueFieldTests extends ESTestCase { ExtractedField field = new DocValueField("a_keyword", Collections.singleton("keyword")); - assertThat(field.value(hit), equalTo(new String[] { "bar" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); assertThat(field.getName(), equalTo("a_keyword")); assertThat(field.getSearchField(), equalTo("a_keyword")); assertThat(field.getTypes(), contains("keyword")); @@ -41,7 +41,7 @@ public class DocValueFieldTests extends ESTestCase { ExtractedField field = new DocValueField("array", Collections.singleton("keyword")); - assertThat(field.value(hit), equalTo(new String[] { "a", "b" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "a", "b" })); assertThat(field.getName(), equalTo("array")); assertThat(field.getSearchField(), equalTo("array")); assertThat(field.getTypes(), contains("keyword")); @@ -52,7 +52,7 @@ public class DocValueFieldTests extends ESTestCase { expectThrows(UnsupportedOperationException.class, () -> field.getParentField()); ExtractedField missing = new DocValueField("missing", Collections.singleton("keyword")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testMissing() { @@ -60,7 +60,7 @@ public class DocValueFieldTests extends ESTestCase { ExtractedField missing = new DocValueField("missing", Collections.singleton("keyword")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testNewFromSource() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java index 50da104bd2e5..23420e518309 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java @@ -127,8 +127,8 @@ public class ExtractedFieldsTests extends ESTestCase { SearchHit hitTrue = new SearchHitBuilder(42).addField("a_bool", true).build(); SearchHit hitFalse = new SearchHitBuilder(42).addField("a_bool", false).build(); - assertThat(mapped.value(hitTrue), equalTo(new Integer[] { 1 })); - assertThat(mapped.value(hitFalse), equalTo(new Integer[] { 0 })); + assertThat(mapped.value(hitTrue, new SourceSupplier(hitTrue)), equalTo(new Integer[] { 1 })); + assertThat(mapped.value(hitFalse, new SourceSupplier(hitFalse)), equalTo(new Integer[] { 0 })); assertThat(mapped.getName(), equalTo(aBool.getName())); assertThat(mapped.getMethod(), equalTo(aBool.getMethod())); @@ -145,10 +145,10 @@ public class ExtractedFieldsTests extends ESTestCase { SearchHit hitTrueArray = new SearchHitBuilder(42).setSource("{\"a_bool\": [\"true\", true]}").build(); SearchHit hitFalseArray = new SearchHitBuilder(42).setSource("{\"a_bool\": [\"false\", false]}").build(); - assertThat(mapped.value(hitTrue), equalTo(new Integer[] { 1 })); - assertThat(mapped.value(hitFalse), equalTo(new Integer[] { 0 })); - assertThat(mapped.value(hitTrueArray), equalTo(new Integer[] { 1, 1 })); - assertThat(mapped.value(hitFalseArray), equalTo(new Integer[] { 0, 0 })); + assertThat(mapped.value(hitTrue, new SourceSupplier(hitTrue)), equalTo(new Integer[] { 1 })); + assertThat(mapped.value(hitFalse, new SourceSupplier(hitFalse)), equalTo(new Integer[] { 0 })); + assertThat(mapped.value(hitTrueArray, new SourceSupplier(hitTrueArray)), equalTo(new Integer[] { 1, 1 })); + assertThat(mapped.value(hitFalseArray, new SourceSupplier(hitFalseArray)), equalTo(new Integer[] { 0, 0 })); assertThat(mapped.getName(), equalTo(aBool.getName())); assertThat(mapped.getMethod(), equalTo(aBool.getMethod())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java index 4b48b44bcd9d..ad1aaebad8bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java @@ -28,7 +28,7 @@ public class GeoPointFieldTests extends ESTestCase { // doc_value field ExtractedField geo = new GeoPointField("geo"); - assertThat(geo.value(hit), equalTo(expected)); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected)); assertThat(geo.getName(), equalTo("geo")); assertThat(geo.getSearchField(), equalTo("geo")); assertThat(geo.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE)); @@ -45,7 +45,7 @@ public class GeoPointFieldTests extends ESTestCase { ExtractedField geo = new GeoPointField("missing"); - assertThat(geo.value(hit), equalTo(new Object[0])); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testArray() { @@ -53,7 +53,7 @@ public class GeoPointFieldTests extends ESTestCase { ExtractedField geo = new GeoPointField("geo"); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit, new SourceSupplier(hit))); assertThat(e.getMessage(), equalTo("Unexpected values for a geo_point field: [1, 2]")); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java index efedf918d479..5b84d61a6997 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java @@ -27,7 +27,7 @@ public class GeoShapeFieldTests extends ESTestCase { ExtractedField geo = new GeoShapeField("geo"); - assertThat(geo.value(hit), equalTo(expected)); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected)); assertThat(geo.getName(), equalTo("geo")); assertThat(geo.getSearchField(), equalTo("geo")); assertThat(geo.getTypes(), contains("geo_shape")); @@ -48,7 +48,7 @@ public class GeoShapeFieldTests extends ESTestCase { ExtractedField geo = new GeoShapeField("geo"); - assertThat(geo.value(hit), equalTo(expected)); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected)); assertThat(geo.getName(), equalTo("geo")); assertThat(geo.getSearchField(), equalTo("geo")); assertThat(geo.getTypes(), contains("geo_shape")); @@ -65,7 +65,7 @@ public class GeoShapeFieldTests extends ESTestCase { ExtractedField geo = new GeoShapeField("missing"); - assertThat(geo.value(hit), equalTo(new Object[0])); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testArray() { @@ -73,7 +73,7 @@ public class GeoShapeFieldTests extends ESTestCase { ExtractedField geo = new GeoShapeField("geo"); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit, new SourceSupplier(hit))); assertThat(e.getMessage(), equalTo("Unexpected values for a geo_shape field: [1, 2]")); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java index 8c7fb8223871..daa190c03321 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java @@ -23,7 +23,7 @@ public class MultiFieldTests extends ESTestCase { ExtractedField wrapped = new DocValueField("a.b", Collections.singleton("integer")); ExtractedField field = new MultiField("a", wrapped); - assertThat(field.value(hit), equalTo(new Integer[] { 2 })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new Integer[] { 2 })); assertThat(field.getName(), equalTo("a.b")); assertThat(field.getSearchField(), equalTo("a.b")); assertThat(field.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE)); @@ -39,7 +39,7 @@ public class MultiFieldTests extends ESTestCase { ExtractedField wrapped = new DocValueField("a", Collections.singleton("integer")); ExtractedField field = new MultiField("a.b", "a", "a", wrapped); - assertThat(field.value(hit), equalTo(new Integer[] { 1 })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new Integer[] { 1 })); assertThat(field.getName(), equalTo("a.b")); assertThat(field.getSearchField(), equalTo("a")); assertThat(field.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java index 907908ef8f71..489a2aa01dfc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java @@ -44,13 +44,13 @@ public class ProcessedFieldTests extends ESTestCase { public void testMissingExtractor() { ProcessedField processedField = new ProcessedField(makeOneHotPreProcessor(randomAlphaOfLength(10), "bar", "baz")); - assertThat(processedField.value(makeHit(), (s) -> null), emptyArray()); + assertThat(processedField.value(makeHit(), null, (s) -> null), emptyArray()); } public void testMissingInputValues() { ExtractedField extractedField = makeExtractedField(new Object[0]); ProcessedField processedField = new ProcessedField(makeOneHotPreProcessor(randomAlphaOfLength(10), "bar", "baz")); - assertThat(processedField.value(makeHit(), (s) -> extractedField), arrayContaining(is(nullValue()), is(nullValue()))); + assertThat(processedField.value(makeHit(), null, (s) -> extractedField), arrayContaining(is(nullValue()), is(nullValue()))); } public void testProcessedFieldFrequencyEncoding() { @@ -101,7 +101,7 @@ public class ProcessedFieldTests extends ESTestCase { assert inputs.length == expectedOutputs.length; for (int i = 0; i < inputs.length; i++) { Object input = inputs[i]; - Object[] result = processedField.value(makeHit(input), (s) -> makeExtractedField(new Object[] { input })); + Object[] result = processedField.value(makeHit(input), null, (s) -> makeExtractedField(new Object[] { input })); assertThat( "Input [" + input + "] Expected " + Arrays.toString(expectedOutputs[i]) + " but received " + Arrays.toString(result), result, @@ -120,7 +120,7 @@ public class ProcessedFieldTests extends ESTestCase { private static ExtractedField makeExtractedField(Object[] value) { ExtractedField extractedField = mock(ExtractedField.class); - when(extractedField.value(any())).thenReturn(value); + when(extractedField.value(any(), any())).thenReturn(value); return extractedField; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java index 1b60d878c573..88aa9c2fe5f7 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java @@ -23,7 +23,7 @@ public class ScriptFieldTests extends ESTestCase { ExtractedField field = new ScriptField("a_keyword"); - assertThat(field.value(hit), equalTo(new String[] { "bar" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); assertThat(field.getName(), equalTo("a_keyword")); assertThat(field.getSearchField(), equalTo("a_keyword")); assertThat(field.getTypes().isEmpty(), is(true)); @@ -40,7 +40,7 @@ public class ScriptFieldTests extends ESTestCase { ExtractedField field = new ScriptField("array"); - assertThat(field.value(hit), equalTo(new String[] { "a", "b" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "a", "b" })); assertThat(field.getName(), equalTo("array")); assertThat(field.getSearchField(), equalTo("array")); assertThat(field.getTypes().isEmpty(), is(true)); @@ -52,7 +52,7 @@ public class ScriptFieldTests extends ESTestCase { expectThrows(UnsupportedOperationException.class, () -> field.newFromSource()); ExtractedField missing = new DocValueField("missing", Collections.singleton("keyword")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testMissing() { @@ -60,6 +60,6 @@ public class ScriptFieldTests extends ESTestCase { ExtractedField missing = new ScriptField("missing"); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java index 891033e76dfa..22fbc779ddb0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java @@ -24,7 +24,7 @@ public class SourceFieldTests extends ESTestCase { ExtractedField field = new SourceField("single", Collections.singleton("text")); - assertThat(field.value(hit), equalTo(new String[] { "bar" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); assertThat(field.getName(), equalTo("single")); assertThat(field.getSearchField(), equalTo("single")); assertThat(field.getTypes(), contains("text")); @@ -42,7 +42,7 @@ public class SourceFieldTests extends ESTestCase { ExtractedField field = new SourceField("array", Collections.singleton("text")); - assertThat(field.value(hit), equalTo(new String[] { "a", "b" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "a", "b" })); assertThat(field.getName(), equalTo("array")); assertThat(field.getSearchField(), equalTo("array")); assertThat(field.getTypes(), contains("text")); @@ -60,7 +60,7 @@ public class SourceFieldTests extends ESTestCase { ExtractedField missing = new SourceField("missing", Collections.singleton("text")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testValueGivenNested() { @@ -69,7 +69,7 @@ public class SourceFieldTests extends ESTestCase { ExtractedField nested = new SourceField("level_1.level_2.foo", Collections.singleton("text")); - assertThat(nested.value(hit), equalTo(new String[] { "bar" })); + assertThat(nested.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); } public void testValueGivenNestedArray() { @@ -78,6 +78,6 @@ public class SourceFieldTests extends ESTestCase { ExtractedField nested = new SourceField("level_1.level_2.foo", Collections.singleton("text")); - assertThat(nested.value(hit), equalTo(new String[] { "bar" })); + assertThat(nested.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java index 79cf90498cd8..987756b7e73c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java @@ -29,7 +29,7 @@ public class TimeFieldTests extends ESTestCase { ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE); - assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis })); assertThat(timeField.getName(), equalTo("time")); assertThat(timeField.getSearchField(), equalTo("time")); assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos")); @@ -51,7 +51,7 @@ public class TimeFieldTests extends ESTestCase { ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE); - assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis })); assertThat(timeField.getName(), equalTo("time")); assertThat(timeField.getSearchField(), equalTo("time")); assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos")); @@ -69,7 +69,7 @@ public class TimeFieldTests extends ESTestCase { ExtractedField timeField = new TimeField("time", ExtractedField.Method.SCRIPT_FIELD); - assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis })); assertThat(timeField.getName(), equalTo("time")); assertThat(timeField.getSearchField(), equalTo("time")); assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos")); @@ -87,7 +87,7 @@ public class TimeFieldTests extends ESTestCase { final ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE); assertThat( - expectThrows(IllegalStateException.class, () -> timeField.value(hit)).getMessage(), + expectThrows(IllegalStateException.class, () -> timeField.value(hit, new SourceSupplier(hit))).getMessage(), startsWith("Unexpected value for a time field") ); } diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index 7ddaa53a5991..a0679c90e0f9 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -166,19 +166,18 @@ public class MonitoringIT extends ESSingleNodeTestCase { final SearchHits hits = response.getHits(); assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat( - "Monitoring documents must have the same timestamp", - Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), - equalTo(1L) - ); - assertThat( - "Monitoring documents must have the same source_node timestamp", - Arrays.stream(hits.getHits()) - .map(hit -> extractValue("source_node.timestamp", hit.getSourceAsMap())) - .distinct() - .count(), - equalTo(1L) - ); + Map sourceHit = hits.getHits()[0].getSourceAsMap(); + Object ts = extractValue("timestamp", sourceHit); + Object sn_ts = extractValue("source_node.timestamp", sourceHit); + for (int i = 1; i < hits.getHits().length; i++) { + sourceHit = hits.getHits()[i].getSourceAsMap(); + assertThat("Monitoring documents must have the same timestamp", extractValue("timestamp", sourceHit), equalTo(ts)); + assertThat( + "Monitoring documents must have the same source_node timestamp", + extractValue("source_node.timestamp", sourceHit), + equalTo(sn_ts) + ); + } for (final SearchHit hit : hits.getHits()) { assertMonitoringDoc(toMap(hit), system, interval); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index f8ac5f9032fe..d080355495a8 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; @@ -267,15 +268,16 @@ public class LocalExporterResourceIntegTests extends LocalExporterIntegTestCase Set watchIds = new HashSet<>(Arrays.asList(ClusterAlertsUtil.WATCH_IDS)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { for (SearchHit hit : response.getHits().getHits()) { - String watchId = ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap()); + Map source = hit.getSourceAsMap(); + String watchId = ObjectPath.eval("metadata.xpack.watch", source); assertNotNull("Missing watch ID", watchId); assertTrue("found unexpected watch id", watchIds.contains(watchId)); - String version = ObjectPath.eval("metadata.xpack.version_created", hit.getSourceAsMap()); + String version = ObjectPath.eval("metadata.xpack.version_created", source); assertNotNull("Missing version from returned watch [" + watchId + "]", version); assertTrue(Version.fromId(Integer.parseInt(version)).onOrAfter(Version.fromId(ClusterAlertsUtil.LAST_UPDATED_VERSION))); - String uuid = ObjectPath.eval("metadata.xpack.cluster_uuid", hit.getSourceAsMap()); + String uuid = ObjectPath.eval("metadata.xpack.cluster_uuid", source); assertNotNull("Missing cluster uuid", uuid); assertEquals(clusterUUID, uuid); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java index f051289d6d7c..234aeeeb6e82 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java @@ -119,9 +119,10 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 1); assertSearchHits(response, "1"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("id").toString(), equalTo("1")); } ); @@ -131,9 +132,10 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 1); assertSearchHits(response, "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("id").toString(), equalTo("2")); } ); @@ -197,8 +199,9 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 1); assertSearchHits(response, "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field1").toString(), equalTo("value2")); } ); @@ -228,9 +231,10 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1"), equalTo("value1")); + assertThat(source.get("id"), equalTo("1")); } ); assertResponse( @@ -239,9 +243,10 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field2"), equalTo("value2")); + assertThat(source.get("id"), equalTo("2")); } ); @@ -254,8 +259,9 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("id"), equalTo("2")); } ); @@ -267,13 +273,15 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); + Map source0 = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), equalTo(2)); + assertThat(source0.get("field1"), equalTo("value1")); + assertThat(source0.get("id"), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("field2"), equalTo("value2")); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("id"), equalTo("2")); + Map source1 = response.getHits().getAt(1).getSourceAsMap(); + assertThat(source1.size(), equalTo(2)); + assertThat(source1.get("field2"), equalTo("value2")); + assertThat(source1.get("id"), equalTo("2")); } ); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 12b75c787d6e..77594e3ae0b1 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -475,15 +475,17 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { response -> { assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("id"), is(1)); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[1].isFailure()); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("id"), is(1)); } ); } @@ -496,15 +498,17 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { response -> { assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("id"), is(2)); } ); } @@ -523,21 +527,25 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { response -> { assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("id"), is(1)); + source0 = response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("id"), is(1)); + source1 = response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap(); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("id"), is(2)); } ); } @@ -1266,8 +1274,9 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { do { assertNoFailures(response); assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); if (response.getScrollId() == null) { break; @@ -1326,8 +1335,9 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); } } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(response.pointInTimeId())).actionGet(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 6c7ba15b773b..fadabb4e8fcb 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -955,13 +955,15 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source0.size(), is(1)); + assertThat(source0.get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source1.size(), is(1)); + assertThat(source1.get("field1"), is("value1")); } ); } @@ -974,13 +976,15 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source0.size(), is(1)); + assertThat(source0.get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source1.size(), is(1)); + assertThat(source1.get("field2"), is("value2")); } ); } @@ -992,15 +996,17 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); } ); } @@ -1016,7 +1022,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); + assertThat(response.getResponses()[01].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); } @@ -1028,17 +1034,19 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source0.size(), is(3)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("field3"), is("value3")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source1.size(), is(3)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("field3"), is("value3")); } ); } @@ -1050,17 +1058,19 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source0.size(), is(3)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("field3"), is("value3")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source1.size(), is(3)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("field3"), is("value3")); } ); } @@ -1072,17 +1082,19 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source0.size(), is(3)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("field3"), is("value3")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source1.size(), is(3)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("field3"), is("value3")); } ); } @@ -1094,15 +1106,17 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); } ); } @@ -1134,8 +1148,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { do { assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); if (response.getScrollId() == null) { break; @@ -1191,10 +1206,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); } ); } @@ -1221,9 +1237,10 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .prepareSearch("test") .setQuery(constantScoreQuery(termQuery("field1", "value1"))), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); } ); assertHitCountAndNoFailures( @@ -1238,11 +1255,12 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(multipleFieldsUser, USERS_PASSWD)) ).prepareSearch("test").setQuery(constantScoreQuery(termQuery("field1", "value1"))), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source.size(), is(3)); + assertThat(source.get("field1"), is("value1")); + assertThat(source.get("field2"), is("value2")); + assertThat(source.get("field3"), is("value3")); } ); } @@ -1311,8 +1329,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .get(); assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = user1SearchResponse.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); scrolledDocsUser1++; } else { user1SearchResponse.decRef(); @@ -1322,8 +1341,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = user1SearchResponse.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); scrolledDocsUser1++; } else { assertThat(user1SearchResponse.getHits().getHits().length, is(0)); @@ -1555,8 +1575,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field1").toString(), equalTo("value1")); } ); @@ -1565,8 +1586,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); @@ -1575,9 +1597,10 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); @@ -1593,10 +1616,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("field3").toString(), equalTo("value3")); } ); @@ -1605,10 +1629,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("field3").toString(), equalTo("value3")); } ); @@ -1617,10 +1642,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("field3").toString(), equalTo("value3")); } ); @@ -1629,9 +1655,10 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); } @@ -2127,9 +2154,10 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(matchQuery("field1", "value1")), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); @@ -2138,10 +2166,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .prepareSearch("test") .setQuery(matchQuery("field2", "value2")), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 4f7ba7808b82..eabd2e2556f8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -1769,7 +1769,7 @@ public class TokenService { client, request, new ContextPreservingActionListener<>(supplier, listener), - (SearchHit hit) -> filterAndParseHit(hit, filter) + (SearchHit hit) -> filterAndParseHit(hit.getSourceAsMap(), filter) ); }, listener::onFailure)); } @@ -1913,9 +1913,8 @@ public class TokenService { }; } - private static Tuple filterAndParseHit(SearchHit hit, @Nullable Predicate> filter) + private static Tuple filterAndParseHit(Map source, @Nullable Predicate> filter) throws IllegalStateException, DateTimeException { - final Map source = hit.getSourceAsMap(); if (source == null) { throw new IllegalStateException("token document did not have source but source should have been fetched"); } @@ -2737,7 +2736,6 @@ public class TokenService { } record Doc(String id, Map sourceAsMap, long seqNo, long primaryTerm) { - Doc(SearchHit searchHit) { this(searchHit.getId(), searchHit.getSourceAsMap(), searchHit.getSeqNo(), searchHit.getPrimaryTerm()); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java index 8a66ceeed039..2b244fef515d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java @@ -10,8 +10,6 @@ package org.elasticsearch.xpack.transform.transforms.latest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -93,11 +91,7 @@ public class Latest extends AbstractCompositeAggFunction { ); } - // We don't use #getSourceAsMap here because we don't want to cache the object as we - // only need it here. More over we are modifying the map of maps so we will be holding - // the wrong map. - BytesReference bytes = topHits.getHits().getHits()[0].getSourceRef(); - Map document = XContentHelper.convertToMap(bytes, true).v2(); + Map document = topHits.getHits().getHits()[0].getSourceAsMap(); // generator to create unique but deterministic document ids, so we // - do not create duplicates if we re-run after failure diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index f3648580691c..9844b1eac6d4 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import java.util.Arrays; +import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; @@ -56,14 +57,15 @@ public class RejectedExecutionTests extends AbstractWatcherIntegrationTestCase { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); + assertThat( "Did not find watcher history for rejected watch", - Arrays.stream(searchResponse.getHits().getHits()) - .anyMatch( - hit -> hit.getSourceAsMap() != null - && hit.getSourceAsMap().get("messages") != null - && hit.getSourceAsMap().get("messages").toString().contains("due to thread pool capacity") - ), + Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> { + Map source = hit.getSourceAsMap(); + return source != null + && source.get("messages") != null + && source.get("messages").toString().contains("due to thread pool capacity"); + }), equalTo(true) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 2ec6541275d0..19d5bfa8ca67 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -140,14 +140,16 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas assertNoFailuresAndResponse(prepareSearch("output1"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key3").toString(), equalTo("20")); }); } @@ -224,14 +226,16 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas assertNoFailuresAndResponse(prepareSearch("output1"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key4").toString(), equalTo("30")); }); } From f27f74666f1486dd2802f0d3ceaf3b208659f261 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Thu, 23 Jan 2025 10:21:52 -0700 Subject: [PATCH 22/29] ES|QL async queries: Partial result on demand (#118122) Add capability to stop async query on demand The theory: - User initiates async search request - User sends the stop request (POST _query/async//stop) - If the async is finished by that time, it's like regular async get - If it's not finished, the sinks are closed and the request is forcefully finished --- docs/changelog/118122.yaml | 5 + .../esql/esql-across-clusters.asciidoc | 9 +- docs/reference/esql/esql-apis.asciidoc | 3 + .../esql/esql-async-query-api.asciidoc | 7 + .../esql/esql-async-query-stop-api.asciidoc | 49 ++ docs/reference/esql/esql-rest.asciidoc | 1 + .../esql/multivalued-fields.asciidoc | 7 + .../api/esql.async_query_stop.json | 31 ++ .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/heap_attack/HeapAttackIT.java | 25 +- .../test/rest/ESRestTestCase.java | 47 ++ .../xpack/core/async/AsyncStopRequest.java | 67 +++ .../xpack/core/esql/EsqlAsyncActionNames.java | 1 + .../operator/exchange/ExchangeService.java | 27 + .../xpack/esql/EsqlAsyncSecurityIT.java | 6 +- .../xpack/esql/EsqlSecurityIT.java | 42 +- .../xpack/esql/ccq/MultiClustersIT.java | 112 +---- .../xpack/esql/qa/single_node/RestEsqlIT.java | 69 +-- .../esql/qa/rest/FieldExtractorTestCase.java | 311 ++++-------- .../rest/RequestIndexFilteringTestCase.java | 84 ++-- .../esql/qa/rest/RestEnrichTestCase.java | 17 +- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 73 +-- ...AbstractCrossClustersUsageTelemetryIT.java | 2 +- .../esql/action/AbstractPauseFieldPlugin.java | 6 +- .../action/CrossClusterAsyncEnrichStopIT.java | 156 ++++++ .../esql/action/CrossClusterAsyncQueryIT.java | 468 +++++++++++++----- .../action/CrossClustersUsageTelemetryIT.java | 96 ++++ .../xpack/esql/action/EsqlActionTaskIT.java | 3 +- .../xpack/esql/action/EsqlAsyncTestUtils.java | 129 +++++ .../esql/action/FailingPauseFieldPlugin.java | 42 ++ .../esql/action/EsqlAsyncStopAction.java | 22 + .../xpack/esql/action/EsqlExecutionInfo.java | 33 +- .../xpack/esql/action/EsqlQueryResponse.java | 1 + .../esql/action/RestEsqlStopAsyncAction.java | 46 ++ .../xpack/esql/plugin/ComputeService.java | 68 +-- .../xpack/esql/plugin/EsqlPlugin.java | 6 +- .../TransportEsqlAsyncGetResultsAction.java | 2 +- .../plugin/TransportEsqlAsyncStopAction.java | 139 ++++++ .../esql/plugin/TransportEsqlQueryAction.java | 36 +- .../esql/action/EsqlQueryResponseTests.java | 48 +- .../CrossClusterEsqlRCS1MissingIndicesIT.java | 1 + ...ssClusterEsqlRCS1UnavailableRemotesIT.java | 1 + ...ssClusterEsqlRCS2UnavailableRemotesIT.java | 1 + .../RemoteClusterSecurityEsqlIT.java | 141 +++++- .../xpack/security/operator/Constants.java | 1 + .../xpack/security/authz/RBACEngine.java | 1 + 46 files changed, 1781 insertions(+), 662 deletions(-) create mode 100644 docs/changelog/118122.yaml create mode 100644 docs/reference/esql/esql-async-query-stop-api.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java diff --git a/docs/changelog/118122.yaml b/docs/changelog/118122.yaml new file mode 100644 index 000000000000..ca27cc94a7cb --- /dev/null +++ b/docs/changelog/118122.yaml @@ -0,0 +1,5 @@ +pr: 118122 +summary: "ES|QL: Partial result on demand for async queries" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index 6decc351bc1c..c12865bad616 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -210,6 +210,7 @@ Which returns: { "is_running": false, "took": 42, <1> + "is_partial": false, <7> "columns" : [ { "name" : "COUNT(http.response.status_code)", @@ -275,8 +276,9 @@ Which returns: <2> This section of counters shows all possible cluster search states and how many cluster searches are currently in that state. The clusters can have one of the following statuses: *running*, *successful* (searches on all shards were successful), *skipped* (the search -failed on a cluster marked with `skip_unavailable`=`true`) or *failed* (the search -failed on a cluster marked with `skip_unavailable`=`false`). +failed on a cluster marked with `skip_unavailable`=`true`), *failed* (the search +failed on a cluster marked with `skip_unavailable`=`false`) or **partial** (the search was +<> before finishing). <3> The `_clusters/details` section shows metadata about the search on each cluster. <4> If you included indices from the local cluster you sent the request to in your {ccs}, it is identified as "(local)". @@ -285,6 +287,8 @@ which clusters have slower response times than others. <6> The shard details for the search on that cluster, including a count of shards that were skipped due to the can-match phase results. Shards are skipped when they cannot have any matching data and therefore are not included in the full ES|QL query. +<7> The `is_partial` field is set to `true` if the search has partial results for any reason, +for example if it was interrupted before finishing using the <>. The cross-cluster metadata can be used to determine whether any data came back from a cluster. @@ -314,6 +318,7 @@ Which returns: { "is_running": false, "took": 55, + "is_partial": false, "columns": [ ... // not shown ], diff --git a/docs/reference/esql/esql-apis.asciidoc b/docs/reference/esql/esql-apis.asciidoc index 157f4e4357e7..633a202c9dc3 100644 --- a/docs/reference/esql/esql-apis.asciidoc +++ b/docs/reference/esql/esql-apis.asciidoc @@ -17,6 +17,7 @@ overview of {esql} and related tutorials, see <>. * <> * <> * <> +* <> include::esql-query-api.asciidoc[] @@ -26,3 +27,5 @@ include::esql-async-query-api.asciidoc[] include::esql-async-query-get-api.asciidoc[] include::esql-async-query-delete-api.asciidoc[] + +include::esql-async-query-stop-api.asciidoc[] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 8cb974cf6773..c194818eb0cc 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -170,3 +170,10 @@ API>> to get the current status and available results for the query. (Boolean) If `true`, the query request is still executing. -- + +`is_partial`:: ++ +-- +(Boolean) +If `true`, the query has partial results - for example, as a result of using the <>. +-- diff --git a/docs/reference/esql/esql-async-query-stop-api.asciidoc b/docs/reference/esql/esql-async-query-stop-api.asciidoc new file mode 100644 index 000000000000..dba5282d224e --- /dev/null +++ b/docs/reference/esql/esql-async-query-stop-api.asciidoc @@ -0,0 +1,49 @@ +[[esql-async-query-stop-api]] +=== {esql} async query stop API +++++ +{esql} async query stop API +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + +The <> async query stop API is used to manually stop an async query. Once the stop command is issued, +the query stops processing new data and returns the results that have been already processed. Note that due to the pipelined +nature of {esql} queries, the stop operation is not immediate and may take time to return results. + +The results are returned in <> as the +<>. +If the query has been finished by the time the stop command is issued, the results are returned immediately. + +If the query processing has not finished by the time the stop command is issued, the response will have the `is_partial` +field set to `true`. + +[source,console] +---- +POST /query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=/stop +---- +// TEST[skip: no access to query ID] + +[[esql-async-query-stop-api-request]] +==== {api-request-title} + +`POST /_query/async//stop` + +[[esql-async-query-stop-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, only the authenticated user that submitted the original query request +can stop the query. + +[[esql-async-query-stop-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Identifier for the query to stop. ++ +A query ID is provided in the <>'s +response for a query that does not complete in the awaited time. diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index c353185e2895..ccdd3227df9e 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -193,6 +193,7 @@ Which returns: ---- { "took": 28, + "is_partial": false, "columns": [ {"name": "author", "type": "text"}, {"name": "name", "type": "text"}, diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 562ea2a2e6b4..00d9df04a0bc 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -27,6 +27,7 @@ Multivalued fields come back as a JSON array: ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"} @@ -78,6 +79,7 @@ And {esql} sees that removal: ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "keyword"} @@ -122,6 +124,7 @@ And {esql} also sees that: ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"} @@ -165,6 +168,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "keyword"} @@ -198,6 +202,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, ], @@ -241,6 +246,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"}, @@ -278,6 +284,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"}, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json new file mode 100644 index 000000000000..6fbdefef8b68 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json @@ -0,0 +1,31 @@ +{ + "esql.async_query_stop": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-stop-api.html", + "description": "Stops a previously submitted async query request given its ID and collects the results." + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_query/async/{id}/stop", + "methods": [ + "POST" + ], + "parts": { + "id": { + "type": "string", + "description": "The async query ID" + } + } + } + ] + } + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a50f888927d4..6fb4703f5153 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,6 +163,7 @@ public class TransportVersions { public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); + public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 4af8681bb939..2e68c094492f 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -86,7 +86,7 @@ public class HeapAttackIT extends ESRestTestCase { public void testSortByManyLongsSuccess() throws IOException { initManyLongs(); Response response = sortByManyLongs(500); - Map map = responseAsMap(response); + Map map = responseAsMap(response); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "b").entry("type", "long")); ListMatcher values = matchesList(); @@ -95,8 +95,7 @@ public class HeapAttackIT extends ESRestTestCase { values = values.item(List.of(0, b)); } } - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } /** @@ -236,11 +235,10 @@ public class HeapAttackIT extends ESRestTestCase { public void testGroupOnSomeLongs() throws IOException { initManyLongs(); Response resp = groupOnManyLongs(200); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(a)").entry("type", "long")); ListMatcher values = matchesList().item(List.of(9)); - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } /** @@ -249,11 +247,10 @@ public class HeapAttackIT extends ESRestTestCase { public void testGroupOnManyLongs() throws IOException { initManyLongs(); Response resp = groupOnManyLongs(5000); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(a)").entry("type", "long")); ListMatcher values = matchesList().item(List.of(9)); - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } private Response groupOnManyLongs(int count) throws IOException { @@ -279,12 +276,11 @@ public class HeapAttackIT extends ESRestTestCase { public void testSmallConcat() throws IOException { initSingleDocIndex(); Response resp = concat(2); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "str").entry("type", "keyword")); ListMatcher values = matchesList().item(List.of(1, "1".repeat(100))); - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } public void testHugeConcat() throws IOException { @@ -465,7 +461,7 @@ public class HeapAttackIT extends ESRestTestCase { public void testManyEval() throws IOException { initManyLongs(); Response resp = manyEval(1); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList(); columns = columns.item(matchesMap().entry("name", "a").entry("type", "long")); columns = columns.item(matchesMap().entry("name", "b").entry("type", "long")); @@ -475,8 +471,7 @@ public class HeapAttackIT extends ESRestTestCase { for (int i = 0; i < 20; i++) { columns = columns.item(matchesMap().entry("name", "i0" + i).entry("type", "long")); } - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", hasSize(10_000)).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, hasSize(10_000)); } public void testTooManyEval() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index b23ad1e9c548..5f1907b07a3a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -76,6 +76,7 @@ import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -84,6 +85,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.After; import org.junit.AfterClass; @@ -133,12 +135,15 @@ import static java.util.Collections.unmodifiableList; import static org.elasticsearch.client.RestClient.IGNORE_RESPONSE_CODES_PARAM; import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.test.rest.TestFeatureService.ALL_FEATURES; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.notNullValue; @@ -2570,4 +2575,46 @@ public abstract class ESRestTestCase extends ESTestCase { addXContentBody(request, body); return request; } + + protected static MapMatcher getResultMatcher(boolean includeMetadata, boolean includePartial) { + MapMatcher mapMatcher = matchesMap(); + if (includeMetadata) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + // Older version may not have is_partial + if (includePartial) { + mapMatcher = mapMatcher.entry("is_partial", false); + } + return mapMatcher; + } + + /** + * Create empty result matcher from result, taking into account all metadata items. + */ + protected static MapMatcher getResultMatcher(Map result) { + return getResultMatcher(result.containsKey("took"), result.containsKey("is_partial")); + } + + /** + * Match result columns and values, with default matchers for metadata. + */ + protected static void assertResultMap(Map result, Matcher columnMatcher, Matcher valuesMatcher) { + assertMap(result, getResultMatcher(result).entry("columns", columnMatcher).entry("values", valuesMatcher)); + } + + protected static void assertResultMap(Map result, Object columnMatcher, Object valuesMatcher) { + assertMap(result, getResultMatcher(result).entry("columns", columnMatcher).entry("values", valuesMatcher)); + } + + /** + * Match result columns and values, with default matchers for metadata. + */ + protected static void assertResultMap( + Map result, + MapMatcher mapMatcher, + Matcher columnMatcher, + Matcher valuesMatcher + ) { + assertMap(result, mapMatcher.entry("columns", columnMatcher).entry("values", valuesMatcher)); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java new file mode 100644 index 000000000000..7113cbca279d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.async; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request for TransportEsqlAsyncStopAction action. + */ +public class AsyncStopRequest extends ActionRequest { + private final String id; + + /** + * Creates a new request + * + * @param id The id of the search progress request. + */ + public AsyncStopRequest(String id) { + this.id = id; + } + + public AsyncStopRequest(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + /** + * Returns the id of the async search. + */ + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AsyncStopRequest request = (AsyncStopRequest) o; + return Objects.equals(id, request.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java index 81ab54fc2db5..7555db8fc85e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java @@ -12,4 +12,5 @@ package org.elasticsearch.xpack.core.esql; */ public class EsqlAsyncActionNames { public static final String ESQL_ASYNC_GET_RESULT_ACTION_NAME = "indices:data/read/esql/async/get"; + public static final String ESQL_ASYNC_STOP_ACTION_NAME = "indices:data/read/esql/async/stop"; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 62cc4daf5fde..d1a5d1757bc9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -76,6 +76,7 @@ public final class ExchangeService extends AbstractLifecycleComponent { private final BlockFactory blockFactory; private final Map sinks = ConcurrentCollections.newConcurrentMap(); + private final Map exchangeSources = ConcurrentCollections.newConcurrentMap(); public ExchangeService(Settings settings, ThreadPool threadPool, String executorName, BlockFactory blockFactory) { this.threadPool = threadPool; @@ -172,6 +173,32 @@ public final class ExchangeService extends AbstractLifecycleComponent { ); } + /** + * Remember the exchange source handler for the given session ID. + * This can be used for async/stop requests. + */ + public void addExchangeSourceHandler(String sessionId, ExchangeSourceHandler sourceHandler) { + exchangeSources.put(sessionId, sourceHandler); + } + + public ExchangeSourceHandler removeExchangeSourceHandler(String sessionId) { + return exchangeSources.remove(sessionId); + } + + /** + * Finishes the session early, i.e., before all sources are finished. + * It is called by async/stop API and should be called on the node that coordinates the async request. + * It will close all sources and return the results - unlike cancel, this does not discard the results. + */ + public void finishSessionEarly(String sessionId, ActionListener listener) { + ExchangeSourceHandler exchangeSource = removeExchangeSourceHandler(sessionId); + if (exchangeSource != null) { + exchangeSource.finishEarly(false, listener); + } else { + listener.onResponse(null); + } + } + private static class OpenExchangeRequest extends TransportRequest { private final String sessionId; private final int exchangeBuffer; diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java index b45ef4591498..0a6f73ee648d 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.Locale; +import java.util.Map; import static org.elasticsearch.core.TimeValue.timeValueNanos; import static org.hamcrest.Matchers.allOf; @@ -50,8 +51,9 @@ public class EsqlAsyncSecurityIT extends EsqlSecurityIT { } @Override - protected MapMatcher responseMatcher() { - return super.responseMatcher().entry("is_running", equalTo(false)).entry("id", allOf(notNullValue(), instanceOf(String.class))); + protected MapMatcher responseMatcher(Map result) { + return super.responseMatcher(result).entry("is_running", equalTo(false)) + .entry("id", allOf(notNullValue(), instanceOf(String.class))); } @Override diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index d8e3b0cccf39..7d96c400cb65 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -165,8 +165,8 @@ public class EsqlSecurityIT extends ESRestTestCase { } } - protected MapMatcher responseMatcher() { - return matchesMap(); + protected MapMatcher responseMatcher(Map result) { + return getResultMatcher(result); } public void testAllowedIndices() throws Exception { @@ -182,10 +182,7 @@ public class EsqlSecurityIT extends ESRestTestCase { Response resp = runESQLCommand(user, "from index-user1 | stats sum=sum(value)"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher mapMatcher = responseMatcher(); - if (responseMap.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) responseMap.get("took")).intValue()); - } + MapMatcher mapMatcher = responseMatcher(responseMap); MapMatcher matcher = mapMatcher.entry("columns", List.of(Map.of("name", "sum", "type", "double"))) .entry("values", List.of(List.of(43.0d))); assertMap(responseMap, matcher); @@ -195,10 +192,7 @@ public class EsqlSecurityIT extends ESRestTestCase { Response resp = runESQLCommand(user, "from index-user2 | stats sum=sum(value)"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher mapMatcher = responseMatcher(); - if (responseMap.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) responseMap.get("took")).intValue()); - } + MapMatcher mapMatcher = responseMatcher(responseMap); MapMatcher matcher = mapMatcher.entry("columns", List.of(Map.of("name", "sum", "type", "double"))) .entry("values", List.of(List.of(72.0d))); assertMap(responseMap, matcher); @@ -208,10 +202,7 @@ public class EsqlSecurityIT extends ESRestTestCase { Response resp = runESQLCommand("metadata1_read2", "from " + index + " | stats sum=sum(value)"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher mapMatcher = responseMatcher(); - if (responseMap.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) responseMap.get("took")).intValue()); - } + MapMatcher mapMatcher = responseMatcher(responseMap); MapMatcher matcher = mapMatcher.entry("columns", List.of(Map.of("name", "sum", "type", "double"))) .entry("values", List.of(List.of(72.0d))); assertMap(responseMap, matcher); @@ -226,9 +217,10 @@ public class EsqlSecurityIT extends ESRestTestCase { ); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher matcher = responseMatcher().entry("took", ((Integer) responseMap.get("took")).intValue()) - .entry("columns", List.of(Map.of("name", "sum", "type", "double"), Map.of("name", "index", "type", "keyword"))) - .entry("values", List.of(List.of(72.0d, "index-user2"))); + MapMatcher matcher = responseMatcher(responseMap).entry( + "columns", + List.of(Map.of("name", "sum", "type", "double"), Map.of("name", "index", "type", "keyword")) + ).entry("values", List.of(List.of(72.0d, "index-user2"))); assertMap(responseMap, matcher); } } @@ -238,16 +230,14 @@ public class EsqlSecurityIT extends ESRestTestCase { Response resp = runESQLCommand("alias_user1", "from " + index + " METADATA _index" + "| KEEP _index, org, value | LIMIT 10"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher matcher = responseMatcher().entry("took", ((Integer) responseMap.get("took")).intValue()) - .entry( - "columns", - List.of( - Map.of("name", "_index", "type", "keyword"), - Map.of("name", "org", "type", "keyword"), - Map.of("name", "value", "type", "double") - ) + MapMatcher matcher = responseMatcher(responseMap).entry( + "columns", + List.of( + Map.of("name", "_index", "type", "keyword"), + Map.of("name", "org", "type", "keyword"), + Map.of("name", "value", "type", "double") ) - .entry("values", List.of(List.of("index-user1", "sales", 31.0d))); + ).entry("values", List.of(List.of("index-user1", "sales", 31.0d))); assertMap(responseMap, matcher); } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 6e43d40a3005..b838d8ae284a 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -37,12 +37,8 @@ import java.util.stream.IntStream; import java.util.stream.Stream; import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.ccq.Clusters.REMOTE_CLUSTER_NAME; -import static org.hamcrest.Matchers.any; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.*; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class MultiClustersIT extends ESRestTestCase { @@ -159,6 +155,17 @@ public class MultiClustersIT extends ESRestTestCase { } } + private void assertResultMap(boolean includeCCSMetadata, Map result, C columns, V values, boolean remoteOnly) { + MapMatcher mapMatcher = getResultMatcher(ccsMetadataAvailable(), result.containsKey("is_partial")); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, remoteOnly); + } + } + public void testCount() throws Exception { { boolean includeCCSMetadata = includeCCSMetadata(); @@ -166,17 +173,7 @@ public class MultiClustersIT extends ESRestTestCase { var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(localDocs.size() + remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, false); - } + assertResultMap(includeCCSMetadata, result, columns, values, false); } { boolean includeCCSMetadata = includeCCSMetadata(); @@ -184,17 +181,7 @@ public class MultiClustersIT extends ESRestTestCase { var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, true); - } + assertResultMap(includeCCSMetadata, result, columns, values, true); } } @@ -207,17 +194,7 @@ public class MultiClustersIT extends ESRestTestCase { var values = List.of(List.of(Math.toIntExact(sum))); // check all sections of map except _cluster/details - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, false); - } + assertResultMap(includeCCSMetadata, result, columns, values, false); } { boolean includeCCSMetadata = includeCCSMetadata(); @@ -226,17 +203,7 @@ public class MultiClustersIT extends ESRestTestCase { long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); var values = List.of(List.of(Math.toIntExact(sum))); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, true); - } + assertResultMap(includeCCSMetadata, result, columns, values, true); } { assumeTrue("requires ccs metadata", ccsMetadataAvailable()); @@ -245,15 +212,7 @@ public class MultiClustersIT extends ESRestTestCase { long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); var values = List.of(List.of(Math.toIntExact(sum))); - MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, true); + assertResultMap(true, result, columns, values, true); } } @@ -325,17 +284,7 @@ public class MultiClustersIT extends ESRestTestCase { .map(e -> List.of(Math.toIntExact(e.getValue()), e.getKey())) .toList(); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, false); - } + assertResultMap(includeCCSMetadata, result, columns, values, false); } { boolean includeCCSMetadata = includeCCSMetadata(); @@ -353,17 +302,7 @@ public class MultiClustersIT extends ESRestTestCase { .toList(); // check all sections of map except _clusters/details - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, true); - } + assertResultMap(includeCCSMetadata, result, columns, values, true); } } @@ -378,11 +317,8 @@ public class MultiClustersIT extends ESRestTestCase { Map result = run("FROM " + indexPattern + " | STATS c = COUNT(*)", false); var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(localDocs.size() + remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + + assertResultMap(false, result, columns, values, false); } { String indexPattern = randomFrom("*:test-remote-index", "*:test-remote-*", "*:test-*"); @@ -390,11 +326,7 @@ public class MultiClustersIT extends ESRestTestCase { var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + assertResultMap(false, result, columns, values, false); } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index cae9e1ba8eb6..601ce819224b 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -80,12 +79,10 @@ public class RestEsqlIT extends RestEsqlTestCase { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } Map result = runEsql(builder); - assertEquals(3, result.size()); + Map colA = Map.of("name", "avg(value)", "type", "double"); - assertEquals(List.of(colA), result.get("columns")); - assertEquals(List.of(List.of(499.5d)), result.get("values")); + assertResultMap(result, List.of(colA), List.of(List.of(499.5d))); assertTrue(result.containsKey("took")); - assertThat(((Number) result.get("took")).longValue(), greaterThanOrEqualTo(0L)); } public void testInvalidPragma() throws IOException { @@ -118,11 +115,8 @@ public class RestEsqlIT extends RestEsqlTestCase { setLoggingLevel("INFO"); RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_NOT_LOG_ME = 1"); Map result = runEsql(builder); - assertEquals(3, result.size()); - assertThat(((Integer) result.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); - assertEquals(List.of(colA), result.get("columns")); - assertEquals(List.of(List.of(1)), result.get("values")); + assertResultMap(result, List.of(colA), List.of(List.of(1))); for (int i = 0; i < cluster.getNumNodes(); i++) { try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { Streams.readAllLines(log, line -> assertThat(line, not(containsString("DO_NOT_LOG_ME")))); @@ -138,11 +132,8 @@ public class RestEsqlIT extends RestEsqlTestCase { setLoggingLevel("DEBUG"); RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_LOG_ME = 1"); Map result = runEsql(builder); - assertEquals(3, result.size()); - assertThat(((Integer) result.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); - assertEquals(List.of(colA), result.get("columns")); - assertEquals(List.of(List.of(1)), result.get("values")); + assertResultMap(result, List.of(colA), List.of(List.of(1))); boolean[] found = new boolean[] { false }; for (int i = 0; i < cluster.getNumNodes(); i++) { try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { @@ -289,13 +280,11 @@ public class RestEsqlIT extends RestEsqlTestCase { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } Map result = runEsql(builder); - MapMatcher mapMatcher = matchesMap(); - assertMap( + assertResultMap( result, - mapMatcher.entry("columns", matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double"))) - .entry("values", List.of(List.of(499.5d))) - .entry("profile", matchesMap().entry("drivers", instanceOf(List.class))) - .entry("took", greaterThanOrEqualTo(0)) + getResultMatcher(result).entry("profile", matchesMap().entry("drivers", instanceOf(List.class))), + matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double")), + equalTo(List.of(List.of(499.5d))) ); List> signatures = new ArrayList<>(); @@ -373,24 +362,19 @@ public class RestEsqlIT extends RestEsqlTestCase { } Map result = runEsql(builder); - MapMatcher mapMatcher = matchesMap(); ListMatcher values = matchesList(); for (int i = 0; i < 1000; i++) { values = values.item(matchesList().item("2020-12-12T00:00:00.000Z").item("value" + i).item("value" + i).item(i).item(499.5)); } - assertMap( + assertResultMap( result, - mapMatcher.entry( - "columns", - matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) - .item(matchesMap().entry("name", "test").entry("type", "text")) - .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword")) - .item(matchesMap().entry("name", "value").entry("type", "long")) - .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")) - ) - .entry("values", values) - .entry("profile", matchesMap().entry("drivers", instanceOf(List.class))) - .entry("took", greaterThanOrEqualTo(0)) + getResultMatcher(result).entry("profile", matchesMap().entry("drivers", instanceOf(List.class))), + matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) + .item(matchesMap().entry("name", "test").entry("type", "text")) + .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword")) + .item(matchesMap().entry("name", "value").entry("type", "long")) + .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")), + values ); List> signatures = new ArrayList<>(); @@ -484,20 +468,15 @@ public class RestEsqlIT extends RestEsqlTestCase { for (int group2 = 0; group2 < 10; group2++) { expectedValues.add(List.of(1.0, 1, 1, 0, group2)); } - MapMatcher mapMatcher = matchesMap(); - assertMap( + assertResultMap( result, - mapMatcher.entry( - "columns", - matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double")) - .item(matchesMap().entry("name", "MAX(value)").entry("type", "long")) - .item(matchesMap().entry("name", "MIN(value)").entry("type", "long")) - .item(matchesMap().entry("name", "group1").entry("type", "long")) - .item(matchesMap().entry("name", "group2").entry("type", "long")) - ) - .entry("values", expectedValues) - .entry("profile", matchesMap().entry("drivers", instanceOf(List.class))) - .entry("took", greaterThanOrEqualTo(0)) + getResultMatcher(result).entry("profile", matchesMap().entry("drivers", instanceOf(List.class))), + matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double")) + .item(matchesMap().entry("name", "MAX(value)").entry("type", "long")) + .item(matchesMap().entry("name", "MIN(value)").entry("type", "long")) + .item(matchesMap().entry("name", "group1").entry("type", "long")) + .item(matchesMap().entry("name", "group2").entry("type", "long")), + equalTo(expectedValues) ); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index 813354db697e..a320cbfa459d 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -23,7 +23,6 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ListMatcher; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -51,7 +50,6 @@ import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.entityToMap; import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.runEsqlSync; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** * Creates indices with many different mappings and fetches values from them to make sure @@ -304,11 +302,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { {"flattened": {"a": "foo"}}"""); Map result = runEsql("FROM test* | LIMIT 2"); - assertMap( - result, - matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("flattened", "unsupported"))) - .entry("values", List.of(matchesList().item(null))) - ); + assertResultMap(result, List.of(columnInfo("flattened", "unsupported")), List.of(matchesList().item(null))); } public void testEmptyMapping() throws IOException { @@ -322,7 +316,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { // TODO this is broken in main too // Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); - // assertMap( + // assertResultMap( // result, // matchesMap().entry("columns", List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))) // .entry("values", List.of(matchesList().item(null).item(null))) @@ -345,13 +339,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { public void testTextFieldWithKeywordSubfield() throws IOException { String value = randomAlphaOfLength(20); Map result = new Test("text").storeAndDocValues(randomBoolean(), null).sub("raw", keywordTest()).roundTrip(value); - - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.raw", "keyword")) - ).entry("values", List.of(matchesList().item(value).item(value))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.raw", "keyword")), + List.of(matchesList().item(value).item(value)) ); } @@ -372,12 +363,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { int value = randomInt(); Map result = textTest().sub("int", intTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")) - ).entry("values", List.of(matchesList().item(Integer.toString(value)).item(value))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")), + List.of(matchesList().item(Integer.toString(value)).item(value)) ); } @@ -398,12 +387,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { String value = randomAlphaOfLength(5); Map result = textTest().sourceMode(SourceMode.DEFAULT).sub("int", intTest().ignoreMalformed(true)).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")) - ).entry("values", List.of(matchesList().item(value).item(null))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")), + List.of(matchesList().item(value).item(null)) ); } @@ -424,12 +411,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { String value = NetworkAddress.format(randomIp(randomBoolean())); Map result = textTest().sub("ip", ipTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")) - ).entry("values", List.of(matchesList().item(value).item(value))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")), + List.of(matchesList().item(value).item(value)) ); } @@ -450,12 +435,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { String value = randomAlphaOfLength(10); Map result = textTest().sourceMode(SourceMode.DEFAULT).sub("ip", ipTest().ignoreMalformed(true)).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")) - ).entry("values", List.of(matchesList().item(value).item(null))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")), + List.of(matchesList().item(value).item(null)) ); } @@ -477,12 +460,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { boolean text = randomBoolean(); Map result = intTest().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(value).item(Integer.toString(value)))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(value).item(Integer.toString(value))) ); } @@ -504,12 +485,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { boolean text = randomBoolean(); Map result = intTest().forceIgnoreMalformed().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(null).item(value))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(null).item(value)) ); } @@ -531,12 +510,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { boolean text = randomBoolean(); Map result = ipTest().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(value).item(value))) + List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(value).item(value)) ); } @@ -558,12 +535,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { boolean text = randomBoolean(); Map result = ipTest().forceIgnoreMalformed().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(null).item(value))) + List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(null).item(value)) ); } @@ -585,12 +560,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { byte value = randomByte(); Map result = intTest().sub("byte", byteTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")) - ).entry("values", List.of(matchesList().item((int) value).item((int) value))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")), + List.of(matchesList().item((int) value).item((int) value)) ); } @@ -614,12 +587,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { .sub("byte", byteTest().ignoreMalformed(true)) .roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")) - ).entry("values", List.of(matchesList().item(value).item(null))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")), + List.of(matchesList().item(value).item(null)) ); } @@ -641,23 +612,13 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { byte value = randomByte(); Map result = byteTest().sub("int", intTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer")) - ).entry("values", List.of(matchesList().item((int) value).item((int) value))) + List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer")), + List.of(matchesList().item((int) value).item((int) value)) ); } - static MapMatcher matchesMapWithOptionalTook(Object tookTimeValue) { - MapMatcher mapMatcher = matchesMap(); - if (tookTimeValue instanceof Number) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - return mapMatcher; - } - /** *
      * "byte_field": {
@@ -676,12 +637,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
         int value = randomValueOtherThanMany((Integer v) -> (Byte.MIN_VALUE <= v) && (v <= Byte.MAX_VALUE), ESTestCase::randomInt);
         Map result = byteTest().forceIgnoreMalformed().sub("int", intTest()).roundTrip(value);
 
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer"))
-            ).entry("values", List.of(matchesList().item(null).item(value)))
+            List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer")),
+            List.of(matchesList().item(null).item(value))
         );
     }
 
@@ -708,11 +667,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             {"f": 1}""");
 
         Map result = runEsql("FROM test*");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("f", "unsupported")))
-                .entry("values", List.of(matchesList().item(null), matchesList().item(null)))
-        );
+        assertResultMap(result, List.of(columnInfo("f", "unsupported")), List.of(matchesList().item(null), matchesList().item(null)));
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT f | LIMIT 3"));
         String err = EntityUtils.toString(e.getResponse().getEntity());
         assertThat(
@@ -746,12 +701,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             {"other": "o2"}""");
 
         Map result = runEsql("FROM test* | SORT file, other");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("file", "keyword"), columnInfo("other", "keyword"))
-            ).entry("values", List.of(matchesList().item("f1").item(null), matchesList().item(null).item("o2")))
+            List.of(columnInfo("file", "keyword"), columnInfo("other", "keyword")),
+            List.of(matchesList().item("f1").item(null), matchesList().item(null).item("o2"))
         );
     }
 
@@ -812,12 +765,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
         );
 
         Map result = runEsql("FROM test* | SORT file.raw | LIMIT 2");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("file", "unsupported"), columnInfo("file.raw", "keyword"))
-            ).entry("values", List.of(matchesList().item(null).item("o2"), matchesList().item(null).item(null)))
+            List.of(columnInfo("file", "unsupported"), columnInfo("file.raw", "keyword")),
+            List.of(matchesList().item(null).item("o2"), matchesList().item(null).item(null))
         );
     }
 
@@ -859,12 +810,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
         assertThat(err, containsString("Cannot use field [f.raw] with unsupported type [ip_range]"));
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))
-            ).entry("values", List.of(matchesList().item(null).item(null)))
+            List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported")),
+            List.of(matchesList().item(null).item(null))
         );
     }
 
@@ -924,12 +873,10 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
         assertThat(err, containsString("Cannot use field [f.raw] with unsupported type [ip_range]"));
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))
-            ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item(null).item(null)))
+            List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported")),
+            List.of(matchesList().item(null).item(null), matchesList().item(null).item(null))
         );
     }
 
@@ -961,11 +908,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             {"emp_no": 2}""");
 
         Map result = runEsql("FROM test* | SORT emp_no | LIMIT 2");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("emp_no", "integer")))
-                .entry("values", List.of(matchesList().item(1), matchesList().item(2)))
-        );
+        assertResultMap(result, List.of(columnInfo("emp_no", "integer")), List.of(matchesList().item(1), matchesList().item(2)));
     }
 
     /**
@@ -1007,11 +950,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
         );
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("emp_no", "unsupported")))
-                .entry("values", List.of(matchesList().item(null), matchesList().item(null)))
-        );
+        assertResultMap(result, List.of(columnInfo("emp_no", "unsupported")), List.of(matchesList().item(null), matchesList().item(null)));
     }
 
     /**
@@ -1053,11 +992,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
         );
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("emp_no", "unsupported")))
-                .entry("values", List.of(matchesList().item(null), matchesList().item(null)))
-        );
+        assertResultMap(result, List.of(columnInfo("emp_no", "unsupported")), List.of(matchesList().item(null), matchesList().item(null)));
     }
 
     /**
@@ -1095,7 +1030,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             {"foo": {"emp_no": "cat"}}""");
 
         Map result = runEsql("FROM test* | LIMIT 3");
-        assertMap(result, matchesMap().entry("columns", List.of(columnInfo("foo.emp_no", "unsupported"))).extraOk());
+        assertMap(result, getResultMatcher(result).entry("columns", List.of(columnInfo("foo.emp_no", "unsupported"))).extraOk());
 
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT foo.emp_no | LIMIT 3"));
         String err = EntityUtils.toString(e.getResponse().getEntity());
@@ -1147,35 +1082,29 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             """);
 
         Map result = runEsql("FROM test");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", Collections.EMPTY_LIST)
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            Collections.EMPTY_LIST
         );
 
         index("test", """
             {"Responses.process.pid": 123,"process.parent.command_line":"run.bat"}""");
 
         result = runEsql("FROM test");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item("run.bat").item("run.bat"))
         );
 
         result = runEsql("""
             FROM test | where process.parent.command_line == "run.bat"
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item("run.bat").item("run.bat"))
         );
 
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test | SORT Responses.process.pid"));
@@ -1235,23 +1164,19 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             {"process.parent.command_line":"run.bat"}""");
 
         Map result = runEsql("FROM test* | SORT process.parent.command_line ASC NULLS FIRST");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat"))
         );
 
         result = runEsql("""
             FROM test* | where process.parent.command_line == "run.bat"
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item("run.bat").item("run.bat"))
         );
 
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT Responses.process.pid"));
@@ -1339,61 +1264,47 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             {"Responses.process": 222,"process.parent.command_line":"run2.bat"}""");
 
         Map result = runEsql("FROM test* | SORT process.parent.command_line");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of(
+                matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
+                matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
             )
-                .entry(
-                    "values",
-                    List.of(
-                        matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
-                        matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
-                    )
-                )
         );
 
         result = runEsql("""
             FROM test* | where Responses.process.pid == 111
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
-            ).entry("values", List.of())
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of()
         );
 
         result = runEsql("FROM test* | SORT process.parent.command_line");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of(
+                matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
+                matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
             )
-                .entry(
-                    "values",
-                    List.of(
-                        matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
-                        matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
-                    )
-                )
         );
 
         result = runEsql("""
@@ -1401,17 +1312,15 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
             | SORT process.parent.command_line
             | WHERE Responses.process IS NULL
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
-            ).entry("values", List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat")))
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat"))
         );
     }
 
@@ -1671,7 +1580,7 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase {
                 values = values.item(expectedValue);
             }
 
-            assertMap(result, matchesMapWithOptionalTook(result.get("took")).entry("columns", columns).entry("values", List.of(values)));
+            assertResultMap(result, columns, List.of(values));
         }
 
         void createIndex(String name, String fieldName) throws IOException {
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java
index 5e0aeb5b3535..ba057cbe276b 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java
@@ -26,14 +26,12 @@ import java.util.Locale;
 import java.util.Map;
 
 import static org.elasticsearch.test.ListMatcher.matchesList;
-import static org.elasticsearch.test.MapMatcher.assertMap;
 import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.entityToMap;
 import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.requestObjectBuilder;
 import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.anyOf;
 import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.instanceOf;
 import static org.hamcrest.Matchers.nullValue;
@@ -63,42 +61,35 @@ public abstract class RequestIndexFilteringTestCase extends ESRestTestCase {
 
         // filter includes both indices in the result (all columns, all rows)
         RestEsqlTestCase.RequestObjectBuilder builder = timestampFilter("gte", "2023-01-01").query(from("test*"));
-        Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "id2").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1 + docsTest2))).entry("took", greaterThanOrEqualTo(0))
+        assertResultMap(
+            runEsql(builder),
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "id2").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(docsTest1 + docsTest2))
         );
 
         // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)!
         builder = timestampFilter("gte", "2024-01-01").query(from("test*"));
-        assertMap(
+        assertResultMap(
             runEsql(builder),
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
 
         // filter excludes both indices (no rows); the first analysis step fails because there are no columns, a second attempt succeeds
         // after eliminating the index filter. All columns are returned.
         builder = timestampFilter("gte", "2025-01-01").query(from("test*"));
-        assertMap(
+        assertResultMap(
             runEsql(builder),
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "id2").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(0))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "id2").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(0))
         );
     }
 
@@ -110,27 +101,22 @@ public abstract class RequestIndexFilteringTestCase extends ESRestTestCase {
 
         // filter includes only test1. Columns and rows of test2 are filtered out
         RestEsqlTestCase.RequestObjectBuilder builder = existsFilter("id1").query(from("test*"));
-        Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+        assertResultMap(
+            runEsql(builder),
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
 
         // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)!
         builder = existsFilter("id1").query(from("test*") + " METADATA _index | KEEP _index, id*");
-        result = runEsql(builder);
-        assertMap(
+        Map result = runEsql(builder);
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
         @SuppressWarnings("unchecked")
         var values = (List>) result.get("values");
@@ -151,14 +137,12 @@ public abstract class RequestIndexFilteringTestCase extends ESRestTestCase {
             from("test*") + " METADATA _index | SORT id2 | KEEP _index, id*"
         );
         Map result = runEsql(builder);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "id2").entry("type", "integer"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "id2").entry("type", "integer")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
         @SuppressWarnings("unchecked")
         var values = (List>) result.get("values");
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java
index bf4a4400e13c..69bbf7420c72 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java
@@ -23,10 +23,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
-import static org.elasticsearch.test.MapMatcher.assertMap;
-import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 
 public abstract class RestEnrichTestCase extends ESRestTestCase {
 
@@ -194,14 +191,14 @@ public abstract class RestEnrichTestCase extends ESRestTestCase {
         Map result = runEsql("from test1 | enrich countries | keep number | sort number");
         var columns = List.of(Map.of("name", "number", "type", "long"));
         var values = List.of(List.of(1000), List.of(1000), List.of(5000));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     public void testMatchField_ImplicitFieldsList_WithStats() throws IOException {
         Map result = runEsql("from test1 | enrich countries | stats s = sum(number) by country_name");
         var columns = List.of(Map.of("name", "s", "type", "long"), Map.of("name", "country_name", "type", "keyword"));
         var values = List.of(List.of(2000, "United States of America"), List.of(5000, "China"));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     public void testSimpleIndexFilteringWithEnrich() throws IOException {
@@ -226,7 +223,7 @@ public abstract class RestEnrichTestCase extends ESRestTestCase {
             Arrays.asList(null, 1000, "US", "test1"),
             Arrays.asList(3, null, "US", "test2")
         );
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
 
         // filter something that won't affect the columns
         result = runEsql("""
@@ -235,7 +232,7 @@ public abstract class RestEnrichTestCase extends ESRestTestCase {
                 | keep *number, geo.dest, _index
                 | sort geo.dest, _index
             """, b -> b.startObject("exists").field("field", "foobar").endObject());
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", List.of()).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, List.of());
     }
 
     public void testIndexFilteringWithEnrich_RemoveOneIndex() throws IOException {
@@ -259,7 +256,7 @@ public abstract class RestEnrichTestCase extends ESRestTestCase {
             Arrays.asList(null, 1000, "US", "test1")
         );
 
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
 
         // filter out test2 and use a wildcarded field name in the "keep" command
         result = runEsql("""
@@ -275,7 +272,7 @@ public abstract class RestEnrichTestCase extends ESRestTestCase {
             Map.of("name", "_index", "type", "keyword")
         );
         values = List.of(Arrays.asList(5000, "CN", "test1"), Arrays.asList(1000, "US", "test1"), Arrays.asList(1000, "US", "test1"));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     public void testIndexFilteringWithEnrich_ExpectException() throws IOException {
@@ -315,7 +312,7 @@ public abstract class RestEnrichTestCase extends ESRestTestCase {
             Map.of("name", "_index", "type", "keyword")
         );
         var values = List.of(Arrays.asList(2, "IN", "test2"), Arrays.asList(2, "IN", "test2"), Arrays.asList(3, "US", "test2"));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     private Map runEsql(String query) throws IOException {
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
index 86f8a8c5363f..66333421eeb7 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
@@ -59,7 +59,6 @@ import static java.util.Collections.emptySet;
 import static java.util.Map.entry;
 import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
 import static org.elasticsearch.test.ListMatcher.matchesList;
-import static org.elasticsearch.test.MapMatcher.assertMap;
 import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.elasticsearch.xpack.esql.EsqlTestUtils.as;
 import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.ASYNC;
@@ -257,7 +256,7 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
 
     public void testGetAnswer() throws IOException {
         Map answer = runEsql(requestObjectBuilder().query("row a = 1, b = 2"));
-        assertEquals(3, answer.size());
+        assertEquals(4, answer.size());
         assertThat(((Integer) answer.get("took")).intValue(), greaterThanOrEqualTo(0));
         Map colA = Map.of("name", "a", "type", "integer");
         Map colB = Map.of("name", "b", "type", "integer");
@@ -296,21 +295,13 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
         assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo("{\"errors\":false}"));
 
         RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats min(value)");
-        Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry("values", List.of(List.of(1)))
-                .entry("columns", List.of(Map.of("name", "min(value)", "type", "long")))
-                .entry("took", greaterThanOrEqualTo(0))
-        );
+        assertResultMap(runEsql(builder), List.of(Map.of("name", "min(value)", "type", "long")), List.of(List.of(1)));
 
         builder = requestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`");
-        result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry("values", List.of(List.of(2, 0), List.of(1, 1)))
-                .entry("columns", List.of(Map.of("name", "min(value)", "type", "long"), Map.of("name", "group", "type", "long")))
-                .entry("took", greaterThanOrEqualTo(0))
+        assertResultMap(
+            runEsql(builder),
+            List.of(Map.of("name", "min(value)", "type", "long"), Map.of("name", "group", "type", "long")),
+            List.of(List.of(2, 0), List.of(1, 1))
         );
     }
 
@@ -569,7 +560,7 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
         );
         var values = List.of(List.of(3, testIndexName() + "-2", 1, "id-2"), List.of(2, testIndexName() + "-1", 2, "id-1"));
 
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
 
         assertThat(deleteIndex(testIndexName() + "-1").isAcknowledged(), is(true)); // clean up
         assertThat(deleteIndex(testIndexName() + "-2").isAcknowledged(), is(true)); // clean up
@@ -867,17 +858,15 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
                     .item(499.5)
             );
         }
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "test").entry("type", "text"))
-                    .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-                    .item(matchesMap().entry("name", "now").entry("type", "date"))
-                    .item(matchesMap().entry("name", "AVG(value)").entry("type", "double"))
-            ).entry("values", values).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "test").entry("type", "text"))
+                .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "value").entry("type", "long"))
+                .item(matchesMap().entry("name", "now").entry("type", "date"))
+                .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")),
+            values
         );
     }
 
@@ -893,11 +882,10 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
         }).query(fromIndex() + " | STATS SUM(value)");
 
         Map result = runEsql(builder);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry("columns", matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")))
-                .entry("values", List.of(List.of(499500)))
-                .entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")),
+            List.of(List.of(499500))
         );
     }
 
@@ -912,12 +900,7 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
             b.endObject();
         }).query(fromIndex() + " | WHERE value == 12 | STATS SUM(value)");
         Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry("columns", matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")))
-                .entry("values", List.of(List.of(12)))
-                .entry("took", greaterThanOrEqualTo(0))
-        );
+        assertResultMap(result, matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")), List.of(List.of(12)));
     }
 
     public void testTopLevelFilterBoolMerged() throws IOException {
@@ -946,11 +929,10 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
                 b.endObject();
             }).query(fromIndex() + " | WHERE @timestamp > \"2010-01-01\" | STATS SUM(value)");
             Map result = runEsql(builder);
-            assertMap(
+            assertResultMap(
                 result,
-                matchesMap().entry("columns", matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")))
-                    .entry("values", List.of(List.of(12)))
-                    .entry("took", greaterThanOrEqualTo(0))
+                matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")),
+                List.of(List.of(12))
             );
         }
     }
@@ -1132,13 +1114,12 @@ public abstract class RestEsqlTestCase extends ESRestTestCase {
         for (int i = 0; i < count; i++) {
             values = values.item(matchesList().item("keyword" + i).item(i));
         }
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "keyword").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "integer").entry("type", "integer"))
-            ).entry("values", values).entry("took", greaterThanOrEqualTo(0)).entry("id", id).entry("is_running", false)
+            getResultMatcher(result).entry("id", id).entry("is_running", false),
+            matchesList().item(matchesMap().entry("name", "keyword").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "integer").entry("type", "integer")),
+            values
         );
 
     }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java
index ffbddd52b255..7df40da0344a 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java
@@ -127,7 +127,7 @@ public class AbstractCrossClustersUsageTelemetryIT extends AbstractMultiClusters
         return getTelemetrySnapshot(queryNode);
     }
 
-    private CCSTelemetrySnapshot getTelemetrySnapshot(String nodeName) {
+    protected CCSTelemetrySnapshot getTelemetrySnapshot(String nodeName) {
         var usage = cluster(LOCAL_CLUSTER).getInstance(UsageService.class, nodeName);
         return usage.getEsqlUsageHolder().getCCSTelemetrySnapshot();
     }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java
index 5554f7e571df..492947304d89 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java
@@ -34,12 +34,16 @@ public abstract class AbstractPauseFieldPlugin extends Plugin implements ScriptP
     // Called when the engine needs to wait for further execution to be allowed.
     protected abstract boolean onWait() throws InterruptedException;
 
+    protected String scriptTypeName() {
+        return "pause";
+    }
+
     @Override
     public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) {
         return new ScriptEngine() {
             @Override
             public String getType() {
-                return "pause";
+                return scriptTypeName();
             }
 
             @Override
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java
new file mode 100644
index 000000000000..99a81c60a9ad
--- /dev/null
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.xcontent.json.JsonXContent;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
+import org.elasticsearch.xpack.esql.plan.logical.Enrich;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster;
+import static org.hamcrest.Matchers.equalTo;
+
+// This tests if enrich after stop works correctly
+public class CrossClusterAsyncEnrichStopIT extends AbstractEnrichBasedCrossClusterTestCase {
+
+    @Override
+    protected boolean reuseClusters() {
+        return false;
+    }
+
+    @Override
+    protected boolean tolerateErrorsWhenWipingEnrichPolicies() {
+        // attempt to wipe will fail since some clusters are already closed
+        return true;
+    }
+
+    @Override
+    protected Collection> nodePlugins(String clusterAlias) {
+        List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias));
+        plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class);
+        plugins.add(SimplePauseFieldPlugin.class);
+        return plugins;
+    }
+
+    @Before
+    public void resetPlugin() {
+        SimplePauseFieldPlugin.resetPlugin();
+    }
+
+    /**
+     * This tests that enrich and aggs work after stop. It works like this:
+     * 1. We launch the async request
+     * 2. c2 index has the pause field which will pause the query until we allow it to proceed
+     * 3. We wait until c1 is done and then stop the async request
+     * 4. We allow the query to proceed
+     * 5. The result should contain the data from local and c1 and coordinator-side enrichments should happen
+     */
+    public void testEnrichAfterStop() throws Exception {
+        setupEventsIndexWithPause("c2");
+        String query = String.format(Locale.ROOT, """
+            FROM *:events,events
+            | eval ip= TO_STR(host)
+            | %s
+            | %s
+            | eval const = coalesce(const, 1)
+            | stats c = sum(const) by vendor
+            | sort vendor
+            """, enrichHosts(Enrich.Mode.COORDINATOR), enrichVendors(Enrich.Mode.COORDINATOR));
+
+        // Start the async query
+        final String asyncExecutionId = startAsyncQuery(client(), query, randomBoolean());
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        // wait until c1 is done
+        waitForCluster(client(), "c1", asyncExecutionId);
+        waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId);
+
+        // Run the stop request
+        var stopRequest = new AsyncStopRequest(asyncExecutionId);
+        var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // Allow the processing to proceed
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+
+        try (EsqlQueryResponse resp = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            // Compare this to CrossClustersEnrichIT.testEnrichTwiceThenAggs - the results from c2 will be absent
+            // because we stopped it before processing the data
+            assertThat(
+                getValuesList(resp),
+                equalTo(
+                    List.of(
+                        List.of(5L, "Apple"),
+                        List.of(6L, "Microsoft"),
+                        List.of(3L, "Redhat"),
+                        List.of(2L, "Samsung"),
+                        Arrays.asList(2L, (String) null)
+                    )
+                )
+            );
+            EsqlExecutionInfo executionInfo = resp.getExecutionInfo();
+            assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2")));
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
+        }
+    }
+
+    private void setupEventsIndexWithPause(String clusterAlias) throws IOException {
+        record Event(long timestamp, String user, String host) {}
+        List events = List.of(
+            new Event(1, "park", "192.168.1.25"),
+            new Event(2, "akio", "192.168.1.5"),
+            new Event(3, "park", "192.168.1.2"),
+            new Event(4, "kevin", "192.168.1.3")
+        );
+        // Regular mapping
+        var stdMapping = PutMappingRequest.simpleMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip");
+        Map mappingMap = XContentHelper.convertToMap(BytesReference.bytes(stdMapping), false, stdMapping.contentType())
+            .v2();
+        // Pause field mapping
+        var mapping = JsonXContent.contentBuilder().startObject();
+        mapping.startObject("runtime");
+        {
+            mapping.startObject("const");
+            {
+                mapping.field("type", "long");
+                mapping.startObject("script").field("source", "").field("lang", "pause").endObject();
+            }
+            mapping.endObject();
+        }
+        mapping.endObject();
+        mapping.endObject();
+        Map mappingMap2 = XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2();
+        // Merge the two mappings
+        mappingMap.putAll(mappingMap2);
+
+        var client = client(clusterAlias);
+        assertAcked(client.admin().indices().prepareDelete("events"));
+        assertAcked(client.admin().indices().prepareCreate("events").setMapping(mappingMap));
+        for (var e : events) {
+            client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host, "const", "1").get();
+        }
+        client.admin().indices().prepareRefresh("events").get();
+    }
+}
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java
index 79ac8816a003..8a163d7336b0 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java
@@ -7,43 +7,53 @@
 
 package org.elasticsearch.xpack.esql.action;
 
-import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.action.ActionFuture;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.action.support.WriteRequest;
-import org.elasticsearch.action.support.master.AcknowledgedResponse;
 import org.elasticsearch.client.internal.Client;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.compute.operator.DriverTaskRunner;
 import org.elasticsearch.compute.operator.exchange.ExchangeService;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.core.Tuple;
-import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.tasks.TaskId;
+import org.elasticsearch.tasks.TaskInfo;
 import org.elasticsearch.test.AbstractMultiClustersTestCase;
 import org.elasticsearch.test.XContentTestUtils;
 import org.elasticsearch.transport.RemoteClusterAware;
 import org.elasticsearch.xcontent.XContentBuilder;
 import org.elasticsearch.xcontent.json.JsonXContent;
-import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest;
-import org.elasticsearch.xpack.core.async.GetAsyncResultRequest;
-import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction;
+import org.elasticsearch.xpack.core.async.AsyncExecutionId;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
 import org.junit.Before;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
-import static org.elasticsearch.core.TimeValue.timeValueMillis;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomIncludeCCSMetadata;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.getAsyncResponse;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.runAsyncQuery;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQueryWithPragmas;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster;
+import static org.hamcrest.Matchers.empty;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 import static org.hamcrest.Matchers.is;
@@ -57,6 +67,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
     private static String LOCAL_INDEX = "logs-1";
     private static String REMOTE_INDEX = "logs-2";
     private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking";
+    private static final String INDEX_WITH_FAIL_MAPPING = "failing";
 
     @Override
     protected List remoteClusterAlias() {
@@ -65,7 +76,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
 
     @Override
     protected Map skipUnavailableForRemoteClusters() {
-        return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean());
+        return Map.of(REMOTE_CLUSTER_1, false, REMOTE_CLUSTER_2, randomBoolean());
     }
 
     @Override
@@ -75,6 +86,8 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
         plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action
         plugins.add(InternalExchangePlugin.class);
         plugins.add(SimplePauseFieldPlugin.class);
+        plugins.add(FailingPauseFieldPlugin.class);
+        plugins.add(CountingPauseFieldPlugin.class);
         return plugins;
     }
 
@@ -94,6 +107,8 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
     @Before
     public void resetPlugin() {
         SimplePauseFieldPlugin.resetPlugin();
+        FailingPauseFieldPlugin.resetPlugin();
+        CountingPauseFieldPlugin.resetPlugin();
     }
 
     /**
@@ -103,42 +118,28 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
         Map testClusterInfo = setupClusters(3);
         int localNumShards = (Integer) testClusterInfo.get("local.num_shards");
         int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards");
-        int remote2NumShards = (Integer) testClusterInfo.get("remote2.blocking_index.num_shards");
+        populateRuntimeIndex(REMOTE_CLUSTER_2, "pause", INDEX_WITH_RUNTIME_MAPPING);
 
         Tuple includeCCSMetadata = randomIncludeCCSMetadata();
-        Boolean requestIncludeMeta = includeCCSMetadata.v1();
         boolean responseExpectMeta = includeCCSMetadata.v2();
 
-        AtomicReference asyncExecutionId = new AtomicReference<>();
-
-        String q = "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(const) | LIMIT 10";
-        try (EsqlQueryResponse resp = runAsyncQuery(q, requestIncludeMeta, null, TimeValue.timeValueMillis(100))) {
-            assertTrue(resp.isRunning());
-            assertNotNull("async execution id is null", resp.asyncExecutionId());
-            asyncExecutionId.set(resp.asyncExecutionId().get());
-            // executionInfo may or may not be set on the initial response when there is a relatively low wait_for_completion_timeout
-            // so we do not check for it here
-        }
-
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(const) | LIMIT 10",
+            includeCCSMetadata.v1()
+        );
         // wait until we know that the query against 'remote-b:blocking' has started
         SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
 
         // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it)
-        assertBusy(() -> {
-            try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
-                EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
-                assertNotNull(executionInfo);
-                EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster("cluster-a");
-                assertThat(clusterA.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)));
-            }
-        });
+        waitForCluster(client(), "cluster-a", asyncExecutionId);
 
         /* at this point:
          *  the query against cluster-a should be finished
          *  the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown)
          *  the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b
          */
-        try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
+        try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
             EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
             assertThat(asyncResponse.isRunning(), is(true));
             assertThat(
@@ -149,13 +150,8 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
             assertThat(executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL), equalTo(1));
 
             EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1);
-            assertThat(clusterA.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(clusterA.getTotalShards(), greaterThanOrEqualTo(1));
-            assertThat(clusterA.getSuccessfulShards(), equalTo(clusterA.getTotalShards()));
-            assertThat(clusterA.getSkippedShards(), equalTo(0));
-            assertThat(clusterA.getFailedShards(), equalTo(0));
-            assertThat(clusterA.getFailures().size(), equalTo(0));
-            assertThat(clusterA.getTook().millis(), greaterThanOrEqualTo(0L));
+            // Should be done and successful
+            assertClusterInfoSuccess(clusterA, clusterA.getTotalShards());
 
             EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
             // should still be RUNNING since the local cluster has to do a STATS on the coordinator, waiting on remoteB
@@ -175,7 +171,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
 
         // wait until both remoteB and local queries have finished
         assertBusy(() -> {
-            try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
+            try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
                 EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
                 assertNotNull(executionInfo);
                 EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2);
@@ -186,40 +182,30 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
             }
         });
 
-        try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
+        try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
             EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
             assertNotNull(executionInfo);
             assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(1L));
+            assertThat(executionInfo.isPartial(), equalTo(false));
 
             EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1);
-            assertThat(clusterA.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(clusterA.getTook().millis(), greaterThanOrEqualTo(0L));
-            assertThat(clusterA.getTotalShards(), equalTo(remote1NumShards));
-            assertThat(clusterA.getSuccessfulShards(), equalTo(remote1NumShards));
-            assertThat(clusterA.getSkippedShards(), equalTo(0));
-            assertThat(clusterA.getFailedShards(), equalTo(0));
-            assertThat(clusterA.getFailures().size(), equalTo(0));
+            assertClusterInfoSuccess(clusterA, remote1NumShards);
 
             EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2);
-            assertThat(remoteB.getTook().millis(), greaterThanOrEqualTo(0L));
-            assertThat(remoteB.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(remoteB.getTotalShards(), equalTo(remote2NumShards));
-            assertThat(remoteB.getSuccessfulShards(), equalTo(remote2NumShards));
-            assertThat(remoteB.getSkippedShards(), equalTo(0));
-            assertThat(remoteB.getFailedShards(), equalTo(0));
-            assertThat(remoteB.getFailures().size(), equalTo(0));
+            assertClusterInfoSuccess(remoteB, 1);
 
             EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
-            assertThat(local.getTook().millis(), greaterThanOrEqualTo(0L));
-            assertThat(local.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(local.getTotalShards(), equalTo(localNumShards));
-            assertThat(local.getSuccessfulShards(), equalTo(localNumShards));
-            assertThat(local.getSkippedShards(), equalTo(0));
-            assertThat(local.getFailedShards(), equalTo(0));
-            assertThat(local.getFailures().size(), equalTo(0));
+            assertClusterInfoSuccess(local, localNumShards);
+
+            // Check that stop produces the same result
+            try (
+                EsqlQueryResponse stopResponse = client().execute(EsqlAsyncStopAction.INSTANCE, new AsyncStopRequest(asyncExecutionId))
+                    .get()
+            ) {
+                assertThat(stopResponse, equalTo(asyncResponse));
+            }
         } finally {
-            AcknowledgedResponse acknowledgedResponse = deleteAsyncId(asyncExecutionId.get());
-            assertThat(acknowledgedResponse.isAcknowledged(), is(true));
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
@@ -231,7 +217,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
 
         final TimeValue waitForCompletion = TimeValue.timeValueNanos(randomFrom(1L, Long.MAX_VALUE));
         String asyncExecutionId = null;
-        try (EsqlQueryResponse resp = runAsyncQuery("FROM logs*,*:logs* | LIMIT 0", requestIncludeMeta, null, waitForCompletion)) {
+        try (EsqlQueryResponse resp = runAsyncQuery(client(), "FROM logs*,*:logs* | LIMIT 0", requestIncludeMeta, waitForCompletion)) {
             EsqlExecutionInfo executionInfo = resp.getExecutionInfo();
             if (resp.isRunning()) {
                 asyncExecutionId = resp.asyncExecutionId().get();
@@ -252,90 +238,296 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
                 assertThat(overallTookMillis, greaterThanOrEqualTo(0L));
                 assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta));
                 assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2)));
+                assertThat(executionInfo.isPartial(), equalTo(false));
 
                 EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
-                assertThat(remoteCluster.getIndexExpression(), equalTo("logs*"));
-                assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-                assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L));
                 assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis));
-                assertThat(remoteCluster.getTotalShards(), equalTo(0));
-                assertThat(remoteCluster.getSuccessfulShards(), equalTo(0));
-                assertThat(remoteCluster.getSkippedShards(), equalTo(0));
-                assertThat(remoteCluster.getFailedShards(), equalTo(0));
+                assertThat(remoteCluster.getIndexExpression(), equalTo("logs*"));
+                assertClusterInfoSuccess(remoteCluster, 0);
 
-                EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
+                EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2);
+                assertClusterInfoSuccess(remote2Cluster, 0);
                 assertThat(remote2Cluster.getIndexExpression(), equalTo("logs*"));
-                assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-                assertThat(remote2Cluster.getTook().millis(), greaterThanOrEqualTo(0L));
                 assertThat(remote2Cluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis));
-                assertThat(remote2Cluster.getTotalShards(), equalTo(0));
-                assertThat(remote2Cluster.getSuccessfulShards(), equalTo(0));
-                assertThat(remote2Cluster.getSkippedShards(), equalTo(0));
-                assertThat(remote2Cluster.getFailedShards(), equalTo(0));
 
                 EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER);
+                assertClusterInfoSuccess(localCluster, 0);
                 assertThat(localCluster.getIndexExpression(), equalTo("logs*"));
-                assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-                assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L));
                 assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis));
-                assertThat(remote2Cluster.getTotalShards(), equalTo(0));
-                assertThat(remote2Cluster.getSuccessfulShards(), equalTo(0));
-                assertThat(remote2Cluster.getSkippedShards(), equalTo(0));
-                assertThat(remote2Cluster.getFailedShards(), equalTo(0));
 
                 assertClusterMetadataInResponse(resp, responseExpectMeta, 3);
             }
         } finally {
             if (asyncExecutionId != null) {
-                AcknowledgedResponse acknowledgedResponse = deleteAsyncId(asyncExecutionId);
-                assertThat(acknowledgedResponse.isAcknowledged(), is(true));
+                assertAcked(deleteAsyncId(client(), asyncExecutionId));
             }
         }
     }
 
-    protected EsqlQueryResponse runAsyncQuery(String query, Boolean ccsMetadata, QueryBuilder filter, TimeValue waitCompletionTime) {
-        EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest();
-        request.query(query);
-        request.pragmas(AbstractEsqlIntegTestCase.randomPragmas());
-        request.profile(randomInt(5) == 2);
-        request.columnar(randomBoolean());
-        if (ccsMetadata != null) {
-            request.includeCCSMetadata(ccsMetadata);
-        }
-        request.waitForCompletionTimeout(waitCompletionTime);
-        request.keepOnCompletion(false);
-        if (filter != null) {
-            request.filter(filter);
-        }
-        return runAsyncQuery(request);
-    }
+    public void testStopQuery() throws Exception {
+        Map testClusterInfo = setupClusters(3);
+        int localNumShards = (Integer) testClusterInfo.get("local.num_shards");
+        int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards");
+        // Create large index so we could be sure we're stopping before the end
+        populateRuntimeIndex(REMOTE_CLUSTER_2, "pause_count", INDEX_WITH_RUNTIME_MAPPING);
 
-    protected EsqlQueryResponse runAsyncQuery(EsqlQueryRequest request) {
-        try {
-            return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS);
-        } catch (ElasticsearchTimeoutException e) {
-            throw new AssertionError("timeout waiting for query response", e);
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        boolean responseExpectMeta = includeCCSMetadata.v2();
+
+        final String asyncExecutionId = startAsyncQueryWithPragmas(
+            client(),
+            "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(coalesce(const,v)) | LIMIT 1",
+            includeCCSMetadata.v1(),
+            Map.of("page_size", 1, "data_partitioning", "shard", "task_concurrency", 1)
+        );
+
+        // wait until we know that the query against 'remote-b:blocking' has started
+        CountingPauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it)
+        waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId);
+        waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId);
+
+        /* at this point:
+         *  the query against cluster-a should be finished
+         *  the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown)
+         *  the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b
+         */
+
+        // run the stop query
+        AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId);
+        ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        assertBusy(() -> {
+            List tasks = getDriverTasks(client(REMOTE_CLUSTER_2));
+            List reduceTasks = tasks.stream().filter(t -> t.description().contains("_LuceneSourceOperator") == false).toList();
+            assertThat(reduceTasks, empty());
+        });
+        // allow remoteB query to proceed
+        CountingPauseFieldPlugin.allowEmitting.countDown();
+
+        // Since part of the query has not been stopped, we expect some result to emerge here
+        try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            // Check that we did not process all the fields on remote-b
+            // Should not be getting more than one page here, and we set page size to 1
+            assertThat(CountingPauseFieldPlugin.count.get(), lessThanOrEqualTo(1L));
+            assertThat(asyncResponse.isRunning(), is(false));
+            assertThat(asyncResponse.columns().size(), equalTo(1));
+            assertThat(asyncResponse.values().hasNext(), is(true));
+            Iterator row = asyncResponse.values().next();
+            // sum of 0-9 is 45, and sum of 0-9 squared is 285
+            assertThat(row.next(), equalTo(330L));
+
+            EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+            assertNotNull(executionInfo);
+            assertThat(executionInfo.isCrossClusterSearch(), is(true));
+            long overallTookMillis = executionInfo.overallTook().millis();
+            assertThat(overallTookMillis, greaterThanOrEqualTo(0L));
+            assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2)));
+            assertThat(executionInfo.isPartial(), equalTo(true));
+
+            EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
+            assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(remoteCluster, remote1NumShards);
+
+            EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2);
+            assertThat(remote2Cluster.getIndexExpression(), equalTo("blocking"));
+            assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL));
+
+            EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER);
+            assertThat(localCluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(localCluster, localNumShards);
+
+            assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3);
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
-    AcknowledgedResponse deleteAsyncId(String id) {
-        try {
-            DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id);
-            return client().execute(TransportDeleteAsyncResultAction.TYPE, request).actionGet(30, TimeUnit.SECONDS);
-        } catch (ElasticsearchTimeoutException e) {
-            throw new AssertionError("timeout waiting for DELETE response", e);
+    public void testStopQueryLocal() throws Exception {
+        Map testClusterInfo = setupClusters(3);
+        int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards");
+        int remote2NumShards = (Integer) testClusterInfo.get("remote2.num_shards");
+        populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_RUNTIME_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        boolean responseExpectMeta = includeCCSMetadata.v2();
+
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM blocking,*:logs-* | STATS total=sum(coalesce(const,v)) | LIMIT 1",
+            includeCCSMetadata.v1()
+        );
+
+        // wait until we know that the query against 'remote-b:blocking' has started
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        // wait until the remotes are done
+        waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId);
+        waitForCluster(client(), REMOTE_CLUSTER_2, asyncExecutionId);
+
+        /* at this point:
+         *  the query against remotes should be finished
+         *  the query against the local cluster should be running because it's blocked
+         */
+
+        // run the stop query
+        AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId);
+        ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // ensure stop operation is running
+        assertBusy(() -> {
+            try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
+                EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+                assertNotNull(executionInfo);
+                assertThat(executionInfo.isPartial(), is(true));
+            }
+        });
+        // allow local query to proceed
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+
+        // Since part of the query has not been stopped, we expect some result to emerge here
+        try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            assertThat(asyncResponse.isRunning(), is(false));
+            assertThat(asyncResponse.columns().size(), equalTo(1));
+            assertThat(asyncResponse.values().hasNext(), is(true));
+            Iterator row = asyncResponse.values().next();
+            // sum of 0-9 squared is 285, from two remotes it's 570
+            assertThat(row.next(), equalTo(570L));
+
+            EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+            assertNotNull(executionInfo);
+            assertThat(executionInfo.isCrossClusterSearch(), is(true));
+            long overallTookMillis = executionInfo.overallTook().millis();
+            assertThat(overallTookMillis, greaterThanOrEqualTo(0L));
+            assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2)));
+            assertThat(executionInfo.isPartial(), equalTo(true));
+
+            EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
+            assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(remoteCluster, remote1NumShards);
+
+            EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2);
+            assertThat(remote2Cluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(remote2Cluster, remote2NumShards);
+
+            EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER);
+            assertThat(localCluster.getIndexExpression(), equalTo("blocking"));
+            assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL));
+
+            assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3);
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
-    EsqlQueryResponse getAsyncResponse(String id) {
-        try {
-            var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueMillis(1));
-            return client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS);
-        } catch (ElasticsearchTimeoutException e) {
-            throw new AssertionError("timeout waiting for GET async result", e);
+    public void testStopQueryLocalNoRemotes() throws Exception {
+        setupClusters(3);
+        populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_RUNTIME_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        boolean responseExpectMeta = includeCCSMetadata.v2();
+
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM blocking | STATS total=count(const) | LIMIT 1",
+            includeCCSMetadata.v1()
+        );
+
+        // wait until we know that the query against 'remote-b:blocking' has started
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        /* at this point:
+         *  the query against the local cluster should be running because it's blocked
+         */
+
+        // run the stop query
+        var stopRequest = new AsyncStopRequest(asyncExecutionId);
+        var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // allow local query to proceed
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+
+        try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            assertThat(asyncResponse.isRunning(), is(false));
+            assertThat(asyncResponse.columns().size(), equalTo(1));
+            assertThat(asyncResponse.values().hasNext(), is(true));
+            Iterator row = asyncResponse.values().next();
+            assertThat((long) row.next(), greaterThanOrEqualTo(0L));
+
+            EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+            assertNotNull(executionInfo);
+            assertThat(executionInfo.isCrossClusterSearch(), is(false));
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
+    public void testAsyncFailure() throws Exception {
+        Map testClusterInfo = setupClusters(2);
+        populateRuntimeIndex(REMOTE_CLUSTER_1, "pause_fail", INDEX_WITH_FAIL_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM logs-*,cluster-a:failing | STATS total=sum(const) | LIMIT 1",
+            includeCCSMetadata.v1()
+        );
+        // wait until we know that the query against remote has started
+        FailingPauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+        // Allow to proceed
+        FailingPauseFieldPlugin.allowEmitting.countDown();
+
+        // wait until local queries have finished
+        try {
+            assertBusy(() -> assertThrows(Exception.class, () -> getAsyncResponse(client(), asyncExecutionId)));
+            // Ensure stop query fails too when get fails
+            assertThrows(
+                ElasticsearchException.class,
+                () -> client().execute(EsqlAsyncStopAction.INSTANCE, new AsyncStopRequest(asyncExecutionId)).actionGet()
+            );
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
+        }
+    }
+
+    private String randomAsyncId() {
+        return AsyncExecutionId.encode(randomAlphaOfLength(10), new TaskId(randomAlphaOfLength(10), randomLong()));
+    }
+
+    public void testBadAsyncId() throws Exception {
+        setupClusters(3);
+        final AtomicReference asyncId = new AtomicReference<>();
+        try (
+            EsqlQueryResponse resp = runAsyncQuery(
+                client(),
+                "FROM logs-*,*:logs-* | STATS total=sum(const) | LIMIT 1",
+                randomBoolean(),
+                TimeValue.timeValueMillis(0)
+            )
+        ) {
+            assertTrue(resp.isRunning());
+            asyncId.set(resp.asyncExecutionId().get());
+        }
+        assertBusy(() -> {
+            try (EsqlQueryResponse resp = getAsyncResponse(client(), asyncId.get())) {
+                assertFalse(resp.isRunning());
+            }
+        });
+
+        String randomAsyncIdasyncId = randomAsyncId();
+        var stopRequest = new AsyncStopRequest(randomAsyncIdasyncId);
+        var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        assertThrows(ResourceNotFoundException.class, () -> stopAction.actionGet(1000, TimeUnit.SECONDS));
+    }
+
+    private void assertClusterInfoSuccess(EsqlExecutionInfo.Cluster cluster, int numShards) {
+        assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L));
+        assertThat(cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
+        assertThat(cluster.getTotalShards(), equalTo(numShards));
+        assertThat(cluster.getSuccessfulShards(), equalTo(numShards));
+        assertThat(cluster.getSkippedShards(), equalTo(0));
+        assertThat(cluster.getFailedShards(), equalTo(0));
+        assertThat(cluster.getFailures().size(), equalTo(0));
+    }
+
     private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta, int numClusters) {
         try {
             final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp);
@@ -373,11 +565,8 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
         if (numClusters == 3) {
             int numShardsRemote2 = randomIntBetween(1, 5);
             populateRemoteIndices(REMOTE_CLUSTER_2, REMOTE_INDEX, numShardsRemote2);
-            populateRemoteIndicesWithRuntimeMapping(REMOTE_CLUSTER_2);
             clusterInfo.put("remote2.index", REMOTE_INDEX);
             clusterInfo.put("remote2.num_shards", numShardsRemote2);
-            clusterInfo.put("remote2.blocking_index", INDEX_WITH_RUNTIME_MAPPING);
-            clusterInfo.put("remote2.blocking_index.num_shards", 1);
         }
 
         String skipUnavailableKey = Strings.format("cluster.remote.%s.skip_unavailable", REMOTE_CLUSTER_1);
@@ -405,23 +594,26 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
         localClient.admin().indices().prepareRefresh(indexName).get();
     }
 
-    void populateRemoteIndicesWithRuntimeMapping(String clusterAlias) throws IOException {
+    void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException {
+        populateRuntimeIndex(clusterAlias, langName, indexName, 10);
+    }
+
+    void populateRuntimeIndex(String clusterAlias, String langName, String indexName, int count) throws IOException {
         XContentBuilder mapping = JsonXContent.contentBuilder().startObject();
         mapping.startObject("runtime");
         {
             mapping.startObject("const");
             {
                 mapping.field("type", "long");
-                mapping.startObject("script").field("source", "").field("lang", "pause").endObject();
+                mapping.startObject("script").field("source", "").field("lang", langName).endObject();
             }
             mapping.endObject();
         }
         mapping.endObject();
         mapping.endObject();
-        client(clusterAlias).admin().indices().prepareCreate(INDEX_WITH_RUNTIME_MAPPING).setMapping(mapping).get();
-        BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(INDEX_WITH_RUNTIME_MAPPING)
-            .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
-        for (int i = 0; i < 10; i++) {
+        client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get();
+        BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+        for (int i = 0; i < count; i++) {
             bulk.add(new IndexRequest().source("foo", i));
         }
         bulk.get();
@@ -441,4 +633,26 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
         }
         remoteClient.admin().indices().prepareRefresh(indexName).get();
     }
+
+    public static class CountingPauseFieldPlugin extends SimplePauseFieldPlugin {
+        public static AtomicLong count = new AtomicLong(0);
+
+        protected String scriptTypeName() {
+            return "pause_count";
+        }
+
+        public static void resetPlugin() {
+            count.set(0);
+        }
+
+        @Override
+        public boolean onWait() throws InterruptedException {
+            count.incrementAndGet();
+            return allowEmitting.await(30, TimeUnit.SECONDS);
+        }
+    }
+
+    private static List getDriverTasks(Client client) {
+        return client.admin().cluster().prepareListTasks().setActions(DriverTaskRunner.ACTION_NAME).setDetailed(true).get().getTasks();
+    }
 }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java
index cd30ab02676f..89f7fdca7913 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java
@@ -7,29 +7,51 @@
 
 package org.elasticsearch.xpack.esql.action;
 
+import org.elasticsearch.action.ActionFuture;
 import org.elasticsearch.action.admin.cluster.stats.CCSTelemetrySnapshot;
 import org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.WriteRequest;
 import org.elasticsearch.plugins.Plugin;
 import org.elasticsearch.test.SkipUnavailableRule;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.json.JsonXContent;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
+import org.junit.Before;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
 
 import static org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry.ASYNC_FEATURE;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId;
 import static org.hamcrest.Matchers.equalTo;
 
 public class CrossClustersUsageTelemetryIT extends AbstractCrossClustersUsageTelemetryIT {
+    private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking";
 
     @Override
     protected Collection> nodePlugins(String clusterAlias) {
         List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias));
         plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class);
         plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class);
+        plugins.add(SimplePauseFieldPlugin.class);
+        plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action
         return plugins;
     }
 
+    @Before
+    public void resetPlugin() {
+        SimplePauseFieldPlugin.resetPlugin();
+    }
+
     public void assertPerClusterCount(CCSTelemetrySnapshot.PerClusterCCSTelemetry perCluster, long count) {
         assertThat(perCluster.getCount(), equalTo(count));
         assertThat(perCluster.getSkippedCount(), equalTo(0L));
@@ -202,6 +224,59 @@ public class CrossClustersUsageTelemetryIT extends AbstractCrossClustersUsageTel
         assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 2L);
     }
 
+    public void testAsyncStop() throws Exception {
+        setupClusters();
+        populateRuntimeIndex(REMOTE1, "pause", INDEX_WITH_RUNTIME_MAPPING);
+        populateRuntimeIndex(REMOTE2, "pause", INDEX_WITH_RUNTIME_MAPPING);
+
+        EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest();
+        request.query("from logs-*,c*:logs-*,c*:blocking | eval v1=coalesce(const, v) | stats sum (v1)");
+        request.pragmas(AbstractEsqlIntegTestCase.randomPragmas());
+        request.columnar(randomBoolean());
+        request.includeCCSMetadata(randomBoolean());
+
+        AtomicReference asyncExecutionId = new AtomicReference<>();
+        assertResponse(cluster(LOCAL_CLUSTER).client(queryNode).execute(EsqlQueryAction.INSTANCE, request), resp -> {
+            if (resp.isRunning()) {
+                assertNotNull("async execution id is null", resp.asyncExecutionId());
+                asyncExecutionId.set(resp.asyncExecutionId().get());
+            }
+        });
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+        AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId.get());
+        ActionFuture actionFuture = cluster(LOCAL_CLUSTER).client(queryNode)
+            .execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // Release the pause
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+        try (EsqlQueryResponse resp = actionFuture.actionGet(30, TimeUnit.SECONDS)) {
+            assertTrue(resp.getExecutionInfo().isPartial());
+
+            CCSTelemetrySnapshot telemetry = getTelemetrySnapshot(queryNode);
+
+            assertThat(telemetry.getTotalCount(), equalTo(1L));
+            assertThat(telemetry.getSuccessCount(), equalTo(1L));
+            assertThat(telemetry.getFailureReasons().size(), equalTo(0));
+            assertThat(telemetry.getTook().count(), equalTo(1L));
+            assertThat(telemetry.getTookMrtFalse().count(), equalTo(0L));
+            assertThat(telemetry.getTookMrtTrue().count(), equalTo(0L));
+            assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
+            assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
+            assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(0L));
+            assertThat(telemetry.getClientCounts().size(), equalTo(0));
+            assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(1L));
+
+            var perCluster = telemetry.getByRemoteCluster();
+            assertThat(perCluster.size(), equalTo(3));
+            for (String clusterAlias : remoteClusterAlias()) {
+                assertPerClusterCount(perCluster.get(clusterAlias), 1L);
+            }
+            assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 1L);
+        } finally {
+            // Clean up
+            assertAcked(deleteAsyncId(client(), asyncExecutionId.get()));
+        }
+    }
+
     public void testNoSuchCluster() throws Exception {
         setupClusters();
         // This is not recognized as a cross-cluster search
@@ -225,4 +300,25 @@ public class CrossClustersUsageTelemetryIT extends AbstractCrossClustersUsageTel
         assertThat(telemetry.getFailureReasons(), equalTo(expectedFailure));
     }
 
+    void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException {
+        XContentBuilder mapping = JsonXContent.contentBuilder().startObject();
+        mapping.startObject("runtime");
+        {
+            mapping.startObject("const");
+            {
+                mapping.field("type", "long");
+                mapping.startObject("script").field("source", "").field("lang", langName).endObject();
+            }
+            mapping.endObject();
+        }
+        mapping.endObject();
+        mapping.endObject();
+        client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get();
+        BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+        for (int i = 0; i < 10; i++) {
+            bulk.add(new IndexRequest().source("foo", i));
+        }
+        bulk.get();
+    }
+
 }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
index 8e27cfceb28e..2d0a15436bf8 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
@@ -362,7 +362,8 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase {
                     "task cancelled",
                     "request cancelled test cancel",
                     "parent task was cancelled [test cancel]",
-                    "cancelled on failure"
+                    "cancelled on failure",
+                    "task cancelled [cancelled on failure]"
                 )
             )
         );
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java
new file mode 100644
index 000000000000..d7117fb5e075
--- /dev/null
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.Nullable;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.transport.RemoteClusterAware;
+import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest;
+import org.elasticsearch.xpack.core.async.GetAsyncResultRequest;
+import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction;
+import org.elasticsearch.xpack.esql.plugin.QueryPragmas;
+
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+
+import static org.elasticsearch.core.TimeValue.timeValueMillis;
+import static org.elasticsearch.test.ESTestCase.assertBusy;
+import static org.elasticsearch.test.ESTestCase.assertThat;
+import static org.elasticsearch.test.ESTestCase.randomBoolean;
+import static org.elasticsearch.test.ESTestCase.randomInt;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public final class EsqlAsyncTestUtils {
+    public static String startAsyncQuery(Client client, String q, Boolean includeCCSMetadata) {
+        return startAsyncQueryWithPragmas(client, q, includeCCSMetadata, null);
+    }
+
+    public static String startAsyncQueryWithPragmas(Client client, String q, Boolean includeCCSMetadata, Map pragmas) {
+        try (EsqlQueryResponse resp = runAsyncQuery(client, q, includeCCSMetadata, TimeValue.timeValueMillis(100), pragmas)) {
+            assertTrue(resp.isRunning());
+            assertNotNull("async execution id is null", resp.asyncExecutionId());
+            // executionInfo may or may not be set on the initial response when there is a relatively low wait_for_completion_timeout
+            // so we do not check for it here
+            return resp.asyncExecutionId().get();
+        }
+    }
+
+    public static EsqlQueryResponse runAsyncQuery(Client client, String query, Boolean ccsMetadata, TimeValue waitCompletionTime) {
+        return runAsyncQuery(client, query, ccsMetadata, waitCompletionTime, null);
+    }
+
+    private static QueryPragmas randomPragmasWithOverride(@Nullable Map pragmas) {
+        if (pragmas == null || pragmas.isEmpty()) {
+            return AbstractEsqlIntegTestCase.randomPragmas();
+        }
+        Settings.Builder settings = Settings.builder();
+        settings.put(AbstractEsqlIntegTestCase.randomPragmas().getSettings());
+        settings.loadFromMap(pragmas);
+        return new QueryPragmas(settings.build());
+    }
+
+    public static EsqlQueryResponse runAsyncQuery(
+        Client client,
+        String query,
+        Boolean ccsMetadata,
+        TimeValue waitCompletionTime,
+        @Nullable Map pragmas
+    ) {
+        EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest();
+        request.query(query);
+        request.pragmas(randomPragmasWithOverride(pragmas));
+        request.profile(randomInt(5) == 2);
+        request.columnar(randomBoolean());
+        if (ccsMetadata != null) {
+            request.includeCCSMetadata(ccsMetadata);
+        }
+        request.waitForCompletionTimeout(waitCompletionTime);
+        request.keepOnCompletion(true);
+        return runAsyncQuery(client, request);
+    }
+
+    /**
+     * Wait for the cluster to finish running the query.
+     */
+    public static void waitForCluster(Client client, String clusterName, String asyncExecutionId) throws Exception {
+        assertBusy(() -> {
+            try (EsqlQueryResponse asyncResponse = getAsyncResponse(client, asyncExecutionId)) {
+                EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+                assertNotNull(executionInfo);
+                EsqlExecutionInfo.Cluster clusterInfo = executionInfo.getCluster(clusterName);
+                // the status of the local cluster won't move to SUCCESS until the reduction pipeline is done
+                if (RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY.equals(clusterName)
+                    && Objects.requireNonNullElse(clusterInfo.getTotalShards(), 0) > 0) {
+                    return;
+                }
+                assertThat(clusterInfo.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)));
+            }
+        });
+    }
+
+    public static EsqlQueryResponse runAsyncQuery(Client client, EsqlQueryRequest request) {
+        try {
+            return client.execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS);
+        } catch (ElasticsearchTimeoutException e) {
+            throw new AssertionError("timeout waiting for query response", e);
+        }
+    }
+
+    public static AcknowledgedResponse deleteAsyncId(Client client, String id) {
+        try {
+            DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id);
+            return client.execute(TransportDeleteAsyncResultAction.TYPE, request).actionGet(30, TimeUnit.SECONDS);
+        } catch (ElasticsearchTimeoutException e) {
+            throw new AssertionError("timeout waiting for DELETE response", e);
+        }
+    }
+
+    public static EsqlQueryResponse getAsyncResponse(Client client, String id) {
+        try {
+            var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueMillis(1));
+            return client.execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS);
+        } catch (ElasticsearchTimeoutException e) {
+            throw new AssertionError("timeout waiting for GET async result", e);
+        }
+    }
+}
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java
new file mode 100644
index 000000000000..010931432e2e
--- /dev/null
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.ElasticsearchException;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A plugin that provides a script language "pause_fail" that can be used to make queries fail in a predictable way.
+ */
+public class FailingPauseFieldPlugin extends AbstractPauseFieldPlugin {
+    public static CountDownLatch startEmitting = new CountDownLatch(1);
+    public static CountDownLatch allowEmitting = new CountDownLatch(1);
+
+    @Override
+    protected String scriptTypeName() {
+        return "pause_fail";
+    }
+
+    public static void resetPlugin() {
+        allowEmitting = new CountDownLatch(1);
+        startEmitting = new CountDownLatch(1);
+    }
+
+    @Override
+    public void onStartExecute() {
+        startEmitting.countDown();
+    }
+
+    @Override
+    public boolean onWait() throws InterruptedException {
+        allowEmitting.await(30, TimeUnit.SECONDS);
+        throw new ElasticsearchException("Failing query");
+    }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java
new file mode 100644
index 000000000000..2178d0bf1e9f
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.action.ActionType;
+import org.elasticsearch.xpack.core.esql.EsqlAsyncActionNames;
+
+public class EsqlAsyncStopAction extends ActionType {
+
+    public static final EsqlAsyncStopAction INSTANCE = new EsqlAsyncStopAction();
+
+    public static final String NAME = EsqlAsyncActionNames.ESQL_ASYNC_STOP_ACTION_NAME;
+
+    private EsqlAsyncStopAction() {
+        super(NAME);
+    }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java
index 61c7135cef42..c1e43a74c227 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java
@@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
 import org.elasticsearch.core.Predicates;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.rest.action.RestActions;
+import org.elasticsearch.transport.NoSuchRemoteClusterException;
 import org.elasticsearch.transport.RemoteClusterAware;
 import org.elasticsearch.transport.RemoteClusterService;
 import org.elasticsearch.xcontent.ParseField;
@@ -57,6 +58,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
     public static final ParseField FAILED_FIELD = new ParseField("failed");
     public static final ParseField DETAILS_FIELD = new ParseField("details");
     public static final ParseField TOOK = new ParseField("took");
+    public static final ParseField IS_PARTIAL_FIELD = new ParseField("is_partial");
 
     // Map key is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query
     // The Map itself is immutable after construction - all Clusters will be accounted for at the start of the search.
@@ -71,6 +73,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
     private final transient Predicate skipUnavailablePredicate;
     private final transient Long relativeStartNanos;  // start time for an ESQL query for calculating took times
     private transient TimeValue planningTookTime;  // time elapsed since start of query to calling ComputeService.execute
+    private volatile boolean isPartial; // Does this request have partial results?
 
     public EsqlExecutionInfo(boolean includeCCSMetadata) {
         this(Predicates.always(), includeCCSMetadata);  // default all clusters to skip_unavailable=true
@@ -113,6 +116,13 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
         } else {
             this.includeCCSMetadata = false;
         }
+
+        if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_RESPONSE_PARTIAL)) {
+            this.isPartial = in.readBoolean();
+        } else {
+            this.isPartial = false;
+        }
+
         this.skipUnavailablePredicate = Predicates.always();
         this.relativeStartNanos = null;
     }
@@ -128,6 +138,9 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
         if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
             out.writeBoolean(includeCCSMetadata);
         }
+        if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_RESPONSE_PARTIAL)) {
+            out.writeBoolean(isPartial);
+        }
     }
 
     public boolean includeCCSMetadata() {
@@ -188,7 +201,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
     /**
      * @param clusterAlias to lookup skip_unavailable from
      * @return skip_unavailable setting (true/false)
-     * @throws org.elasticsearch.transport.NoSuchRemoteClusterException if clusterAlias is unknown to this node's RemoteClusterService
+     * @throws NoSuchRemoteClusterException if clusterAlias is unknown to this node's RemoteClusterService
      */
     public boolean isSkipUnavailable(String clusterAlias) {
         if (RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias)) {
@@ -279,6 +292,24 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable {
         return Objects.hash(clusterInfo, overallTook);
     }
 
+    public boolean isPartial() {
+        return isPartial;
+    }
+
+    /**
+     * Mark the query as having partial results.
+     */
+    public void markAsPartial() {
+        isPartial = true;
+    }
+
+    /**
+     * Mark this cluster as having partial results.
+     */
+    public void markClusterAsPartial(String clusterAlias) {
+        swapCluster(clusterAlias, (k, v) -> new Cluster.Builder(v).setStatus(Cluster.Status.PARTIAL).build());
+    }
+
     /**
      * Represents the search metadata about a particular cluster involved in a cross-cluster search.
      * The Cluster object can represent either the local cluster or a remote cluster.
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java
index 8530d9b48da0..26b532958942 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java
@@ -208,6 +208,7 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action.
         if (executionInfo != null && executionInfo.overallTook() != null) {
             tookTime = ChunkedToXContentHelper.chunk((builder, p) -> {
                 builder.field("took", executionInfo.overallTook().millis());
+                builder.field(EsqlExecutionInfo.IS_PARTIAL_FIELD.getPreferredName(), executionInfo.isPartial());
                 return builder;
             });
         } else {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java
new file mode 100644
index 000000000000..c7477f738e95
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.client.internal.node.NodeClient;
+import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.Scope;
+import org.elasticsearch.rest.ServerlessScope;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
+
+import java.util.List;
+import java.util.Set;
+
+import static org.elasticsearch.rest.RestRequest.Method.POST;
+import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION;
+import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER;
+
+@ServerlessScope(Scope.PUBLIC)
+public class RestEsqlStopAsyncAction extends BaseRestHandler {
+    @Override
+    public List routes() {
+        return List.of(new Route(POST, "/_query/async/{id}/stop"));
+    }
+
+    @Override
+    public String getName() {
+        return "esql_async_stop";
+    }
+
+    @Override
+    protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) {
+        AsyncStopRequest stopReq = new AsyncStopRequest(request.param("id"));
+        return channel -> client.execute(EsqlAsyncStopAction.INSTANCE, stopReq, new EsqlResponseListener(channel, request));
+    }
+
+    @Override
+    protected Set responseParams() {
+        return Set.of(URL_PARAM_DELIMITER, DROP_NULL_COLUMNS_OPTION);
+    }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java
index 7d99cf598859..75619958c522 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java
@@ -55,9 +55,9 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Function;
 import java.util.function.Supplier;
 
 import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME;
@@ -83,6 +83,7 @@ public class ComputeService {
     private final AtomicLong childSessionIdGenerator = new AtomicLong();
     private final DataNodeComputeHandler dataNodeComputeHandler;
     private final ClusterComputeHandler clusterComputeHandler;
+    private final ExchangeService exchangeService;
 
     @SuppressWarnings("this-escape")
     public ComputeService(
@@ -113,6 +114,7 @@ public class ComputeService {
             esqlExecutor,
             dataNodeComputeHandler
         );
+        this.exchangeService = exchangeService;
     }
 
     public void execute(
@@ -195,11 +197,12 @@ public class ComputeService {
             var exchangeSource = new ExchangeSourceHandler(
                 queryPragmas.exchangeBufferSize(),
                 transportService.getThreadPool().executor(ThreadPool.Names.SEARCH),
-                computeListener.acquireAvoid()
+                ActionListener.runBefore(computeListener.acquireAvoid(), () -> exchangeService.removeExchangeSourceHandler(sessionId))
             );
+            exchangeService.addExchangeSourceHandler(sessionId, exchangeSource);
             try (Releasable ignored = exchangeSource.addEmptySink()) {
                 // run compute on the coordinator
-                final AtomicReference localResponse = new AtomicReference<>(new ComputeResponse(List.of()));
+                final AtomicBoolean localClusterWasInterrupted = new AtomicBoolean();
                 try (
                     var localListener = new ComputeListener(
                         transportService.getThreadPool(),
@@ -207,16 +210,13 @@ public class ComputeService {
                         computeListener.acquireCompute().delegateFailure((l, profiles) -> {
                             if (execInfo.isCrossClusterSearch() && execInfo.clusterAliases().contains(LOCAL_CLUSTER)) {
                                 var tookTime = TimeValue.timeValueNanos(System.nanoTime() - execInfo.getRelativeStartNanos());
-                                var r = localResponse.get();
-                                var merged = new ComputeResponse(
-                                    profiles,
-                                    tookTime,
-                                    r.totalShards,
-                                    r.successfulShards,
-                                    r.skippedShards,
-                                    r.failedShards
+                                var status = localClusterWasInterrupted.get()
+                                    ? EsqlExecutionInfo.Cluster.Status.PARTIAL
+                                    : EsqlExecutionInfo.Cluster.Status.SUCCESSFUL;
+                                execInfo.swapCluster(
+                                    LOCAL_CLUSTER,
+                                    (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(status).setTook(tookTime).build()
                                 );
-                                updateExecutionInfo(execInfo, LOCAL_CLUSTER, merged);
                             }
                             l.onResponse(profiles);
                         })
@@ -241,7 +241,17 @@ public class ComputeService {
                             exchangeSource,
                             cancelQueryOnFailure,
                             localListener.acquireCompute().map(r -> {
-                                localResponse.set(r);
+                                localClusterWasInterrupted.set(execInfo.isPartial());
+                                if (execInfo.isCrossClusterSearch() && execInfo.clusterAliases().contains(LOCAL_CLUSTER)) {
+                                    execInfo.swapCluster(
+                                        LOCAL_CLUSTER,
+                                        (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(r.getTotalShards())
+                                            .setSuccessfulShards(r.getSuccessfulShards())
+                                            .setSkippedShards(r.getSkippedShards())
+                                            .setFailedShards(r.getFailedShards())
+                                            .build()
+                                    );
+                                }
                                 return r.getProfiles();
                             })
                         );
@@ -269,22 +279,19 @@ public class ComputeService {
     }
 
     private void updateExecutionInfo(EsqlExecutionInfo executionInfo, String clusterAlias, ComputeResponse resp) {
-        TimeValue tookOnCluster;
-        if (resp.getTook() != null) {
-            TimeValue remoteExecutionTime = resp.getTook();
-            final long planningTime;
-            if (clusterAlias.equals(LOCAL_CLUSTER)) {
-                planningTime = 0L;
+        Function runningToSuccess = status -> {
+            if (status == EsqlExecutionInfo.Cluster.Status.RUNNING) {
+                return executionInfo.isPartial() ? EsqlExecutionInfo.Cluster.Status.PARTIAL : EsqlExecutionInfo.Cluster.Status.SUCCESSFUL;
             } else {
-                planningTime = executionInfo.planningTookTime().nanos();
+                return status;
             }
-            tookOnCluster = new TimeValue(planningTime + remoteExecutionTime.nanos(), TimeUnit.NANOSECONDS);
+        };
+        if (resp.getTook() != null) {
+            var tookTime = TimeValue.timeValueNanos(executionInfo.planningTookTime().nanos() + resp.getTook().nanos());
             executionInfo.swapCluster(
                 clusterAlias,
-                (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v)
-                    // for now ESQL doesn't return partial results, so set status to SUCCESSFUL
-                    .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)
-                    .setTook(tookOnCluster)
+                (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(runningToSuccess.apply(v.getStatus()))
+                    .setTook(tookTime)
                     .setTotalShards(resp.getTotalShards())
                     .setSuccessfulShards(resp.getSuccessfulShards())
                     .setSkippedShards(resp.getSkippedShards())
@@ -294,14 +301,11 @@ public class ComputeService {
         } else {
             // if the cluster is an older version and does not send back took time, then calculate it here on the coordinator
             // and leave shard info unset, so it is not shown in the CCS metadata section of the JSON response
-            long remoteTook = System.nanoTime() - executionInfo.getRelativeStartNanos();
-            tookOnCluster = new TimeValue(remoteTook, TimeUnit.NANOSECONDS);
+            var tookTime = TimeValue.timeValueNanos(System.nanoTime() - executionInfo.getRelativeStartNanos());
             executionInfo.swapCluster(
                 clusterAlias,
-                (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v)
-                    // for now ESQL doesn't return partial results, so set status to SUCCESSFUL
-                    .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)
-                    .setTook(tookOnCluster)
+                (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(runningToSuccess.apply(v.getStatus()))
+                    .setTook(tookTime)
                     .build()
             );
         }
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
index b79dda900f39..4379e2e8041a 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
@@ -52,6 +52,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction;
 import org.elasticsearch.xpack.esql.EsqlInfoTransportAction;
 import org.elasticsearch.xpack.esql.EsqlUsageTransportAction;
 import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction;
+import org.elasticsearch.xpack.esql.action.EsqlAsyncStopAction;
 import org.elasticsearch.xpack.esql.action.EsqlQueryAction;
 import org.elasticsearch.xpack.esql.action.EsqlQueryRequestBuilder;
 import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction;
@@ -60,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.RestEsqlAsyncQueryAction;
 import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction;
 import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction;
 import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction;
+import org.elasticsearch.xpack.esql.action.RestEsqlStopAsyncAction;
 import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator;
 import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator;
 import org.elasticsearch.xpack.esql.execution.PlanExecutor;
@@ -151,7 +153,8 @@ public class EsqlPlugin extends Plugin implements ActionPlugin {
             new ActionHandler<>(XPackUsageFeatureAction.ESQL, EsqlUsageTransportAction.class),
             new ActionHandler<>(XPackInfoFeatureAction.ESQL, EsqlInfoTransportAction.class),
             new ActionHandler<>(EsqlResolveFieldsAction.TYPE, EsqlResolveFieldsAction.class),
-            new ActionHandler<>(EsqlSearchShardsAction.TYPE, EsqlSearchShardsAction.class)
+            new ActionHandler<>(EsqlSearchShardsAction.TYPE, EsqlSearchShardsAction.class),
+            new ActionHandler<>(EsqlAsyncStopAction.INSTANCE, TransportEsqlAsyncStopAction.class)
         );
     }
 
@@ -171,6 +174,7 @@ public class EsqlPlugin extends Plugin implements ActionPlugin {
             new RestEsqlQueryAction(),
             new RestEsqlAsyncQueryAction(),
             new RestEsqlGetAsyncResultAction(),
+            new RestEsqlStopAsyncAction(),
             new RestEsqlDeleteAsyncResultAction()
         );
     }
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java
index 4bcebcfe64cb..5658db059918 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java
@@ -75,7 +75,7 @@ public class TransportEsqlAsyncGetResultsAction extends AbstractTransportQlAsync
 
     /**
      * Unwraps the exception in the case of failure. This keeps the exception types
-     * the same as the sync API, namely ParsingException and ParsingException.
+     * the same as the sync API, namely ParsingException and VerificationException.
      */
     static  ActionListener unwrapListener(ActionListener listener) {
         return new ActionListener<>() {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java
new file mode 100644
index 000000000000..a4007a520ed3
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.plugin;
+
+import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionListenerResponseHandler;
+import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.HandledTransportAction;
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.compute.EsqlRefCountingListener;
+import org.elasticsearch.compute.data.BlockFactory;
+import org.elasticsearch.compute.operator.exchange.ExchangeService;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.injection.guice.Inject;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.tasks.TaskId;
+import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.xpack.core.XPackPlugin;
+import org.elasticsearch.xpack.core.async.AsyncExecutionId;
+import org.elasticsearch.xpack.core.async.AsyncSearchSecurity;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
+import org.elasticsearch.xpack.core.async.AsyncTaskIndexService;
+import org.elasticsearch.xpack.core.async.GetAsyncResultRequest;
+import org.elasticsearch.xpack.core.security.SecurityContext;
+import org.elasticsearch.xpack.esql.action.EsqlAsyncStopAction;
+import org.elasticsearch.xpack.esql.action.EsqlQueryResponse;
+import org.elasticsearch.xpack.esql.action.EsqlQueryTask;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN;
+
+/**
+ * This action will stop running async request and collect the results.
+ * If the request is already finished, it will do the same thing as the regular async get.
+ */
+public class TransportEsqlAsyncStopAction extends HandledTransportAction {
+
+    private final TransportEsqlQueryAction queryAction;
+    private final TransportEsqlAsyncGetResultsAction getResultsAction;
+    private final ExchangeService exchangeService;
+    private final BlockFactory blockFactory;
+    private final ClusterService clusterService;
+    private final TransportService transportService;
+    private final AsyncSearchSecurity security;
+
+    @Inject
+    public TransportEsqlAsyncStopAction(
+        TransportService transportService,
+        ClusterService clusterService,
+        ActionFilters actionFilters,
+        TransportEsqlQueryAction queryAction,
+        TransportEsqlAsyncGetResultsAction getResultsAction,
+        Client client,
+        ExchangeService exchangeService,
+        BlockFactory blockFactory
+    ) {
+        super(EsqlAsyncStopAction.NAME, transportService, actionFilters, AsyncStopRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE);
+        this.queryAction = queryAction;
+        this.getResultsAction = getResultsAction;
+        this.exchangeService = exchangeService;
+        this.blockFactory = blockFactory;
+        this.transportService = transportService;
+        this.clusterService = clusterService;
+        this.security = new AsyncSearchSecurity(
+            XPackPlugin.ASYNC_RESULTS_INDEX,
+            new SecurityContext(clusterService.getSettings(), client.threadPool().getThreadContext()),
+            client,
+            ASYNC_SEARCH_ORIGIN
+        );
+    }
+
+    @Override
+    protected void doExecute(Task task, AsyncStopRequest request, ActionListener listener) {
+        AsyncExecutionId searchId = AsyncExecutionId.decode(request.getId());
+        DiscoveryNode node = clusterService.state().nodes().get(searchId.getTaskId().getNodeId());
+        if (clusterService.localNode().getId().equals(searchId.getTaskId().getNodeId()) || node == null) {
+            stopQueryAndReturnResult(task, searchId, listener);
+        } else {
+            transportService.sendRequest(
+                node,
+                EsqlAsyncStopAction.NAME,
+                request,
+                new ActionListenerResponseHandler<>(listener, EsqlQueryResponse.reader(blockFactory), EsExecutors.DIRECT_EXECUTOR_SERVICE)
+            );
+        }
+    }
+
+    /**
+    * Returns the ID for stored compute session. See {@link TransportEsqlQueryAction#sessionID(Task)}
+    */
+    private String sessionID(AsyncExecutionId asyncId) {
+        return new TaskId(clusterService.localNode().getId(), asyncId.getTaskId().getId()).toString();
+    }
+
+    private void stopQueryAndReturnResult(Task task, AsyncExecutionId asyncId, ActionListener listener) {
+        String asyncIdStr = asyncId.getEncoded();
+        TransportEsqlQueryAction.EsqlQueryListener asyncListener = queryAction.getAsyncListener(asyncIdStr);
+        if (asyncListener == null) {
+            // This should mean one of the two things: either bad request ID, or the query has already finished
+            // In both cases, let regular async get deal with it.
+            var getAsyncResultRequest = new GetAsyncResultRequest(asyncIdStr);
+            // TODO: this should not be happening, but if the listener is not registered and the query is not finished,
+            // we give it some time to finish
+            getAsyncResultRequest.setWaitForCompletionTimeout(new TimeValue(1, TimeUnit.SECONDS));
+            getResultsAction.execute(task, getAsyncResultRequest, listener);
+            return;
+        }
+        try {
+            EsqlQueryTask asyncTask = AsyncTaskIndexService.getTask(taskManager, asyncId, EsqlQueryTask.class);
+            if (false == security.currentUserHasAccessToTask(asyncTask)) {
+                throw new ResourceNotFoundException(asyncId + " not found");
+            }
+        } catch (IOException e) {
+            throw new ResourceNotFoundException(asyncId + " not found", e);
+        }
+        // Here we will wait for both the response to become available and for the finish operation to complete
+        var responseHolder = new AtomicReference();
+        try (var refs = new EsqlRefCountingListener(listener.map(unused -> responseHolder.get()))) {
+            asyncListener.addListener(refs.acquire().map(r -> {
+                responseHolder.set(r);
+                return null;
+            }));
+            asyncListener.markAsPartial();
+            exchangeService.finishSessionEarly(sessionID(asyncId), refs.acquire());
+        }
+    }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java
index d83239545c38..a32b4591943f 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java
@@ -13,12 +13,14 @@ import org.elasticsearch.action.admin.cluster.stats.CCSUsage;
 import org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.HandledTransportAction;
+import org.elasticsearch.action.support.SubscribableListener;
 import org.elasticsearch.client.internal.Client;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
 import org.elasticsearch.compute.data.BlockFactory;
 import org.elasticsearch.compute.operator.exchange.ExchangeService;
@@ -81,6 +83,8 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncListeners = ConcurrentCollections.newConcurrentMap();
 
     @Inject
     @SuppressWarnings("this-escape")
@@ -179,11 +183,41 @@ public class TransportEsqlQueryAction extends HandledTransportAction {
+        private EsqlExecutionInfo executionInfo;
+
+        public EsqlQueryListener(EsqlExecutionInfo executionInfo) {
+            this.executionInfo = executionInfo;
+        }
+
+        public EsqlExecutionInfo getExecutionInfo() {
+            return executionInfo;
+        }
+
+        public void markAsPartial() {
+            if (executionInfo != null) {
+                executionInfo.markAsPartial();
+            }
+        }
+    }
+
     @Override
     public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener listener) {
         // set EsqlExecutionInfo on async-search task so that it is accessible to GET _query/async while the query is still running
         task.setExecutionInfo(createEsqlExecutionInfo(request));
-        ActionListener.run(listener, l -> innerExecute(task, request, l));
+        // Since the request is async here, we need to wrap the listener in a SubscribableListener so that we can collect the results from
+        // other endpoints, such as _query/async/stop
+        EsqlQueryListener subListener = new EsqlQueryListener(task.executionInfo());
+        String asyncExecutionId = task.getExecutionId().getEncoded();
+        subListener.addListener(ActionListener.runAfter(listener, () -> asyncListeners.remove(asyncExecutionId)));
+        asyncListeners.put(asyncExecutionId, subListener);
+        ActionListener.run(subListener, l -> innerExecute(task, request, l));
+    }
+
+    public EsqlQueryListener getAsyncListener(String executionId) {
+        return asyncListeners.get(executionId);
     }
 
     private void innerExecute(Task task, EsqlQueryRequest request, ActionListener listener) {
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java
index f4c68f141460..5743c7c6ec57 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java
@@ -518,30 +518,66 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2;
-            assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize);
+            assertChunkCount(resp, r -> 5 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize);
         }
 
         try (EsqlQueryResponse resp = randomResponseAsync(true, null, true)) {
             int columnCount = resp.pages().get(0).getBlockCount();
             int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2;
-            assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); // is_running
+            assertChunkCount(resp, r -> 6 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize); // is_running
         }
     }
 
     public void testChunkResponseSizeRows() {
-        int sizeClusterDetails = 14;
         try (EsqlQueryResponse resp = randomResponse(false, null)) {
             int bodySize = resp.pages().stream().mapToInt(Page::getPositionCount).sum();
-            assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize);
+            assertChunkCount(resp, r -> 5 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize);
         }
         try (EsqlQueryResponse resp = randomResponseAsync(false, null, true)) {
             int bodySize = resp.pages().stream().mapToInt(Page::getPositionCount).sum();
-            assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize);
+            assertChunkCount(resp, r -> 6 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize);
         }
     }
 
diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java
index 8d8629db96fc..23f33b2351c2 100644
--- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java
+++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java
@@ -78,6 +78,7 @@ public class CrossClusterEsqlRCS1MissingIndicesIT extends AbstractRemoteClusterS
     void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) {
         Map clusters = (Map) responseMap.get("_clusters");
         assertThat((int) responseMap.get("took"), greaterThan(0));
+        assertThat((boolean) responseMap.get("is_partial"), is(false));
 
         Map detailsMap = (Map) clusters.get("details");
         assertThat(detailsMap.size(), is(expected.size()));
diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java
index b6fc43e2a6e4..c7623779ee21 100644
--- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java
+++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java
@@ -94,6 +94,7 @@ public class CrossClusterEsqlRCS1UnavailableRemotesIT extends AbstractRemoteClus
         assertThat((int) map.get("took"), greaterThan(0));
         assertThat(columns.size(), is(4));
         assertThat(values.size(), is(9));
+        assertThat((boolean) map.get("is_partial"), is(false));
 
         assertThat((int) clusters.get("total"), is(2));
         assertThat((int) clusters.get("successful"), is(2));
diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java
index 52cd0655fbfd..b62d82c47f75 100644
--- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java
+++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java
@@ -112,6 +112,7 @@ public class CrossClusterEsqlRCS2UnavailableRemotesIT extends AbstractRemoteClus
         assertThat((int) map.get("took"), greaterThan(0));
         assertThat(columns.size(), is(4));
         assertThat(values.size(), is(9));
+        assertThat((boolean) map.get("is_partial"), is(false));
 
         assertThat((int) clusters.get("total"), is(2));
         assertThat((int) clusters.get("successful"), is(2));
diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java
index 41f2eab6a00e..42d03838ed8d 100644
--- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java
+++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java
@@ -18,6 +18,7 @@ import org.elasticsearch.common.CheckedBiConsumer;
 import org.elasticsearch.common.UUIDs;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.core.CheckedConsumer;
+import org.elasticsearch.core.Nullable;
 import org.elasticsearch.core.Strings;
 import org.elasticsearch.core.Tuple;
 import org.elasticsearch.test.cluster.ElasticsearchCluster;
@@ -330,6 +331,19 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe
         assertOK(adminClient().performRequest(putUserRequest));
     }
 
+    private static String populateOtherUser() throws IOException {
+        String otherUser = REMOTE_SEARCH_USER + "_other";
+
+        final var putUserRequest = new Request("PUT", "/_security/user/" + otherUser);
+        putUserRequest.setJsonEntity("""
+            {
+              "password": "x-pack-test-password",
+              "roles" : ["remote_search"]
+            }""");
+        assertOK(adminClient().performRequest(putUserRequest));
+        return otherUser;
+    }
+
     @After
     public void wipeData() throws Exception {
         CheckedConsumer wipe = client -> {
@@ -1198,7 +1212,116 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe
         }
     }
 
+    public void testCrossClusterAsyncQuery() throws Exception {
+        assumeTrue("delay() is only available in snapshot builds", Build.current().isSnapshot());
+        configureRemoteCluster();
+        populateData();
+        String otherUser = populateOtherUser();
+
+        // Adding a delay there so that the async query is not completed before we check the status
+        Request request = esqlRequestAsync("""
+            FROM employees, *:employees
+            | SORT emp_id ASC
+            | LIMIT 10
+            | WHERE delay(10ms)
+            | KEEP emp_id, department""");
+        Response response = performRequestWithRemoteSearchUser(request);
+        assertOK(response);
+        Map responseAsMap = entityAsMap(response);
+        assumeTrue("Query finished too fast, can not test", (boolean) responseAsMap.get("is_running"));
+
+        String asyncId = (String) responseAsMap.get("id");
+        response = performRequestWithRemoteSearchUser(esqlAsyncGetRequest(asyncId));
+        assertOK(response);
+        responseAsMap = entityAsMap(response);
+        assertThat(responseAsMap.get("is_running"), equalTo(true));
+
+        // Other user can't see the async query
+        ResponseException error = expectThrows(
+            ResponseException.class,
+            () -> performRequestWithUser(esqlAsyncGetRequest(asyncId), otherUser)
+        );
+        assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404));
+        assertThat(error.getMessage(), containsString("resource_not_found_exception"));
+
+        // Clean up
+        response = performRequestWithRemoteSearchUser(esqlAsyncDeleteRequest(asyncId));
+        assertOK(response);
+    }
+
+    public void testCrossClusterAsyncQueryStop() throws Exception {
+        assumeTrue("delay() is only available in snapshot builds", Build.current().isSnapshot());
+        configureRemoteCluster();
+        populateData();
+        String otherUser = populateOtherUser();
+
+        // query remote cluster only
+        Request request = esqlRequestAsync("""
+            FROM employees, *:employees
+            | SORT emp_id ASC
+            | LIMIT 10
+            | WHERE delay(10ms)
+            | KEEP emp_id, department""");
+        Response response = performRequestWithRemoteSearchUser(request);
+        assertOK(response);
+        Map responseAsMap = entityAsMap(response);
+        assertThat(responseAsMap.get("is_running"), equalTo(true));
+        String asyncId = (String) responseAsMap.get("id");
+
+        response = performRequestWithRemoteSearchUser(esqlAsyncGetRequest(asyncId));
+        assertOK(response);
+        responseAsMap = entityAsMap(response);
+        assertThat(responseAsMap.get("is_running"), equalTo(true));
+
+        // Other user can't see the async query
+        ResponseException error = expectThrows(
+            ResponseException.class,
+            () -> performRequestWithUser(esqlAsyncStopRequest(asyncId), otherUser)
+        );
+        assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404));
+        assertThat(error.getMessage(), containsString("resource_not_found_exception"));
+
+        response = performRequestWithRemoteSearchUser(esqlAsyncStopRequest(asyncId));
+        assertOK(response);
+        responseAsMap = entityAsMap(response);
+        assertThat(responseAsMap.get("is_running"), equalTo(false));
+
+        // Clean up
+        response = performRequestWithRemoteSearchUser(esqlAsyncDeleteRequest(asyncId));
+        assertOK(response);
+    }
+
     protected Request esqlRequest(String command) throws IOException {
+        XContentBuilder body = getBody(command, null);
+        Request request = new Request("POST", "_query");
+        request.setJsonEntity(org.elasticsearch.common.Strings.toString(body));
+        return request;
+    }
+
+    protected Request esqlRequestAsync(String command) throws IOException {
+        XContentBuilder body = getBody(command, Map.of("wait_for_completion_timeout", "1ms"));
+        Request request = new Request("POST", "_query/async");
+        request.setJsonEntity(org.elasticsearch.common.Strings.toString(body));
+        return request;
+    }
+
+    protected Request esqlAsyncGetRequest(String asyncID) {
+        Request request = new Request("GET", "_query/async/" + asyncID);
+        request.addParameter("wait_for_completion_timeout", "1ms");
+        return request;
+    }
+
+    protected Request esqlAsyncStopRequest(String asyncID) {
+        Request request = new Request("POST", "_query/async/" + asyncID + "/stop");
+        return request;
+    }
+
+    protected Request esqlAsyncDeleteRequest(String asyncID) {
+        Request request = new Request("DELETE", "_query/async/" + asyncID);
+        return request;
+    }
+
+    private static XContentBuilder getBody(String command, @Nullable Map extraParams) throws IOException {
         XContentBuilder body = JsonXContent.contentBuilder();
         body.startObject();
         body.field("query", command);
@@ -1224,10 +1347,17 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe
                 body.endObject();
             }
         }
+        if (extraParams != null) {
+            extraParams.forEach((name, value) -> {
+                try {
+                    body.field(name, value);
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                }
+            });
+        }
         body.endObject();
-        Request request = new Request("POST", "_query");
-        request.setJsonEntity(org.elasticsearch.common.Strings.toString(body));
-        return request;
+        return body;
     }
 
     private Response performRequestWithRemoteSearchUser(final Request request) throws IOException {
@@ -1237,6 +1367,11 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe
         return client().performRequest(request);
     }
 
+    private Response performRequestWithUser(final Request request, String user) throws IOException {
+        request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(user, PASS)));
+        return client().performRequest(request);
+    }
+
     private Response performRequestWithRemoteSearchUserViaAPIKey(Request request, String encodedApiKey) throws IOException {
         request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encodedApiKey));
         return client().performRequest(request);
diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java
index 417d66019aae..5a2d24e1aa3c 100644
--- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java
+++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java
@@ -567,6 +567,7 @@ public class Constants {
         "indices:data/read/eql/async/get",
         "indices:data/read/esql",
         "indices:data/read/esql/async/get",
+        "indices:data/read/esql/async/stop",
         "indices:data/read/esql/resolve_fields",
         "indices:data/read/esql/search_shards",
         "indices:data/read/explain",
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java
index 614401770cfb..b9270b603568 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java
@@ -1030,6 +1030,7 @@ public class RBACEngine implements AuthorizationEngine {
             || action.equals(TransportDeleteAsyncResultAction.TYPE.name())
             || action.equals(EqlAsyncActionNames.EQL_ASYNC_GET_RESULT_ACTION_NAME)
             || action.equals(EsqlAsyncActionNames.ESQL_ASYNC_GET_RESULT_ACTION_NAME)
+            || action.equals(EsqlAsyncActionNames.ESQL_ASYNC_STOP_ACTION_NAME)
             || action.equals(SqlAsyncActionNames.SQL_ASYNC_GET_RESULT_ACTION_NAME);
     }
 

From dc4fa2617451d57bc0a72a09990ae0855bfce95f Mon Sep 17 00:00:00 2001
From: Nik Everett 
Date: Thu, 23 Jan 2025 12:40:09 -0500
Subject: [PATCH 23/29] Speed up COALESCE significantly (#120139)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

```
                      before              after
     (operation)   Score   Error       Score   Error  Units
 coalesce_2_noop  75.949 ± 3.961  ->   0.010 ±  0.001 ns/op  99.9%
coalesce_2_eager  99.299 ± 6.959  ->   4.292 ±  0.227 ns/op  95.7%
 coalesce_2_lazy 113.118 ± 5.747  ->  26.746 ±  0.954 ns/op  76.4%
```

We tend to advise folks that "COALESCE is faster than CASE", but, as of
8.16.0/https://github.com/elastic/elasticsearch/pull/112295 that wasn't the true. I was working with someone a few
days ago to port a scripted_metric aggregation to ESQL and we saw
COALESCE taking ~60% of the time. That won't do.

The trouble is that CASE and COALESCE have to be *lazy*, meaning that
operations like:
```
COALESCE(a, 1 / b)
```
should never emit a warning if `a` is not `null`, even if `b` is `0`. In
8.16/https://github.com/elastic/elasticsearch/pull/112295 CASE grew an optimization where it could operate non-lazily
if it was flagged as "safe". This brings a similar optimization to
COALESCE, see it above as "case_2_eager", a 95.7% improvement.

It also brings and arguably more important optimization - entire-block
execution for COALESCE. The schort version is that, if the first
parameter of COALESCE returns no nulls we can return it without doing
anything lazily. There are a few more cases, but the upshot is that
COALESCE is pretty much *free* in cases where long strings of results
are `null` or not `null`. That's the `coalesce_2_noop` line.

Finally, when there mixed null and non-null values we were using a
single builder with some fairly inefficient paths. This specializes them
per type and skips some slow null-checking where possible. That's the
`coalesce_2_lazy` result, a more modest 76.4%.

NOTE: These %s of improvements on COALESCE itself, or COALESCE with some load-overhead operators like `+`. If COALESCE isn't taking a *ton* time in your query don't get particularly excited about this. It's fun though.

Closes #119953
---
 .gitattributes                                |   1 +
 benchmarks/README.md                          |   5 +-
 .../compute/operator/EvalBenchmark.java       |  83 ++++++-
 x-pack/plugin/esql/build.gradle               |  27 ++
 .../compute/data/BooleanBlock.java            |   8 +
 .../compute/data/BooleanBlockBuilder.java     |  52 ++--
 .../compute/data/BytesRefBlock.java           |  10 +
 .../compute/data/BytesRefBlockBuilder.java    |  53 ++--
 .../compute/data/DoubleBlock.java             |   8 +
 .../compute/data/DoubleBlockBuilder.java      |  52 ++--
 .../compute/data/FloatBlock.java              |   8 +
 .../compute/data/FloatBlockBuilder.java       |  52 ++--
 .../elasticsearch/compute/data/IntBlock.java  |   8 +
 .../compute/data/IntBlockBuilder.java         |  52 ++--
 .../elasticsearch/compute/data/LongBlock.java |   8 +
 .../compute/data/LongBlockBuilder.java        |  52 ++--
 .../org/elasticsearch/compute/data/Block.java |   5 +
 .../compute/data/X-Block.java.st              |  12 +
 .../compute/data/X-BlockBuilder.java.st       |  60 +++--
 .../compute/operator/EvalOperator.java        |   8 +-
 .../data/BlockBuilderCopyFromTests.java       |  12 +-
 .../nulls/CoalesceBooleanEvaluator.java       | 225 +++++++++++++++++
 .../nulls/CoalesceBytesRefEvaluator.java      | 228 +++++++++++++++++
 .../scalar/nulls/CoalesceDoubleEvaluator.java | 225 +++++++++++++++++
 .../scalar/nulls/CoalesceIntEvaluator.java    | 225 +++++++++++++++++
 .../scalar/nulls/CoalesceLongEvaluator.java   | 225 +++++++++++++++++
 .../function/scalar/conditional/Case.java     |   3 +
 .../function/scalar/nulls/Coalesce.java       |  97 ++------
 .../scalar/nulls/X-CoalesceEvaluator.java.st  | 234 ++++++++++++++++++
 .../function/scalar/nulls/CoalesceTests.java  | 100 +++++++-
 30 files changed, 1943 insertions(+), 195 deletions(-)
 create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java
 create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java
 create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java
 create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java
 create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java
 create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st

diff --git a/.gitattributes b/.gitattributes
index 04881c92ede0..a0f434f16b32 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -11,6 +11,7 @@ x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/*.interp li
 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer*.java linguist-generated=true
 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser*.java linguist-generated=true
 x-pack/plugin/esql/src/main/generated/** linguist-generated=true
+x-pack/plugin/esql/src/main/generated-src/** linguist-generated=true
 
 # ESQL functions docs are autogenerated. More information at `docs/reference/esql/functions/README.md`
 docs/reference/esql/functions/*/** linguist-generated=true
diff --git a/benchmarks/README.md b/benchmarks/README.md
index d7b324acfef8..0cf95a2e81b9 100644
--- a/benchmarks/README.md
+++ b/benchmarks/README.md
@@ -126,9 +126,12 @@ exit
 Grab the async profiler from https://github.com/jvm-profiling-tools/async-profiler
 and run `prof async` like so:
 ```
-gradlew -p benchmarks/ run --args 'LongKeyedBucketOrdsBenchmark.multiBucket -prof "async:libPath=/home/nik9000/Downloads/tmp/async-profiler-1.8.3-linux-x64/build/libasyncProfiler.so;dir=/tmp/prof;output=flamegraph"'
+gradlew -p benchmarks/ run --args 'LongKeyedBucketOrdsBenchmark.multiBucket -prof "async:libPath=/home/nik9000/Downloads/async-profiler-3.0-29ee888-linux-x64/lib/libasyncProfiler.so;dir=/tmp/prof;output=flamegraph"'
 ```
 
+Note: As of January 2025 the latest release of async profiler doesn't work
+      with our JDK but the nightly is fine.
+
 If you are on Mac, this'll warn you that you downloaded the shared library from
 the internet. You'll need to go to settings and allow it to run.
 
diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java
index d3259b960471..19d72a1f84f2 100644
--- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java
+++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java
@@ -38,6 +38,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case;
 import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc;
 import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs;
 import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin;
+import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce;
 import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike;
 import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add;
 import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals;
@@ -96,6 +97,9 @@ public class EvalBenchmark {
             "add_double",
             "case_1_eager",
             "case_1_lazy",
+            "coalesce_2_noop",
+            "coalesce_2_eager",
+            "coalesce_2_lazy",
             "date_trunc",
             "equal_to_const",
             "long_equal_to_long",
@@ -142,8 +146,34 @@ public class EvalBenchmark {
                     lhs = new Add(Source.EMPTY, lhs, new Literal(Source.EMPTY, 1L, DataType.LONG));
                     rhs = new Add(Source.EMPTY, rhs, new Literal(Source.EMPTY, 1L, DataType.LONG));
                 }
-                yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Case(Source.EMPTY, condition, List.of(lhs, rhs)), layout(f1, f2))
-                    .get(driverContext);
+                EvalOperator.ExpressionEvaluator evaluator = EvalMapper.toEvaluator(
+                    FOLD_CONTEXT,
+                    new Case(Source.EMPTY, condition, List.of(lhs, rhs)),
+                    layout(f1, f2)
+                ).get(driverContext);
+                String desc = operation.endsWith("lazy") ? "CaseLazyEvaluator" : "CaseEagerEvaluator";
+                if (evaluator.toString().contains(desc) == false) {
+                    throw new IllegalArgumentException("Evaluator was [" + evaluator + "] but expected one containing [" + desc + "]");
+                }
+                yield evaluator;
+            }
+            case "coalesce_2_noop", "coalesce_2_eager", "coalesce_2_lazy" -> {
+                FieldAttribute f1 = longField();
+                FieldAttribute f2 = longField();
+                Expression lhs = f1;
+                if (operation.endsWith("lazy")) {
+                    lhs = new Add(Source.EMPTY, lhs, new Literal(Source.EMPTY, 1L, DataType.LONG));
+                }
+                EvalOperator.ExpressionEvaluator evaluator = EvalMapper.toEvaluator(
+                    FOLD_CONTEXT,
+                    new Coalesce(Source.EMPTY, lhs, List.of(f2)),
+                    layout(f1, f2)
+                ).get(driverContext);
+                String desc = operation.endsWith("lazy") ? "CoalesceLazyEvaluator" : "CoalesceEagerEvaluator";
+                if (evaluator.toString().contains(desc) == false) {
+                    throw new IllegalArgumentException("Evaluator was [" + evaluator + "] but expected one containing [" + desc + "]");
+                }
+                yield evaluator;
             }
             case "date_trunc" -> {
                 FieldAttribute timestamp = new FieldAttribute(
@@ -260,6 +290,38 @@ public class EvalBenchmark {
                     }
                 }
             }
+            case "coalesce_2_noop" -> {
+                LongVector f1 = actual.getBlock(0).asVector();
+                LongVector result = actual.getBlock(2).asVector();
+                for (int i = 0; i < BLOCK_LENGTH; i++) {
+                    long expected = f1.getLong(i);
+                    if (result.getLong(i) != expected) {
+                        throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + result.getLong(i) + "]");
+                    }
+                }
+            }
+            case "coalesce_2_eager" -> {
+                LongBlock f1 = actual.getBlock(0);
+                LongVector f2 = actual.getBlock(1).asVector();
+                LongVector result = actual.getBlock(2).asVector();
+                for (int i = 0; i < BLOCK_LENGTH; i++) {
+                    long expected = i % 5 == 0 ? f2.getLong(i) : f1.getLong(f1.getFirstValueIndex(i));
+                    if (result.getLong(i) != expected) {
+                        throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + result.getLong(i) + "]");
+                    }
+                }
+            }
+            case "coalesce_2_lazy" -> {
+                LongBlock f1 = actual.getBlock(0);
+                LongVector f2 = actual.getBlock(1).asVector();
+                LongVector result = actual.getBlock(2).asVector();
+                for (int i = 0; i < BLOCK_LENGTH; i++) {
+                    long expected = i % 5 == 0 ? f2.getLong(i) : f1.getLong(f1.getFirstValueIndex(i)) + 1;
+                    if (result.getLong(i) != expected) {
+                        throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + result.getLong(i) + "]");
+                    }
+                }
+            }
             case "date_trunc" -> {
                 LongVector v = actual.getBlock(1).asVector();
                 long oneDay = TimeValue.timeValueHours(24).millis();
@@ -304,7 +366,7 @@ public class EvalBenchmark {
                     }
                 }
             }
-            default -> throw new UnsupportedOperationException();
+            default -> throw new UnsupportedOperationException(operation);
         }
     }
 
@@ -324,7 +386,7 @@ public class EvalBenchmark {
                 }
                 yield new Page(builder.build());
             }
-            case "case_1_eager", "case_1_lazy" -> {
+            case "case_1_eager", "case_1_lazy", "coalesce_2_noop" -> {
                 var f1 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
                 var f2 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
                 for (int i = 0; i < BLOCK_LENGTH; i++) {
@@ -333,6 +395,19 @@ public class EvalBenchmark {
                 }
                 yield new Page(f1.build(), f2.build());
             }
+            case "coalesce_2_eager", "coalesce_2_lazy" -> {
+                var f1 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
+                var f2 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
+                for (int i = 0; i < BLOCK_LENGTH; i++) {
+                    if (i % 5 == 0) {
+                        f1.appendNull();
+                    } else {
+                        f1.appendLong(i);
+                    }
+                    f2.appendLong(-i);
+                }
+                yield new Page(f1.build(), f2.build());
+            }
             case "long_equal_to_long" -> {
                 var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
                 var rhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle
index 8d2050fb4304..2498b621b73e 100644
--- a/x-pack/plugin/esql/build.gradle
+++ b/x-pack/plugin/esql/build.gradle
@@ -348,4 +348,31 @@ tasks.named('stringTemplates').configure {
     it.inputFile =  inInputFile
     it.outputFile = "org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBytesRefEvaluator.java"
   }
+
+  File coalesceInputFile = file("src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st")
+  template {
+    it.properties = booleanProperties
+    it.inputFile =  coalesceInputFile
+    it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java"
+  }
+  template {
+    it.properties = intProperties
+    it.inputFile =  coalesceInputFile
+    it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java"
+  }
+  template {
+    it.properties = longProperties
+    it.inputFile =  coalesceInputFile
+    it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java"
+  }
+  template {
+    it.properties = doubleProperties
+    it.inputFile =  coalesceInputFile
+    it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java"
+  }
+  template {
+    it.properties = bytesRefProperties
+    it.inputFile =  coalesceInputFile
+    it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java"
+  }
 }
diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java
index 5d2d6c97a11f..b08b80acc697 100644
--- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java
+++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java
@@ -223,6 +223,14 @@ public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, Bo
          */
         Builder copyFrom(BooleanBlock block, int beginInclusive, int endExclusive);
 
+        /**
+         * Copy the values in {@code block} at {@code position}. If this position
+         * has a single value, this'll copy a single value. If this positions has
+         * many values, it'll copy all of them. If this is {@code null}, then it'll
+         * copy the {@code null}.
+         */
+        Builder copyFrom(BooleanBlock block, int position);
+
         @Override
         Builder appendNull();
 
diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java
index 32627a0e0d36..7f4705ddecb2 100644
--- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java
+++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java
@@ -7,6 +7,7 @@
 
 package org.elasticsearch.compute.data;
 
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.elasticsearch.common.breaker.CircuitBreakingException;
 import org.elasticsearch.common.util.BitArray;
@@ -85,7 +86,11 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB
     /**
      * Copy the values in {@code block} from {@code beginInclusive} to
      * {@code endExclusive} into this builder.
+     * 

+ * For single-position copies see {@link #copyFrom(BooleanBlock, int)}. + *

*/ + @Override public BooleanBlockBuilder copyFrom(BooleanBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB private void copyFromBlock(BooleanBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBoolean(block.getBoolean(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public BooleanBlockBuilder copyFrom(BooleanBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendBoolean(block.getBoolean(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendBoolean(block.getBoolean(i++)); + } + endPositionEntry(); + return this; + } + @Override public BooleanBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 6fe45f33a7df..666189572272 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -228,6 +228,16 @@ public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, */ Builder copyFrom(BytesRefBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. + */ + Builder copyFrom(BytesRefBlock block, int position, BytesRef scratch); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 6232cbdd2717..0a2b35078040 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -88,7 +88,11 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(BytesRefBlock, int, BytesRef scratch)}. + *

*/ + @Override public BytesRefBlockBuilder copyFrom(BytesRefBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -105,21 +109,7 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe private void copyFromBlock(BytesRefBlock block, int beginInclusive, int endExclusive) { BytesRef scratch = new BytesRef(); for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBytesRef(block.getBytesRef(i++, scratch)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p, scratch); } } @@ -130,6 +120,39 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public BytesRefBlockBuilder copyFrom(BytesRefBlock block, int position, BytesRef scratch) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendBytesRef(block.getBytesRef(i++, scratch)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendBytesRef(block.getBytesRef(i++, scratch)); + } + endPositionEntry(); + return this; + } + @Override public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 395ccd412fab..04df6253662a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -217,6 +217,14 @@ public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, Doub */ Builder copyFrom(DoubleBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(DoubleBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 5921c2daa9f9..8ecc9b91e0ff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.DoubleArray; @@ -85,7 +86,11 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(DoubleBlock, int)}. + *

*/ + @Override public DoubleBlockBuilder copyFrom(DoubleBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo private void copyFromBlock(DoubleBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendDouble(block.getDouble(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public DoubleBlockBuilder copyFrom(DoubleBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendDouble(block.getDouble(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendDouble(block.getDouble(i++)); + } + endPositionEntry(); + return this; + } + @Override public DoubleBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java index 633c9f309901..0679e38b6321 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java @@ -216,6 +216,14 @@ public sealed interface FloatBlock extends Block permits FloatArrayBlock, FloatV */ Builder copyFrom(FloatBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(FloatBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java index 9c1e7aba49a2..8504912adc05 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.FloatArray; @@ -85,7 +86,11 @@ final class FloatBlockBuilder extends AbstractBlockBuilder implements FloatBlock /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(FloatBlock, int)}. + *

*/ + @Override public FloatBlockBuilder copyFrom(FloatBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ final class FloatBlockBuilder extends AbstractBlockBuilder implements FloatBlock private void copyFromBlock(FloatBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendFloat(block.getFloat(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ final class FloatBlockBuilder extends AbstractBlockBuilder implements FloatBlock } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public FloatBlockBuilder copyFrom(FloatBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendFloat(block.getFloat(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendFloat(block.getFloat(i++)); + } + endPositionEntry(); + return this; + } + @Override public FloatBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 7c77d9965391..6af61695929d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -216,6 +216,14 @@ public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorB */ Builder copyFrom(IntBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(IntBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 85f943004de2..31449b6f1cd7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.IntArray; @@ -85,7 +86,11 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(IntBlock, int)}. + *

*/ + @Override public IntBlockBuilder copyFrom(IntBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui private void copyFromBlock(IntBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendInt(block.getInt(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public IntBlockBuilder copyFrom(IntBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendInt(block.getInt(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendInt(block.getInt(i++)); + } + endPositionEntry(); + return this; + } + @Override public IntBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 6c88da8860ca..090efd9a3157 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -217,6 +217,14 @@ public sealed interface LongBlock extends Block permits LongArrayBlock, LongVect */ Builder copyFrom(LongBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(LongBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index d24ae214da63..bf25347edd98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.LongArray; @@ -85,7 +86,11 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(LongBlock, int)}. + *

*/ + @Override public LongBlockBuilder copyFrom(LongBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B private void copyFromBlock(LongBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendLong(block.getLong(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public LongBlockBuilder copyFrom(LongBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendLong(block.getLong(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendLong(block.getLong(i++)); + } + endPositionEntry(); + return this; + } + @Override public LongBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index edf54a829deb..de87c08f7ceb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -280,6 +280,11 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single position copies use the faster + * {@link IntBlockBuilder#copyFrom(IntBlock, int)}, + * {@link LongBlockBuilder#copyFrom(LongBlock, int)}, etc. + *

*/ Builder copyFrom(Block block, int beginInclusive, int endExclusive); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 67e4ac4bb334..6c1616c37072 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -288,6 +288,18 @@ $endif$ */ Builder copyFrom($Type$Block block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. +$if(BytesRef)$ + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. +$endif$ + */ + Builder copyFrom($Type$Block block, int position$if(BytesRef)$, BytesRef scratch$endif$); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 8397a0f5274f..d60e1de179d2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.core.Releasables; $else$ +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.$Array$; @@ -123,7 +124,11 @@ $endif$ /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom($Type$Block, int$if(BytesRef)$, BytesRef scratch$endif$)}. + *

*/ + @Override public $Type$BlockBuilder copyFrom($Type$Block block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -142,25 +147,7 @@ $if(BytesRef)$ BytesRef scratch = new BytesRef(); $endif$ for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { -$if(BytesRef)$ - appendBytesRef(block.getBytesRef(i++, scratch)); -$else$ - append$Type$(block.get$Type$(i++)); -$endif$ - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p$if(BytesRef)$, scratch$endif$); } } @@ -177,6 +164,41 @@ $endif$ } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. +$if(BytesRef)$ + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. +$endif$ + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public $Type$BlockBuilder copyFrom($Type$Block block, int position$if(BytesRef)$, BytesRef scratch$endif$) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + append$Type$(block.get$Type$(i++$if(BytesRef)$, scratch$endif$)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + append$Type$(block.get$Type$(i++$if(BytesRef)$, scratch$endif$)); + } + endPositionEntry(); + return this; + } + @Override public $Type$BlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index 349ce7b00ff1..2573baf78b16 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -96,12 +96,18 @@ public class EvalOperator extends AbstractPageMappingOperator { public void close() { } + + @Override + public String toString() { + return CONSTANT_NULL_NAME; + } }; } @Override public String toString() { - return "ConstantNull"; + return CONSTANT_NULL_NAME; } }; + private static final String CONSTANT_NULL_NAME = "ConstantNull"; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java index 679e3441fb45..1d3c8df914bc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; @@ -92,7 +93,16 @@ public class BlockBuilderCopyFromTests extends ESTestCase { Block.Builder builder = elementType.newBlockBuilder(block.getPositionCount() / 2, blockFactory); List> expected = new ArrayList<>(); for (int i = 0; i < block.getPositionCount(); i += 2) { - builder.copyFrom(block, i, i + 1); + switch (elementType) { + case BOOLEAN -> ((BooleanBlockBuilder) builder).copyFrom((BooleanBlock) block, i); + case BYTES_REF -> ((BytesRefBlockBuilder) builder).copyFrom((BytesRefBlock) block, i, new BytesRef()); + case DOUBLE -> ((DoubleBlockBuilder) builder).copyFrom((DoubleBlock) block, i); + case FLOAT -> ((FloatBlockBuilder) builder).copyFrom((FloatBlock) block, i); + case INT -> ((IntBlockBuilder) builder).copyFrom((IntBlock) block, i); + case LONG -> ((LongBlockBuilder) builder).copyFrom((LongBlock) block, i); + default -> throw new IllegalArgumentException("unsupported type: " + elementType); + } + expected.add(valuesAtPositions(block, i, i + 1).get(0)); } assertBlockValues(builder.build(), expected); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java new file mode 100644 index 000000000000..97b4cba0d993 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceBooleanEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceBooleanEvaluator.CoalesceBooleanEagerEvaluator, // + CoalesceBooleanEvaluator.CoalesceBooleanLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceBooleanEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceBooleanEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceBooleanLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceBooleanLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceBooleanEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final BooleanBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private BooleanBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + BooleanBlock lastFullBlock = (BooleanBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract BooleanBlock perPosition(Page page, BooleanBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceBooleanLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceBooleanEagerEvaluator extends CoalesceBooleanEvaluator { + CoalesceBooleanEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BooleanBlock perPosition(Page page, BooleanBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + BooleanBlock[] flatten = new BooleanBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (BooleanBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (BooleanBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceBooleanLazyEvaluator extends CoalesceBooleanEvaluator { + CoalesceBooleanLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BooleanBlock perPosition(Page page, BooleanBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (BooleanBlock block = (BooleanBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java new file mode 100644 index 000000000000..7d6834e765a9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java @@ -0,0 +1,228 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceBytesRefEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceBytesRefEvaluator.CoalesceBytesRefEagerEvaluator, // + CoalesceBytesRefEvaluator.CoalesceBytesRefLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceBytesRefEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceBytesRefEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceBytesRefLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceBytesRefLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceBytesRefEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final BytesRefBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private BytesRefBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + BytesRefBlock lastFullBlock = (BytesRefBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract BytesRefBlock perPosition(Page page, BytesRefBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceBytesRefLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceBytesRefEagerEvaluator extends CoalesceBytesRefEvaluator { + CoalesceBytesRefEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BytesRefBlock perPosition(Page page, BytesRefBlock lastFullBlock, int firstToEvaluate) { + BytesRef scratch = new BytesRef(); + int positionCount = page.getPositionCount(); + BytesRefBlock[] flatten = new BytesRefBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (BytesRefBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (BytesRefBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p, scratch); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceBytesRefLazyEvaluator extends CoalesceBytesRefEvaluator { + CoalesceBytesRefLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BytesRefBlock perPosition(Page page, BytesRefBlock lastFullBlock, int firstToEvaluate) { + BytesRef scratch = new BytesRef(); + int positionCount = page.getPositionCount(); + try (BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (BytesRefBlock block = (BytesRefBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0, scratch); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java new file mode 100644 index 000000000000..4c01a961ecbe --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceDoubleEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceDoubleEvaluator.CoalesceDoubleEagerEvaluator, // + CoalesceDoubleEvaluator.CoalesceDoubleLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceDoubleEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceDoubleEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceDoubleLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceDoubleLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceDoubleEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final DoubleBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private DoubleBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + DoubleBlock lastFullBlock = (DoubleBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract DoubleBlock perPosition(Page page, DoubleBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceDoubleLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceDoubleEagerEvaluator extends CoalesceDoubleEvaluator { + CoalesceDoubleEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected DoubleBlock perPosition(Page page, DoubleBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + DoubleBlock[] flatten = new DoubleBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (DoubleBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (DoubleBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceDoubleLazyEvaluator extends CoalesceDoubleEvaluator { + CoalesceDoubleLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected DoubleBlock perPosition(Page page, DoubleBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (DoubleBlock block = (DoubleBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java new file mode 100644 index 000000000000..e90bd4b8e5e3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceIntEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceIntEvaluator.CoalesceIntEagerEvaluator, // + CoalesceIntEvaluator.CoalesceIntLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceIntEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceIntEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceIntLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceIntLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceIntEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final IntBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private IntBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + IntBlock lastFullBlock = (IntBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract IntBlock perPosition(Page page, IntBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceIntLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceIntEagerEvaluator extends CoalesceIntEvaluator { + CoalesceIntEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected IntBlock perPosition(Page page, IntBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + IntBlock[] flatten = new IntBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (IntBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (IntBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceIntLazyEvaluator extends CoalesceIntEvaluator { + CoalesceIntLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected IntBlock perPosition(Page page, IntBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (IntBlock block = (IntBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java new file mode 100644 index 000000000000..53a21ad1198f --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceLongEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceLongEvaluator.CoalesceLongEagerEvaluator, // + CoalesceLongEvaluator.CoalesceLongLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceLongEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceLongEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceLongLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceLongLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceLongEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final LongBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private LongBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + LongBlock lastFullBlock = (LongBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract LongBlock perPosition(Page page, LongBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceLongLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceLongEagerEvaluator extends CoalesceLongEvaluator { + CoalesceLongEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected LongBlock perPosition(Page page, LongBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + LongBlock[] flatten = new LongBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (LongBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (LongBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceLongLazyEvaluator extends CoalesceLongEvaluator { + CoalesceLongLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected LongBlock perPosition(Page page, LongBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (LongBlock block = (LongBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 236e625f7abe..04da04e1b392 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -524,6 +524,9 @@ public final class Case extends EsqlScalarFunction { ) { for (int p = 0; p < lhs.getPositionCount(); p++) { if (lhsOrRhs.mask().getBoolean(p)) { + // TODO Copy the per-type specialization that COALESCE has. + // There's also a slowdown because copying from a block checks to see if there are any nulls and that's slow. + // Vectors do not, so this still shows as fairly fast. But not as fast as the per-type unrolling. builder.copyFrom(lhs, p, p + 1); } else { builder.copyFrom(rhs, p, p + 1); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 52686430ca5b..611c7a456864 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -11,13 +11,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -31,17 +26,29 @@ import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.io.IOException; import java.util.List; -import java.util.stream.IntStream; import java.util.stream.Stream; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; /** - * Function returning the first non-null value. + * Function returning the first non-null value. {@code COALESCE} runs as though + * it were lazily evaluating each position in each incoming {@link Block}. */ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Coalesce", Coalesce::new); @@ -194,70 +201,16 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - List childEvaluators = children().stream().map(toEvaluator::apply).toList(); - return new ExpressionEvaluator.Factory() { - @Override - public ExpressionEvaluator get(DriverContext context) { - return new CoalesceEvaluator( - context, - PlannerUtils.toElementType(dataType()), - childEvaluators.stream().map(x -> x.get(context)).toList() - ); - } - - @Override - public String toString() { - return "CoalesceEvaluator[values=" + childEvaluators + ']'; - } + return switch (dataType()) { + case BOOLEAN -> CoalesceBooleanEvaluator.toEvaluator(toEvaluator, children()); + case DOUBLE, COUNTER_DOUBLE -> CoalesceDoubleEvaluator.toEvaluator(toEvaluator, children()); + case INTEGER, COUNTER_INTEGER -> CoalesceIntEvaluator.toEvaluator(toEvaluator, children()); + case LONG, DATE_NANOS, DATETIME, COUNTER_LONG, UNSIGNED_LONG -> CoalesceLongEvaluator.toEvaluator(toEvaluator, children()); + case KEYWORD, TEXT, SEMANTIC_TEXT, CARTESIAN_POINT, CARTESIAN_SHAPE, GEO_POINT, GEO_SHAPE, IP, VERSION -> + CoalesceBytesRefEvaluator.toEvaluator(toEvaluator, children()); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + case UNSUPPORTED, SHORT, BYTE, DATE_PERIOD, OBJECT, DOC_DATA_TYPE, SOURCE, TIME_DURATION, FLOAT, HALF_FLOAT, TSID_DATA_TYPE, + SCALED_FLOAT, PARTIAL_AGG -> throw new UnsupportedOperationException(dataType() + " can't be coalesced"); }; } - - private record CoalesceEvaluator(DriverContext driverContext, ElementType resultType, List evaluators) - implements - EvalOperator.ExpressionEvaluator { - @Override - public Block eval(Page page) { - /* - * We have to evaluate lazily so any errors or warnings that would be - * produced by the right hand side are avoided. And so if anything - * on the right hand side is slow we skip it. - * - * And it'd be good if that lazy evaluation were fast. But this - * implementation isn't. It's fairly simple - running position at - * a time - but it's not at all fast. - */ - int positionCount = page.getPositionCount(); - try (Block.Builder result = resultType.newBlockBuilder(positionCount, driverContext.blockFactory())) { - position: for (int p = 0; p < positionCount; p++) { - int[] positions = new int[] { p }; - Page limited = new Page( - 1, - IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) - ); - try (Releasable ignored = limited::releaseBlocks) { - for (EvalOperator.ExpressionEvaluator eval : evaluators) { - try (Block block = eval.eval(limited)) { - if (false == block.isNull(0)) { - result.copyFrom(block, 0, 1); - continue position; - } - } - } - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "CoalesceEvaluator[values=" + evaluators + ']'; - } - - @Override - public void close() { - Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st new file mode 100644 index 000000000000..33841f03f780 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st @@ -0,0 +1,234 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +$endif$ +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class Coalesce$Type$Evaluator implements EvalOperator.ExpressionEvaluator permits + Coalesce$Type$Evaluator.Coalesce$Type$EagerEvaluator, // + Coalesce$Type$Evaluator.Coalesce$Type$LazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Coalesce$Type$EagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "Coalesce$Type$EagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Coalesce$Type$LazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "Coalesce$Type$LazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected Coalesce$Type$Evaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final $Type$Block eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private $Type$Block entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + $Type$Block lastFullBlock = ($Type$Block) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract $Type$Block perPosition(Page page, $Type$Block lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link Coalesce$Type$LazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class Coalesce$Type$EagerEvaluator extends Coalesce$Type$Evaluator { + Coalesce$Type$EagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected $Type$Block perPosition(Page page, $Type$Block lastFullBlock, int firstToEvaluate) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int positionCount = page.getPositionCount(); + $Type$Block[] flatten = new $Type$Block[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = ($Type$Block) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try ($Type$Block.Builder result = driverContext.blockFactory().new$Type$BlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for ($Type$Block f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p$if(BytesRef)$, scratch$endif$); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class Coalesce$Type$LazyEvaluator extends Coalesce$Type$Evaluator { + Coalesce$Type$LazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected $Type$Block perPosition(Page page, $Type$Block lastFullBlock, int firstToEvaluate) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int positionCount = page.getPositionCount(); + try ($Type$Block.Builder result = driverContext.blockFactory().new$Type$BlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try ($Type$Block block = ($Type$Block) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0$if(BytesRef)$, scratch$endif$); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index 688341ebaa2b..1235a175294a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -12,8 +12,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; @@ -29,6 +34,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunctionTestCase; import org.elasticsearch.xpack.esql.planner.Layout; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.time.ZonedDateTime; @@ -40,6 +46,9 @@ import java.util.function.Supplier; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class CoalesceTests extends AbstractScalarFunctionTestCase { public CoalesceTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -49,7 +58,7 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { @ParametersFactory public static Iterable parameters() { List noNullsSuppliers = new ArrayList<>(); - VaragsTestCaseBuilder builder = new VaragsTestCaseBuilder(type -> "Coalesce"); + VaragsTestCaseBuilder builder = new VaragsTestCaseBuilder(type -> "Coalesce" + type + "Eager"); builder.expectString(strings -> strings.filter(v -> v != null).findFirst()); builder.expectLong(longs -> longs.filter(v -> v != null).findFirst()); builder.expectInt(ints -> ints.filter(v -> v != null).findFirst()); @@ -64,7 +73,7 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(first, DataType.IP, "first"), new TestCaseSupplier.TypedData(second, DataType.IP, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceBytesRefEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.IP, equalTo(first == null ? second : first) ); @@ -79,7 +88,7 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(first, DataType.VERSION, "first"), new TestCaseSupplier.TypedData(second, DataType.VERSION, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceBytesRefEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.VERSION, equalTo(first == null ? second : first) ); @@ -92,7 +101,7 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(firstDate, DataType.DATETIME, "first"), new TestCaseSupplier.TypedData(secondDate, DataType.DATETIME, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceLongEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.DATETIME, equalTo(firstDate == null ? secondDate : firstDate) ); @@ -105,7 +114,7 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(firstDate, DataType.DATE_NANOS, "first"), new TestCaseSupplier.TypedData(secondDate, DataType.DATE_NANOS, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceLongEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.DATE_NANOS, equalTo(firstDate == null ? secondDate : firstDate) ); @@ -129,6 +138,20 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { suppliers.add(new TestCaseSupplier(nullCaseName(s, nullUpTo, true), types, () -> nullCase(s.get(), finalNullUpTo, true))); } } + suppliers.add( + new TestCaseSupplier( + List.of(DataType.NULL, DataType.NULL), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(null, DataType.NULL, "first"), + new TestCaseSupplier.TypedData(null, DataType.NULL, "second") + ), + "ConstantNull", + DataType.NULL, + nullValue() + ) + ) + ); return parameterSuppliersFromTypedData(suppliers); } @@ -167,7 +190,7 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.testCaseSupplier( leftDataSupplier, rightDataSupplier, - (l, r) -> equalTo("CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]"), + (l, r) -> equalTo("CoalesceBytesRefEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]"), dataType, (l, r) -> l ) @@ -235,6 +258,69 @@ public class CoalesceTests extends AbstractScalarFunctionTestCase { sub.add(between(0, sub.size()), randomLiteral(sub.get(sub.size() - 1).dataType())); Coalesce exp = build(Source.EMPTY, sub); // Known not to be nullable because it contains a non-null literal - assertThat(exp.nullable(), equalTo(Nullability.FALSE)); + if (testCase.expectedType() == DataType.NULL) { + assertThat(exp.nullable(), equalTo(Nullability.UNKNOWN)); + } else { + assertThat(exp.nullable(), equalTo(Nullability.FALSE)); + } + } + + /** + * Inserts random non-null garbage around the expected data and runs COALESCE. + *

+ * This is important for catching the case where your value is null, but the rest of the block + * isn't null. An off-by-one error in the evaluators can break this in a way that the standard + * tests weren't catching and this does. + *

+ */ + public void testEvaluateWithGarbage() { + DriverContext context = driverContext(); + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + int positions = between(2, 1024); + List data = testCase.getData(); + Page onePositionPage = row(testCase.getDataValues()); + Block[] blocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; + int realPosition = between(0, positions - 1); + try { + int blocksIndex = 0; + for (TestCaseSupplier.TypedData d : data) { + blocks[blocksIndex] = blockWithRandomGarbage( + context.blockFactory(), + d.type(), + onePositionPage.getBlock(blocksIndex), + positions, + realPosition + ); + blocksIndex++; + } + try ( + EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(context); + Block block = eval.eval(new Page(positions, blocks)) + ) { + assertThat(block.getPositionCount(), is(positions)); + assertThat(toJavaObjectUnsignedLongAware(block, realPosition), testCase.getMatcher()); + assertThat("evaluates to tracked block", block.blockFactory(), sameInstance(context.blockFactory())); + } + } finally { + Releasables.close(onePositionPage::releaseBlocks, Releasables.wrap(blocks)); + } + } + + private Block blockWithRandomGarbage( + BlockFactory blockFactory, + DataType type, + Block singlePositionBlock, + int totalPositions, + int insertLocation + ) { + try (Block.Builder builder = PlannerUtils.toElementType(type).newBlockBuilder(totalPositions, blockFactory)) { + for (int p = 0; p < totalPositions; p++) { + Block copyFrom = p == insertLocation + ? singlePositionBlock + : BlockUtils.constantBlock(TestBlockFactory.getNonBreakingInstance(), randomLiteral(type).value(), 1); + builder.copyFrom(copyFrom, 0, 1); + } + return builder.build(); + } } } From 7e43605e381da204fe5b3fe6f9833583fd2d230c Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 23 Jan 2025 13:04:38 -0500 Subject: [PATCH 24/29] Esql Support date nanos on date diff function (#120645) Resolves #109999 This adds support for date nanos in the date diff function, as well as mixed nanos/millis use cases. --------- Co-authored-by: elasticsearchmachine --- docs/changelog/120645.yaml | 6 + .../kibana/definition/date_diff.json | 144 +++++++++++++ .../esql/functions/types/date_diff.asciidoc | 6 + .../src/main/resources/date_nanos.csv-spec | 25 +++ .../date/DateDiffConstantMillisEvaluator.java | 168 ++++++++++++++++ .../DateDiffConstantMillisNanosEvaluator.java | 168 ++++++++++++++++ .../date/DateDiffConstantNanosEvaluator.java | 168 ++++++++++++++++ .../DateDiffConstantNanosMillisEvaluator.java | 168 ++++++++++++++++ .../scalar/date/DateDiffMillisEvaluator.java | 190 ++++++++++++++++++ .../date/DateDiffMillisNanosEvaluator.java | 190 ++++++++++++++++++ .../scalar/date/DateDiffNanosEvaluator.java | 190 ++++++++++++++++++ .../date/DateDiffNanosMillisEvaluator.java | 190 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 +- .../function/scalar/date/DateDiff.java | 115 +++++++++-- .../scalar/date/DateDiffErrorTests.java | 2 +- .../scalar/date/DateDiffFunctionTests.java | 7 +- .../function/scalar/date/DateDiffTests.java | 103 +++++++++- 17 files changed, 1823 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/120645.yaml create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java diff --git a/docs/changelog/120645.yaml b/docs/changelog/120645.yaml new file mode 100644 index 000000000000..a5ee79de6cb5 --- /dev/null +++ b/docs/changelog/120645.yaml @@ -0,0 +1,6 @@ +pr: 120645 +summary: Esql Support date nanos on date diff function +area: ES|QL +type: enhancement +issues: + - 109999 diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json index d32028d45534..2738ec839022 100644 --- a/docs/reference/esql/functions/kibana/definition/date_diff.json +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -28,6 +28,102 @@ "variadic" : false, "returnType" : "integer" }, + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, { "params" : [ { @@ -42,6 +138,30 @@ "optional" : false, "description" : "A string representing a start timestamp" }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, { "name" : "endTimestamp", "type" : "date", @@ -51,6 +171,30 @@ ], "variadic" : false, "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" } ], "examples" : [ diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc index b0a4818f412a..b557d5a34258 100644 --- a/docs/reference/esql/functions/types/date_diff.asciidoc +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -6,5 +6,11 @@ |=== unit | startTimestamp | endTimestamp | result keyword | date | date | integer +keyword | date | date_nanos | integer +keyword | date_nanos | date | integer +keyword | date_nanos | date_nanos | integer text | date | date | integer +text | date | date_nanos | integer +text | date_nanos | date | integer +text | date_nanos | date_nanos | integer |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index b2a063e509a8..1f4e555bd5d8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -1249,3 +1249,28 @@ sv_nanos:date_nanos | a:keyword | b:keyword | c:keywo 2023-03-23T12:15:03.360103847Z | 2023-03-23T12:15:03.360Z | 2023-03-23 | 2023-03-23T12:15:03.360103847Z 2023-03-23T12:15:03.360103847Z | 2023-03-23T12:15:03.360Z | 2023-03-23 | 2023-03-23T12:15:03.360103847Z ; + +Date Nanos Date Diff +required_capability: date_nanos_date_diff +required_capability: to_date_nanos + +FROM date_nanos +| EVAL n = MV_MAX(nanos) +| EVAL diff_sec = DATE_DIFF("seconds", TO_DATE_NANOS("2023-10-23T12:15:03.360103847Z"), n) +| EVAL diff_sec_m = DATE_DIFF("seconds", TO_DATETIME("2023-10-23T12:15:03.360103847Z"), n) +| KEEP diff_sec, diff_sec_m, n; +ignoreOrder:true + +# Note - it is expected that the millisecond diff is slightly different due to rounding. +diff_sec:integer | diff_sec_m:integer | n:date_nanos +5998 | 5998 | 2023-10-23T13:55:01.543123456Z +5932 | 5932 | 2023-10-23T13:53:55.832987654Z +5871 | 5871 | 2023-10-23T13:52:55.015787878Z +5811 | 5811 | 2023-10-23T13:51:54.732102837Z +4711 | 4711 | 2023-10-23T13:33:34.937193000Z +745 | 745 | 2023-10-23T12:27:28.948000000Z +0 | 0 | 2023-10-23T12:15:03.360103847Z +0 | 0 | 2023-10-23T12:15:03.360103847Z +-18489600 | -18489599 | 2023-03-23T12:15:03.360103847Z +-18489600 | -18489599 | 2023-03-23T12:15:03.360103847Z +; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java new file mode 100644 index 000000000000..0ff047f9bd81 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantMillisEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillis(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampVector, + LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillis(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffConstantMillisEvaluator get(DriverContext context) { + return new DateDiffConstantMillisEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java new file mode 100644 index 000000000000..880531ca5370 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator endTimestampNanos; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantMillisNanosEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestampMillis, + EvalOperator.ExpressionEvaluator endTimestampNanos, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampMillisBlock = (LongBlock) startTimestampMillis.eval(page)) { + try (LongBlock endTimestampNanosBlock = (LongBlock) endTimestampNanos.eval(page)) { + LongVector startTimestampMillisVector = startTimestampMillisBlock.asVector(); + if (startTimestampMillisVector == null) { + return eval(page.getPositionCount(), startTimestampMillisBlock, endTimestampNanosBlock); + } + LongVector endTimestampNanosVector = endTimestampNanosBlock.asVector(); + if (endTimestampNanosVector == null) { + return eval(page.getPositionCount(), startTimestampMillisBlock, endTimestampNanosBlock); + } + return eval(page.getPositionCount(), startTimestampMillisVector, endTimestampNanosVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampMillisBlock, + LongBlock endTimestampNanosBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampMillisBlock.getValueCount(p) != 1) { + if (startTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.getValueCount(p) != 1) { + if (endTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillisNanos(this.datePartFieldUnit, startTimestampMillisBlock.getLong(startTimestampMillisBlock.getFirstValueIndex(p)), endTimestampNanosBlock.getLong(endTimestampNanosBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampMillisVector, + LongVector endTimestampNanosVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillisNanos(this.datePartFieldUnit, startTimestampMillisVector.getLong(p), endTimestampNanosVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantMillisNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestampMillis, endTimestampNanos); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampNanos; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestampMillis, + EvalOperator.ExpressionEvaluator.Factory endTimestampNanos) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + } + + @Override + public DateDiffConstantMillisNanosEvaluator get(DriverContext context) { + return new DateDiffConstantMillisNanosEvaluator(source, datePartFieldUnit, startTimestampMillis.get(context), endTimestampNanos.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantMillisNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java new file mode 100644 index 000000000000..99f7d1cb2e24 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantNanosEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanos(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampVector, + LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanos(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffConstantNanosEvaluator get(DriverContext context) { + return new DateDiffConstantNanosEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java new file mode 100644 index 000000000000..842930a040ed --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator endTimestampMillis; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantNanosMillisEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestampNanos, + EvalOperator.ExpressionEvaluator endTimestampMillis, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampNanosBlock = (LongBlock) startTimestampNanos.eval(page)) { + try (LongBlock endTimestampMillisBlock = (LongBlock) endTimestampMillis.eval(page)) { + LongVector startTimestampNanosVector = startTimestampNanosBlock.asVector(); + if (startTimestampNanosVector == null) { + return eval(page.getPositionCount(), startTimestampNanosBlock, endTimestampMillisBlock); + } + LongVector endTimestampMillisVector = endTimestampMillisBlock.asVector(); + if (endTimestampMillisVector == null) { + return eval(page.getPositionCount(), startTimestampNanosBlock, endTimestampMillisBlock); + } + return eval(page.getPositionCount(), startTimestampNanosVector, endTimestampMillisVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampNanosBlock, + LongBlock endTimestampMillisBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampNanosBlock.getValueCount(p) != 1) { + if (startTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.getValueCount(p) != 1) { + if (endTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanosMillis(this.datePartFieldUnit, startTimestampNanosBlock.getLong(startTimestampNanosBlock.getFirstValueIndex(p)), endTimestampMillisBlock.getLong(endTimestampMillisBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampNanosVector, + LongVector endTimestampMillisVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanosMillis(this.datePartFieldUnit, startTimestampNanosVector.getLong(p), endTimestampMillisVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantNanosMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestampNanos, endTimestampMillis); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampMillis; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestampNanos, + EvalOperator.ExpressionEvaluator.Factory endTimestampMillis) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + } + + @Override + public DateDiffConstantNanosMillisEvaluator get(DriverContext context) { + return new DateDiffConstantNanosMillisEvaluator(source, datePartFieldUnit, startTimestampNanos.get(context), endTimestampMillis.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantNanosMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java new file mode 100644 index 000000000000..a464d0c5cafc --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillis(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampVector, LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillis(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffMillisEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffMillisEvaluator get(DriverContext context) { + return new DateDiffMillisEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffMillisEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java new file mode 100644 index 000000000000..4586e2cb720f --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator endTimestampNanos; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffMillisNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestampMillis, + EvalOperator.ExpressionEvaluator endTimestampNanos, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampMillisBlock = (LongBlock) startTimestampMillis.eval(page)) { + try (LongBlock endTimestampNanosBlock = (LongBlock) endTimestampNanos.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampMillisBlock, endTimestampNanosBlock); + } + LongVector startTimestampMillisVector = startTimestampMillisBlock.asVector(); + if (startTimestampMillisVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampMillisBlock, endTimestampNanosBlock); + } + LongVector endTimestampNanosVector = endTimestampNanosBlock.asVector(); + if (endTimestampNanosVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampMillisBlock, endTimestampNanosBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampMillisVector, endTimestampNanosVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, + LongBlock startTimestampMillisBlock, LongBlock endTimestampNanosBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampMillisBlock.getValueCount(p) != 1) { + if (startTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.getValueCount(p) != 1) { + if (endTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillisNanos(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampMillisBlock.getLong(startTimestampMillisBlock.getFirstValueIndex(p)), endTimestampNanosBlock.getLong(endTimestampNanosBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampMillisVector, LongVector endTimestampNanosVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillisNanos(unitVector.getBytesRef(p, unitScratch), startTimestampMillisVector.getLong(p), endTimestampNanosVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffMillisNanosEvaluator[" + "unit=" + unit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestampMillis, endTimestampNanos); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampNanos; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestampMillis, + EvalOperator.ExpressionEvaluator.Factory endTimestampNanos) { + this.source = source; + this.unit = unit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + } + + @Override + public DateDiffMillisNanosEvaluator get(DriverContext context) { + return new DateDiffMillisNanosEvaluator(source, unit.get(context), startTimestampMillis.get(context), endTimestampNanos.get(context), context); + } + + @Override + public String toString() { + return "DateDiffMillisNanosEvaluator[" + "unit=" + unit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java new file mode 100644 index 000000000000..95a54c3a24ec --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanos(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampVector, LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanos(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffNanosEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffNanosEvaluator get(DriverContext context) { + return new DateDiffNanosEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffNanosEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java new file mode 100644 index 000000000000..a7694647aec5 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator endTimestampMillis; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffNanosMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestampNanos, + EvalOperator.ExpressionEvaluator endTimestampMillis, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampNanosBlock = (LongBlock) startTimestampNanos.eval(page)) { + try (LongBlock endTimestampMillisBlock = (LongBlock) endTimestampMillis.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampNanosBlock, endTimestampMillisBlock); + } + LongVector startTimestampNanosVector = startTimestampNanosBlock.asVector(); + if (startTimestampNanosVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampNanosBlock, endTimestampMillisBlock); + } + LongVector endTimestampMillisVector = endTimestampMillisBlock.asVector(); + if (endTimestampMillisVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampNanosBlock, endTimestampMillisBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampNanosVector, endTimestampMillisVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, + LongBlock startTimestampNanosBlock, LongBlock endTimestampMillisBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampNanosBlock.getValueCount(p) != 1) { + if (startTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.getValueCount(p) != 1) { + if (endTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanosMillis(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampNanosBlock.getLong(startTimestampNanosBlock.getFirstValueIndex(p)), endTimestampMillisBlock.getLong(endTimestampMillisBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampNanosVector, LongVector endTimestampMillisVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanosMillis(unitVector.getBytesRef(p, unitScratch), startTimestampNanosVector.getLong(p), endTimestampMillisVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffNanosMillisEvaluator[" + "unit=" + unit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestampNanos, endTimestampMillis); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampMillis; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestampNanos, + EvalOperator.ExpressionEvaluator.Factory endTimestampMillis) { + this.source = source; + this.unit = unit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + } + + @Override + public DateDiffNanosMillisEvaluator get(DriverContext context) { + return new DateDiffNanosMillisEvaluator(source, unit.get(context), startTimestampNanos.get(context), endTimestampMillis.get(context), context); + } + + @Override + public String toString() { + return "DateDiffNanosMillisEvaluator[" + "unit=" + unit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 08e0f0cf473e..e4c591f8f6b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -503,7 +503,10 @@ public class EsqlCapabilities { * Support running date format function on nanosecond dates */ DATE_NANOS_DATE_FORMAT(), - + /** + * support date diff function on date nanos type, and mixed nanos/millis + */ + DATE_NANOS_DATE_DIFF(), /** * DATE_PARSE supports reading timezones */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index b588832aba4c..4d843ea7180a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -11,11 +11,13 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -41,8 +43,9 @@ import java.util.function.BiFunction; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToInt; /** @@ -168,10 +171,14 @@ public class DateDiff extends EsqlScalarFunction { @Param(name = "unit", type = { "keyword", "text" }, description = "Time difference unit") Expression unit, @Param( name = "startTimestamp", - type = { "date" }, + type = { "date", "date_nanos" }, description = "A string representing a start timestamp" ) Expression startTimestamp, - @Param(name = "endTimestamp", type = { "date" }, description = "A string representing an end timestamp") Expression endTimestamp + @Param( + name = "endTimestamp", + type = { "date", "date_nanos" }, + description = "A string representing an end timestamp" + ) Expression endTimestamp ) { super(source, List.of(unit, startTimestamp, endTimestamp)); this.unit = unit; @@ -213,33 +220,115 @@ public class DateDiff extends EsqlScalarFunction { return endTimestamp; } - @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) - static int process(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + @Evaluator(extraName = "ConstantMillis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillis(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestamp), UTC); ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestamp), UTC); return datePartFieldUnit.diff(zdtStart, zdtEnd); } - @Evaluator(warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) - static int process(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { - return process(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + @Evaluator(extraName = "Millis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillis(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + return processMillis(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + } + + @Evaluator(extraName = "ConstantNanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanos(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(DateUtils.toInstant(startTimestamp), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(DateUtils.toInstant(endTimestamp), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(extraName = "Nanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanos(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + return processNanos(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + } + + @Evaluator(extraName = "ConstantNanosMillis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanosMillis(@Fixed Part datePartFieldUnit, long startTimestampNanos, long endTimestampMillis) + throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(DateUtils.toInstant(startTimestampNanos), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestampMillis), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(extraName = "NanosMillis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanosMillis(BytesRef unit, long startTimestampNanos, long endTimestampMillis) throws IllegalArgumentException { + return processNanosMillis(Part.resolve(unit.utf8ToString()), startTimestampNanos, endTimestampMillis); + } + + @Evaluator(extraName = "ConstantMillisNanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillisNanos(@Fixed Part datePartFieldUnit, long startTimestampMillis, long endTimestampNanos) + throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestampMillis), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(DateUtils.toInstant(endTimestampNanos), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(extraName = "MillisNanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillisNanos(BytesRef unit, long startTimestampMillis, long endTimestampNanos) throws IllegalArgumentException { + return processMillisNanos(Part.resolve(unit.utf8ToString()), startTimestampMillis, endTimestampNanos); + } + + @FunctionalInterface + public interface DateDiffFactory { + ExpressionEvaluator.Factory build( + Source source, + ExpressionEvaluator.Factory unitsEvaluator, + ExpressionEvaluator.Factory startTimestampEvaluator, + ExpressionEvaluator.Factory endTimestampEvaluator + ); + } + + @FunctionalInterface + public interface DateDiffConstantFactory { + ExpressionEvaluator.Factory build( + Source source, + Part unitsEvaluator, + ExpressionEvaluator.Factory startTimestampEvaluator, + ExpressionEvaluator.Factory endTimestampEvaluator + ); } @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + if (startTimestamp.dataType() == DATETIME && endTimestamp.dataType() == DATETIME) { + return toEvaluator(toEvaluator, DateDiffConstantMillisEvaluator.Factory::new, DateDiffMillisEvaluator.Factory::new); + } else if (startTimestamp.dataType() == DATE_NANOS && endTimestamp.dataType() == DATE_NANOS) { + return toEvaluator(toEvaluator, DateDiffConstantNanosEvaluator.Factory::new, DateDiffNanosEvaluator.Factory::new); + } else if (startTimestamp.dataType() == DATE_NANOS && endTimestamp.dataType() == DATETIME) { + return toEvaluator(toEvaluator, DateDiffConstantNanosMillisEvaluator.Factory::new, DateDiffNanosMillisEvaluator.Factory::new); + } else if (startTimestamp.dataType() == DATETIME && endTimestamp.dataType() == DATE_NANOS) { + return toEvaluator(toEvaluator, DateDiffConstantMillisNanosEvaluator.Factory::new, DateDiffMillisNanosEvaluator.Factory::new); + } + throw new UnsupportedOperationException( + "Invalid types [" + + startTimestamp.dataType() + + ", " + + endTimestamp.dataType() + + "] " + + "If you see this error, there is a bug in DateDiff.resolveType()" + ); + } + + private ExpressionEvaluator.Factory toEvaluator( + ToEvaluator toEvaluator, + DateDiffConstantFactory constantFactory, + DateDiffFactory dateDiffFactory + ) { ExpressionEvaluator.Factory startTimestampEvaluator = toEvaluator.apply(startTimestamp); ExpressionEvaluator.Factory endTimestampEvaluator = toEvaluator.apply(endTimestamp); if (unit.foldable()) { try { Part datePartField = Part.resolve(((BytesRef) unit.fold(toEvaluator.foldCtx())).utf8ToString()); - return new DateDiffConstantEvaluator.Factory(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); + return constantFactory.build(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); } catch (IllegalArgumentException e) { throw new InvalidArgumentException("invalid unit format for [{}]: {}", sourceText(), e.getMessage()); } } ExpressionEvaluator.Factory unitEvaluator = toEvaluator.apply(unit); - return new DateDiffEvaluator.Factory(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); + return dateDiffFactory.build(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); } @Override @@ -248,8 +337,10 @@ public class DateDiff extends EsqlScalarFunction { return new TypeResolution("Unresolved children"); } - TypeResolution resolution = isString(unit, sourceText(), FIRST).and(isDate(startTimestamp, sourceText(), SECOND)) - .and(isDate(endTimestamp, sourceText(), THIRD)); + String operationName = sourceText(); + TypeResolution resolution = isString(unit, sourceText(), FIRST).and( + TypeResolutions.isType(startTimestamp, DataType::isDate, operationName, SECOND, "datetime or date_nanos") + ).and(TypeResolutions.isType(endTimestamp, DataType::isDate, operationName, THIRD, "datetime or date_nanos")); if (resolution.unresolved()) { return resolution; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java index a3a808de277d..7f70c6e8cd37 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java @@ -36,7 +36,7 @@ public class DateDiffErrorTests extends ErrorsForCasesWithoutExamplesTestCase { if (i == 0) { return "string"; } - return "datetime"; + return "datetime or date_nanos"; })); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java index e194443a8bc2..7380ac08f85a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java @@ -18,7 +18,10 @@ import static org.hamcrest.Matchers.containsString; public class DateDiffFunctionTests extends ESTestCase { public void testDateDiffFunctionErrorUnitNotValid() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> DateDiff.processMillis(new BytesRef("sseconds"), 0, 0) + ); assertThat( e.getMessage(), containsString( @@ -27,7 +30,7 @@ public class DateDiffFunctionTests extends ESTestCase { ) ); - e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); + e = expectThrows(IllegalArgumentException.class, () -> DateDiff.processMillis(new BytesRef("not-valid-unit"), 0, 0)); assertThat( e.getMessage(), containsString( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index e2e2f0572c7a..e23283d89957 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -101,7 +102,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { // Units as Keyword case return List.of( new TestCaseSupplier( - "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( @@ -109,15 +110,60 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(expected) ) ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.KEYWORD, DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffNanosEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.KEYWORD, DataType.DATE_NANOS, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") + ), + "DateDiffNanosMillisEvaluator[unit=Attribute[channel=0], startTimestampNanos=Attribute[channel=1], " + + "endTimestampMillis=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffMillisNanosEvaluator[unit=Attribute[channel=0], startTimestampMillis=Attribute[channel=1], " + + "endTimestampNanos=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), // Units as text case new TestCaseSupplier( - "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, List.of(DataType.TEXT, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( @@ -125,11 +171,56 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(expected) ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.TEXT, DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffNanosEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.TEXT, DataType.DATE_NANOS, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") + ), + "DateDiffNanosMillisEvaluator[unit=Attribute[channel=0], startTimestampNanos=Attribute[channel=1], " + + "endTimestampMillis=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.TEXT, DataType.DATETIME, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffMillisNanosEvaluator[unit=Attribute[channel=0], startTimestampMillis=Attribute[channel=1], " + + "endTimestampNanos=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) ) ); } @@ -146,7 +237,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(null) @@ -163,7 +254,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(null) From bee1a4b2efc3aeef978b9a6908599f03c66941a9 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Thu, 23 Jan 2025 15:09:03 -0500 Subject: [PATCH 25/29] [ML] Migrate stream to core error parsing (#120722) Avoid trapping Throwable by rethrowing it on another thread, allowing us to reuse the `generateFailureXContent` for Exceptions and match the new 9.0 format. --- docs/changelog/120722.yaml | 5 ++ ...rverSentEventsRestActionListenerTests.java | 3 +- .../ServerSentEventsRestActionListener.java | 58 +++++-------------- 3 files changed, 20 insertions(+), 46 deletions(-) create mode 100644 docs/changelog/120722.yaml diff --git a/docs/changelog/120722.yaml b/docs/changelog/120722.yaml new file mode 100644 index 000000000000..4bdd65b0937e --- /dev/null +++ b/docs/changelog/120722.yaml @@ -0,0 +1,5 @@ +pr: 120722 +summary: Migrate stream to core error parsing +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index 5fc4448c8094..69912a967fd2 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -85,8 +85,7 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { private static final Exception expectedException = new IllegalStateException("hello there"); private static final String expectedExceptionAsServerSentEvent = """ {\ - "error":{"root_cause":[{"type":"illegal_state_exception","reason":"hello there",\ - "caused_by":{"type":"illegal_state_exception","reason":"hello there"}}],\ + "error":{"root_cause":[{"type":"illegal_state_exception","reason":"hello there"}],\ "type":"illegal_state_exception","reason":"hello there"},"status":500\ }"""; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 62cbcf902a9e..6991e1325f3b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -40,15 +40,10 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Iterator; -import java.util.Map; import java.util.Objects; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE; -import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE; -import static org.elasticsearch.rest.RestController.ERROR_TRACE_DEFAULT; - /** * A version of {@link org.elasticsearch.rest.action.RestChunkedToXContentListener} that reads from a {@link Flow.Publisher} and encodes * the response in Server-Sent Events. @@ -154,48 +149,23 @@ public class ServerSentEventsRestActionListener implements ActionListener Iterators.concat(ChunkedToXContentHelper.startObject(), ChunkedToXContentHelper.chunk((b, p) -> { - // Render the exception with a simple message - if (channel.detailedErrorsEnabled() == false) { - String message = "No ElasticsearchException found"; - var inner = t; - for (int counter = 0; counter < 10 && inner != null; counter++) { - if (inner instanceof ElasticsearchException) { - message = inner.getClass().getSimpleName() + "[" + inner.getMessage() + "]"; - break; - } - inner = inner.getCause(); - } - return b.field("error", message); - } - var errorParams = p; - if (errorParams.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && status != RestStatus.UNAUTHORIZED) { - errorParams = new ToXContent.DelegatingMapParams( - Map.of(REST_EXCEPTION_SKIP_STACK_TRACE, "false", REST_EXCEPTION_SKIP_CAUSE, "true"), - params - ); - } - - // Render the exception with all details - final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(t); - b.startObject("error"); - { - b.startArray("root_cause"); - for (ElasticsearchException rootCause : rootCauses) { - b.startObject(); - rootCause.toXContent(b, errorParams); - b.endObject(); - } - b.endArray(); - } - ElasticsearchException.generateThrowableXContent(b, errorParams, t); - return b.endObject(); - }), ChunkedToXContentHelper.field("status", status.getStatus()), ChunkedToXContentHelper.endObject()); + Exception e; + if (t instanceof Exception) { + e = (Exception) t; + } else { + // if not exception, then error, and we should not let it escape. rethrow on another thread, and inform the user we're stopping. + ExceptionsHelper.maybeDieOnAnotherThread(t); + e = new RuntimeException("Fatal error while streaming response", t); + } + return params -> Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.single((b, p) -> ElasticsearchException.generateFailureXContent(b, p, e, channel.detailedErrorsEnabled())), + Iterators.single((b, p) -> b.field("status", status.getStatus())), + ChunkedToXContentHelper.endObject() + ); } private void requestNextChunk(ActionListener listener) { From c7d8862c75dab684c1997f486ccf869dee5459f3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 24 Jan 2025 08:49:40 +1100 Subject: [PATCH 26/29] Mute org.elasticsearch.xpack.esql.action.CrossClusterAsyncEnrichStopIT testEnrichAfterStop #120757 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ae61311fdb4d..ef564633d1fc 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,6 +235,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120668 - class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/119882 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncEnrichStopIT + method: testEnrichAfterStop + issue: https://github.com/elastic/elasticsearch/issues/120757 # Examples: # From 36292d3f12f7fddd9445fb9ead75b061ed7e1bf7 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Fri, 24 Jan 2025 08:59:22 +1000 Subject: [PATCH 27/29] Improve memory aspects of enrich cache (#120256) This commit reduces the occupied heap space of the enrich cache and corrects inaccuracies in tracking the occupied heap space (for cache size limitation purposes). --------- Co-authored-by: Joe Gallo --- docs/changelog/120256.yaml | 7 + .../enrich/EnrichProcessorMaxMatchesIT.java | 191 ++++++++++++++++++ .../xpack/enrich/AbstractEnrichProcessor.java | 37 ++-- .../xpack/enrich/EnrichCache.java | 116 +++++------ .../xpack/enrich/EnrichProcessorFactory.java | 43 ++-- .../xpack/enrich/GeoMatchProcessor.java | 7 +- .../xpack/enrich/MatchProcessor.java | 5 +- .../xpack/enrich/EnrichCacheTests.java | 151 +++----------- .../xpack/enrich/GeoMatchProcessorTests.java | 12 +- .../xpack/enrich/MatchProcessorTests.java | 12 +- 10 files changed, 347 insertions(+), 234 deletions(-) create mode 100644 docs/changelog/120256.yaml create mode 100644 x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java diff --git a/docs/changelog/120256.yaml b/docs/changelog/120256.yaml new file mode 100644 index 000000000000..c4ee5ab1705c --- /dev/null +++ b/docs/changelog/120256.yaml @@ -0,0 +1,7 @@ +pr: 120256 +summary: Improve memory aspects of enrich cache +area: Ingest Node +type: enhancement +issues: + - 96050 + - 120021 diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java new file mode 100644 index 000000000000..230e5e4dd3c6 --- /dev/null +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; +import static org.elasticsearch.xpack.enrich.AbstractEnrichTestCase.createSourceIndices; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichProcessorMaxMatchesIT extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder() + // TODO Change this to run with security enabled + // https://github.com/elastic/elasticsearch/issues/75940 + .put(XPackSettings.SECURITY_ENABLED.getKey(), false) + .build(); + } + + public void testEnrichCacheValuesAndMaxMatches() { + // this test is meant to be much less ignorable than a mere comment in the code, since the behavior here is tricky. + + // there's an interesting edge case where two processors could be using the same policy and search, etc, + // but that they have a different number of max_matches -- if we're not careful about how we implement caching, + // then we could miss that edge case and return the wrong results from the cache. + + // Ensure enrich cache is empty + var statsRequest = new EnrichStatsAction.Request(TEST_REQUEST_TIMEOUT); + var statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); + assertThat(statsResponse.getCacheStats().size(), equalTo(1)); + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).misses(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).hits(), equalTo(0L)); + + String policyName = "kv"; + String sourceIndexName = "kv"; + + var enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(sourceIndexName), "key", List.of("value")); + + // Create source index and add two documents: + createSourceIndices(client(), enrichPolicy); + { + IndexRequest indexRequest = new IndexRequest(sourceIndexName); + indexRequest.create(true); + indexRequest.source(""" + { + "key": "k1", + "value": "v1" + } + """, XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + } + { + IndexRequest indexRequest = new IndexRequest(sourceIndexName); + indexRequest.create(true); + indexRequest.source(""" + { + "key": "k1", + "value": "v2" + } + """, XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + } + + // Store policy and execute it: + var putPolicyRequest = new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet(); + var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); + + { + // run a single enrich processor to fill the cache, note that the default max_matches is 1 (so it's not given explicitly here) + var simulatePipelineRequest = jsonSimulatePipelineRequest(""" + { + "pipeline": { + "processors" : [ + { + "enrich": { + "policy_name": "kv", + "field": "key", + "target_field": "result" + } + } + ] + }, + "docs": [ + { + "_source": { + "key": "k1" + } + } + ] + } + """); + var response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); + var result = (SimulateDocumentBaseResult) response.getResults().get(0); + assertThat(result.getFailure(), nullValue()); + // it's not actually important in this specific test whether the result is v1 or v2 + assertThat(result.getIngestDocument().getFieldValue("result.value", String.class), containsString("v")); + } + + { + // run two enrich processors with different max_matches, and see if we still get the right behavior + var simulatePipelineRequest = jsonSimulatePipelineRequest(""" + { + "pipeline": { + "processors" : [ + { + "enrich": { + "policy_name": "kv", + "field": "key", + "target_field": "result" + } + }, + { + "enrich": { + "policy_name": "kv", + "field": "key", + "target_field": "results", + "max_matches": 8 + } + } + ] + }, + "docs": [ + { + "_source": { + "key": "k1" + } + } + ] + } + """); + var response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); + var result = (SimulateDocumentBaseResult) response.getResults().get(0); + assertThat(result.getFailure(), nullValue()); + // it's not actually important in this specific test whether the result is v1 or v2 + assertThat(result.getIngestDocument().getFieldValue("result.value", String.class), containsString("v")); + + // this is the important part of the test -- did the max_matches=1 case pollute the cache for the max_matches=8 case? + @SuppressWarnings("unchecked") + List> results = (List>) result.getIngestDocument().getSource().get("results"); + List values = results.stream().map(m -> m.get("value")).toList(); + // if these assertions fail, it probably means you were fussing about with the EnrichCache.CacheKey and tried removing + // the max_matches accounting from it + assertThat(values, containsInAnyOrder("v1", "v2")); + assertThat(values, hasSize(2)); + } + + statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); + assertThat(statsResponse.getCacheStats().size(), equalTo(1)); + // there are two items in the cache, the single result from max_matches 1 (implied), and the multi-result from max_matches 8 + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(2L)); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java index ddcad949b6a7..c2bcc6718495 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java @@ -20,22 +20,24 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; +import java.util.function.Supplier; public abstract class AbstractEnrichProcessor extends AbstractProcessor { private final String policyName; - private final BiConsumer>, Exception>> searchRunner; + private final EnrichProcessorFactory.SearchRunner searchRunner; private final TemplateScript.Factory field; private final TemplateScript.Factory targetField; private final boolean ignoreMissing; private final boolean overrideEnabled; protected final String matchField; protected final int maxMatches; + private final String indexAlias; protected AbstractEnrichProcessor( String tag, String description, - BiConsumer>, Exception>> searchRunner, + EnrichProcessorFactory.SearchRunner searchRunner, String policyName, TemplateScript.Factory field, TemplateScript.Factory targetField, @@ -53,6 +55,8 @@ public abstract class AbstractEnrichProcessor extends AbstractProcessor { this.overrideEnabled = overrideEnabled; this.matchField = matchField; this.maxMatches = maxMatches; + // note: since the policyName determines the indexAlias, we can calculate this once + this.indexAlias = EnrichPolicy.getBaseName(policyName); } public abstract QueryBuilder getQueryBuilder(Object fieldValue); @@ -68,20 +72,23 @@ public abstract class AbstractEnrichProcessor extends AbstractProcessor { return; } - QueryBuilder queryBuilder = getQueryBuilder(value); - ConstantScoreQueryBuilder constantScore = new ConstantScoreQueryBuilder(queryBuilder); - SearchSourceBuilder searchBuilder = new SearchSourceBuilder(); - searchBuilder.from(0); - searchBuilder.size(maxMatches); - searchBuilder.trackScores(false); - searchBuilder.fetchSource(true); - searchBuilder.query(constantScore); - SearchRequest req = new SearchRequest(); - req.indices(EnrichPolicy.getBaseName(getPolicyName())); - req.preference(Preference.LOCAL.type()); - req.source(searchBuilder); + Supplier searchRequestSupplier = () -> { + QueryBuilder queryBuilder = getQueryBuilder(value); + ConstantScoreQueryBuilder constantScore = new ConstantScoreQueryBuilder(queryBuilder); + SearchSourceBuilder searchBuilder = new SearchSourceBuilder(); + searchBuilder.from(0); + searchBuilder.size(maxMatches); + searchBuilder.trackScores(false); + searchBuilder.fetchSource(true); + searchBuilder.query(constantScore); + SearchRequest req = new SearchRequest(); + req.indices(indexAlias); + req.preference(Preference.LOCAL.type()); + req.source(searchBuilder); + return req; + }; - searchRunner.accept(req, (searchHits, e) -> { + searchRunner.accept(value, maxMatches, searchRequestSupplier, (searchHits, e) -> { if (e != null) { handler.accept(null, e); return; diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index a2899813aa42..d11ca41b3fba 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -8,16 +8,12 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexAbstraction; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; @@ -26,9 +22,8 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.LongSupplier; import java.util.function.ToLongBiFunction; @@ -36,28 +31,24 @@ import java.util.function.ToLongBiFunction; * A simple cache for enrich that uses {@link Cache}. There is one instance of this cache and * multiple enrich processors with different policies will use this cache. *

- * The key of the cache is based on the search request and the enrich index that will be used. - * Search requests that enrich generates target the alias for an enrich policy, this class - * resolves the alias to the actual enrich index and uses that for the cache key. This way - * no stale entries will be returned if a policy execution happens and a new enrich index is created. - *

* There is no cleanup mechanism of stale entries in case a new enrich index is created * as part of a policy execution. This shouldn't be needed as cache entries for prior enrich * indices will be eventually evicted, because these entries will not end up being used. The * latest enrich index name will be used as cache key after an enrich policy execution. - * (Also a cleanup mechanism also wouldn't be straightforward to implement, + * (Also a cleanup mechanism wouldn't be straightforward to implement, * since there is no easy check to see that an enrich index used as cache key no longer is the - * current enrich index the enrich alias of an policy refers to. It would require checking + * current enrich index that the enrich alias of a policy refers to. It would require checking * all cached entries on each cluster state update) */ public final class EnrichCache { + private static final CacheValue EMPTY_CACHE_VALUE = new CacheValue(List.of(), CacheKey.CACHE_KEY_SIZE); + private final Cache cache; private final LongSupplier relativeNanoTimeProvider; private final AtomicLong hitsTimeInNanos = new AtomicLong(0); private final AtomicLong missesTimeInNanos = new AtomicLong(0); private final AtomicLong sizeInBytes = new AtomicLong(0); - private volatile Metadata metadata; EnrichCache(long maxSize) { this(maxSize, System::nanoTime); @@ -89,30 +80,36 @@ public final class EnrichCache { } /** - * This method notifies the given listener of the value in this cache for the given searchRequest. If there is no value in the cache - * for the searchRequest, then the new cache value is computed using searchResponseFetcher. - * @param searchRequest The key for the cache request + * This method notifies the given listener of the value in this cache for the given search parameters. If there is no value in the cache + * for these search parameters, then the new cache value is computed using searchResponseFetcher. + * + * @param enrichIndex The enrich index from which the results will be retrieved + * @param lookupValue The value that will be used in the search + * @param maxMatches The max number of matches that the search will return * @param searchResponseFetcher The function used to compute the value to be put in the cache, if there is no value in the cache already * @param listener A listener to be notified of the value in the cache */ public void computeIfAbsent( - SearchRequest searchRequest, - BiConsumer> searchResponseFetcher, + String enrichIndex, + Object lookupValue, + int maxMatches, + Consumer> searchResponseFetcher, ActionListener>> listener ) { // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. long cacheStart = relativeNanoTimeProvider.getAsLong(); - List> response = get(searchRequest); + var cacheKey = new CacheKey(enrichIndex, lookupValue, maxMatches); + List> response = get(cacheKey); long cacheRequestTime = relativeNanoTimeProvider.getAsLong() - cacheStart; if (response != null) { hitsTimeInNanos.addAndGet(cacheRequestTime); listener.onResponse(response); } else { final long retrieveStart = relativeNanoTimeProvider.getAsLong(); - searchResponseFetcher.accept(searchRequest, ActionListener.wrap(resp -> { - CacheValue value = toCacheValue(resp); - put(searchRequest, value); - List> copy = deepCopy(value.hits, false); + searchResponseFetcher.accept(ActionListener.wrap(resp -> { + CacheValue cacheValue = toCacheValue(resp); + put(cacheKey, cacheValue); + List> copy = deepCopy(cacheValue.hits, false); long databaseQueryAndCachePutTime = relativeNanoTimeProvider.getAsLong() - retrieveStart; missesTimeInNanos.addAndGet(cacheRequestTime + databaseQueryAndCachePutTime); listener.onResponse(copy); @@ -121,10 +118,7 @@ public final class EnrichCache { } // non-private for unit testing only - List> get(SearchRequest searchRequest) { - String enrichIndex = getEnrichIndexKey(searchRequest); - CacheKey cacheKey = new CacheKey(enrichIndex, searchRequest); - + List> get(CacheKey cacheKey) { CacheValue response = cache.get(cacheKey); if (response != null) { return deepCopy(response.hits, false); @@ -134,18 +128,11 @@ public final class EnrichCache { } // non-private for unit testing only - void put(SearchRequest searchRequest, CacheValue cacheValue) { - String enrichIndex = getEnrichIndexKey(searchRequest); - CacheKey cacheKey = new CacheKey(enrichIndex, searchRequest); - + void put(CacheKey cacheKey, CacheValue cacheValue) { cache.put(cacheKey, cacheValue); sizeInBytes.addAndGet(cacheValue.sizeInBytes); } - void setMetadata(Metadata metadata) { - this.metadata = metadata; - } - public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { Cache.CacheStats cacheStats = cache.stats(); return new EnrichStatsAction.Response.CacheStats( @@ -160,18 +147,13 @@ public final class EnrichCache { ); } - private String getEnrichIndexKey(SearchRequest searchRequest) { - String alias = searchRequest.indices()[0]; - IndexAbstraction ia = metadata.getIndicesLookup().get(alias); - if (ia == null) { - throw new IndexNotFoundException("no generated enrich index [" + alias + "]"); - } - return ia.getIndices().get(0).getName(); - } - static CacheValue toCacheValue(SearchResponse response) { + if (response.getHits().getHits().length == 0) { + return EMPTY_CACHE_VALUE; + } List> result = new ArrayList<>(response.getHits().getHits().length); - long size = 0; + // Include the size of the cache key. + long size = CacheKey.CACHE_KEY_SIZE; for (SearchHit hit : response.getHits()) { // There is a cost of decompressing source here plus caching it. // We do it first so we don't decompress it twice. @@ -209,28 +191,26 @@ public final class EnrichCache { } } - private static class CacheKey { - - final String enrichIndex; - final SearchRequest searchRequest; - - private CacheKey(String enrichIndex, SearchRequest searchRequest) { - this.enrichIndex = enrichIndex; - this.searchRequest = searchRequest; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CacheKey cacheKey = (CacheKey) o; - return enrichIndex.equals(cacheKey.enrichIndex) && searchRequest.equals(cacheKey.searchRequest); - } - - @Override - public int hashCode() { - return Objects.hash(enrichIndex, searchRequest); - } + /** + * The cache key consists of the (variable) parameters that are used to construct a search request for the enrich lookup. We define a + * custom record to group these fields to avoid constructing and storing the much larger + * {@link org.elasticsearch.action.search.SearchRequest}. + * + * @param enrichIndex The enrich index (i.e. not the alias, but the concrete index that the alias points to) + * @param lookupValue The value that is used to find matches in the enrich index + * @param maxMatches The max number of matches that the enrich lookup should return. This changes the size of the search response and + * should thus be included in the cache key + */ + // Visibility for testing + record CacheKey(String enrichIndex, Object lookupValue, int maxMatches) { + /** + * In reality, the size in bytes of the cache key is a function of the {@link CacheKey#lookupValue} field plus some constant for + * the object itself, the string reference for the enrich index (but not the string itself because it's taken from the metadata), + * and the integer for the max number of matches. However, by defining a static cache key size, we can make the + * {@link EnrichCache#EMPTY_CACHE_VALUE} static as well, which allows us to avoid having to instantiate new cache values for + * empty results and thus save some heap space. + */ + private static final long CACHE_KEY_SIZE = 256L; } // Visibility for testing diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java index 9890a96aae82..0c1ad73c96c2 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.Processor; import org.elasticsearch.script.ScriptService; @@ -29,6 +30,7 @@ import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ENRICH_ORIGIN; @@ -50,12 +52,12 @@ final class EnrichProcessorFactory implements Processor.Factory, Consumer processorFactories, String tag, String description, Map config) throws Exception { - String policyName = ConfigurationUtils.readStringProperty(TYPE, tag, config, "policy_name"); - String policyAlias = EnrichPolicy.getBaseName(policyName); + final String policyName = ConfigurationUtils.readStringProperty(TYPE, tag, config, "policy_name"); + final String indexAlias = EnrichPolicy.getBaseName(policyName); if (metadata == null) { throw new IllegalStateException("enrich processor factory has not yet been initialized with cluster state"); } - IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(policyAlias); + IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(indexAlias); if (indexAbstraction == null) { throw new IllegalArgumentException("no enrich index exists for policy with name [" + policyName + "]"); } @@ -78,7 +80,7 @@ final class EnrichProcessorFactory implements Processor.Factory, Consumer 128) { throw ConfigurationUtils.newConfigurationException(TYPE, tag, "max_matches", "should be between 1 and 128"); } - BiConsumer>, Exception>> searchRunner = createSearchRunner(client, enrichCache); + var searchRunner = createSearchRunner(indexAlias, client, enrichCache); switch (policyType) { case EnrichPolicy.MATCH_TYPE: case EnrichPolicy.RANGE_TYPE: @@ -121,25 +123,40 @@ final class EnrichProcessorFactory implements Processor.Factory, Consumer>, Exception>> createSearchRunner( - Client client, - EnrichCache enrichCache - ) { + private SearchRunner createSearchRunner(String indexAlias, Client client, EnrichCache enrichCache) { Client originClient = new OriginSettingClient(client, ENRICH_ORIGIN); - return (req, handler) -> { + return (value, maxMatches, reqSupplier, handler) -> { // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. enrichCache.computeIfAbsent( - req, - (searchRequest, searchResponseActionListener) -> originClient.execute( + getEnrichIndexKey(indexAlias), + value, + maxMatches, + (searchResponseActionListener) -> originClient.execute( EnrichCoordinatorProxyAction.INSTANCE, - searchRequest, + reqSupplier.get(), searchResponseActionListener ), ActionListener.wrap(resp -> handler.accept(resp, null), e -> handler.accept(null, e)) ); }; } + + private String getEnrichIndexKey(String indexAlias) { + IndexAbstraction ia = metadata.getIndicesLookup().get(indexAlias); + if (ia == null) { + throw new IndexNotFoundException("no generated enrich index [" + indexAlias + "]"); + } + return ia.getIndices().get(0).getName(); + } + + public interface SearchRunner { + void accept( + Object value, + int maxMatches, + Supplier searchRequestSupplier, + BiConsumer>, Exception> handler + ); + } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java index dd164c630495..998b06e870b7 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.enrich; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.ShapeRelation; @@ -15,10 +14,6 @@ import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.TemplateScript; -import java.util.List; -import java.util.Map; -import java.util.function.BiConsumer; - public final class GeoMatchProcessor extends AbstractEnrichProcessor { private final ShapeRelation shapeRelation; @@ -27,7 +22,7 @@ public final class GeoMatchProcessor extends AbstractEnrichProcessor { GeoMatchProcessor( String tag, String description, - BiConsumer>, Exception>> searchRunner, + EnrichProcessorFactory.SearchRunner searchRunner, String policyName, TemplateScript.Factory field, TemplateScript.Factory targetField, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java index 76156c84c22b..b8b2f1b17fa8 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java @@ -6,22 +6,19 @@ */ package org.elasticsearch.xpack.enrich; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.script.TemplateScript; import java.util.List; -import java.util.Map; -import java.util.function.BiConsumer; public final class MatchProcessor extends AbstractEnrichProcessor { MatchProcessor( String tag, String description, - BiConsumer>, Exception>> searchRunner, + EnrichProcessorFactory.SearchRunner searchRunner, String policyName, TemplateScript.Factory field, TemplateScript.Factory targetField, diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java index 19af929017a3..7125dfd45eaf 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java @@ -7,23 +7,14 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.AliasMetadata; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; import java.io.IOException; @@ -35,7 +26,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -45,47 +35,19 @@ import static org.hamcrest.Matchers.sameInstance; public class EnrichCacheTests extends ESTestCase { public void testCaching() { - // Emulate cluster metadata: - // (two enrich indices with corresponding alias entries) - var metadata = Metadata.builder() - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) - ) - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) - ) - .build(); - // Emulated search requests that an enrich processor could generate: // (two unique searches for two enrich policies) - var searchRequest1 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) - ); - var searchRequest2 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "2")) - ); - var searchRequest3 = new SearchRequest(EnrichPolicy.getBaseName("policy2")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) - ); - var searchRequest4 = new SearchRequest(EnrichPolicy.getBaseName("policy2")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "2")) - ); + var cacheKey1 = new EnrichCache.CacheKey("policy1-1", "1", 1); + var cacheKey2 = new EnrichCache.CacheKey("policy1-1", "2", 1); + var cacheKey3 = new EnrichCache.CacheKey("policy2-1", "1", 1); + var cacheKey4 = new EnrichCache.CacheKey("policy2-1", "2", 1); // Emulated search response (content doesn't matter, since it isn't used, it just a cache entry) EnrichCache.CacheValue searchResponse = new EnrichCache.CacheValue(List.of(Map.of("test", "entry")), 1L); EnrichCache enrichCache = new EnrichCache(3); - enrichCache.setMetadata(metadata); - enrichCache.put(searchRequest1, searchResponse); - enrichCache.put(searchRequest2, searchResponse); - enrichCache.put(searchRequest3, searchResponse); + enrichCache.put(cacheKey1, searchResponse); + enrichCache.put(cacheKey2, searchResponse); + enrichCache.put(cacheKey3, searchResponse); var cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(0L)); @@ -93,10 +55,10 @@ public class EnrichCacheTests extends ESTestCase { assertThat(cacheStats.evictions(), equalTo(0L)); assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L)); - assertThat(enrichCache.get(searchRequest1), notNullValue()); - assertThat(enrichCache.get(searchRequest2), notNullValue()); - assertThat(enrichCache.get(searchRequest3), notNullValue()); - assertThat(enrichCache.get(searchRequest4), nullValue()); + assertThat(enrichCache.get(cacheKey1), notNullValue()); + assertThat(enrichCache.get(cacheKey2), notNullValue()); + assertThat(enrichCache.get(cacheKey3), notNullValue()); + assertThat(enrichCache.get(cacheKey4), nullValue()); cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(3L)); @@ -104,7 +66,7 @@ public class EnrichCacheTests extends ESTestCase { assertThat(cacheStats.evictions(), equalTo(0L)); assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L)); - enrichCache.put(searchRequest4, searchResponse); + enrichCache.put(cacheKey4, searchResponse); cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(3L)); @@ -112,41 +74,27 @@ public class EnrichCacheTests extends ESTestCase { assertThat(cacheStats.evictions(), equalTo(1L)); assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L)); - // Simulate enrich policy execution, which should make current cache entries unused. - metadata = Metadata.builder() - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-2") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) - ) - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-2") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) - ) - .build(); - enrichCache.setMetadata(metadata); + cacheKey1 = new EnrichCache.CacheKey("policy1-2", "1", 1); + cacheKey2 = new EnrichCache.CacheKey("policy1-2", "2", 1); + cacheKey3 = new EnrichCache.CacheKey("policy2-2", "1", 1); + cacheKey4 = new EnrichCache.CacheKey("policy2-2", "2", 1); // Because enrich index has changed, cache can't serve cached entries - assertThat(enrichCache.get(searchRequest1), nullValue()); - assertThat(enrichCache.get(searchRequest2), nullValue()); - assertThat(enrichCache.get(searchRequest3), nullValue()); - assertThat(enrichCache.get(searchRequest4), nullValue()); + assertThat(enrichCache.get(cacheKey1), nullValue()); + assertThat(enrichCache.get(cacheKey2), nullValue()); + assertThat(enrichCache.get(cacheKey3), nullValue()); + assertThat(enrichCache.get(cacheKey4), nullValue()); // Add new entries using new enrich index name as key - enrichCache.put(searchRequest1, searchResponse); - enrichCache.put(searchRequest2, searchResponse); - enrichCache.put(searchRequest3, searchResponse); + enrichCache.put(cacheKey1, searchResponse); + enrichCache.put(cacheKey2, searchResponse); + enrichCache.put(cacheKey3, searchResponse); // Entries can now be served: - assertThat(enrichCache.get(searchRequest1), notNullValue()); - assertThat(enrichCache.get(searchRequest2), notNullValue()); - assertThat(enrichCache.get(searchRequest3), notNullValue()); - assertThat(enrichCache.get(searchRequest4), nullValue()); + assertThat(enrichCache.get(cacheKey1), notNullValue()); + assertThat(enrichCache.get(cacheKey2), notNullValue()); + assertThat(enrichCache.get(cacheKey3), notNullValue()); + assertThat(enrichCache.get(cacheKey4), nullValue()); cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(6L)); @@ -156,30 +104,8 @@ public class EnrichCacheTests extends ESTestCase { } public void testComputeIfAbsent() throws InterruptedException { - // Emulate cluster metadata: - // (two enrich indices with corresponding alias entries) - var metadata = Metadata.builder() - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) - ) - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) - ) - .build(); - // Emulated search requests that an enrich processor could generate: // (two unique searches for two enrich policies) - var searchRequest1 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) - ); final List> searchResponseMap = List.of( Map.of("key1", "value1", "key2", "value2"), Map.of("key3", "value3", "key4", "value4") @@ -187,12 +113,11 @@ public class EnrichCacheTests extends ESTestCase { final AtomicLong testNanoTime = new AtomicLong(0); // We use a relative time provider that increments 1ms every time it is called. So each operation appears to take 1ms EnrichCache enrichCache = new EnrichCache(3, () -> testNanoTime.addAndGet(TimeValue.timeValueMillis(1).getNanos())); - enrichCache.setMetadata(metadata); { CountDownLatch queriedDatabaseLatch = new CountDownLatch(1); CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); - enrichCache.computeIfAbsent(searchRequest1, (searchRequest, searchResponseActionListener) -> { + enrichCache.computeIfAbsent("policy1-1", "1", 1, (searchResponseActionListener) -> { SearchResponse searchResponse = convertToSearchResponse(searchResponseMap); searchResponseActionListener.onResponse(searchResponse); searchResponse.decRef(); @@ -222,7 +147,7 @@ public class EnrichCacheTests extends ESTestCase { { CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); - enrichCache.computeIfAbsent(searchRequest1, (searchRequest, searchResponseActionListener) -> { + enrichCache.computeIfAbsent("policy1-1", "1", 1, (searchResponseActionListener) -> { fail("Expected no call to the database because item should have been in the cache"); }, new ActionListener<>() { @Override @@ -326,22 +251,4 @@ public class EnrichCacheTests extends ESTestCase { assertArrayEquals(new byte[] { 1, 2, 3 }, (byte[]) result.get("embedded_object")); } - public void testEnrichIndexNotExist() { - // Emulate cluster metadata: - var metadata = Metadata.builder().build(); - - // Emulated search request on a non-exist enrich index that an enrich processor could generate - var searchRequest = new SearchRequest(EnrichPolicy.getBaseName("policy-enrich-index-not-generated")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("test", "query")) - ); - // Emulated search response (content doesn't matter, since it isn't used, it just a cache entry) - EnrichCache.CacheValue searchResponse = new EnrichCache.CacheValue(List.of(Map.of("test", "entry")), 1L); - - EnrichCache enrichCache = new EnrichCache(1); - enrichCache.setMetadata(metadata); - - IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> enrichCache.put(searchRequest, searchResponse)); - assertThat(e.getMessage(), containsString("no generated enrich index [.enrich-policy-enrich-index-not-generated]")); - } - } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java index 5642e685a592..fcf2bc3c1429 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java @@ -27,6 +27,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; +import java.util.function.Supplier; import static org.elasticsearch.xpack.enrich.MatchProcessorTests.str; import static org.hamcrest.Matchers.emptyArray; @@ -139,7 +140,7 @@ public class GeoMatchProcessorTests extends ESTestCase { } - private static final class MockSearchFunction implements BiConsumer>, Exception>> { + private static final class MockSearchFunction implements EnrichProcessorFactory.SearchRunner { private final List> mockResponse; private final SetOnce capturedRequest; private final Exception exception; @@ -157,8 +158,13 @@ public class GeoMatchProcessorTests extends ESTestCase { } @Override - public void accept(SearchRequest request, BiConsumer>, Exception> handler) { - capturedRequest.set(request); + public void accept( + Object value, + int maxMatches, + Supplier searchRequestSupplier, + BiConsumer>, Exception> handler + ) { + capturedRequest.set(searchRequestSupplier.get()); if (exception != null) { handler.accept(null, exception); } else { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index 0d7f900188ba..b4d3ec15d31d 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -25,6 +25,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; +import java.util.function.Supplier; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; @@ -376,7 +377,7 @@ public class MatchProcessorTests extends ESTestCase { assertThat(entry.get("tld"), equalTo("co")); } - private static final class MockSearchFunction implements BiConsumer>, Exception>> { + private static final class MockSearchFunction implements EnrichProcessorFactory.SearchRunner { private final List> mockResponse; private final SetOnce capturedRequest; private final Exception exception; @@ -394,8 +395,13 @@ public class MatchProcessorTests extends ESTestCase { } @Override - public void accept(SearchRequest request, BiConsumer>, Exception> handler) { - capturedRequest.set(request); + public void accept( + Object value, + int maxMatches, + Supplier searchRequestSupplier, + BiConsumer>, Exception> handler + ) { + capturedRequest.set(searchRequestSupplier.get()); if (exception != null) { handler.accept(null, exception); } else { From b88f1eddc40f875201f39c4b6b5b0fd57eae8d35 Mon Sep 17 00:00:00 2001 From: Jordan Powers Date: Thu, 23 Jan 2025 15:32:51 -0800 Subject: [PATCH 28/29] Counted keyword: inherit source keep mode from index settings (#120678) This patch adds a property to CountedKeywordMapper to track the synthetic_source_keep index setting. This property is then used to properly implement synthetic source support in the counted_keyword field type, with fallback to the ignore_source mechanism when synthetic_source_keep is set in either the field mapping or the index settings. --- .../index/mapper/MapperTestCase.java | 6 +- .../CountedKeywordFieldMapper.java | 25 +- .../CountedKeywordFieldMapperTests.java | 55 +- .../CountedTermsAggregatorTests.java | 5 +- .../counted_keyword/30_synthetic_source.yml | 531 ++++++++++++++++-- 5 files changed, 553 insertions(+), 69 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index bb48b0031483..a62af5729a09 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1077,12 +1077,12 @@ public abstract class MapperTestCase extends MapperServiceTestCase { this(b -> b.value(inputValue), b -> b.value(result), b -> b.value(blockLoaderResults), mapping); } - private void buildInput(XContentBuilder b) throws IOException { + public void buildInput(XContentBuilder b) throws IOException { b.field("field"); inputValue.accept(b); } - private void buildInputArray(XContentBuilder b, int elementCount) throws IOException { + public void buildInputArray(XContentBuilder b, int elementCount) throws IOException { b.startArray("field"); for (int i = 0; i < elementCount; i++) { inputValue.accept(b); @@ -1369,7 +1369,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase { assertThat(syntheticSource(mapper, b -> b.startArray("field").endArray()), equalTo(expected)); } - private boolean shouldUseIgnoreMalformed() { + protected boolean shouldUseIgnoreMalformed() { // 5% of test runs use ignore_malformed return supportsIgnoreMalformed() && randomDouble() <= 0.05; } diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index 1a765ca06efb..d38fa456582b 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -76,8 +76,7 @@ import static org.elasticsearch.common.lucene.Lucene.KEYWORD_ANALYZER; * 2 for each key (one per document), a counted_terms aggregation on a counted_keyword field will consider * the actual count and report a count of 3 for each key.

* - *

Synthetic source is supported, but uses the fallback "ignore source" infrastructure unless the source_keep_mode is - * explicitly set to none in the field mapping parameters.

+ *

Synthetic source is fully supported.

*/ public class CountedKeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "counted_keyword"; @@ -274,9 +273,11 @@ public class CountedKeywordFieldMapper extends FieldMapper { public static class Builder extends FieldMapper.Builder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).mappedFieldType.isIndexed(), true); private final Parameter> meta = Parameter.metaParam(); + private final SourceKeepMode indexSourceKeepMode; - protected Builder(String name) { + protected Builder(String name, SourceKeepMode indexSourceKeepMode) { super(name); + this.indexSourceKeepMode = indexSourceKeepMode; } @Override @@ -306,7 +307,8 @@ public class CountedKeywordFieldMapper extends FieldMapper { countFieldMapper.fieldType() ), builderParams(this, context), - countFieldMapper + countFieldMapper, + indexSourceKeepMode ); } } @@ -386,21 +388,26 @@ public class CountedKeywordFieldMapper extends FieldMapper { } } - public static TypeParser PARSER = new TypeParser((n, c) -> new CountedKeywordFieldMapper.Builder(n)); + public static TypeParser PARSER = new TypeParser( + (n, c) -> new CountedKeywordFieldMapper.Builder(n, c.getIndexSettings().sourceKeepMode()) + ); private final FieldType fieldType; private final BinaryFieldMapper countFieldMapper; + private final SourceKeepMode indexSourceKeepMode; protected CountedKeywordFieldMapper( String simpleName, FieldType fieldType, MappedFieldType mappedFieldType, BuilderParams builderParams, - BinaryFieldMapper countFieldMapper + BinaryFieldMapper countFieldMapper, + SourceKeepMode indexSourceKeepMode ) { super(simpleName, mappedFieldType, builderParams); this.fieldType = fieldType; this.countFieldMapper = countFieldMapper; + this.indexSourceKeepMode = indexSourceKeepMode; } @Override @@ -482,7 +489,7 @@ public class CountedKeywordFieldMapper extends FieldMapper { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName()).init(this); + return new Builder(leafName(), indexSourceKeepMode).init(this); } @Override @@ -492,8 +499,8 @@ public class CountedKeywordFieldMapper extends FieldMapper { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var keepMode = sourceKeepMode(); - if (keepMode.isPresent() == false || keepMode.get() != SourceKeepMode.NONE) { + var keepMode = sourceKeepMode().orElse(indexSourceKeepMode); + if (keepMode != SourceKeepMode.NONE) { return super.syntheticSourceSupport(); } diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java index c99edcf7352f..176311565ec8 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java @@ -10,6 +10,8 @@ package org.elasticsearch.xpack.countedkeyword; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.DocumentMapper; @@ -20,12 +22,15 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.lookup.SourceFilter; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.junit.AssumptionViolatedException; import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.stream.Stream; import static org.hamcrest.Matchers.equalTo; @@ -75,7 +80,6 @@ public class CountedKeywordFieldMapperTests extends MapperTestCase { DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { b.startObject("field"); minimalMapping(b); - b.field("synthetic_source_keep", "none"); b.endObject(); })).documentMapper(); @@ -94,7 +98,6 @@ public class CountedKeywordFieldMapperTests extends MapperTestCase { DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { b.startObject("field"); minimalMapping(b); - b.field("synthetic_source_keep", "none"); b.endObject(); })).documentMapper(); @@ -114,19 +117,32 @@ public class CountedKeywordFieldMapperTests extends MapperTestCase { assertThat(syntheticSource(mapper, new SourceFilter(null, new String[] { "field" }), buildInput), equalTo("{}")); } - @Override - public void testSyntheticSourceKeepAll() throws IOException { - // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" - } + public void testSyntheticSourceIndexLevelKeepArrays() throws IOException { + SyntheticSourceExample example = syntheticSourceSupportForKeepTests(shouldUseIgnoreMalformed()).example(1); + XContentBuilder mappings = mapping(b -> { + b.startObject("field"); + example.mapping().accept(b); + b.endObject(); + }); - @Override - public void testSyntheticSourceKeepArrays() throws IOException { - // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" - } + var settings = Settings.builder() + .put("index.mapping.source.mode", "synthetic") + .put("index.mapping.synthetic_source_keep", "arrays") + .build(); + DocumentMapper mapperAll = createMapperService(getVersion(), settings, () -> true, mappings).documentMapper(); - @Override - public void testSyntheticSourceKeepNone() throws IOException { - // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" + int elementCount = randomIntBetween(2, 5); + CheckedConsumer buildInput = (XContentBuilder builder) -> { + example.buildInputArray(builder, elementCount); + }; + + var builder = XContentFactory.jsonBuilder(); + builder.startObject(); + buildInput.accept(builder); + builder.endObject(); + String expected = Strings.toString(builder); + String actual = syntheticSource(mapperAll, buildInput); + assertThat(actual, equalTo(expected)); } @Override @@ -151,16 +167,21 @@ public class CountedKeywordFieldMapperTests extends MapperTestCase { return new SyntheticSourceExample(in, out, this::mapping); } + private final Set previousValues = new HashSet<>(); + private Tuple generateValue() { - String v = ESTestCase.randomAlphaOfLength(5); + String v; + if (previousValues.size() > 0 && randomBoolean()) { + v = randomFrom(previousValues); + } else { + v = ESTestCase.randomAlphaOfLength(5); + previousValues.add(v); + } return Tuple.tuple(v, v); } private void mapping(XContentBuilder b) throws IOException { minimalMapping(b); - // For now, synthetic source is only supported when "synthetic_source_keep" is "none". - // Once we implement true synthetic source support, we should remove this. - b.field("synthetic_source_keep", "none"); } @Override diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java index ef11c7dd3e9d..11dcff0bfac7 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.SourceToParse; @@ -40,7 +41,9 @@ public class CountedTermsAggregatorTests extends AggregatorTestCase { } public void testAggregatesCountedKeywords() throws Exception { - FieldMapper mapper = new CountedKeywordFieldMapper.Builder("stacktraces").build(MapperBuilderContext.root(false, false)); + FieldMapper mapper = new CountedKeywordFieldMapper.Builder("stacktraces", Mapper.SourceKeepMode.NONE).build( + MapperBuilderContext.root(false, false) + ); MappedFieldType fieldType = mapper.fieldType(); CountedTermsAggregationBuilder aggregationBuilder = new CountedTermsAggregationBuilder("st").field("stacktraces"); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml index 7ade369893f4..df85362df5fa 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml @@ -1,4 +1,4 @@ -setup: +"Source values are mutated as expected": - requires: cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] reason: "Feature implemented" @@ -14,7 +14,6 @@ setup: properties: events: type: counted_keyword - synthetic_source_keep: none - do: @@ -53,71 +52,525 @@ setup: id: "6" body: { "events": [null, null]} + - do: + index: + index: test-events + id: "7" + body: { "events": [["a", "b"], "a", ["c"], [["b"], "c"]]} + - do: indices.refresh: { } ---- -"Source values are mutated as expected": - - do: - search: - index: test-events - body: - query: - ids: - values: [1] - - match: - hits.hits.0._source: - events: ["a", "a", "b", "c"] + - do: + search: + index: test-events + body: + query: + ids: + values: [1] + - match: + hits.hits.0._source: + events: ["a", "a", "b", "c"] - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [2] - - match: + - match: hits.hits.0._source: events: ["a", "b", "b", "b", "c"] - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [3] - - match: + - match: hits.hits.0._source: events: ["a", "b", "c", "c"] - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [4] - - match: + - match: hits.hits.0._source: events: "a" - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [5] - - match: + - match: hits.hits.0._source: {} - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [6] - - match: + - match: hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [7] + - match: + hits.hits.0._source: + events: ["a", "a", "b", "b", "c", "c"] + +--- + +"synthetic_source_keep value is respected": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + events: + type: counted_keyword + synthetic_source_keep: all + + - do: + index: + index: test-events + id: "1" + body: { "events": [ "a", "b", "a", "c" ] } + + - do: + index: + index: test-events + id: "2" + body: { "events": [ "b", "b", "c", "a", "b" ] } + + - do: + index: + index: test-events + id: "3" + body: { "events": [ "c", "a", null, "b", null, "c" ] } + + - do: + index: + index: test-events + id: "4" + body: { "events": [ "a" ] } + + - do: + index: + index: test-events + id: "5" + body: { "events": [ ] } + + - do: + index: + index: test-events + id: "6" + body: { "events": [ null, null ] } + + - do: + index: + index: test-events + id: "7" + body: { "events": [["a", "b"], "a", ["c"], [["b"], "c"]]} + + - do: + indices.refresh: { } + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + events: [ "a", "b", "a", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + events: [ "b", "b", "c", "a", "b" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + events: [ "c", "a", null, "b", null, "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + events: [ "a" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: + events: [ ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: + events: [ null, null ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + events: [["a", "b"], "a", ["c"], [["b"], "c"]] + +--- + +"synthetic_source_keep value is not inherited": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + event-object: + type: object + synthetic_source_keep: arrays + properties: + event-object-2: + type: object + properties: + events: + type: counted_keyword + - do: + index: + index: test-events + id: "1" + body: { "event-object": { "event-object-2": { "events": [ "a", "b", "a", "c" ] } } } + + - do: + index: + index: test-events + id: "2" + body: { "event-object": { "event-object-2": { "events": [ "b", "b", "c", "a", "b" ] } } } + + - do: + index: + index: test-events + id: "3" + body: { "event-object": { "event-object-2": { "events": [ "c", "a", null, "b", null, "c" ] } } } + + - do: + index: + index: test-events + id: "4" + body: { "event-object": { "event-object-2": { "events": [ "a" ] } } } + + - do: + index: + index: test-events + id: "5" + body: { "event-object": { "event-object-2": { "events": [ ] } } } + + - do: + index: + index: test-events + id: "6" + body: { "event-object": { "event-object-2": { "events": [ null, null ] } } } + + - do: + index: + index: test-events + id: "7" + body: { "event-object": { "event-object-2": { "events": [["a", "b"], "a", ["c"], [["b"], "c"]] } } } + + - do: + indices.refresh: { } + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "a", "b", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "b", "b", "b", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "b", "c", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: "a" + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "a", "b", "b", "c", "c" ] + +--- + +"Index-level synthetic_source_keep value is respected": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mapping.synthetic_source_keep: arrays + mappings: + properties: + events: + type: counted_keyword + + - do: + index: + index: test-events + id: "1" + body: { "events": [ "a", "b", "a", "c" ] } + + - do: + index: + index: test-events + id: "2" + body: { "events": [ "b", "b", "c", "a", "b" ] } + + - do: + index: + index: test-events + id: "3" + body: { "events": [ "c", "a", null, "b", null, "c" ] } + + - do: + index: + index: test-events + id: "4" + body: { "events": [ "a" ] } + + - do: + index: + index: test-events + id: "5" + body: { "events": [ ] } + + - do: + index: + index: test-events + id: "6" + body: { "events": [ null, null ] } + + - do: + index: + index: test-events + id: "7" + body: { "events": [ [ "a", "b" ], "a", [ "c" ], [ [ "b" ], "c" ] ] } + + - do: + indices.refresh: { } + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + events: [ "a", "b", "a", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + events: [ "b", "b", "c", "a", "b" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + events: [ "c", "a", null, "b", null, "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + events: [ "a" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: + events: [ ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: + events: [ null, null ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + events: [["a", "b"], "a", ["c"], [["b"], "c"]] From bc67124a9084142b6a988ae95d9c38660e9389d5 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 23 Jan 2025 17:40:50 -0600 Subject: [PATCH 29/29] Increasng the default value of migrate.data_stream_reindex_max_request_per_second (#120758) --- .../migrate/action/ReindexDataStreamIndexTransportAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index fc2ca0364e8a..b915eb3cd3e2 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -57,7 +57,7 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio public static final Setting REINDEX_MAX_REQUESTS_PER_SECOND_SETTING = new Setting<>( REINDEX_MAX_REQUESTS_PER_SECOND_KEY, - Float.toString(10f), + Float.toString(1000f), s -> { if (s.equals("-1")) { return Float.POSITIVE_INFINITY;