mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-25 15:47:23 -04:00
ESQL: Add row counts to profile results (#120134)
Closes https://github.com/elastic/elasticsearch/issues/119969 - Rename "pages_in/out" to "pages_received/emitted", to standardize the name along most operators - **There are still "pages_processed" operators**, maybe it would make sense to also rename those? - Add "pages_received/emitted" to TopN operator, as it was missing that - Added "rows_received/emitted" to most operators - Added a test to ensure all operators with status provide those metrics
This commit is contained in:
parent
1f182e7a2a
commit
b7ab8f8bb7
37 changed files with 846 additions and 191 deletions
|
@ -1,4 +1,5 @@
|
||||||
import org.elasticsearch.gradle.internal.test.TestUtil
|
import org.elasticsearch.gradle.internal.test.TestUtil
|
||||||
|
import org.elasticsearch.gradle.OS
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
@ -77,7 +78,7 @@ tasks.register("copyPainless", Copy) {
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named("run").configure {
|
tasks.named("run").configure {
|
||||||
executable = "${buildParams.runtimeJavaHome.get()}/bin/java"
|
executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + (OS.current() == OS.WINDOWS ? '.exe' : '')
|
||||||
args << "-Dplugins.dir=${buildDir}/plugins" << "-Dtests.index=${buildDir}/index"
|
args << "-Dplugins.dir=${buildDir}/plugins" << "-Dtests.index=${buildDir}/index"
|
||||||
dependsOn "copyExpression", "copyPainless", configurations.nativeLib
|
dependsOn "copyExpression", "copyPainless", configurations.nativeLib
|
||||||
systemProperty 'es.nativelibs.path', TestUtil.getTestLibraryPath(file("../libs/native/libraries/build/platform/").toString())
|
systemProperty 'es.nativelibs.path', TestUtil.getTestLibraryPath(file("../libs/native/libraries/build/platform/").toString())
|
||||||
|
|
|
@ -156,6 +156,7 @@ public class TransportVersions {
|
||||||
public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_00_0);
|
public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_00_0);
|
||||||
public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0);
|
public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0);
|
||||||
public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0);
|
public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0);
|
||||||
|
public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_00_0);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* STOP! READ THIS FIRST! No, really,
|
* STOP! READ THIS FIRST! No, really,
|
||||||
|
|
|
@ -138,7 +138,6 @@ public class LuceneCountOperator extends LuceneOperator {
|
||||||
Page page = null;
|
Page page = null;
|
||||||
// emit only one page
|
// emit only one page
|
||||||
if (remainingDocs <= 0 && pagesEmitted == 0) {
|
if (remainingDocs <= 0 && pagesEmitted == 0) {
|
||||||
pagesEmitted++;
|
|
||||||
LongBlock count = null;
|
LongBlock count = null;
|
||||||
BooleanBlock seen = null;
|
BooleanBlock seen = null;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -151,7 +151,6 @@ final class LuceneMinMaxOperator extends LuceneOperator {
|
||||||
Page page = null;
|
Page page = null;
|
||||||
// emit only one page
|
// emit only one page
|
||||||
if (remainingDocs <= 0 && pagesEmitted == 0) {
|
if (remainingDocs <= 0 && pagesEmitted == 0) {
|
||||||
pagesEmitted++;
|
|
||||||
Block result = null;
|
Block result = null;
|
||||||
BooleanBlock seen = null;
|
BooleanBlock seen = null;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -68,6 +68,10 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
long processingNanos;
|
long processingNanos;
|
||||||
int pagesEmitted;
|
int pagesEmitted;
|
||||||
boolean doneCollecting;
|
boolean doneCollecting;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) {
|
protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) {
|
||||||
this.blockFactory = blockFactory;
|
this.blockFactory = blockFactory;
|
||||||
|
@ -115,7 +119,12 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
@Override
|
@Override
|
||||||
public final Page getOutput() {
|
public final Page getOutput() {
|
||||||
try {
|
try {
|
||||||
return getCheckedOutput();
|
Page page = getCheckedOutput();
|
||||||
|
if (page != null) {
|
||||||
|
pagesEmitted++;
|
||||||
|
rowsEmitted += page.getPositionCount();
|
||||||
|
}
|
||||||
|
return page;
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
throw new UncheckedIOException(ioe);
|
throw new UncheckedIOException(ioe);
|
||||||
}
|
}
|
||||||
|
@ -252,6 +261,7 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
private final int sliceMin;
|
private final int sliceMin;
|
||||||
private final int sliceMax;
|
private final int sliceMax;
|
||||||
private final int current;
|
private final int current;
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
private Status(LuceneOperator operator) {
|
private Status(LuceneOperator operator) {
|
||||||
processedSlices = operator.processedSlices;
|
processedSlices = operator.processedSlices;
|
||||||
|
@ -276,6 +286,7 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
current = scorer.position;
|
current = scorer.position;
|
||||||
}
|
}
|
||||||
pagesEmitted = operator.pagesEmitted;
|
pagesEmitted = operator.pagesEmitted;
|
||||||
|
rowsEmitted = operator.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
Status(
|
Status(
|
||||||
|
@ -288,7 +299,8 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
int pagesEmitted,
|
int pagesEmitted,
|
||||||
int sliceMin,
|
int sliceMin,
|
||||||
int sliceMax,
|
int sliceMax,
|
||||||
int current
|
int current,
|
||||||
|
long rowsEmitted
|
||||||
) {
|
) {
|
||||||
this.processedSlices = processedSlices;
|
this.processedSlices = processedSlices;
|
||||||
this.processedQueries = processedQueries;
|
this.processedQueries = processedQueries;
|
||||||
|
@ -300,6 +312,7 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
this.sliceMin = sliceMin;
|
this.sliceMin = sliceMin;
|
||||||
this.sliceMax = sliceMax;
|
this.sliceMax = sliceMax;
|
||||||
this.current = current;
|
this.current = current;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
Status(StreamInput in) throws IOException {
|
Status(StreamInput in) throws IOException {
|
||||||
|
@ -318,6 +331,11 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
sliceMin = in.readVInt();
|
sliceMin = in.readVInt();
|
||||||
sliceMax = in.readVInt();
|
sliceMax = in.readVInt();
|
||||||
current = in.readVInt();
|
current = in.readVInt();
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -336,6 +354,9 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
out.writeVInt(sliceMin);
|
out.writeVInt(sliceMin);
|
||||||
out.writeVInt(sliceMax);
|
out.writeVInt(sliceMax);
|
||||||
out.writeVInt(current);
|
out.writeVInt(current);
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -383,6 +404,10 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
return current;
|
return current;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
|
@ -399,6 +424,7 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
builder.field("slice_min", sliceMin);
|
builder.field("slice_min", sliceMin);
|
||||||
builder.field("slice_max", sliceMax);
|
builder.field("slice_max", sliceMax);
|
||||||
builder.field("current", current);
|
builder.field("current", current);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -416,12 +442,13 @@ public abstract class LuceneOperator extends SourceOperator {
|
||||||
&& pagesEmitted == status.pagesEmitted
|
&& pagesEmitted == status.pagesEmitted
|
||||||
&& sliceMin == status.sliceMin
|
&& sliceMin == status.sliceMin
|
||||||
&& sliceMax == status.sliceMax
|
&& sliceMax == status.sliceMax
|
||||||
&& current == status.current;
|
&& current == status.current
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current);
|
return Objects.hash(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -175,7 +175,6 @@ public class LuceneSourceOperator extends LuceneOperator {
|
||||||
}
|
}
|
||||||
Page page = null;
|
Page page = null;
|
||||||
if (currentPagePos >= minPageSize || remainingDocs <= 0 || scorer.isDone()) {
|
if (currentPagePos >= minPageSize || remainingDocs <= 0 || scorer.isDone()) {
|
||||||
pagesEmitted++;
|
|
||||||
IntBlock shard = null;
|
IntBlock shard = null;
|
||||||
IntBlock leaf = null;
|
IntBlock leaf = null;
|
||||||
IntVector docs = null;
|
IntVector docs = null;
|
||||||
|
|
|
@ -240,7 +240,6 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
|
||||||
Releasables.closeExpectNoException(shard, segments, docs, docBlock, scores);
|
Releasables.closeExpectNoException(shard, segments, docs, docBlock, scores);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pagesEmitted++;
|
|
||||||
return page;
|
return page;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -546,8 +546,8 @@ public class ValuesSourceReaderOperator extends AbstractPageMappingOperator {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Status status(long processNanos, int pagesProcessed) {
|
protected Status status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
return new Status(new TreeMap<>(readersBuilt), processNanos, pagesProcessed);
|
return new Status(new TreeMap<>(readersBuilt), processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Status extends AbstractPageMappingOperator.Status {
|
public static class Status extends AbstractPageMappingOperator.Status {
|
||||||
|
@ -559,8 +559,8 @@ public class ValuesSourceReaderOperator extends AbstractPageMappingOperator {
|
||||||
|
|
||||||
private final Map<String, Integer> readersBuilt;
|
private final Map<String, Integer> readersBuilt;
|
||||||
|
|
||||||
Status(Map<String, Integer> readersBuilt, long processNanos, int pagesProcessed) {
|
Status(Map<String, Integer> readersBuilt, long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
super(processNanos, pagesProcessed);
|
super(processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
this.readersBuilt = readersBuilt;
|
this.readersBuilt = readersBuilt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,14 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
* Count of pages that have been processed by this operator.
|
* Count of pages that have been processed by this operator.
|
||||||
*/
|
*/
|
||||||
private int pagesProcessed;
|
private int pagesProcessed;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
protected abstract Page process(Page page);
|
protected abstract Page process(Page page);
|
||||||
|
|
||||||
|
@ -52,6 +60,7 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
public final void addInput(Page page) {
|
public final void addInput(Page page) {
|
||||||
assert prev == null : "has pending input page";
|
assert prev == null : "has pending input page";
|
||||||
prev = page;
|
prev = page;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -75,6 +84,9 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
long start = System.nanoTime();
|
long start = System.nanoTime();
|
||||||
Page p = process(prev);
|
Page p = process(prev);
|
||||||
pagesProcessed++;
|
pagesProcessed++;
|
||||||
|
if (p != null) {
|
||||||
|
rowsEmitted += p.getPositionCount();
|
||||||
|
}
|
||||||
processNanos += System.nanoTime() - start;
|
processNanos += System.nanoTime() - start;
|
||||||
prev = null;
|
prev = null;
|
||||||
return p;
|
return p;
|
||||||
|
@ -82,11 +94,11 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final Status status() {
|
public final Status status() {
|
||||||
return status(processNanos, pagesProcessed);
|
return status(processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Status status(long processNanos, int pagesProcessed) {
|
protected Status status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
return new Status(processNanos, pagesProcessed);
|
return new Status(processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -105,15 +117,26 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
|
|
||||||
private final long processNanos;
|
private final long processNanos;
|
||||||
private final int pagesProcessed;
|
private final int pagesProcessed;
|
||||||
|
private final long rowsReceived;
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
public Status(long processNanos, int pagesProcessed) {
|
public Status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
this.processNanos = processNanos;
|
this.processNanos = processNanos;
|
||||||
this.pagesProcessed = pagesProcessed;
|
this.pagesProcessed = pagesProcessed;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Status(StreamInput in) throws IOException {
|
protected Status(StreamInput in) throws IOException {
|
||||||
processNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0;
|
processNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0;
|
||||||
pagesProcessed = in.readVInt();
|
pagesProcessed = in.readVInt();
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -122,6 +145,10 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
out.writeVLong(processNanos);
|
out.writeVLong(processNanos);
|
||||||
}
|
}
|
||||||
out.writeVInt(pagesProcessed);
|
out.writeVInt(pagesProcessed);
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -133,6 +160,14 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
return pagesProcessed;
|
return pagesProcessed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
public long processNanos() {
|
public long processNanos() {
|
||||||
return processNanos;
|
return processNanos;
|
||||||
}
|
}
|
||||||
|
@ -153,7 +188,7 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
if (builder.humanReadable()) {
|
if (builder.humanReadable()) {
|
||||||
builder.field("process_time", TimeValue.timeValueNanos(processNanos));
|
builder.field("process_time", TimeValue.timeValueNanos(processNanos));
|
||||||
}
|
}
|
||||||
return builder.field("pages_processed", pagesProcessed);
|
return builder.field("pages_processed", pagesProcessed).field("rows_received", rowsReceived).field("rows_emitted", rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -161,12 +196,15 @@ public abstract class AbstractPageMappingOperator implements Operator {
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return processNanos == status.processNanos && pagesProcessed == status.pagesProcessed;
|
return processNanos == status.processNanos
|
||||||
|
&& pagesProcessed == status.pagesProcessed
|
||||||
|
&& rowsReceived == status.rowsReceived
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(processNanos, pagesProcessed);
|
return Objects.hash(processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -49,6 +49,16 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
*/
|
*/
|
||||||
private int pagesEmitted;
|
private int pagesEmitted;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build and Iterator of results for a new page.
|
* Build and Iterator of results for a new page.
|
||||||
*/
|
*/
|
||||||
|
@ -82,6 +92,7 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
}
|
}
|
||||||
next = new RuntimeTrackingIterator(receive(page));
|
next = new RuntimeTrackingIterator(receive(page));
|
||||||
pagesReceived++;
|
pagesReceived++;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -101,16 +112,23 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
}
|
}
|
||||||
Page ret = next.next();
|
Page ret = next.next();
|
||||||
pagesEmitted++;
|
pagesEmitted++;
|
||||||
|
rowsEmitted += ret.getPositionCount();
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final AbstractPageMappingToIteratorOperator.Status status() {
|
public final AbstractPageMappingToIteratorOperator.Status status() {
|
||||||
return status(processNanos, pagesReceived, pagesEmitted);
|
return status(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected AbstractPageMappingToIteratorOperator.Status status(long processNanos, int pagesReceived, int pagesEmitted) {
|
protected AbstractPageMappingToIteratorOperator.Status status(
|
||||||
return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted);
|
long processNanos,
|
||||||
|
int pagesReceived,
|
||||||
|
int pagesEmitted,
|
||||||
|
long rowsReceived,
|
||||||
|
long rowsEmitted
|
||||||
|
) {
|
||||||
|
return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -154,17 +172,28 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
private final long processNanos;
|
private final long processNanos;
|
||||||
private final int pagesReceived;
|
private final int pagesReceived;
|
||||||
private final int pagesEmitted;
|
private final int pagesEmitted;
|
||||||
|
private final long rowsReceived;
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
public Status(long processNanos, int pagesProcessed, int pagesEmitted) {
|
public Status(long processNanos, int pagesProcessed, int pagesEmitted, long rowsReceived, long rowsEmitted) {
|
||||||
this.processNanos = processNanos;
|
this.processNanos = processNanos;
|
||||||
this.pagesReceived = pagesProcessed;
|
this.pagesReceived = pagesProcessed;
|
||||||
this.pagesEmitted = pagesEmitted;
|
this.pagesEmitted = pagesEmitted;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Status(StreamInput in) throws IOException {
|
protected Status(StreamInput in) throws IOException {
|
||||||
processNanos = in.readVLong();
|
processNanos = in.readVLong();
|
||||||
pagesReceived = in.readVInt();
|
pagesReceived = in.readVInt();
|
||||||
pagesEmitted = in.readVInt();
|
pagesEmitted = in.readVInt();
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -172,6 +201,10 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
out.writeVLong(processNanos);
|
out.writeVLong(processNanos);
|
||||||
out.writeVInt(pagesReceived);
|
out.writeVInt(pagesReceived);
|
||||||
out.writeVInt(pagesEmitted);
|
out.writeVInt(pagesEmitted);
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -187,6 +220,14 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
return pagesEmitted;
|
return pagesEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
public long processNanos() {
|
public long processNanos() {
|
||||||
return processNanos;
|
return processNanos;
|
||||||
}
|
}
|
||||||
|
@ -207,8 +248,10 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
if (builder.humanReadable()) {
|
if (builder.humanReadable()) {
|
||||||
builder.field("process_time", TimeValue.timeValueNanos(processNanos));
|
builder.field("process_time", TimeValue.timeValueNanos(processNanos));
|
||||||
}
|
}
|
||||||
builder.field("pages_received", pagesReceived);
|
return builder.field("pages_received", pagesReceived)
|
||||||
return builder.field("pages_emitted", pagesEmitted);
|
.field("pages_emitted", pagesEmitted)
|
||||||
|
.field("rows_received", rowsReceived)
|
||||||
|
.field("rows_emitted", rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -216,12 +259,16 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
AbstractPageMappingToIteratorOperator.Status status = (AbstractPageMappingToIteratorOperator.Status) o;
|
AbstractPageMappingToIteratorOperator.Status status = (AbstractPageMappingToIteratorOperator.Status) o;
|
||||||
return processNanos == status.processNanos && pagesReceived == status.pagesReceived && pagesEmitted == status.pagesEmitted;
|
return processNanos == status.processNanos
|
||||||
|
&& pagesReceived == status.pagesReceived
|
||||||
|
&& pagesEmitted == status.pagesEmitted
|
||||||
|
&& rowsReceived == status.rowsReceived
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(processNanos, pagesReceived, pagesEmitted);
|
return Objects.hash(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -58,6 +58,14 @@ public class AggregationOperator implements Operator {
|
||||||
* Count of pages this operator has processed.
|
* Count of pages this operator has processed.
|
||||||
*/
|
*/
|
||||||
private int pagesProcessed;
|
private int pagesProcessed;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
public record AggregationOperatorFactory(List<Factory> aggregators, AggregatorMode mode) implements OperatorFactory {
|
public record AggregationOperatorFactory(List<Factory> aggregators, AggregatorMode mode) implements OperatorFactory {
|
||||||
|
|
||||||
|
@ -106,12 +114,16 @@ public class AggregationOperator implements Operator {
|
||||||
page.releaseBlocks();
|
page.releaseBlocks();
|
||||||
aggregationNanos += System.nanoTime() - start;
|
aggregationNanos += System.nanoTime() - start;
|
||||||
pagesProcessed++;
|
pagesProcessed++;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Page getOutput() {
|
public Page getOutput() {
|
||||||
Page p = output;
|
Page p = output;
|
||||||
|
if (p != null) {
|
||||||
|
rowsEmitted += p.getPositionCount();
|
||||||
|
}
|
||||||
this.output = null;
|
this.output = null;
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
@ -181,7 +193,7 @@ public class AggregationOperator implements Operator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Operator.Status status() {
|
public Operator.Status status() {
|
||||||
return new Status(aggregationNanos, aggregationFinishNanos, pagesProcessed);
|
return new Status(aggregationNanos, aggregationFinishNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Status implements Operator.Status {
|
public static class Status implements Operator.Status {
|
||||||
|
@ -204,6 +216,14 @@ public class AggregationOperator implements Operator {
|
||||||
* Count of pages this operator has processed.
|
* Count of pages this operator has processed.
|
||||||
*/
|
*/
|
||||||
private final int pagesProcessed;
|
private final int pagesProcessed;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private final long rowsReceived;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build.
|
* Build.
|
||||||
|
@ -211,10 +231,12 @@ public class AggregationOperator implements Operator {
|
||||||
* @param aggregationFinishNanos Nanoseconds this operator has spent running the aggregations.
|
* @param aggregationFinishNanos Nanoseconds this operator has spent running the aggregations.
|
||||||
* @param pagesProcessed Count of pages this operator has processed.
|
* @param pagesProcessed Count of pages this operator has processed.
|
||||||
*/
|
*/
|
||||||
public Status(long aggregationNanos, long aggregationFinishNanos, int pagesProcessed) {
|
public Status(long aggregationNanos, long aggregationFinishNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
this.aggregationNanos = aggregationNanos;
|
this.aggregationNanos = aggregationNanos;
|
||||||
this.aggregationFinishNanos = aggregationFinishNanos;
|
this.aggregationFinishNanos = aggregationFinishNanos;
|
||||||
this.pagesProcessed = pagesProcessed;
|
this.pagesProcessed = pagesProcessed;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Status(StreamInput in) throws IOException {
|
protected Status(StreamInput in) throws IOException {
|
||||||
|
@ -225,6 +247,13 @@ public class AggregationOperator implements Operator {
|
||||||
aggregationFinishNanos = null;
|
aggregationFinishNanos = null;
|
||||||
}
|
}
|
||||||
pagesProcessed = in.readVInt();
|
pagesProcessed = in.readVInt();
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -234,6 +263,10 @@ public class AggregationOperator implements Operator {
|
||||||
out.writeOptionalVLong(aggregationFinishNanos);
|
out.writeOptionalVLong(aggregationFinishNanos);
|
||||||
}
|
}
|
||||||
out.writeVInt(pagesProcessed);
|
out.writeVInt(pagesProcessed);
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -262,6 +295,20 @@ public class AggregationOperator implements Operator {
|
||||||
return pagesProcessed;
|
return pagesProcessed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
|
@ -277,6 +324,8 @@ public class AggregationOperator implements Operator {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
builder.field("pages_processed", pagesProcessed);
|
builder.field("pages_processed", pagesProcessed);
|
||||||
|
builder.field("rows_received", rowsReceived);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -287,13 +336,15 @@ public class AggregationOperator implements Operator {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return aggregationNanos == status.aggregationNanos
|
return aggregationNanos == status.aggregationNanos
|
||||||
|
&& Objects.equals(aggregationFinishNanos, status.aggregationFinishNanos)
|
||||||
&& pagesProcessed == status.pagesProcessed
|
&& pagesProcessed == status.pagesProcessed
|
||||||
&& Objects.equals(aggregationFinishNanos, status.aggregationFinishNanos);
|
&& rowsReceived == status.rowsReceived
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(aggregationNanos, aggregationFinishNanos, pagesProcessed);
|
return Objects.hash(aggregationNanos, aggregationFinishNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -99,6 +99,14 @@ public class HashAggregationOperator implements Operator {
|
||||||
* Count of pages this operator has processed.
|
* Count of pages this operator has processed.
|
||||||
*/
|
*/
|
||||||
private int pagesProcessed;
|
private int pagesProcessed;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
@SuppressWarnings("this-escape")
|
@SuppressWarnings("this-escape")
|
||||||
public HashAggregationOperator(
|
public HashAggregationOperator(
|
||||||
|
@ -187,12 +195,16 @@ public class HashAggregationOperator implements Operator {
|
||||||
} finally {
|
} finally {
|
||||||
page.releaseBlocks();
|
page.releaseBlocks();
|
||||||
pagesProcessed++;
|
pagesProcessed++;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Page getOutput() {
|
public Page getOutput() {
|
||||||
Page p = output;
|
Page p = output;
|
||||||
|
if (p != null) {
|
||||||
|
rowsEmitted += p.getPositionCount();
|
||||||
|
}
|
||||||
output = null;
|
output = null;
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
@ -246,7 +258,7 @@ public class HashAggregationOperator implements Operator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Operator.Status status() {
|
public Operator.Status status() {
|
||||||
return new Status(hashNanos, aggregationNanos, pagesProcessed);
|
return new Status(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void checkState(boolean condition, String msg) {
|
protected static void checkState(boolean condition, String msg) {
|
||||||
|
@ -288,23 +300,43 @@ public class HashAggregationOperator implements Operator {
|
||||||
* Count of pages this operator has processed.
|
* Count of pages this operator has processed.
|
||||||
*/
|
*/
|
||||||
private final int pagesProcessed;
|
private final int pagesProcessed;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private final long rowsReceived;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build.
|
* Build.
|
||||||
* @param hashNanos Nanoseconds this operator has spent hashing grouping keys.
|
* @param hashNanos Nanoseconds this operator has spent hashing grouping keys.
|
||||||
* @param aggregationNanos Nanoseconds this operator has spent running the aggregations.
|
* @param aggregationNanos Nanoseconds this operator has spent running the aggregations.
|
||||||
* @param pagesProcessed Count of pages this operator has processed.
|
* @param pagesProcessed Count of pages this operator has processed.
|
||||||
|
* @param rowsReceived Count of rows this operator has received.
|
||||||
|
* @param rowsEmitted Count of rows this operator has emitted.
|
||||||
*/
|
*/
|
||||||
public Status(long hashNanos, long aggregationNanos, int pagesProcessed) {
|
public Status(long hashNanos, long aggregationNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
this.hashNanos = hashNanos;
|
this.hashNanos = hashNanos;
|
||||||
this.aggregationNanos = aggregationNanos;
|
this.aggregationNanos = aggregationNanos;
|
||||||
this.pagesProcessed = pagesProcessed;
|
this.pagesProcessed = pagesProcessed;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Status(StreamInput in) throws IOException {
|
protected Status(StreamInput in) throws IOException {
|
||||||
hashNanos = in.readVLong();
|
hashNanos = in.readVLong();
|
||||||
aggregationNanos = in.readVLong();
|
aggregationNanos = in.readVLong();
|
||||||
pagesProcessed = in.readVInt();
|
pagesProcessed = in.readVInt();
|
||||||
|
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -312,6 +344,11 @@ public class HashAggregationOperator implements Operator {
|
||||||
out.writeVLong(hashNanos);
|
out.writeVLong(hashNanos);
|
||||||
out.writeVLong(aggregationNanos);
|
out.writeVLong(aggregationNanos);
|
||||||
out.writeVInt(pagesProcessed);
|
out.writeVInt(pagesProcessed);
|
||||||
|
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -340,6 +377,20 @@ public class HashAggregationOperator implements Operator {
|
||||||
return pagesProcessed;
|
return pagesProcessed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
|
@ -352,6 +403,8 @@ public class HashAggregationOperator implements Operator {
|
||||||
builder.field("aggregation_time", TimeValue.timeValueNanos(aggregationNanos));
|
builder.field("aggregation_time", TimeValue.timeValueNanos(aggregationNanos));
|
||||||
}
|
}
|
||||||
builder.field("pages_processed", pagesProcessed);
|
builder.field("pages_processed", pagesProcessed);
|
||||||
|
builder.field("rows_received", rowsReceived);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -361,12 +414,16 @@ public class HashAggregationOperator implements Operator {
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return hashNanos == status.hashNanos && aggregationNanos == status.aggregationNanos && pagesProcessed == status.pagesProcessed;
|
return hashNanos == status.hashNanos
|
||||||
|
&& aggregationNanos == status.aggregationNanos
|
||||||
|
&& pagesProcessed == status.pagesProcessed
|
||||||
|
&& rowsReceived == status.rowsReceived
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(hashNanos, aggregationNanos, pagesProcessed);
|
return Objects.hash(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -37,6 +37,16 @@ public class LimitOperator implements Operator {
|
||||||
*/
|
*/
|
||||||
private int pagesProcessed;
|
private int pagesProcessed;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
private Page lastInput;
|
private Page lastInput;
|
||||||
|
|
||||||
private boolean finished;
|
private boolean finished;
|
||||||
|
@ -67,6 +77,7 @@ public class LimitOperator implements Operator {
|
||||||
public void addInput(Page page) {
|
public void addInput(Page page) {
|
||||||
assert lastInput == null : "has pending input page";
|
assert lastInput == null : "has pending input page";
|
||||||
lastInput = page;
|
lastInput = page;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -117,13 +128,14 @@ public class LimitOperator implements Operator {
|
||||||
}
|
}
|
||||||
lastInput = null;
|
lastInput = null;
|
||||||
pagesProcessed++;
|
pagesProcessed++;
|
||||||
|
rowsEmitted += result.getPositionCount();
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Status status() {
|
public Status status() {
|
||||||
return new Status(limit, limitRemaining, pagesProcessed);
|
return new Status(limit, limitRemaining, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -160,16 +172,35 @@ public class LimitOperator implements Operator {
|
||||||
*/
|
*/
|
||||||
private final int pagesProcessed;
|
private final int pagesProcessed;
|
||||||
|
|
||||||
protected Status(int limit, int limitRemaining, int pagesProcessed) {
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private final long rowsReceived;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
|
protected Status(int limit, int limitRemaining, int pagesProcessed, long rowsReceived, long rowsEmitted) {
|
||||||
this.limit = limit;
|
this.limit = limit;
|
||||||
this.limitRemaining = limitRemaining;
|
this.limitRemaining = limitRemaining;
|
||||||
this.pagesProcessed = pagesProcessed;
|
this.pagesProcessed = pagesProcessed;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Status(StreamInput in) throws IOException {
|
protected Status(StreamInput in) throws IOException {
|
||||||
limit = in.readVInt();
|
limit = in.readVInt();
|
||||||
limitRemaining = in.readVInt();
|
limitRemaining = in.readVInt();
|
||||||
pagesProcessed = in.readVInt();
|
pagesProcessed = in.readVInt();
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -177,6 +208,10 @@ public class LimitOperator implements Operator {
|
||||||
out.writeVInt(limit);
|
out.writeVInt(limit);
|
||||||
out.writeVInt(limitRemaining);
|
out.writeVInt(limitRemaining);
|
||||||
out.writeVInt(pagesProcessed);
|
out.writeVInt(pagesProcessed);
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -205,12 +240,28 @@ public class LimitOperator implements Operator {
|
||||||
return pagesProcessed;
|
return pagesProcessed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("limit", limit);
|
builder.field("limit", limit);
|
||||||
builder.field("limit_remaining", limitRemaining);
|
builder.field("limit_remaining", limitRemaining);
|
||||||
builder.field("pages_processed", pagesProcessed);
|
builder.field("pages_processed", pagesProcessed);
|
||||||
|
builder.field("rows_received", rowsReceived);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,12 +270,16 @@ public class LimitOperator implements Operator {
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return limit == status.limit && limitRemaining == status.limitRemaining && pagesProcessed == status.pagesProcessed;
|
return limit == status.limit
|
||||||
|
&& limitRemaining == status.limitRemaining
|
||||||
|
&& pagesProcessed == status.pagesProcessed
|
||||||
|
&& rowsReceived == status.rowsReceived
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(limit, limitRemaining, pagesProcessed);
|
return Objects.hash(limit, limitRemaining, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -69,10 +69,21 @@ public class MvExpandOperator implements Operator {
|
||||||
private int nextItemOnExpanded = 0;
|
private int nextItemOnExpanded = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count of pages that have been processed by this operator.
|
* Count of pages that this operator has received.
|
||||||
*/
|
*/
|
||||||
private int pagesIn;
|
private int pagesReceived;
|
||||||
private int pagesOut;
|
/**
|
||||||
|
* Count of pages this operator has emitted.
|
||||||
|
*/
|
||||||
|
private int pagesEmitted;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
public MvExpandOperator(int channel, int pageSize) {
|
public MvExpandOperator(int channel, int pageSize) {
|
||||||
this.channel = channel;
|
this.channel = channel;
|
||||||
|
@ -82,10 +93,18 @@ public class MvExpandOperator implements Operator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final Page getOutput() {
|
public final Page getOutput() {
|
||||||
|
Page result = getOutputInternal();
|
||||||
|
if (result != null) {
|
||||||
|
pagesEmitted++;
|
||||||
|
rowsEmitted += result.getPositionCount();
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Page getOutputInternal() {
|
||||||
if (prev == null) {
|
if (prev == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
pagesOut++;
|
|
||||||
|
|
||||||
if (expandedBlock == null) {
|
if (expandedBlock == null) {
|
||||||
/*
|
/*
|
||||||
|
@ -214,7 +233,8 @@ public class MvExpandOperator implements Operator {
|
||||||
assert prev == null : "has pending input page";
|
assert prev == null : "has pending input page";
|
||||||
prev = page;
|
prev = page;
|
||||||
this.expandingBlock = prev.getBlock(channel);
|
this.expandingBlock = prev.getBlock(channel);
|
||||||
pagesIn++;
|
pagesReceived++;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -229,7 +249,7 @@ public class MvExpandOperator implements Operator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final Status status() {
|
public final Status status() {
|
||||||
return new Status(pagesIn, pagesOut, noops);
|
return new Status(pagesReceived, pagesEmitted, noops, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -248,9 +268,11 @@ public class MvExpandOperator implements Operator {
|
||||||
|
|
||||||
public static final class Status implements Operator.Status {
|
public static final class Status implements Operator.Status {
|
||||||
|
|
||||||
private final int pagesIn;
|
private final int pagesReceived;
|
||||||
private final int pagesOut;
|
private final int pagesEmitted;
|
||||||
private final int noops;
|
private final int noops;
|
||||||
|
private final long rowsReceived;
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
|
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
|
||||||
Operator.Status.class,
|
Operator.Status.class,
|
||||||
|
@ -258,31 +280,46 @@ public class MvExpandOperator implements Operator {
|
||||||
Status::new
|
Status::new
|
||||||
);
|
);
|
||||||
|
|
||||||
Status(int pagesIn, int pagesOut, int noops) {
|
Status(int pagesReceived, int pagesEmitted, int noops, long rowsReceived, long rowsEmitted) {
|
||||||
this.pagesIn = pagesIn;
|
this.pagesReceived = pagesReceived;
|
||||||
this.pagesOut = pagesOut;
|
this.pagesEmitted = pagesEmitted;
|
||||||
this.noops = noops;
|
this.noops = noops;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
Status(StreamInput in) throws IOException {
|
Status(StreamInput in) throws IOException {
|
||||||
pagesIn = in.readVInt();
|
pagesReceived = in.readVInt();
|
||||||
pagesOut = in.readVInt();
|
pagesEmitted = in.readVInt();
|
||||||
noops = in.readVInt();
|
noops = in.readVInt();
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeVInt(pagesIn);
|
out.writeVInt(pagesReceived);
|
||||||
out.writeVInt(pagesOut);
|
out.writeVInt(pagesEmitted);
|
||||||
out.writeVInt(noops);
|
out.writeVInt(noops);
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("pages_in", pagesIn);
|
builder.field("pages_received", pagesReceived);
|
||||||
builder.field("pages_out", pagesOut);
|
builder.field("pages_emitted", pagesEmitted);
|
||||||
builder.field("noops", noops);
|
builder.field("noops", noops);
|
||||||
|
builder.field("rows_received", rowsReceived);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -304,20 +341,32 @@ public class MvExpandOperator implements Operator {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return noops == status.noops && pagesIn == status.pagesIn && pagesOut == status.pagesOut;
|
return noops == status.noops
|
||||||
|
&& pagesReceived == status.pagesReceived
|
||||||
|
&& pagesEmitted == status.pagesEmitted
|
||||||
|
&& rowsReceived == status.rowsReceived
|
||||||
|
&& rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int pagesIn() {
|
public int pagesReceived() {
|
||||||
return pagesIn;
|
return pagesReceived;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int pagesOut() {
|
public int pagesEmitted() {
|
||||||
return pagesOut;
|
return pagesEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(noops, pagesIn, pagesOut);
|
return Objects.hash(noops, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -33,7 +33,8 @@ public class ExchangeSinkOperator extends SinkOperator {
|
||||||
|
|
||||||
private final ExchangeSink sink;
|
private final ExchangeSink sink;
|
||||||
private final Function<Page, Page> transformer;
|
private final Function<Page, Page> transformer;
|
||||||
private int pagesAccepted;
|
private int pagesReceived;
|
||||||
|
private long rowsReceived;
|
||||||
|
|
||||||
public record ExchangeSinkOperatorFactory(Supplier<ExchangeSink> exchangeSinks, Function<Page, Page> transformer)
|
public record ExchangeSinkOperatorFactory(Supplier<ExchangeSink> exchangeSinks, Function<Page, Page> transformer)
|
||||||
implements
|
implements
|
||||||
|
@ -81,7 +82,8 @@ public class ExchangeSinkOperator extends SinkOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAddInput(Page page) {
|
protected void doAddInput(Page page) {
|
||||||
pagesAccepted++;
|
pagesReceived++;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
sink.addPage(transformer.apply(page));
|
sink.addPage(transformer.apply(page));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,7 +99,7 @@ public class ExchangeSinkOperator extends SinkOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Status status() {
|
public Status status() {
|
||||||
return new Status(pagesAccepted);
|
return new Status(pagesReceived, rowsReceived);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Status implements Operator.Status {
|
public static class Status implements Operator.Status {
|
||||||
|
@ -107,19 +109,31 @@ public class ExchangeSinkOperator extends SinkOperator {
|
||||||
Status::new
|
Status::new
|
||||||
);
|
);
|
||||||
|
|
||||||
private final int pagesAccepted;
|
private final int pagesReceived;
|
||||||
|
private final long rowsReceived;
|
||||||
|
|
||||||
Status(int pagesAccepted) {
|
Status(int pagesReceived, long rowsReceived) {
|
||||||
this.pagesAccepted = pagesAccepted;
|
this.pagesReceived = pagesReceived;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
}
|
}
|
||||||
|
|
||||||
Status(StreamInput in) throws IOException {
|
Status(StreamInput in) throws IOException {
|
||||||
pagesAccepted = in.readVInt();
|
pagesReceived = in.readVInt();
|
||||||
|
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsReceived = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsReceived = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeVInt(pagesAccepted);
|
out.writeVInt(pagesReceived);
|
||||||
|
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -127,14 +141,19 @@ public class ExchangeSinkOperator extends SinkOperator {
|
||||||
return ENTRY.name;
|
return ENTRY.name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int pagesAccepted() {
|
public int pagesReceived() {
|
||||||
return pagesAccepted;
|
return pagesReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("pages_accepted", pagesAccepted);
|
builder.field("pages_received", pagesReceived);
|
||||||
|
builder.field("rows_received", rowsReceived);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,12 +162,12 @@ public class ExchangeSinkOperator extends SinkOperator {
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return pagesAccepted == status.pagesAccepted;
|
return pagesReceived == status.pagesReceived && rowsReceived == status.rowsReceived;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(pagesAccepted);
|
return Objects.hash(pagesReceived, rowsReceived);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -32,6 +32,7 @@ public class ExchangeSourceOperator extends SourceOperator {
|
||||||
private final ExchangeSource source;
|
private final ExchangeSource source;
|
||||||
private IsBlockedResult isBlocked = NOT_BLOCKED;
|
private IsBlockedResult isBlocked = NOT_BLOCKED;
|
||||||
private int pagesEmitted;
|
private int pagesEmitted;
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
public record ExchangeSourceOperatorFactory(Supplier<ExchangeSource> exchangeSources) implements SourceOperatorFactory {
|
public record ExchangeSourceOperatorFactory(Supplier<ExchangeSource> exchangeSources) implements SourceOperatorFactory {
|
||||||
|
|
||||||
|
@ -55,6 +56,7 @@ public class ExchangeSourceOperator extends SourceOperator {
|
||||||
final var page = source.pollPage();
|
final var page = source.pollPage();
|
||||||
if (page != null) {
|
if (page != null) {
|
||||||
pagesEmitted++;
|
pagesEmitted++;
|
||||||
|
rowsEmitted += page.getPositionCount();
|
||||||
}
|
}
|
||||||
return page;
|
return page;
|
||||||
}
|
}
|
||||||
|
@ -92,7 +94,7 @@ public class ExchangeSourceOperator extends SourceOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Status status() {
|
public Status status() {
|
||||||
return new Status(source.bufferSize(), pagesEmitted);
|
return new Status(source.bufferSize(), pagesEmitted, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Status implements Operator.Status {
|
public static class Status implements Operator.Status {
|
||||||
|
@ -104,21 +106,33 @@ public class ExchangeSourceOperator extends SourceOperator {
|
||||||
|
|
||||||
private final int pagesWaiting;
|
private final int pagesWaiting;
|
||||||
private final int pagesEmitted;
|
private final int pagesEmitted;
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
Status(int pagesWaiting, int pagesEmitted) {
|
Status(int pagesWaiting, int pagesEmitted, long rowsEmitted) {
|
||||||
this.pagesWaiting = pagesWaiting;
|
this.pagesWaiting = pagesWaiting;
|
||||||
this.pagesEmitted = pagesEmitted;
|
this.pagesEmitted = pagesEmitted;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
Status(StreamInput in) throws IOException {
|
Status(StreamInput in) throws IOException {
|
||||||
pagesWaiting = in.readVInt();
|
pagesWaiting = in.readVInt();
|
||||||
pagesEmitted = in.readVInt();
|
pagesEmitted = in.readVInt();
|
||||||
|
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeVInt(pagesWaiting);
|
out.writeVInt(pagesWaiting);
|
||||||
out.writeVInt(pagesEmitted);
|
out.writeVInt(pagesEmitted);
|
||||||
|
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -134,11 +148,16 @@ public class ExchangeSourceOperator extends SourceOperator {
|
||||||
return pagesEmitted;
|
return pagesEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("pages_waiting", pagesWaiting);
|
builder.field("pages_waiting", pagesWaiting);
|
||||||
builder.field("pages_emitted", pagesEmitted);
|
builder.field("pages_emitted", pagesEmitted);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,12 +166,12 @@ public class ExchangeSourceOperator extends SourceOperator {
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
Status status = (Status) o;
|
Status status = (Status) o;
|
||||||
return pagesWaiting == status.pagesWaiting && pagesEmitted == status.pagesEmitted;
|
return pagesWaiting == status.pagesWaiting && pagesEmitted == status.pagesEmitted && rowsEmitted == status.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(pagesWaiting, pagesEmitted);
|
return Objects.hash(pagesWaiting, pagesEmitted, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -278,6 +278,26 @@ public class TopNOperator implements Operator, Accountable {
|
||||||
|
|
||||||
private Iterator<Page> output;
|
private Iterator<Page> output;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of pages that have been received by this operator.
|
||||||
|
*/
|
||||||
|
private int pagesReceived;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of pages that have been emitted by this operator.
|
||||||
|
*/
|
||||||
|
private int pagesEmitted;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has received.
|
||||||
|
*/
|
||||||
|
private long rowsReceived;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count of rows this operator has emitted.
|
||||||
|
*/
|
||||||
|
private long rowsEmitted;
|
||||||
|
|
||||||
public TopNOperator(
|
public TopNOperator(
|
||||||
BlockFactory blockFactory,
|
BlockFactory blockFactory,
|
||||||
CircuitBreaker breaker,
|
CircuitBreaker breaker,
|
||||||
|
@ -368,7 +388,9 @@ public class TopNOperator implements Operator, Accountable {
|
||||||
spare = inputQueue.insertWithOverflow(spare);
|
spare = inputQueue.insertWithOverflow(spare);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
Releasables.close(() -> page.releaseBlocks());
|
page.releaseBlocks();
|
||||||
|
pagesReceived++;
|
||||||
|
rowsReceived += page.getPositionCount();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -491,11 +513,14 @@ public class TopNOperator implements Operator, Accountable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Page getOutput() {
|
public Page getOutput() {
|
||||||
if (output != null && output.hasNext()) {
|
if (output == null || output.hasNext() == false) {
|
||||||
return output.next();
|
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
Page ret = output.next();
|
||||||
|
pagesEmitted++;
|
||||||
|
rowsEmitted += ret.getPositionCount();
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
|
@ -531,7 +556,7 @@ public class TopNOperator implements Operator, Accountable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Status status() {
|
public Status status() {
|
||||||
return new TopNOperatorStatus(inputQueue.size(), ramBytesUsed());
|
return new TopNOperatorStatus(inputQueue.size(), ramBytesUsed(), pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -27,21 +27,55 @@ public class TopNOperatorStatus implements Operator.Status {
|
||||||
);
|
);
|
||||||
private final int occupiedRows;
|
private final int occupiedRows;
|
||||||
private final long ramBytesUsed;
|
private final long ramBytesUsed;
|
||||||
|
private final int pagesReceived;
|
||||||
|
private final int pagesEmitted;
|
||||||
|
private final long rowsReceived;
|
||||||
|
private final long rowsEmitted;
|
||||||
|
|
||||||
public TopNOperatorStatus(int occupiedRows, long ramBytesUsed) {
|
public TopNOperatorStatus(
|
||||||
|
int occupiedRows,
|
||||||
|
long ramBytesUsed,
|
||||||
|
int pagesReceived,
|
||||||
|
int pagesEmitted,
|
||||||
|
long rowsReceived,
|
||||||
|
long rowsEmitted
|
||||||
|
) {
|
||||||
this.occupiedRows = occupiedRows;
|
this.occupiedRows = occupiedRows;
|
||||||
this.ramBytesUsed = ramBytesUsed;
|
this.ramBytesUsed = ramBytesUsed;
|
||||||
|
this.pagesReceived = pagesReceived;
|
||||||
|
this.pagesEmitted = pagesEmitted;
|
||||||
|
this.rowsReceived = rowsReceived;
|
||||||
|
this.rowsEmitted = rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
TopNOperatorStatus(StreamInput in) throws IOException {
|
TopNOperatorStatus(StreamInput in) throws IOException {
|
||||||
this.occupiedRows = in.readVInt();
|
this.occupiedRows = in.readVInt();
|
||||||
this.ramBytesUsed = in.readVLong();
|
this.ramBytesUsed = in.readVLong();
|
||||||
|
|
||||||
|
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
this.pagesReceived = in.readVInt();
|
||||||
|
this.pagesEmitted = in.readVInt();
|
||||||
|
this.rowsReceived = in.readVLong();
|
||||||
|
this.rowsEmitted = in.readVLong();
|
||||||
|
} else {
|
||||||
|
this.pagesReceived = 0;
|
||||||
|
this.pagesEmitted = 0;
|
||||||
|
this.rowsReceived = 0;
|
||||||
|
this.rowsEmitted = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeVInt(occupiedRows);
|
out.writeVInt(occupiedRows);
|
||||||
out.writeVLong(ramBytesUsed);
|
out.writeVLong(ramBytesUsed);
|
||||||
|
|
||||||
|
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) {
|
||||||
|
out.writeVInt(pagesReceived);
|
||||||
|
out.writeVInt(pagesEmitted);
|
||||||
|
out.writeVLong(rowsReceived);
|
||||||
|
out.writeVLong(rowsEmitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -57,12 +91,32 @@ public class TopNOperatorStatus implements Operator.Status {
|
||||||
return ramBytesUsed;
|
return ramBytesUsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int pagesReceived() {
|
||||||
|
return pagesReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int pagesEmitted() {
|
||||||
|
return pagesEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsReceived() {
|
||||||
|
return rowsReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long rowsEmitted() {
|
||||||
|
return rowsEmitted;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("occupied_rows", occupiedRows);
|
builder.field("occupied_rows", occupiedRows);
|
||||||
builder.field("ram_bytes_used", ramBytesUsed);
|
builder.field("ram_bytes_used", ramBytesUsed);
|
||||||
builder.field("ram_used", ByteSizeValue.ofBytes(ramBytesUsed));
|
builder.field("ram_used", ByteSizeValue.ofBytes(ramBytesUsed));
|
||||||
|
builder.field("pages_received", pagesReceived);
|
||||||
|
builder.field("pages_emitted", pagesEmitted);
|
||||||
|
builder.field("rows_received", rowsReceived);
|
||||||
|
builder.field("rows_emitted", rowsEmitted);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,12 +126,17 @@ public class TopNOperatorStatus implements Operator.Status {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
TopNOperatorStatus that = (TopNOperatorStatus) o;
|
TopNOperatorStatus that = (TopNOperatorStatus) o;
|
||||||
return occupiedRows == that.occupiedRows && ramBytesUsed == that.ramBytesUsed;
|
return occupiedRows == that.occupiedRows
|
||||||
|
&& ramBytesUsed == that.ramBytesUsed
|
||||||
|
&& pagesReceived == that.pagesReceived
|
||||||
|
&& pagesEmitted == that.pagesEmitted
|
||||||
|
&& rowsReceived == that.rowsReceived
|
||||||
|
&& rowsEmitted == that.rowsEmitted;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(occupiedRows, ramBytesUsed);
|
return Objects.hash(occupiedRows, ramBytesUsed, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,7 +20,19 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase<LuceneSourceOperator.Status> {
|
public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase<LuceneSourceOperator.Status> {
|
||||||
public static LuceneSourceOperator.Status simple() {
|
public static LuceneSourceOperator.Status simple() {
|
||||||
return new LuceneSourceOperator.Status(2, Set.of("*:*"), new TreeSet<>(List.of("a:0", "a:1")), 1002, 0, 1, 5, 123, 99990, 8000);
|
return new LuceneSourceOperator.Status(
|
||||||
|
2,
|
||||||
|
Set.of("*:*"),
|
||||||
|
new TreeSet<>(List.of("a:0", "a:1")),
|
||||||
|
1002,
|
||||||
|
0,
|
||||||
|
1,
|
||||||
|
5,
|
||||||
|
123,
|
||||||
|
99990,
|
||||||
|
8000,
|
||||||
|
222
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
|
@ -41,7 +53,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
"pages_emitted" : 5,
|
"pages_emitted" : 5,
|
||||||
"slice_min" : 123,
|
"slice_min" : 123,
|
||||||
"slice_max" : 99990,
|
"slice_max" : 99990,
|
||||||
"current" : 8000
|
"current" : 8000,
|
||||||
|
"rows_emitted" : 222
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,7 +79,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
randomNonNegativeInt(),
|
randomNonNegativeInt(),
|
||||||
randomNonNegativeInt(),
|
randomNonNegativeInt(),
|
||||||
randomNonNegativeInt(),
|
randomNonNegativeInt(),
|
||||||
randomNonNegativeInt()
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,7 +114,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
int sliceMin = instance.sliceMin();
|
int sliceMin = instance.sliceMin();
|
||||||
int sliceMax = instance.sliceMax();
|
int sliceMax = instance.sliceMax();
|
||||||
int current = instance.current();
|
int current = instance.current();
|
||||||
switch (between(0, 9)) {
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 10)) {
|
||||||
case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt);
|
case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt);
|
||||||
case 1 -> processedQueries = randomValueOtherThan(processedQueries, LuceneSourceOperatorStatusTests::randomProcessedQueries);
|
case 1 -> processedQueries = randomValueOtherThan(processedQueries, LuceneSourceOperatorStatusTests::randomProcessedQueries);
|
||||||
case 2 -> processedShards = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards);
|
case 2 -> processedShards = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards);
|
||||||
|
@ -111,6 +126,7 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
case 7 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt);
|
case 7 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt);
|
||||||
case 8 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt);
|
case 8 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt);
|
||||||
case 9 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt);
|
case 9 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 10 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
default -> throw new UnsupportedOperationException();
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
return new LuceneSourceOperator.Status(
|
return new LuceneSourceOperator.Status(
|
||||||
|
@ -123,7 +139,8 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
pagesEmitted,
|
pagesEmitted,
|
||||||
sliceMin,
|
sliceMin,
|
||||||
sliceMax,
|
sliceMax,
|
||||||
current
|
current,
|
||||||
|
rowsEmitted
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializingTestCase<ValuesSourceReaderOperator.Status> {
|
public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializingTestCase<ValuesSourceReaderOperator.Status> {
|
||||||
public static ValuesSourceReaderOperator.Status simple() {
|
public static ValuesSourceReaderOperator.Status simple() {
|
||||||
return new ValuesSourceReaderOperator.Status(Map.of("ReaderType", 3), 1022323, 123);
|
return new ValuesSourceReaderOperator.Status(Map.of("ReaderType", 3), 1022323, 123, 111, 222);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
|
@ -31,7 +31,9 @@ public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializi
|
||||||
},
|
},
|
||||||
"process_nanos" : 1022323,
|
"process_nanos" : 1022323,
|
||||||
"process_time" : "1ms",
|
"process_time" : "1ms",
|
||||||
"pages_processed" : 123
|
"pages_processed" : 123,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,7 +48,13 @@ public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ValuesSourceReaderOperator.Status createTestInstance() {
|
public ValuesSourceReaderOperator.Status createTestInstance() {
|
||||||
return new ValuesSourceReaderOperator.Status(randomReadersBuilt(), randomNonNegativeLong(), randomNonNegativeInt());
|
return new ValuesSourceReaderOperator.Status(
|
||||||
|
randomReadersBuilt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Integer> randomReadersBuilt() {
|
private Map<String, Integer> randomReadersBuilt() {
|
||||||
|
@ -63,12 +71,16 @@ public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializi
|
||||||
Map<String, Integer> readersBuilt = instance.readersBuilt();
|
Map<String, Integer> readersBuilt = instance.readersBuilt();
|
||||||
long processNanos = instance.processNanos();
|
long processNanos = instance.processNanos();
|
||||||
int pagesProcessed = instance.pagesProcessed();
|
int pagesProcessed = instance.pagesProcessed();
|
||||||
switch (between(0, 2)) {
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 4)) {
|
||||||
case 0 -> readersBuilt = randomValueOtherThan(readersBuilt, this::randomReadersBuilt);
|
case 0 -> readersBuilt = randomValueOtherThan(readersBuilt, this::randomReadersBuilt);
|
||||||
case 1 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong);
|
case 1 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
default -> throw new UnsupportedOperationException();
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
return new ValuesSourceReaderOperator.Status(readersBuilt, processNanos, pagesProcessed);
|
return new ValuesSourceReaderOperator.Status(readersBuilt, processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializingTestCase<AbstractPageMappingOperator.Status> {
|
public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializingTestCase<AbstractPageMappingOperator.Status> {
|
||||||
public static AbstractPageMappingOperator.Status simple() {
|
public static AbstractPageMappingOperator.Status simple() {
|
||||||
return new AbstractPageMappingOperator.Status(200012, 123);
|
return new AbstractPageMappingOperator.Status(200012, 123, 111, 222);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
|
@ -24,7 +24,9 @@ public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializ
|
||||||
{
|
{
|
||||||
"process_nanos" : 200012,
|
"process_nanos" : 200012,
|
||||||
"process_time" : "200micros",
|
"process_time" : "200micros",
|
||||||
"pages_processed" : 123
|
"pages_processed" : 123,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,18 +41,27 @@ public class AbstractPageMappingOperatorStatusTests extends AbstractWireSerializ
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AbstractPageMappingOperator.Status createTestInstance() {
|
public AbstractPageMappingOperator.Status createTestInstance() {
|
||||||
return new AbstractPageMappingOperator.Status(randomNonNegativeLong(), randomNonNegativeInt());
|
return new AbstractPageMappingOperator.Status(
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected AbstractPageMappingOperator.Status mutateInstance(AbstractPageMappingOperator.Status instance) {
|
protected AbstractPageMappingOperator.Status mutateInstance(AbstractPageMappingOperator.Status instance) {
|
||||||
long processNanos = instance.processNanos();
|
long processNanos = instance.processNanos();
|
||||||
int pagesProcessed = instance.pagesProcessed();
|
int pagesProcessed = instance.pagesProcessed();
|
||||||
switch (between(0, 1)) {
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 3)) {
|
||||||
case 0 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong);
|
case 0 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 1 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
case 1 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 2 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
case 3 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
default -> throw new UnsupportedOperationException();
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
return new AbstractPageMappingOperator.Status(processNanos, pagesProcessed);
|
return new AbstractPageMappingOperator.Status(processNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWireSerializingTestCase<
|
public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWireSerializingTestCase<
|
||||||
AbstractPageMappingToIteratorOperator.Status> {
|
AbstractPageMappingToIteratorOperator.Status> {
|
||||||
public static AbstractPageMappingToIteratorOperator.Status simple() {
|
public static AbstractPageMappingToIteratorOperator.Status simple() {
|
||||||
return new AbstractPageMappingToIteratorOperator.Status(200012, 123, 204);
|
return new AbstractPageMappingToIteratorOperator.Status(200012, 123, 204, 111, 222);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
|
@ -26,7 +26,9 @@ public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWi
|
||||||
"process_nanos" : 200012,
|
"process_nanos" : 200012,
|
||||||
"process_time" : "200micros",
|
"process_time" : "200micros",
|
||||||
"pages_received" : 123,
|
"pages_received" : 123,
|
||||||
"pages_emitted" : 204
|
"pages_emitted" : 204,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +43,13 @@ public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AbstractPageMappingToIteratorOperator.Status createTestInstance() {
|
public AbstractPageMappingToIteratorOperator.Status createTestInstance() {
|
||||||
return new AbstractPageMappingToIteratorOperator.Status(randomNonNegativeLong(), randomNonNegativeInt(), randomNonNegativeInt());
|
return new AbstractPageMappingToIteratorOperator.Status(
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -49,12 +57,16 @@ public class AbstractPageMappingToIteratorOperatorStatusTests extends AbstractWi
|
||||||
long processNanos = instance.processNanos();
|
long processNanos = instance.processNanos();
|
||||||
int pagesReceived = instance.pagesReceived();
|
int pagesReceived = instance.pagesReceived();
|
||||||
int pagesEmitted = instance.pagesEmitted();
|
int pagesEmitted = instance.pagesEmitted();
|
||||||
switch (between(0, 2)) {
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 4)) {
|
||||||
case 0 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong);
|
case 0 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 1 -> pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt);
|
case 1 -> pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt);
|
||||||
case 2 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt);
|
case 2 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
default -> throw new UnsupportedOperationException();
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted);
|
return new AbstractPageMappingToIteratorOperator.Status(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class AggregationOperatorStatusTests extends AbstractWireSerializingTestCase<AggregationOperator.Status> {
|
public class AggregationOperatorStatusTests extends AbstractWireSerializingTestCase<AggregationOperator.Status> {
|
||||||
public static AggregationOperator.Status simple() {
|
public static AggregationOperator.Status simple() {
|
||||||
return new AggregationOperator.Status(200012, 400036, 123);
|
return new AggregationOperator.Status(200012, 400036, 123, 111, 222);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
|
@ -26,7 +26,9 @@ public class AggregationOperatorStatusTests extends AbstractWireSerializingTestC
|
||||||
"aggregation_time" : "200micros",
|
"aggregation_time" : "200micros",
|
||||||
"aggregation_finish_nanos" : 400036,
|
"aggregation_finish_nanos" : 400036,
|
||||||
"aggregation_finish_time" : "400micros",
|
"aggregation_finish_time" : "400micros",
|
||||||
"pages_processed" : 123
|
"pages_processed" : 123,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +43,13 @@ public class AggregationOperatorStatusTests extends AbstractWireSerializingTestC
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AggregationOperator.Status createTestInstance() {
|
public AggregationOperator.Status createTestInstance() {
|
||||||
return new AggregationOperator.Status(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeInt());
|
return new AggregationOperator.Status(
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -49,12 +57,16 @@ public class AggregationOperatorStatusTests extends AbstractWireSerializingTestC
|
||||||
long aggregationNanos = instance.aggregationNanos();
|
long aggregationNanos = instance.aggregationNanos();
|
||||||
long aggregationFinishNanos = instance.aggregationFinishNanos();
|
long aggregationFinishNanos = instance.aggregationFinishNanos();
|
||||||
int pagesProcessed = instance.pagesProcessed();
|
int pagesProcessed = instance.pagesProcessed();
|
||||||
switch (between(0, 2)) {
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 4)) {
|
||||||
case 0 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong);
|
case 0 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 1 -> aggregationFinishNanos = randomValueOtherThan(aggregationFinishNanos, ESTestCase::randomNonNegativeLong);
|
case 1 -> aggregationFinishNanos = randomValueOtherThan(aggregationFinishNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
default -> throw new UnsupportedOperationException();
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
return new AggregationOperator.Status(aggregationNanos, aggregationFinishNanos, pagesProcessed);
|
return new AggregationOperator.Status(aggregationNanos, aggregationFinishNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,10 +8,20 @@
|
||||||
package org.elasticsearch.compute.operator;
|
package org.elasticsearch.compute.operator;
|
||||||
|
|
||||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction;
|
import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction;
|
||||||
import org.elasticsearch.compute.data.BlockFactory;
|
import org.elasticsearch.compute.data.BlockFactory;
|
||||||
|
import org.elasticsearch.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
import org.hamcrest.Matcher;
|
import org.hamcrest.Matcher;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.both;
|
||||||
|
import static org.hamcrest.Matchers.either;
|
||||||
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
import static org.hamcrest.Matchers.matchesPattern;
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -74,6 +84,38 @@ public abstract class AnyOperatorTestCase extends ComputeTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensures that the Operator.Status of this operator has the standard fields.
|
||||||
|
*/
|
||||||
|
public void testOperatorStatus() throws IOException {
|
||||||
|
DriverContext driverContext = driverContext();
|
||||||
|
try (var operator = simple().get(driverContext)) {
|
||||||
|
Operator.Status status = operator.status();
|
||||||
|
|
||||||
|
assumeTrue("Operator does not provide a status", status != null);
|
||||||
|
|
||||||
|
var xContent = XContentType.JSON.xContent();
|
||||||
|
try (var xContentBuilder = XContentBuilder.builder(xContent)) {
|
||||||
|
status.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
|
||||||
|
|
||||||
|
var bytesReference = BytesReference.bytes(xContentBuilder);
|
||||||
|
var map = XContentHelper.convertToMap(bytesReference, false, xContentBuilder.contentType()).v2();
|
||||||
|
|
||||||
|
if (operator instanceof SourceOperator) {
|
||||||
|
assertThat(map, hasKey("pages_emitted"));
|
||||||
|
assertThat(map, hasKey("rows_emitted"));
|
||||||
|
} else if (operator instanceof SinkOperator) {
|
||||||
|
assertThat(map, hasKey("pages_received"));
|
||||||
|
assertThat(map, hasKey("rows_received"));
|
||||||
|
} else {
|
||||||
|
assertThat(map, either(hasKey("pages_processed")).or(both(hasKey("pages_received")).and(hasKey("pages_emitted"))));
|
||||||
|
assertThat(map, hasKey("rows_received"));
|
||||||
|
assertThat(map, hasKey("rows_emitted"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A {@link DriverContext} with a nonBreakingBigArrays.
|
* A {@link DriverContext} with a nonBreakingBigArrays.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -16,7 +16,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class HashAggregationOperatorStatusTests extends AbstractWireSerializingTestCase<HashAggregationOperator.Status> {
|
public class HashAggregationOperatorStatusTests extends AbstractWireSerializingTestCase<HashAggregationOperator.Status> {
|
||||||
public static HashAggregationOperator.Status simple() {
|
public static HashAggregationOperator.Status simple() {
|
||||||
return new HashAggregationOperator.Status(500012, 200012, 123);
|
return new HashAggregationOperator.Status(500012, 200012, 123, 111, 222);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
|
@ -26,7 +26,9 @@ public class HashAggregationOperatorStatusTests extends AbstractWireSerializingT
|
||||||
"hash_time" : "500micros",
|
"hash_time" : "500micros",
|
||||||
"aggregation_nanos" : 200012,
|
"aggregation_nanos" : 200012,
|
||||||
"aggregation_time" : "200micros",
|
"aggregation_time" : "200micros",
|
||||||
"pages_processed" : 123
|
"pages_processed" : 123,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +43,13 @@ public class HashAggregationOperatorStatusTests extends AbstractWireSerializingT
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public HashAggregationOperator.Status createTestInstance() {
|
public HashAggregationOperator.Status createTestInstance() {
|
||||||
return new HashAggregationOperator.Status(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeInt());
|
return new HashAggregationOperator.Status(
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -49,12 +57,16 @@ public class HashAggregationOperatorStatusTests extends AbstractWireSerializingT
|
||||||
long hashNanos = instance.hashNanos();
|
long hashNanos = instance.hashNanos();
|
||||||
long aggregationNanos = instance.aggregationNanos();
|
long aggregationNanos = instance.aggregationNanos();
|
||||||
int pagesProcessed = instance.pagesProcessed();
|
int pagesProcessed = instance.pagesProcessed();
|
||||||
switch (between(0, 2)) {
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 4)) {
|
||||||
case 0 -> hashNanos = randomValueOtherThan(hashNanos, ESTestCase::randomNonNegativeLong);
|
case 0 -> hashNanos = randomValueOtherThan(hashNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 1 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong);
|
case 1 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong);
|
||||||
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
default -> throw new UnsupportedOperationException();
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
return new HashAggregationOperator.Status(hashNanos, aggregationNanos, pagesProcessed);
|
return new HashAggregationOperator.Status(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.compute.operator;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -17,8 +18,8 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class LimitStatusTests extends AbstractWireSerializingTestCase<LimitOperator.Status> {
|
public class LimitStatusTests extends AbstractWireSerializingTestCase<LimitOperator.Status> {
|
||||||
public void testToXContent() {
|
public void testToXContent() {
|
||||||
assertThat(Strings.toString(new LimitOperator.Status(10, 1, 1)), equalTo("""
|
assertThat(Strings.toString(new LimitOperator.Status(10, 1, 1, 111, 222)), equalTo("""
|
||||||
{"limit":10,"limit_remaining":1,"pages_processed":1}"""));
|
{"limit":10,"limit_remaining":1,"pages_processed":1,"rows_received":111,"rows_emitted":222}"""));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -28,7 +29,13 @@ public class LimitStatusTests extends AbstractWireSerializingTestCase<LimitOpera
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected LimitOperator.Status createTestInstance() {
|
protected LimitOperator.Status createTestInstance() {
|
||||||
return new LimitOperator.Status(between(0, Integer.MAX_VALUE), between(0, Integer.MAX_VALUE), between(0, Integer.MAX_VALUE));
|
return new LimitOperator.Status(
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -36,19 +43,27 @@ public class LimitStatusTests extends AbstractWireSerializingTestCase<LimitOpera
|
||||||
int limit = instance.limit();
|
int limit = instance.limit();
|
||||||
int limitRemaining = instance.limitRemaining();
|
int limitRemaining = instance.limitRemaining();
|
||||||
int pagesProcessed = instance.pagesProcessed();
|
int pagesProcessed = instance.pagesProcessed();
|
||||||
switch (between(0, 2)) {
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 4)) {
|
||||||
case 0:
|
case 0:
|
||||||
limit = randomValueOtherThan(limit, () -> between(0, Integer.MAX_VALUE));
|
limit = randomValueOtherThan(limit, ESTestCase::randomNonNegativeInt);
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
limitRemaining = randomValueOtherThan(limitRemaining, () -> between(0, Integer.MAX_VALUE));
|
limitRemaining = randomValueOtherThan(limitRemaining, ESTestCase::randomNonNegativeInt);
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
pagesProcessed = randomValueOtherThan(pagesProcessed, () -> between(0, Integer.MAX_VALUE));
|
pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException();
|
throw new IllegalArgumentException();
|
||||||
}
|
}
|
||||||
return new LimitOperator.Status(limit, limitRemaining, pagesProcessed);
|
return new LimitOperator.Status(limit, limitRemaining, pagesProcessed, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,12 +16,12 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class MvExpandOperatorStatusTests extends AbstractWireSerializingTestCase<MvExpandOperator.Status> {
|
public class MvExpandOperatorStatusTests extends AbstractWireSerializingTestCase<MvExpandOperator.Status> {
|
||||||
public static MvExpandOperator.Status simple() {
|
public static MvExpandOperator.Status simple() {
|
||||||
return new MvExpandOperator.Status(10, 15, 9);
|
return new MvExpandOperator.Status(10, 15, 9, 111, 222);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
return """
|
return """
|
||||||
{"pages_in":10,"pages_out":15,"noops":9}""";
|
{"pages_received":10,"pages_emitted":15,"noops":9,"rows_received":111,"rows_emitted":222}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToXContent() {
|
public void testToXContent() {
|
||||||
|
@ -35,32 +35,30 @@ public class MvExpandOperatorStatusTests extends AbstractWireSerializingTestCase
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MvExpandOperator.Status createTestInstance() {
|
public MvExpandOperator.Status createTestInstance() {
|
||||||
return new MvExpandOperator.Status(randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeInt());
|
return new MvExpandOperator.Status(
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MvExpandOperator.Status mutateInstance(MvExpandOperator.Status instance) {
|
protected MvExpandOperator.Status mutateInstance(MvExpandOperator.Status instance) {
|
||||||
switch (between(0, 2)) {
|
int pagesReceived = instance.pagesReceived();
|
||||||
case 0:
|
int pagesEmitted = instance.pagesEmitted();
|
||||||
return new MvExpandOperator.Status(
|
int noops = instance.noops();
|
||||||
randomValueOtherThan(instance.pagesIn(), ESTestCase::randomNonNegativeInt),
|
long rowsReceived = instance.rowsReceived();
|
||||||
instance.pagesOut(),
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
instance.noops()
|
switch (between(0, 4)) {
|
||||||
);
|
case 0 -> pagesReceived = randomValueOtherThan(instance.pagesReceived(), ESTestCase::randomNonNegativeInt);
|
||||||
case 1:
|
case 1 -> pagesEmitted = randomValueOtherThan(instance.pagesEmitted(), ESTestCase::randomNonNegativeInt);
|
||||||
return new MvExpandOperator.Status(
|
case 2 -> noops = randomValueOtherThan(instance.noops(), ESTestCase::randomNonNegativeInt);
|
||||||
instance.pagesIn(),
|
case 3 -> rowsReceived = randomValueOtherThan(instance.rowsReceived(), ESTestCase::randomNonNegativeLong);
|
||||||
randomValueOtherThan(instance.pagesOut(), ESTestCase::randomNonNegativeInt),
|
case 4 -> rowsEmitted = randomValueOtherThan(instance.rowsEmitted(), ESTestCase::randomNonNegativeLong);
|
||||||
instance.noops()
|
default -> throw new UnsupportedOperationException();
|
||||||
);
|
|
||||||
case 2:
|
|
||||||
return new MvExpandOperator.Status(
|
|
||||||
instance.pagesIn(),
|
|
||||||
instance.pagesOut(),
|
|
||||||
randomValueOtherThan(instance.noops(), ESTestCase::randomNonNegativeInt)
|
|
||||||
);
|
|
||||||
default:
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
|
return new MvExpandOperator.Status(pagesReceived, pagesEmitted, noops, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -210,8 +210,8 @@ public class MvExpandOperatorTests extends OperatorTestCase {
|
||||||
assertThat(result, hasSize(1));
|
assertThat(result, hasSize(1));
|
||||||
assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2))));
|
assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2))));
|
||||||
MvExpandOperator.Status status = op.status();
|
MvExpandOperator.Status status = op.status();
|
||||||
assertThat(status.pagesIn(), equalTo(1));
|
assertThat(status.pagesReceived(), equalTo(1));
|
||||||
assertThat(status.pagesOut(), equalTo(1));
|
assertThat(status.pagesEmitted(), equalTo(1));
|
||||||
assertThat(status.noops(), equalTo(1));
|
assertThat(status.noops(), equalTo(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,8 +223,8 @@ public class MvExpandOperatorTests extends OperatorTestCase {
|
||||||
assertThat(result, hasSize(1));
|
assertThat(result, hasSize(1));
|
||||||
assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2))));
|
assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2))));
|
||||||
MvExpandOperator.Status status = op.status();
|
MvExpandOperator.Status status = op.status();
|
||||||
assertThat(status.pagesIn(), equalTo(1));
|
assertThat(status.pagesReceived(), equalTo(1));
|
||||||
assertThat(status.pagesOut(), equalTo(1));
|
assertThat(status.pagesEmitted(), equalTo(1));
|
||||||
assertThat(status.noops(), equalTo(0));
|
assertThat(status.noops(), equalTo(0));
|
||||||
result.forEach(Page::releaseBlocks);
|
result.forEach(Page::releaseBlocks);
|
||||||
}
|
}
|
||||||
|
|
|
@ -212,7 +212,8 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase {
|
||||||
// Clone the input so that the operator can close it, then, later, we can read it again to build the assertion.
|
// Clone the input so that the operator can close it, then, later, we can read it again to build the assertion.
|
||||||
List<Page> origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance());
|
List<Page> origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance());
|
||||||
|
|
||||||
List<Page> results = drive(simple().get(context), input.iterator(), context);
|
var operator = simple().get(context);
|
||||||
|
List<Page> results = drive(operator, input.iterator(), context);
|
||||||
assertSimpleOutput(origInput, results);
|
assertSimpleOutput(origInput, results);
|
||||||
assertThat(context.breaker().getUsed(), equalTo(0L));
|
assertThat(context.breaker().getUsed(), equalTo(0L));
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.compute.operator.exchange;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -21,13 +22,14 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ExchangeSinkOperator.Status simple() {
|
public static ExchangeSinkOperator.Status simple() {
|
||||||
return new ExchangeSinkOperator.Status(10);
|
return new ExchangeSinkOperator.Status(10, 111);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String simpleToJson() {
|
public static String simpleToJson() {
|
||||||
return """
|
return """
|
||||||
{
|
{
|
||||||
"pages_accepted" : 10
|
"pages_received" : 10,
|
||||||
|
"rows_received" : 111
|
||||||
}""";
|
}""";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,11 +40,18 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTest
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ExchangeSinkOperator.Status createTestInstance() {
|
public ExchangeSinkOperator.Status createTestInstance() {
|
||||||
return new ExchangeSinkOperator.Status(between(0, Integer.MAX_VALUE));
|
return new ExchangeSinkOperator.Status(randomNonNegativeInt(), randomNonNegativeLong());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ExchangeSinkOperator.Status mutateInstance(ExchangeSinkOperator.Status instance) throws IOException {
|
protected ExchangeSinkOperator.Status mutateInstance(ExchangeSinkOperator.Status instance) throws IOException {
|
||||||
return new ExchangeSinkOperator.Status(randomValueOtherThan(instance.pagesAccepted(), () -> between(0, Integer.MAX_VALUE)));
|
int pagesReceived = instance.pagesReceived();
|
||||||
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
switch (between(0, 1)) {
|
||||||
|
case 0 -> pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt);
|
||||||
|
case 1 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
default -> throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
return new ExchangeSinkOperator.Status(pagesReceived, rowsReceived);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.compute.operator.exchange;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -17,8 +18,8 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class ExchangeSourceOperatorStatusTests extends AbstractWireSerializingTestCase<ExchangeSourceOperator.Status> {
|
public class ExchangeSourceOperatorStatusTests extends AbstractWireSerializingTestCase<ExchangeSourceOperator.Status> {
|
||||||
public void testToXContent() {
|
public void testToXContent() {
|
||||||
assertThat(Strings.toString(new ExchangeSourceOperator.Status(0, 10)), equalTo("""
|
assertThat(Strings.toString(new ExchangeSourceOperator.Status(0, 10, 111)), equalTo("""
|
||||||
{"pages_waiting":0,"pages_emitted":10}"""));
|
{"pages_waiting":0,"pages_emitted":10,"rows_emitted":111}"""));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -28,24 +29,20 @@ public class ExchangeSourceOperatorStatusTests extends AbstractWireSerializingTe
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ExchangeSourceOperator.Status createTestInstance() {
|
protected ExchangeSourceOperator.Status createTestInstance() {
|
||||||
return new ExchangeSourceOperator.Status(between(0, Integer.MAX_VALUE), between(0, Integer.MAX_VALUE));
|
return new ExchangeSourceOperator.Status(randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeLong());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ExchangeSourceOperator.Status mutateInstance(ExchangeSourceOperator.Status instance) throws IOException {
|
protected ExchangeSourceOperator.Status mutateInstance(ExchangeSourceOperator.Status instance) throws IOException {
|
||||||
switch (between(0, 1)) {
|
int pagesWaiting = instance.pagesWaiting();
|
||||||
case 0:
|
int pagesEmitted = instance.pagesEmitted();
|
||||||
return new ExchangeSourceOperator.Status(
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
randomValueOtherThan(instance.pagesWaiting(), () -> between(0, Integer.MAX_VALUE)),
|
switch (between(0, 2)) {
|
||||||
instance.pagesEmitted()
|
case 0 -> pagesWaiting = randomValueOtherThan(pagesWaiting, ESTestCase::randomNonNegativeInt);
|
||||||
);
|
case 1 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt);
|
||||||
case 1:
|
case 2 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
return new ExchangeSourceOperator.Status(
|
default -> throw new UnsupportedOperationException();
|
||||||
instance.pagesWaiting(),
|
|
||||||
randomValueOtherThan(instance.pagesEmitted(), () -> between(0, Integer.MAX_VALUE))
|
|
||||||
);
|
|
||||||
default:
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
|
return new ExchangeSourceOperator.Status(pagesWaiting, pagesEmitted, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,13 +10,30 @@ package org.elasticsearch.compute.operator.topn;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class TopNOperatorStatusTests extends AbstractWireSerializingTestCase<TopNOperatorStatus> {
|
public class TopNOperatorStatusTests extends AbstractWireSerializingTestCase<TopNOperatorStatus> {
|
||||||
|
public static TopNOperatorStatus simple() {
|
||||||
|
return new TopNOperatorStatus(10, 2000, 123, 123, 111, 222);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String simpleToJson() {
|
||||||
|
return """
|
||||||
|
{
|
||||||
|
"occupied_rows" : 10,
|
||||||
|
"ram_bytes_used" : 2000,
|
||||||
|
"ram_used" : "1.9kb",
|
||||||
|
"pages_received" : 123,
|
||||||
|
"pages_emitted" : 123,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
|
}""";
|
||||||
|
}
|
||||||
|
|
||||||
public void testToXContent() {
|
public void testToXContent() {
|
||||||
assertThat(Strings.toString(new TopNOperatorStatus(10, 2000)), equalTo("""
|
assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson()));
|
||||||
{"occupied_rows":10,"ram_bytes_used":2000,"ram_used":"1.9kb"}"""));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -26,23 +43,46 @@ public class TopNOperatorStatusTests extends AbstractWireSerializingTestCase<Top
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TopNOperatorStatus createTestInstance() {
|
protected TopNOperatorStatus createTestInstance() {
|
||||||
return new TopNOperatorStatus(randomNonNegativeInt(), randomNonNegativeLong());
|
return new TopNOperatorStatus(
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TopNOperatorStatus mutateInstance(TopNOperatorStatus instance) {
|
protected TopNOperatorStatus mutateInstance(TopNOperatorStatus instance) {
|
||||||
int occupiedRows = instance.occupiedRows();
|
int occupiedRows = instance.occupiedRows();
|
||||||
long ramBytesUsed = instance.ramBytesUsed();
|
long ramBytesUsed = instance.ramBytesUsed();
|
||||||
switch (between(0, 1)) {
|
int pagesReceived = instance.pagesReceived();
|
||||||
|
int pagesEmitted = instance.pagesEmitted();
|
||||||
|
long rowsReceived = instance.rowsReceived();
|
||||||
|
long rowsEmitted = instance.rowsEmitted();
|
||||||
|
switch (between(0, 5)) {
|
||||||
case 0:
|
case 0:
|
||||||
occupiedRows = randomValueOtherThan(occupiedRows, () -> randomNonNegativeInt());
|
occupiedRows = randomValueOtherThan(occupiedRows, ESTestCase::randomNonNegativeInt);
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
ramBytesUsed = randomValueOtherThan(ramBytesUsed, () -> randomNonNegativeLong());
|
ramBytesUsed = randomValueOtherThan(ramBytesUsed, ESTestCase::randomNonNegativeLong);
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt);
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt);
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
|
||||||
|
break;
|
||||||
|
case 5:
|
||||||
|
rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException();
|
throw new IllegalArgumentException();
|
||||||
}
|
}
|
||||||
return new TopNOperatorStatus(occupiedRows, ramBytesUsed);
|
return new TopNOperatorStatus(occupiedRows, ramBytesUsed, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -574,23 +574,35 @@ public class RestEsqlIT extends RestEsqlTestCase {
|
||||||
.entry("slice_min", 0)
|
.entry("slice_min", 0)
|
||||||
.entry("current", DocIdSetIterator.NO_MORE_DOCS)
|
.entry("current", DocIdSetIterator.NO_MORE_DOCS)
|
||||||
.entry("pages_emitted", greaterThan(0))
|
.entry("pages_emitted", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0))
|
||||||
.entry("processing_nanos", greaterThan(0))
|
.entry("processing_nanos", greaterThan(0))
|
||||||
.entry("processed_queries", List.of("*:*"));
|
.entry("processed_queries", List.of("*:*"));
|
||||||
case "ValuesSourceReaderOperator" -> basicProfile().entry("readers_built", matchesMap().extraOk());
|
case "ValuesSourceReaderOperator" -> basicProfile().entry("readers_built", matchesMap().extraOk());
|
||||||
case "AggregationOperator" -> matchesMap().entry("pages_processed", greaterThan(0))
|
case "AggregationOperator" -> matchesMap().entry("pages_processed", greaterThan(0))
|
||||||
|
.entry("rows_received", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0))
|
||||||
.entry("aggregation_nanos", greaterThan(0))
|
.entry("aggregation_nanos", greaterThan(0))
|
||||||
.entry("aggregation_finish_nanos", greaterThan(0));
|
.entry("aggregation_finish_nanos", greaterThan(0));
|
||||||
case "ExchangeSinkOperator" -> matchesMap().entry("pages_accepted", greaterThan(0));
|
case "ExchangeSinkOperator" -> matchesMap().entry("pages_received", greaterThan(0)).entry("rows_received", greaterThan(0));
|
||||||
case "ExchangeSourceOperator" -> matchesMap().entry("pages_emitted", greaterThan(0)).entry("pages_waiting", 0);
|
case "ExchangeSourceOperator" -> matchesMap().entry("pages_waiting", 0)
|
||||||
|
.entry("pages_emitted", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0));
|
||||||
case "ProjectOperator", "EvalOperator" -> basicProfile();
|
case "ProjectOperator", "EvalOperator" -> basicProfile();
|
||||||
case "LimitOperator" -> matchesMap().entry("pages_processed", greaterThan(0))
|
case "LimitOperator" -> matchesMap().entry("pages_processed", greaterThan(0))
|
||||||
.entry("limit", 1000)
|
.entry("limit", 1000)
|
||||||
.entry("limit_remaining", 999);
|
.entry("limit_remaining", 999)
|
||||||
|
.entry("rows_received", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0));
|
||||||
case "OutputOperator" -> null;
|
case "OutputOperator" -> null;
|
||||||
case "TopNOperator" -> matchesMap().entry("occupied_rows", 0)
|
case "TopNOperator" -> matchesMap().entry("occupied_rows", 0)
|
||||||
|
.entry("pages_received", greaterThan(0))
|
||||||
|
.entry("pages_emitted", greaterThan(0))
|
||||||
|
.entry("rows_received", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0))
|
||||||
.entry("ram_used", instanceOf(String.class))
|
.entry("ram_used", instanceOf(String.class))
|
||||||
.entry("ram_bytes_used", greaterThan(0));
|
.entry("ram_bytes_used", greaterThan(0));
|
||||||
case "LuceneTopNSourceOperator" -> matchesMap().entry("pages_emitted", greaterThan(0))
|
case "LuceneTopNSourceOperator" -> matchesMap().entry("pages_emitted", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0))
|
||||||
.entry("current", greaterThan(0))
|
.entry("current", greaterThan(0))
|
||||||
.entry("processed_slices", greaterThan(0))
|
.entry("processed_slices", greaterThan(0))
|
||||||
.entry("processed_shards", List.of("rest-esql-test:0"))
|
.entry("processed_shards", List.of("rest-esql-test:0"))
|
||||||
|
@ -611,7 +623,10 @@ public class RestEsqlIT extends RestEsqlTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private MapMatcher basicProfile() {
|
private MapMatcher basicProfile() {
|
||||||
return matchesMap().entry("pages_processed", greaterThan(0)).entry("process_nanos", greaterThan(0));
|
return matchesMap().entry("pages_processed", greaterThan(0))
|
||||||
|
.entry("process_nanos", greaterThan(0))
|
||||||
|
.entry("rows_received", greaterThan(0))
|
||||||
|
.entry("rows_emitted", greaterThan(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertException(String query, String... errorMessages) throws IOException {
|
private void assertException(String query, String... errorMessages) throws IOException {
|
||||||
|
|
|
@ -160,7 +160,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase {
|
||||||
}
|
}
|
||||||
if (o.operator().equals("ExchangeSinkOperator")) {
|
if (o.operator().equals("ExchangeSinkOperator")) {
|
||||||
ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status();
|
ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status();
|
||||||
assertThat(oStatus.pagesAccepted(), greaterThanOrEqualTo(0));
|
assertThat(oStatus.pagesReceived(), greaterThanOrEqualTo(0));
|
||||||
exchangeSinks++;
|
exchangeSinks++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,7 +63,12 @@ public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCa
|
||||||
String name = randomAlphaOfLength(4);
|
String name = randomAlphaOfLength(4);
|
||||||
Operator.Status status = randomBoolean()
|
Operator.Status status = randomBoolean()
|
||||||
? null
|
? null
|
||||||
: new AbstractPageMappingOperator.Status(randomNonNegativeLong(), between(0, Integer.MAX_VALUE));
|
: new AbstractPageMappingOperator.Status(
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
return new DriverStatus.OperatorStatus(name, status);
|
return new DriverStatus.OperatorStatus(name, status);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -683,7 +683,7 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase<E
|
||||||
20021,
|
20021,
|
||||||
20000,
|
20000,
|
||||||
12,
|
12,
|
||||||
List.of(new DriverStatus.OperatorStatus("asdf", new AbstractPageMappingOperator.Status(10021, 10))),
|
List.of(new DriverStatus.OperatorStatus("asdf", new AbstractPageMappingOperator.Status(10021, 10, 111, 222))),
|
||||||
DriverSleeps.empty()
|
DriverSleeps.empty()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -722,7 +722,9 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase<E
|
||||||
"operator" : "asdf",
|
"operator" : "asdf",
|
||||||
"status" : {
|
"status" : {
|
||||||
"process_nanos" : 10021,
|
"process_nanos" : 10021,
|
||||||
"pages_processed" : 10
|
"pages_processed" : 10,
|
||||||
|
"rows_received" : 111,
|
||||||
|
"rows_emitted" : 222
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
|
@ -20,10 +20,21 @@ public class NamedWriteablesTests extends ESTestCase {
|
||||||
public void testTopNStatus() throws Exception {
|
public void testTopNStatus() throws Exception {
|
||||||
try (EsqlPlugin plugin = new EsqlPlugin()) {
|
try (EsqlPlugin plugin = new EsqlPlugin()) {
|
||||||
NamedWriteableRegistry registry = new NamedWriteableRegistry(plugin.getNamedWriteables());
|
NamedWriteableRegistry registry = new NamedWriteableRegistry(plugin.getNamedWriteables());
|
||||||
TopNOperatorStatus origin = new TopNOperatorStatus(randomNonNegativeInt(), randomNonNegativeLong());
|
TopNOperatorStatus origin = new TopNOperatorStatus(
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeInt(),
|
||||||
|
randomNonNegativeLong(),
|
||||||
|
randomNonNegativeLong()
|
||||||
|
);
|
||||||
TopNOperatorStatus copy = (TopNOperatorStatus) copyNamedWriteable(origin, registry, Operator.Status.class);
|
TopNOperatorStatus copy = (TopNOperatorStatus) copyNamedWriteable(origin, registry, Operator.Status.class);
|
||||||
assertThat(copy.occupiedRows(), equalTo(origin.occupiedRows()));
|
assertThat(copy.occupiedRows(), equalTo(origin.occupiedRows()));
|
||||||
assertThat(copy.ramBytesUsed(), equalTo(origin.ramBytesUsed()));
|
assertThat(copy.ramBytesUsed(), equalTo(origin.ramBytesUsed()));
|
||||||
|
assertThat(copy.pagesReceived(), equalTo(origin.pagesReceived()));
|
||||||
|
assertThat(copy.pagesEmitted(), equalTo(origin.pagesEmitted()));
|
||||||
|
assertThat(copy.rowsReceived(), equalTo(origin.rowsReceived()));
|
||||||
|
assertThat(copy.rowsEmitted(), equalTo(origin.rowsEmitted()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue