mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-27 17:10:22 -04:00
Add benchmark script (#126596)
Adds a simple script to run benchmarks for ESQL and collect their results. The script has a `--test` mode which takes about ten minutes. Running without `--test` takes a four hours fifteen minutes. To speed up `--test` I reworked the "self test" that each benchmark runs to be optional and disabled in `--test` mode.
This commit is contained in:
parent
d870f42c90
commit
85749d606c
16 changed files with 255 additions and 31 deletions
44
benchmarks/run.sh
Executable file
44
benchmarks/run.sh
Executable file
|
@ -0,0 +1,44 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
# or more contributor license agreements. Licensed under the "Elastic License
|
||||
# 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
# Public License v 1"; you may not use this file except in compliance with, at
|
||||
# your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
# License v3.0 only", or the "Server Side Public License, v 1".
|
||||
#
|
||||
|
||||
EXTRA=""
|
||||
POSITIONAL_ARGS=()
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--test)
|
||||
# Get inaccurate results quickly by shortening all measurements
|
||||
# to 50ms each and skip self tests.
|
||||
EXTRA="-r 50ms -w 50ms -jvmArgsAppend -DskipSelfTest=true"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
POSITIONAL_ARGS+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
set -- "${POSITIONAL_ARGS[@]}"
|
||||
|
||||
run() {
|
||||
../gradlew run --args "$2 -rf json $EXTRA"
|
||||
mv jmh-result.json build/benchmarks/$1.json
|
||||
}
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
mkdir -p build/benchmarks
|
||||
run 'esql_agg' 'AggregatorBenchmark -pgrouping=none,longs -pfilter=none -pblockType=vector_longs,half_null_longs'
|
||||
run 'esql_block_keep_mask' 'BlockKeepMaskBenchmark -pdataTypeAndBlockKind=BytesRef/array,BytesRef/vector,long/array,long/vector'
|
||||
run 'esql_block_read' 'BlockReadBenchmark -paccessType=sequential'
|
||||
run 'esql_eval' 'EvalBenchmark'
|
||||
run 'esql_parse_ip' 'ParseIpBenchmark'
|
||||
run 'esql_topn' 'TopNBenchmark'
|
||||
run 'esql_values_agg' 'ValuesAggregatorBenchmark'
|
||||
run 'esql_values_source_reader' 'ValuesSourceReaderBenchmark'
|
|
@ -113,6 +113,12 @@ public class AggregatorBenchmark {
|
|||
|
||||
static {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
if (false == "true".equals(System.getProperty("skipSelfTest"))) {
|
||||
selfTest();
|
||||
}
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
try {
|
||||
for (String grouping : AggregatorBenchmark.class.getField("grouping").getAnnotationsByType(Param.class)[0].value()) {
|
||||
for (String op : AggregatorBenchmark.class.getField("op").getAnnotationsByType(Param.class)[0].value()) {
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.compute.data.LongVector;
|
|||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.Random;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
public class BlockBenchmark {
|
||||
/**
|
||||
|
@ -112,7 +113,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = blockFactory.newBooleanArrayBlock(
|
||||
values,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING
|
||||
);
|
||||
|
@ -120,7 +121,7 @@ public class BlockBenchmark {
|
|||
case "array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
|
||||
blocks[blockIndex] = blockFactory.newBooleanArrayBlock(
|
||||
values,
|
||||
|
@ -141,7 +142,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = new BooleanBigArrayBlock(
|
||||
valuesBigArray,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING,
|
||||
blockFactory
|
||||
|
@ -150,7 +151,7 @@ public class BlockBenchmark {
|
|||
case "big-array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
BitArray valuesBigArray = new BitArray(totalPositions, BigArrays.NON_RECYCLING_INSTANCE);
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
if (values[i]) {
|
||||
|
@ -211,7 +212,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = blockFactory.newBytesRefArrayBlock(
|
||||
values,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING
|
||||
);
|
||||
|
@ -219,7 +220,7 @@ public class BlockBenchmark {
|
|||
case "array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
|
||||
blocks[blockIndex] = blockFactory.newBytesRefArrayBlock(
|
||||
values,
|
||||
|
@ -257,7 +258,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = blockFactory.newDoubleArrayBlock(
|
||||
values,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING
|
||||
);
|
||||
|
@ -265,7 +266,7 @@ public class BlockBenchmark {
|
|||
case "array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
|
||||
blocks[blockIndex] = blockFactory.newDoubleArrayBlock(
|
||||
values,
|
||||
|
@ -284,7 +285,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = new DoubleBigArrayBlock(
|
||||
valuesBigArray,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING,
|
||||
blockFactory
|
||||
|
@ -293,7 +294,7 @@ public class BlockBenchmark {
|
|||
case "big-array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false);
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
valuesBigArray.set(i, values[i]);
|
||||
|
@ -344,7 +345,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = blockFactory.newIntArrayBlock(
|
||||
values,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING
|
||||
);
|
||||
|
@ -352,7 +353,7 @@ public class BlockBenchmark {
|
|||
case "array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
|
||||
blocks[blockIndex] = blockFactory.newIntArrayBlock(
|
||||
values,
|
||||
|
@ -371,7 +372,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = new IntBigArrayBlock(
|
||||
valuesBigArray,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING,
|
||||
blockFactory
|
||||
|
@ -380,7 +381,7 @@ public class BlockBenchmark {
|
|||
case "big-array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false);
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
valuesBigArray.set(i, values[i]);
|
||||
|
@ -431,7 +432,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = blockFactory.newLongArrayBlock(
|
||||
values,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING
|
||||
);
|
||||
|
@ -439,7 +440,7 @@ public class BlockBenchmark {
|
|||
case "array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
|
||||
blocks[blockIndex] = blockFactory.newLongArrayBlock(
|
||||
values,
|
||||
|
@ -458,7 +459,7 @@ public class BlockBenchmark {
|
|||
blocks[blockIndex] = new LongBigArrayBlock(
|
||||
valuesBigArray,
|
||||
totalPositions,
|
||||
null,
|
||||
IntStream.rangeClosed(0, totalPositions).toArray(),
|
||||
null,
|
||||
Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING,
|
||||
blockFactory
|
||||
|
@ -467,7 +468,7 @@ public class BlockBenchmark {
|
|||
case "big-array-multivalue-null" -> {
|
||||
int[] firstValueIndexes = randomFirstValueIndexes(totalPositions);
|
||||
int positionCount = firstValueIndexes.length - 1;
|
||||
BitSet nulls = randomNulls(positionCount);
|
||||
BitSet nulls = nullsFromFirstValues(firstValueIndexes);
|
||||
LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false);
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
valuesBigArray.set(i, values[i]);
|
||||
|
@ -526,10 +527,10 @@ public class BlockBenchmark {
|
|||
return firstValueIndexes.stream().mapToInt(x -> x).toArray();
|
||||
}
|
||||
|
||||
private static BitSet randomNulls(int positionCount) {
|
||||
BitSet nulls = new BitSet(positionCount);
|
||||
for (int i = 0; i < positionCount; i++) {
|
||||
if (random.nextDouble() < NULL_PERCENTAGE) {
|
||||
private static BitSet nullsFromFirstValues(int[] firstValueIndexes) {
|
||||
BitSet nulls = new BitSet(firstValueIndexes.length - 1);
|
||||
for (int i = 0; i < firstValueIndexes.length - 1; i++) {
|
||||
if (firstValueIndexes[i + 1] - firstValueIndexes[i] == 1 && random.nextDouble() < NULL_PERCENTAGE) {
|
||||
nulls.set(i);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,13 @@ import java.util.concurrent.TimeUnit;
|
|||
@Fork(1)
|
||||
public class BlockKeepMaskBenchmark extends BlockBenchmark {
|
||||
static {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
if (false == "true".equals(System.getProperty("skipSelfTest"))) {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
}
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
int totalPositions = 10;
|
||||
for (String paramString : RELEVANT_TYPE_BLOCK_COMBINATIONS) {
|
||||
String[] params = paramString.split("/");
|
||||
|
|
|
@ -26,6 +26,13 @@ import java.util.stream.IntStream;
|
|||
@Fork(1)
|
||||
public class BlockReadBenchmark extends BlockBenchmark {
|
||||
static {
|
||||
if (false == "true".equals(System.getProperty("skipSelfTest"))) {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
}
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
int totalPositions = 10;
|
||||
long[] actualCheckSums = new long[NUM_BLOCKS_PER_ITERATION];
|
||||
|
|
|
@ -94,8 +94,10 @@ public class EvalBenchmark {
|
|||
|
||||
static {
|
||||
LogConfigurator.configureESLogging();
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
if (false == "true".equals(System.getProperty("skipSelfTest"))) {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
}
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
|
|
|
@ -69,6 +69,10 @@ public class TopNBenchmark {
|
|||
|
||||
static {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
try {
|
||||
for (String data : TopNBenchmark.class.getField("data").getAnnotationsByType(Param.class)[0].value()) {
|
||||
for (String topCount : TopNBenchmark.class.getField("topCount").getAnnotationsByType(Param.class)[0].value()) {
|
||||
|
@ -98,8 +102,8 @@ public class TopNBenchmark {
|
|||
case DOUBLES -> List.of(ElementType.DOUBLE);
|
||||
case BOOLEANS -> List.of(ElementType.BOOLEAN);
|
||||
case BYTES_REFS -> List.of(ElementType.BYTES_REF);
|
||||
case TWO_LONGS -> List.of(ElementType.INT, ElementType.INT);
|
||||
case LONGS_AND_BYTES_REFS -> List.of(ElementType.INT, ElementType.BYTES_REF);
|
||||
case TWO_LONGS -> List.of(ElementType.LONG, ElementType.LONG);
|
||||
case LONGS_AND_BYTES_REFS -> List.of(ElementType.LONG, ElementType.BYTES_REF);
|
||||
default -> throw new IllegalArgumentException("unsupported data type [" + data + "]");
|
||||
};
|
||||
List<TopNEncoder> encoders = switch (data) {
|
||||
|
@ -127,7 +131,7 @@ public class TopNBenchmark {
|
|||
}
|
||||
|
||||
private static void checkExpected(int topCount, List<Page> pages) {
|
||||
if (topCount != pages.size()) {
|
||||
if (topCount != pages.stream().mapToLong(Page::getPositionCount).sum()) {
|
||||
throw new AssertionError("expected [" + topCount + "] but got [" + pages.size() + "]");
|
||||
}
|
||||
}
|
||||
|
@ -191,7 +195,7 @@ public class TopNBenchmark {
|
|||
try (Operator operator = operator(data, topCount)) {
|
||||
Page page = page(data);
|
||||
for (int i = 0; i < 1024; i++) {
|
||||
operator.addInput(page);
|
||||
operator.addInput(page.shallowCopy());
|
||||
}
|
||||
operator.finish();
|
||||
List<Page> results = new ArrayList<>();
|
||||
|
|
|
@ -81,6 +81,13 @@ public class ValuesAggregatorBenchmark {
|
|||
);
|
||||
|
||||
static {
|
||||
if (false == "true".equals(System.getProperty("skipSelfTest"))) {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
}
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
try {
|
||||
for (String groups : ValuesAggregatorBenchmark.class.getField("groups").getAnnotationsByType(Param.class)[0].value()) {
|
||||
|
|
|
@ -92,6 +92,10 @@ public class ValuesSourceReaderBenchmark {
|
|||
|
||||
static {
|
||||
// Smoke test all the expected values and force loading subclasses more like prod
|
||||
selfTest();
|
||||
}
|
||||
|
||||
static void selfTest() {
|
||||
try {
|
||||
ValuesSourceReaderBenchmark benchmark = new ValuesSourceReaderBenchmark();
|
||||
benchmark.setupIndex();
|
||||
|
@ -263,7 +267,42 @@ public class ValuesSourceReaderBenchmark {
|
|||
null,
|
||||
null,
|
||||
false
|
||||
).blockLoader(null);
|
||||
).blockLoader(new MappedFieldType.BlockLoaderContext() {
|
||||
@Override
|
||||
public String indexName() {
|
||||
return "benchmark";
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType.FieldExtractPreference fieldExtractPreference() {
|
||||
return MappedFieldType.FieldExtractPreference.NONE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexSettings indexSettings() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchLookup lookup() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> sourcePaths(String name) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String parentField(String field) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.compute.operator;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class AggregatorBenchmarkTests extends ESTestCase {
|
||||
public void test() {
|
||||
AggregatorBenchmark.selfTest();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.compute.operator;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class BlockKeepMaskBenchmarkTests extends ESTestCase {
|
||||
public void test() {
|
||||
BlockKeepMaskBenchmark.selfTest();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.compute.operator;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class BlockReadBenchmarkTests extends ESTestCase {
|
||||
public void test() {
|
||||
BlockKeepMaskBenchmark.selfTest();
|
||||
}
|
||||
}
|
|
@ -12,7 +12,7 @@ package org.elasticsearch.benchmark.compute.operator;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class EvalBenchmarkTests extends ESTestCase {
|
||||
public void testSelfTest() {
|
||||
public void test() {
|
||||
EvalBenchmark.selfTest();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.compute.operator;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class TopNBenchmarkTests extends ESTestCase {
|
||||
public void test() {
|
||||
TopNBenchmark.selfTest();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.compute.operator;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class ValuesAggregatorBenchmarkTests extends ESTestCase {
|
||||
public void test() {
|
||||
ValuesAggregatorBenchmark.selfTest();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.compute.operator;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class ValuesSourceReaderBenchmarkTests extends ESTestCase {
|
||||
public void test() {
|
||||
ValuesSourceReaderBenchmark.selfTest();
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue