mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Create a pipeline to run micro-benchmarks periodically (#128507)
This commit is contained in:
parent
cdc74748dc
commit
b909a503a5
3 changed files with 57 additions and 9 deletions
10
.buildkite/pipelines/periodic-micro-benchmarks.yml
Normal file
10
.buildkite/pipelines/periodic-micro-benchmarks.yml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
steps:
|
||||||
|
- label: periodic-micro-benchmarks
|
||||||
|
command: |
|
||||||
|
.ci/scripts/run-gradle.sh -p benchmarks/ run --args 'org.elasticsearch.benchmark._nightly -rf json -rff build/result.json'
|
||||||
|
timeout_in_minutes: 300
|
||||||
|
agents:
|
||||||
|
provider: gcp
|
||||||
|
image: family/elasticsearch-ubuntu-2004
|
||||||
|
machineType: custom-32-98304
|
||||||
|
buildDirectory: /dev/shm/bk
|
|
@ -7,7 +7,7 @@
|
||||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.benchmark.esql;
|
package org.elasticsearch.benchmark._nightly.esql;
|
||||||
|
|
||||||
import org.elasticsearch.common.logging.LogConfigurator;
|
import org.elasticsearch.common.logging.LogConfigurator;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -67,9 +67,9 @@ public class QueryPlanningBenchmark {
|
||||||
}
|
}
|
||||||
|
|
||||||
private PlanTelemetry telemetry;
|
private PlanTelemetry telemetry;
|
||||||
private EsqlParser parser;
|
private EsqlParser defaultParser;
|
||||||
private Analyzer analyzer;
|
private Analyzer manyFieldsAnalyzer;
|
||||||
private LogicalPlanOptimizer optimizer;
|
private LogicalPlanOptimizer defaultOptimizer;
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
public void setup() {
|
public void setup() {
|
||||||
|
@ -100,8 +100,8 @@ public class QueryPlanningBenchmark {
|
||||||
var functionRegistry = new EsqlFunctionRegistry();
|
var functionRegistry = new EsqlFunctionRegistry();
|
||||||
|
|
||||||
telemetry = new PlanTelemetry(functionRegistry);
|
telemetry = new PlanTelemetry(functionRegistry);
|
||||||
parser = new EsqlParser();
|
defaultParser = new EsqlParser();
|
||||||
analyzer = new Analyzer(
|
manyFieldsAnalyzer = new Analyzer(
|
||||||
new AnalyzerContext(
|
new AnalyzerContext(
|
||||||
config,
|
config,
|
||||||
functionRegistry,
|
functionRegistry,
|
||||||
|
@ -112,10 +112,10 @@ public class QueryPlanningBenchmark {
|
||||||
),
|
),
|
||||||
new Verifier(new Metrics(functionRegistry), new XPackLicenseState(() -> 0L))
|
new Verifier(new Metrics(functionRegistry), new XPackLicenseState(() -> 0L))
|
||||||
);
|
);
|
||||||
optimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(config, FoldContext.small()));
|
defaultOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(config, FoldContext.small()));
|
||||||
}
|
}
|
||||||
|
|
||||||
private LogicalPlan plan(String query) {
|
private LogicalPlan plan(EsqlParser parser, Analyzer analyzer, LogicalPlanOptimizer optimizer, String query) {
|
||||||
var parsed = parser.createStatement(query, new QueryParams(), telemetry);
|
var parsed = parser.createStatement(query, new QueryParams(), telemetry);
|
||||||
var analyzed = analyzer.analyze(parsed);
|
var analyzed = analyzer.analyze(parsed);
|
||||||
var optimized = optimizer.optimize(analyzed);
|
var optimized = optimizer.optimize(analyzed);
|
||||||
|
@ -124,6 +124,6 @@ public class QueryPlanningBenchmark {
|
||||||
|
|
||||||
@Benchmark
|
@Benchmark
|
||||||
public void manyFields(Blackhole blackhole) {
|
public void manyFields(Blackhole blackhole) {
|
||||||
blackhole.consume(plan("FROM test | LIMIT 10"));
|
blackhole.consume(plan(defaultParser, manyFieldsAnalyzer, defaultOptimizer, "FROM test | LIMIT 10"));
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -263,3 +263,41 @@ spec:
|
||||||
Daily:
|
Daily:
|
||||||
branch: main
|
branch: main
|
||||||
cronline: "0 12 * * * America/New_York"
|
cronline: "0 12 * * * America/New_York"
|
||||||
|
---
|
||||||
|
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
|
||||||
|
apiVersion: backstage.io/v1alpha1
|
||||||
|
kind: Resource
|
||||||
|
metadata:
|
||||||
|
name: buildkite-pipeline-elasticsearch-periodic-micro-benchmarks
|
||||||
|
description: Runs periodic micro benchmarks fom the main branch
|
||||||
|
links:
|
||||||
|
- title: Pipeline
|
||||||
|
url: https://buildkite.com/elastic/elasticsearch-periodic-micro-benchmarks
|
||||||
|
spec:
|
||||||
|
type: buildkite-pipeline
|
||||||
|
system: buildkite
|
||||||
|
owner: group:elasticsearch-team
|
||||||
|
implementation:
|
||||||
|
apiVersion: buildkite.elastic.dev/v1
|
||||||
|
kind: Pipeline
|
||||||
|
metadata:
|
||||||
|
description: ":elasticsearch: Runs nightly micro benchmarks fom the main branch"
|
||||||
|
name: elasticsearch / periodic / micro-benchmarks
|
||||||
|
spec:
|
||||||
|
repository: elastic/elasticsearch
|
||||||
|
pipeline_file: .buildkite/pipelines/periodic-micro-benchmarks.yml
|
||||||
|
branch_configuration: main
|
||||||
|
teams:
|
||||||
|
elasticsearch-team: {}
|
||||||
|
ml-core: {}
|
||||||
|
everyone:
|
||||||
|
access_level: BUILD_AND_READ
|
||||||
|
provider_settings:
|
||||||
|
build_branches: false
|
||||||
|
build_pull_requests: false
|
||||||
|
publish_commit_status: false
|
||||||
|
trigger_mode: none
|
||||||
|
schedules:
|
||||||
|
Daily:
|
||||||
|
branch: main
|
||||||
|
cronline: "@daily"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue