Add initial buildkite pipeline for Benchmark (#16190)

skeleton pipeline for benchmark
This commit is contained in:
kaisecheng 2024-05-31 15:17:50 +01:00 committed by GitHub
parent 2a7f059754
commit 1d4038b27f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 159 additions and 0 deletions

View file

@ -0,0 +1,14 @@
agents:
provider: gcp
imageProject: elastic-images-prod
image: family/platform-ingest-logstash-ubuntu-2204
machineType: "n2-standard-16"
diskSizeGb: 100
diskType: pd-ssd
steps:
- label: "Benchmark Snapshot"
retry:
automatic:
- limit: 3
command: .buildkite/scripts/benchmark/main.sh --all

View file

@ -0,0 +1,15 @@
http.enabled: false
filebeat.inputs:
- type: log
symlinks: true
paths:
- "/usr/share/filebeat/flog/*.log"
logging.level: info
output.logstash:
hosts:
- "localhost:5044"
ttl: 10ms
bulk_max_size: 2048
# queue.mem:
# events: 4096
# flush.min_events: 2048

View file

@ -0,0 +1,9 @@
api.http.host: 0.0.0.0
pipeline.workers: ${WORKER}
pipeline.batch.size: ${BATCH_SIZE}
queue.type: ${QTYPE}
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.username: ${MONITOR_ES_USER}
xpack.monitoring.elasticsearch.password: ${MONITOR_ES_PW}
xpack.monitoring.elasticsearch.hosts: ["${MONITOR_ES_HOST}"]

View file

@ -0,0 +1,44 @@
- pipeline.id: main
config.string: |
input {
beats {
port => 5044
}
}
output {
elasticsearch {
hosts => [ "${BENCHMARK_ES_HOST}" ]
user => "${BENCHMARK_ES_USER}"
password => "${BENCHMARK_ES_PW}"
}
}
- pipeline.id: node_stats
config.string: |
input {
http_poller {
urls => {
NodeStats => {
method => get
url => "http://localhost:9600/_node/stats"
}
}
schedule => { every => "30s"}
codec => "json"
}
}
filter {
mutate {
remove_field => [ "host", "[pipelines][.monitoring-logstash]", "event" ]
add_field => { "[benchmark][label]" => "${QTYPE}_w${WORKER}b${BATCH_SIZE}" }
}
}
output {
elasticsearch {
hosts => [ "${BENCHMARK_ES_HOST}" ]
user => "${BENCHMARK_ES_USER}"
password => "${BENCHMARK_ES_PW}"
data_stream_type => "metrics"
data_stream_dataset => "nodestats"
data_stream_namespace => "logstash"
}
}

View file

@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -euo pipefail
main() {
echo "hello world"
echo "$@"
}
main "$@"

View file

@ -0,0 +1,14 @@
#!/usr/bin/env bash
arch() { uname -m | sed -e "s|amd|x86_|" -e "s|arm|aarch|"; }
# return the min value
# usage:
# g: float; h: human; d: dictionary; M: month
# min -g 3 2 5 1
# max -g 1.5 5.2 2.5 1.2 5.7
# max -g "null" "0"
# min -h 25M 13G 99K 1098M
min() { printf "%s\n" "${@:2}" | sort "$1" | head -n1 ; }
max() { min ${1}r ${@:2} ; }

View file

@ -31,6 +31,7 @@ spec:
- resource:logstash-dra-staging-pipeline
- resource:logstash-linux-jdk-matrix-pipeline
- resource:logstash-windows-jdk-matrix-pipeline
- resource:logstash-benchmark-pipeline
# ***********************************
# Declare serverless IT pipeline
@ -589,3 +590,56 @@ spec:
# *******************************
# SECTION END: Scheduler pipeline
# *******************************
# ***********************************
# Declare Benchmark pipeline
# ***********************************
---
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json
apiVersion: backstage.io/v1alpha1
kind: Resource
metadata:
name: logstash-benchmark-pipeline
description: Buildkite pipeline for the Logstash benchmark
links:
- title: 'Logstash Benchmark (Daily, Auto) pipeline'
url: https://buildkite.com/elastic/logstash-benchmark-pipeline
spec:
type: buildkite-pipeline
owner: group:logstash
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
metadata:
name: logstash-benchmark-pipeline
description: ':logstash: The Benchmark pipeline'
spec:
repository: elastic/logstash
pipeline_file: ".buildkite/benchmark_pipeline.yml"
maximum_timeout_in_minutes: 90
provider_settings:
trigger_mode: none # don't trigger jobs from github activity
env:
ELASTIC_SLACK_NOTIFICATIONS_ENABLED: 'false'
SLACK_NOTIFICATIONS_CHANNEL: '#logstash-build'
SLACK_NOTIFICATIONS_ON_SUCCESS: 'false'
SLACK_NOTIFICATIONS_SKIP_FOR_RETRIES: 'true'
teams:
ingest-fp:
access_level: MANAGE_BUILD_AND_READ
logstash:
access_level: MANAGE_BUILD_AND_READ
ingest-eng-prod:
access_level: MANAGE_BUILD_AND_READ
everyone:
access_level: READ_ONLY
schedules:
Daily serverless test on core_serverless_test branch:
branch: main
cronline: 30 04 * * *
message: Daily trigger of Benchmark Pipeline
# *******************************
# SECTION END: Benchmark pipeline
# *******************************