Merge branch 'main' into metricsdb. Upgrade otel to 1.50.0

# Conflicts:
#	gradle/verification-metadata.xml
This commit is contained in:
Jonas Kunz 2025-06-27 10:05:32 +02:00
commit 3857b27a80
No known key found for this signature in database
3512 changed files with 99416 additions and 27205 deletions

View file

@ -94,6 +94,16 @@ if [[ "${USE_PROD_DOCKER_CREDENTIALS:-}" == "true" ]]; then
fi
fi
if [[ "${USE_PERF_CREDENTIALS:-}" == "true" ]]; then
PERF_METRICS_HOST=$(vault read -field=es_host /secret/ci/elastic-elasticsearch/microbenchmarks-metrics)
PERF_METRICS_USERNAME=$(vault read -field=es_user /secret/ci/elastic-elasticsearch/microbenchmarks-metrics)
PERF_METRICS_PASSWORD=$(vault read -field=es_password /secret/ci/elastic-elasticsearch/microbenchmarks-metrics)
export PERF_METRICS_HOST
export PERF_METRICS_USERNAME
export PERF_METRICS_PASSWORD
fi
# Authenticate to the Docker Hub public read-only registry
if which docker > /dev/null 2>&1; then
DOCKERHUB_REGISTRY_USERNAME="$(vault read -field=username secret/ci/elastic-elasticsearch/docker_hub_public_ro_credentials)"

View file

@ -4,7 +4,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
notify:

View file

@ -4,7 +4,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait
@ -13,7 +13,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part2
@ -21,7 +21,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part3
@ -29,7 +29,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part4
@ -37,17 +37,26 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part5
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart5
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part6
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart6
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- group: bwc-snapshots
steps:
- label: "{{matrix.BWC_VERSION}} / bwc-snapshots"
@ -58,11 +67,13 @@ steps:
BWC_VERSION: $BWC_LIST
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: "{{matrix.BWC_VERSION}}"
- label: bc-upgrade
command: ".buildkite/scripts/run-bc-upgrade-tests.sh"
- group: lucene-compat
steps:
- label: "{{matrix.LUCENE_VERSION}} / lucene-compat"
@ -78,7 +89,7 @@ steps:
- "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
@ -89,7 +100,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait

View file

@ -5,7 +5,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait
@ -14,7 +14,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part2
@ -22,7 +22,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part3
@ -30,7 +30,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part4
@ -38,17 +38,26 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part5
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart5
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part6
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart6
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- group: bwc-snapshots
steps:
- label: "{{matrix.BWC_VERSION}} / bwc-snapshots"
@ -56,14 +65,16 @@ steps:
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: ["8.17.7", "8.18.3", "8.19.0", "9.0.2", "9.1.0"]
BWC_VERSION: ["8.17.9", "8.18.4", "8.19.0", "9.0.4", "9.1.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: "{{matrix.BWC_VERSION}}"
- label: bc-upgrade
command: ".buildkite/scripts/run-bc-upgrade-tests.sh"
- group: lucene-compat
steps:
- label: "{{matrix.LUCENE_VERSION}} / lucene-compat"
@ -79,7 +90,7 @@ steps:
- "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
@ -90,7 +101,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait

View file

@ -12,7 +12,7 @@ steps:
UPDATE_ES_LUCENE_SNAPSHOT: "true"
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait

View file

@ -4,7 +4,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait: null
@ -13,7 +13,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: part2
@ -21,7 +21,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: part3
@ -29,7 +29,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: part4
@ -37,12 +37,20 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: part5
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart5
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: part6
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart6
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
@ -60,7 +68,7 @@ steps:
- 8.10.0
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
@ -70,6 +78,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -4,7 +4,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
matrix:

View file

@ -5,7 +5,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
matrix:

View file

@ -2,9 +2,12 @@ steps:
- label: periodic-micro-benchmarks
command: |
.ci/scripts/run-gradle.sh :benchmarks:run --args 'org.elasticsearch.benchmark._nightly -rf json -rff build/result.json'
.buildkite/scripts/index-micro-benchmark-results.sh
env:
USE_PERF_CREDENTIALS: "true"
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -5,7 +5,7 @@
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}

View file

@ -12,7 +12,6 @@ steps:
- oraclelinux-8
- oraclelinux-9
- sles-15
- ubuntu-2004
- ubuntu-2204
- ubuntu-2404
- rocky-8

View file

@ -13,7 +13,6 @@ steps:
- oraclelinux-8
- oraclelinux-9
- sles-15
- ubuntu-2004
- ubuntu-2204
- ubuntu-2404
- rocky-8
@ -38,7 +37,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -54,7 +53,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -70,7 +69,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -86,7 +85,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -102,7 +101,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -118,7 +117,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -134,7 +133,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -150,7 +149,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -166,7 +165,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -182,7 +181,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -198,7 +197,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -214,7 +213,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -230,7 +229,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -246,7 +245,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -262,7 +261,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -278,7 +277,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -294,7 +293,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -303,37 +302,37 @@ steps:
env:
BWC_VERSION: 8.16.6
- label: "{{matrix.image}} / 8.17.7 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.7
- label: "{{matrix.image}} / 8.17.9 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.9
timeout_in_minutes: 300
matrix:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
machineType: custom-16-32768
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 8.17.7
BWC_VERSION: 8.17.9
- label: "{{matrix.image}} / 8.18.3 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.3
- label: "{{matrix.image}} / 8.18.4 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.4
timeout_in_minutes: 300
matrix:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
machineType: custom-16-32768
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 8.18.3
BWC_VERSION: 8.18.4
- label: "{{matrix.image}} / 8.19.0 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.19.0
@ -342,7 +341,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
@ -351,21 +350,21 @@ steps:
env:
BWC_VERSION: 8.19.0
- label: "{{matrix.image}} / 9.0.2 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.2
- label: "{{matrix.image}} / 9.0.4 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.4
timeout_in_minutes: 300
matrix:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
machineType: custom-16-32768
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 9.0.2
BWC_VERSION: 9.0.4
- label: "{{matrix.image}} / 9.1.0 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.1.0
@ -374,7 +373,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}

View file

@ -12,7 +12,6 @@ steps:
- oraclelinux-8
- oraclelinux-9
- sles-15
- ubuntu-2004
- ubuntu-2204
- ubuntu-2404
- rocky-8
@ -45,6 +44,7 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
- checkRestCompat
agents:
provider: gcp
@ -63,7 +63,6 @@ steps:
setup:
image:
- almalinux-8-aarch64
- ubuntu-2004-aarch64
- ubuntu-2404-aarch64
GRADLE_TASK:
- checkPart1
@ -71,6 +70,7 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
- checkRestCompat
agents:
provider: aws

View file

@ -3,7 +3,7 @@
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true

View file

@ -6,7 +6,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- label: encryption-at-rest
@ -14,7 +14,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- label: eql-correctness
@ -22,7 +22,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: example-plugins
@ -33,7 +33,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- group: java-fips-matrix
@ -51,10 +51,11 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
- checkRestCompat
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -70,7 +71,7 @@ steps:
BWC_VERSION: $BWC_LIST
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -93,10 +94,11 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
- checkRestCompat
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -113,7 +115,7 @@ steps:
BWC_VERSION: $BWC_LIST
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -124,7 +126,7 @@ steps:
timeout_in_minutes: 360
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- label: single-processor-node-tests
@ -132,7 +134,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- group: third-party tests
@ -148,7 +150,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / azure
@ -162,7 +164,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / gcs
@ -176,7 +178,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / geoip
@ -185,7 +187,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / s3
@ -199,7 +201,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- group: lucene-compat
@ -217,7 +219,7 @@ steps:
- "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
@ -230,7 +232,7 @@ steps:
timeout_in_minutes: 20
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
if: build.branch == "main" || build.branch == "8.19" || build.branch == "7.17"
@ -239,7 +241,7 @@ steps:
timeout_in_minutes: 15
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-2
- label: check-branch-protection-rules
command: .buildkite/scripts/branch-protection.sh

View file

@ -7,7 +7,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -26,7 +26,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -45,7 +45,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -64,7 +64,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -83,7 +83,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -102,7 +102,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -121,7 +121,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -140,7 +140,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -159,7 +159,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -178,7 +178,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -197,7 +197,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -216,7 +216,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -235,7 +235,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -254,7 +254,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -273,7 +273,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -292,7 +292,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -311,7 +311,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -325,17 +325,17 @@ steps:
- signal_reason: agent_stop
limit: 3
- label: 8.17.7 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.7#bwcTest
- label: 8.17.9 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.9#bwcTest
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
env:
BWC_VERSION: 8.17.7
BWC_VERSION: 8.17.9
retry:
automatic:
- exit_status: "-1"
@ -344,17 +344,17 @@ steps:
- signal_reason: agent_stop
limit: 3
- label: 8.18.3 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.3#bwcTest
- label: 8.18.4 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.4#bwcTest
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
env:
BWC_VERSION: 8.18.3
BWC_VERSION: 8.18.4
retry:
automatic:
- exit_status: "-1"
@ -368,7 +368,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -382,17 +382,17 @@ steps:
- signal_reason: agent_stop
limit: 3
- label: 9.0.2 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.2#bwcTest
- label: 9.0.4 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.4#bwcTest
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
env:
BWC_VERSION: 9.0.2
BWC_VERSION: 9.0.4
retry:
automatic:
- exit_status: "-1"
@ -406,7 +406,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
preemptible: true
@ -425,7 +425,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- label: encryption-at-rest
@ -433,7 +433,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- label: eql-correctness
@ -441,7 +441,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- label: example-plugins
@ -452,7 +452,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- group: java-fips-matrix
@ -470,10 +470,11 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
- checkRestCompat
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -486,10 +487,10 @@ steps:
setup:
ES_RUNTIME_JAVA:
- openjdk21
BWC_VERSION: ["8.17.7", "8.18.3", "8.19.0", "9.0.2", "9.1.0"]
BWC_VERSION: ["8.17.9", "8.18.4", "8.19.0", "9.0.4", "9.1.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -512,10 +513,11 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
- checkRestCompat
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -529,10 +531,10 @@ steps:
ES_RUNTIME_JAVA:
- openjdk21
- openjdk23
BWC_VERSION: ["8.17.7", "8.18.3", "8.19.0", "9.0.2", "9.1.0"]
BWC_VERSION: ["8.17.9", "8.18.4", "8.19.0", "9.0.4", "9.1.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
@ -543,7 +545,7 @@ steps:
timeout_in_minutes: 360
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- label: single-processor-node-tests
@ -551,7 +553,7 @@ steps:
timeout_in_minutes: 420
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304
- group: third-party tests
@ -567,7 +569,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / azure
@ -581,7 +583,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / gcs
@ -595,7 +597,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / geoip
@ -604,7 +606,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- label: third-party / s3
@ -618,7 +620,7 @@ steps:
timeout_in_minutes: 30
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
- group: lucene-compat
@ -636,7 +638,7 @@ steps:
- "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:
@ -649,7 +651,7 @@ steps:
timeout_in_minutes: 20
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-8
buildDirectory: /dev/shm/bk
if: build.branch == "main" || build.branch == "8.19" || build.branch == "7.17"
@ -658,7 +660,7 @@ steps:
timeout_in_minutes: 15
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n2-standard-2
- label: check-branch-protection-rules
command: .buildkite/scripts/branch-protection.sh

View file

@ -22,6 +22,6 @@ steps:
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,9 +6,21 @@ config:
steps:
- group: bwc-snapshots
steps:
- label: "{{matrix.BWC_VERSION}} / bwc-snapshots"
key: "bwc-snapshots"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest
- label: "{{matrix.BWC_VERSION}} / Part 1 / bwc-snapshots"
key: "bwc-snapshots-part1"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTestPart1
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: "{{matrix.BWC_VERSION}} / Part 2 / bwc-snapshots"
key: "bwc-snapshots-part2"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTestPart2
timeout_in_minutes: 300
matrix:
setup:
@ -18,3 +30,52 @@ steps:
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: "{{matrix.BWC_VERSION}} / Part 3 / bwc-snapshots"
key: "bwc-snapshots-part3"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTestPart3
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: "{{matrix.BWC_VERSION}} / Part 4 / bwc-snapshots"
key: "bwc-snapshots-part4"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTestPart5
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: "{{matrix.BWC_VERSION}} / Part 5 / bwc-snapshots"
key: "bwc-snapshots-part5"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTestPart5
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: "{{matrix.BWC_VERSION}} / Part 6 / bwc-snapshots"
key: "bwc-snapshots-part6"
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTestPart6
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk

View file

@ -8,6 +8,6 @@ steps:
timeout_in_minutes: 20
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -9,6 +9,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -4,6 +4,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -13,6 +13,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -10,6 +10,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -13,7 +13,7 @@ steps:
setup:
image:
- rhel-8
- ubuntu-2004
- ubuntu-2404
PACKAGING_TASK:
- destructiveDistroTest.docker
- destructiveDistroTest.packages

View file

@ -15,7 +15,6 @@ steps:
- oraclelinux-8
- oraclelinux-9
- sles-15
- ubuntu-2004
- ubuntu-2204
- ubuntu-2404
- rocky-8

View file

@ -12,7 +12,7 @@ steps:
setup:
image:
- rocky-8
- ubuntu-2004
- ubuntu-2404
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}

View file

@ -6,7 +6,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: aws
imagePrefix: elasticsearch-ubuntu-2004-aarch64
imagePrefix: elasticsearch-ubuntu-2404-aarch64
instanceType: m6g.8xlarge
diskSizeGb: 350
diskType: gp3

View file

@ -8,6 +8,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,6 +6,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,7 +6,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: aws
imagePrefix: elasticsearch-ubuntu-2004-aarch64
imagePrefix: elasticsearch-ubuntu-2404-aarch64
instanceType: m6g.8xlarge
diskSizeGb: 350
diskType: gp3

View file

@ -8,6 +8,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -4,6 +4,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,7 +6,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: aws
imagePrefix: elasticsearch-ubuntu-2004-aarch64
imagePrefix: elasticsearch-ubuntu-2404-aarch64
instanceType: m6g.8xlarge
diskSizeGb: 350
diskType: gp3

View file

@ -8,6 +8,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,6 +6,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,7 +6,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: aws
imagePrefix: elasticsearch-ubuntu-2004-aarch64
imagePrefix: elasticsearch-ubuntu-2404-aarch64
instanceType: m6g.8xlarge
diskSizeGb: 350
diskType: gp3

View file

@ -8,6 +8,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk

View file

@ -6,6 +6,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: n1-standard-32
buildDirectory: /dev/shm/bk

View file

@ -6,7 +6,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: aws
imagePrefix: elasticsearch-ubuntu-2004-aarch64
imagePrefix: elasticsearch-ubuntu-2404-aarch64
instanceType: m6g.8xlarge
diskSizeGb: 350
diskType: gp3

View file

@ -8,6 +8,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -6,6 +6,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -0,0 +1,13 @@
config:
allow-labels: "test-arm"
steps:
- label: part-6-arm
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart6
timeout_in_minutes: 300
agents:
provider: aws
imagePrefix: elasticsearch-ubuntu-2004-aarch64
instanceType: m6g.8xlarge
diskSizeGb: 350
diskType: gp3
diskName: /dev/sda1

View file

@ -0,0 +1,13 @@
config:
allow-labels:
- Team:Security
- test-fips
steps:
- label: part-6-fips
command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart6
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -0,0 +1,14 @@
config:
allow-labels: "test-windows"
steps:
- label: part-6-windows
command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-windows-2022
machineType: custom-32-98304
diskType: pd-ssd
diskSizeGb: 350
env:
GRADLE_TASK: checkPart6

View file

@ -0,0 +1,10 @@
steps:
- label: part-6
command: |
.ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart6
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -0,0 +1,6 @@
steps:
- label: pr-upgrade
command: ".buildkite/scripts/run-pr-upgrade-tests.sh"
agents:
image: "docker.elastic.co/ci-agent-images/eck-region/buildkite-agent:1.5"
memory: "4G"

View file

@ -9,6 +9,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -15,8 +15,9 @@ steps:
- checkPart3
- checkPart4
- checkPart5
- checkPart6
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
diskSizeGb: 350
machineType: custom-32-98304

View file

@ -6,6 +6,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -4,6 +4,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -71,6 +71,7 @@ echo --- Building release artifacts
buildReleaseArtifacts \
exportCompressedDockerImages \
exportDockerContexts \
:zipAggregation \
:distribution:generateDependenciesReport
PATH="$PATH:${JAVA_HOME}/bin" # Required by the following script

View file

@ -0,0 +1,10 @@
#!/bin/bash
jq -c '.[]' "benchmarks/build/result.json" | while read -r doc; do
doc=$(echo "$doc" | jq --argjson timestamp "$(date +%s000)" '. + {"@timestamp": $timestamp}')
echo "Indexing $(echo "$doc" | jq -r '.benchmark')"
curl -s -X POST "https://$PERF_METRICS_HOST/metrics-microbenchmarks-default/_doc" \
-u "$PERF_METRICS_USERNAME:$PERF_METRICS_PASSWORD" \
-H 'Content-Type: application/json' \
-d "$doc"
done

View file

@ -12,7 +12,7 @@ exports[`generatePipelines should generate correct pipelines with a non-docs cha
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -63,7 +63,7 @@ exports[`generatePipelines should generate correct pipelines with only docs chan
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -89,7 +89,7 @@ exports[`generatePipelines should generate correct pipelines with full BWC expan
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -104,7 +104,7 @@ exports[`generatePipelines should generate correct pipelines with full BWC expan
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -119,7 +119,7 @@ exports[`generatePipelines should generate correct pipelines with full BWC expan
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -134,7 +134,7 @@ exports[`generatePipelines should generate correct pipelines with full BWC expan
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -149,7 +149,7 @@ exports[`generatePipelines should generate correct pipelines with full BWC expan
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -214,7 +214,7 @@ exports[`generatePipelines should generate correct pipelines with a non-docs cha
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},
@ -268,7 +268,7 @@ exports[`generatePipelines should generate correct pipelines with a non-docs cha
{
"agents": {
"buildDirectory": "/dev/shm/bk",
"image": "family/elasticsearch-ubuntu-2004",
"image": "family/elasticsearch-ubuntu-2404",
"machineType": "custom-32-98304",
"provider": "gcp",
},

View file

@ -14,7 +14,7 @@ steps:
BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:

View file

@ -9,6 +9,6 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk

View file

@ -10,7 +10,7 @@ steps:
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
image: family/elasticsearch-ubuntu-2404
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
env:

View file

@ -9,8 +9,13 @@ export BEATS_DIR=$(pwd)/distribution/docker/build/artifacts/beats
mkdir -p ${BEATS_DIR}
curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-arm64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-arm64.tar.gz
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
curl --fail -o "${BEATS_DIR}/metricbeat-fips-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-fips-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
curl --fail -o "${BEATS_DIR}/metricbeat-fips-${ES_VERSION}-linux-arm64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-fips-${ES_VERSION}-SNAPSHOT-linux-arm64.tar.gz
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-arm64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-arm64.tar.gz
curl --fail -o "${BEATS_DIR}/filebeat-fips-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/filebeat/filebeat-fips-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
curl --fail -o "${BEATS_DIR}/filebeat-fips-${ES_VERSION}-linux-arm64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/filebeat/filebeat-fips-${ES_VERSION}-SNAPSHOT-linux-arm64.tar.gz
# Fetch ML artifacts
export ML_IVY_REPO=$(mktemp -d)

View file

@ -0,0 +1,72 @@
#!/bin/bash
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the "Elastic License
# 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
# Public License v 1"; you may not use this file except in compliance with, at
# your election, the "Elastic License 2.0", the "GNU Affero General Public
# License v3.0 only", or the "Server Side Public License, v 1".
#
set -euo pipefail
echo "Selecting the most recent build from branch [$BUILDKITE_BRANCH]."
# Select the most recent build from the current branch.
# We collect snapshots, order by date, then collect BCs, order by date, and concat them; then we select the last.
# So if we have one (or more) BC, we will always prefer to use that. Otherwise we will use the latest snapshot.
MANIFEST_URL="$(curl -s https://artifacts.elastic.co/releases/TfEVhiaBGqR64ie0g0r0uUwNAbEQMu1Z/future-releases/stack.json |
jq ".releases[] |
select(.branch == \"$BUILDKITE_BRANCH\") |
select(.active_release == true) |
((.snapshots | to_entries | sort_by(.value.completed_at)) +
(.build_candidates | to_entries | sort_by(.value.completed_at))) |
last | .value.manifest_url")"
if [[ -z "$MANIFEST_URL" ]]; then
echo "No snapshots or build candidates for branch [$BUILDKITE_BRANCH]."
echo "Skipping BC upgrade tests."
exit 0
fi
echo "Getting build manifest from [$MANIFEST_URL]"
# Note: we use eval to perform variable substitution for the curl arguments, and command substitution to
# set the output variable. Double quotes are not enough in this case.
MANIFEST="$(eval "curl -s $MANIFEST_URL")"
if [[ -z "$MANIFEST" ]]; then
echo "Cannot get the build manifest from [$MANIFEST_URL]"
exit 1
fi
CURRENT_VERSION=$(sed -n 's/^elasticsearch[[:space:]]*=[[:space:]]*\(.*\)/\1/p' build-tools-internal/version.properties)
BC_VERSION=$(echo "$MANIFEST" | jq -r .version)
BC_BUILD_ID=$(echo "$MANIFEST" | jq -r .build_id)
BC_COMMIT_HASH=$(echo "$MANIFEST" | jq -r .projects.elasticsearch.commit_hash)
if [ "$CURRENT_VERSION-SNAPSHOT" != "$BC_VERSION" ]; then
echo "Version [$BC_VERSION] of BC (or snapshot) does not match current version [$CURRENT_VERSION] of branch [$BUILDKITE_BRANCH]."
echo "Skipping BC upgrade tests."
exit 0
fi
echo "Running BC upgrade tests on $BUILDKITE_BRANCH [$BC_VERSION] using BC (or snapshot) build of commit [$BC_COMMIT_HASH] with build id [$BC_BUILD_ID]."
cat <<EOF | buildkite-agent pipeline upload
steps:
- group: "bc-upgrade $BC_BUILD_ID -> $BUILDKITE_BRANCH"
steps:
- label: "bc-upgrade-tests-part{{matrix.PART}}"
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=${BC_VERSION} -Dtests.bwc.refspec.main=${BC_COMMIT_HASH} bcUpgradeTestPart{{matrix.PART}}
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
matrix:
setup:
PART: ["1", "2", "3", "4", "5", "6"]
EOF

View file

@ -0,0 +1,42 @@
#!/bin/bash
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the "Elastic License
# 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
# Public License v 1"; you may not use this file except in compliance with, at
# your election, the "Elastic License 2.0", the "GNU Affero General Public
# License v3.0 only", or the "Server Side Public License, v 1".
#
set -euo pipefail
if [[ -z "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then
echo "Not a pull request, skipping PR upgrade tests."
exit 0
fi
# Identify the merge base of the current commit (branch) and the base branch of the pull request.
# PR upgrade tests are run from the merge base to the current commit.
BASE_COMMIT=$(git merge-base $BUILDKITE_PULL_REQUEST_BASE_BRANCH $BUILDKITE_COMMIT)
VERSION=$(sed -n 's/^elasticsearch[[:space:]]*=[[:space:]]*\(.*\)/\1/p' build-tools-internal/version.properties)
echo "Running PR upgrade tests from $BUILDKITE_PULL_REQUEST_BASE_BRANCH [$BASE_COMMIT] to $BUILDKITE_BRANCH [$BUILDKITE_COMMIT]."
cat << EOF | buildkite-agent pipeline upload
steps:
- group: "pr-upgrade $BUILDKITE_PULL_REQUEST_BASE_BRANCH -> $BUILDKITE_BRANCH"
steps:
- label: "pr-upgrade-part-{{matrix.PART}}"
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=${VERSION}-SNAPSHOT -Dtests.bwc.refspec.main=${BASE_COMMIT} bcUpgradeTestPart{{matrix.PART}}
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
matrix:
setup:
PART: ["1", "2", "3", "4", "5", "6"]
EOF

View file

@ -16,8 +16,8 @@ BWC_VERSION:
- "8.14.3"
- "8.15.5"
- "8.16.6"
- "8.17.7"
- "8.18.3"
- "8.17.9"
- "8.18.4"
- "8.19.0"
- "9.0.2"
- "9.0.4"
- "9.1.0"

View file

@ -1,6 +1,6 @@
BWC_VERSION:
- "8.17.7"
- "8.18.3"
- "8.17.9"
- "8.18.4"
- "8.19.0"
- "9.0.2"
- "9.0.4"
- "9.1.0"

View file

@ -16,4 +16,4 @@ jobs:
deployments: write
id-token: write
contents: read
pull-requests: read
pull-requests: write

View file

@ -0,0 +1,71 @@
name: "Docs preview comment"
on:
pull_request_target:
types: [opened, reopened, synchronize]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
preview-links:
runs-on: ubuntu-latest
steps:
- name: Comment preview links for changed docs
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const pr = context.payload.pull_request;
const prNum = pr.number;
const owner = context.repo.owner;
const repo = context.repo.repo;
const base = `https://docs-v3-preview.elastic.dev/${owner}/${repo}/pull/${prNum}`;
// 1) List all files in this PR
const { data: files } = await github.rest.pulls.listFiles({
owner, repo, pull_number: prNum
});
// 2) Filter to only added/modified .md files (skip removed and _snippets/)
const links = files
.filter(f =>
f.status !== 'removed' &&
/\.md$/i.test(f.filename) &&
!/(^|\/)_snippets\//i.test(f.filename)
)
.map(f => {
let p = f.filename.replace(/^docs\//, '').replace(/\/index\.md$/i, '/');
if (p === f.filename.replace(/^docs\//, '')) p = p.replace(/\.md$/i, '');
return `- [\`${f.filename}\`](${base}/${p})`;
});
if (!links.length) return; // nothing to do
// 3) Build the comment body
const body = [
"🔍 **Preview links for changed docs:**",
"",
...links,
"",
"🔔 *The preview site may take up to **3 minutes** to finish building. These links will become live once it completes.*"
].join("\n");
// 4) Post or update a single bot comment
const { data: comments } = await github.rest.issues.listComments({
owner, repo, issue_number: prNum
});
const existing = comments.find(c =>
c.user.type === 'Bot' &&
c.body.startsWith("🔍 **Preview links for changed docs:**")
);
if (existing) {
await github.rest.issues.updateComment({
owner, repo,
comment_id: existing.id,
body
});
} else {
await github.rest.issues.createComment({
owner, repo,
issue_number: prNum,
body
});
}

View file

@ -168,16 +168,13 @@ You can import the Elasticsearch project into IntelliJ IDEA via:
#### Checkstyle
If you have the [Checkstyle] plugin installed, you can configure IntelliJ to
check the Elasticsearch code. However, the Checkstyle configuration file does
not work by default with the IntelliJ plugin, so instead an IDE-specific config
file is generated automatically after IntelliJ finishes syncing. You can
manually generate the file with `./gradlew configureIdeCheckstyle` in case
it is removed due to a `./gradlew clean` or other action.
IntelliJ should automatically configure checkstyle. It does so by running
`configureIdeCheckstyle` on import. That makes `.idea/checkstyle-idea.xml`
configuration file. IntelliJ points checkstyle at that.
IntelliJ should be automatically configured to use the generated rules after
import via the `.idea/checkstyle-idea.xml` configuration file. No further
action is required.
Things like `./gradlew clean` or `git clean -xdf` can nuke the file. You can
regenerate it by running `./gradlew -Didea.active=true configureIdeCheckstyle`,
but generally shouldn't have to.
#### Formatting
@ -206,6 +203,18 @@ Alternative manual steps for IntelliJ.
3. Navigate to the file `build-conventions/formatterConfig.xml`
4. Click "OK"
#### Options
When importing to IntelliJ, we offer a few options that can be used to
configure the behaviour of the import:
| Property | Description | Values (* = default) |
|--------------------------------------------|------------------------------------------------------------------------------------------------------|----------------------|
| `org.elasticsearch.idea-configuration-cache` | Should IntelliJ enable the Gradle Configuration cache to speed up builds when generating run configs | *`true`, `false` |
| `org.elasticsearch.idea-delegate-to-gradle` | Should IntelliJ use Gradle for all generated run / test configs or prompt each time | `true`, *`false` |
These options can be set anywhere on the Gradle config path including in `~/.gradle/gradle.properties`
### REST endpoint conventions
Elasticsearch typically uses singular nouns rather than plurals in URLs.

View file

@ -41,6 +41,7 @@ dependencies {
}
api(project(':libs:h3'))
api(project(':modules:aggregations'))
implementation project(':modules:mapper-extras');
api(project(':x-pack:plugin:esql-core'))
api(project(':x-pack:plugin:core'))
api(project(':x-pack:plugin:esql'))

View file

@ -73,6 +73,7 @@ public class AggregatorBenchmark {
static final int BLOCK_LENGTH = 8 * 1024;
private static final int OP_COUNT = 1024;
private static final int GROUPS = 5;
private static final int TOP_N_LIMIT = 3;
private static final BlockFactory blockFactory = BlockFactory.getInstance(
new NoopCircuitBreaker("noop"),
@ -90,6 +91,7 @@ public class AggregatorBenchmark {
private static final String TWO_ORDINALS = "two_" + ORDINALS;
private static final String LONGS_AND_BYTES_REFS = LONGS + "_and_" + BYTES_REFS;
private static final String TWO_LONGS_AND_BYTES_REFS = "two_" + LONGS + "_and_" + BYTES_REFS;
private static final String TOP_N_LONGS = "top_n_" + LONGS;
private static final String VECTOR_DOUBLES = "vector_doubles";
private static final String HALF_NULL_DOUBLES = "half_null_doubles";
@ -147,7 +149,8 @@ public class AggregatorBenchmark {
TWO_BYTES_REFS,
TWO_ORDINALS,
LONGS_AND_BYTES_REFS,
TWO_LONGS_AND_BYTES_REFS }
TWO_LONGS_AND_BYTES_REFS,
TOP_N_LONGS }
)
public String grouping;
@ -161,8 +164,7 @@ public class AggregatorBenchmark {
public String filter;
private static Operator operator(DriverContext driverContext, String grouping, String op, String dataType, String filter) {
if (grouping.equals("none")) {
if (grouping.equals(NONE)) {
return new AggregationOperator(
List.of(supplier(op, dataType, filter).aggregatorFactory(AggregatorMode.SINGLE, List.of(0)).apply(driverContext)),
driverContext
@ -188,6 +190,9 @@ public class AggregatorBenchmark {
new BlockHash.GroupSpec(1, ElementType.LONG),
new BlockHash.GroupSpec(2, ElementType.BYTES_REF)
);
case TOP_N_LONGS -> List.of(
new BlockHash.GroupSpec(0, ElementType.LONG, false, new BlockHash.TopNDef(0, true, true, TOP_N_LIMIT))
);
default -> throw new IllegalArgumentException("unsupported grouping [" + grouping + "]");
};
return new HashAggregationOperator(
@ -271,10 +276,14 @@ public class AggregatorBenchmark {
case BOOLEANS -> 2;
default -> GROUPS;
};
int availableGroups = switch (grouping) {
case TOP_N_LONGS -> TOP_N_LIMIT;
default -> groups;
};
switch (op) {
case AVG -> {
DoubleBlock dValues = (DoubleBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long sum = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum();
long count = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).count();
@ -286,7 +295,7 @@ public class AggregatorBenchmark {
}
case COUNT -> {
LongBlock lValues = (LongBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).count() * opCount;
if (lValues.getLong(g) != expected) {
@ -296,7 +305,7 @@ public class AggregatorBenchmark {
}
case COUNT_DISTINCT -> {
LongBlock lValues = (LongBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).distinct().count();
long count = lValues.getLong(g);
@ -310,7 +319,7 @@ public class AggregatorBenchmark {
switch (dataType) {
case LONGS -> {
LongBlock lValues = (LongBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
if (lValues.getLong(g) != (long) g) {
throw new AssertionError(prefix + "expected [" + g + "] but was [" + lValues.getLong(g) + "]");
}
@ -318,7 +327,7 @@ public class AggregatorBenchmark {
}
case DOUBLES -> {
DoubleBlock dValues = (DoubleBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
if (dValues.getDouble(g) != (long) g) {
throw new AssertionError(prefix + "expected [" + g + "] but was [" + dValues.getDouble(g) + "]");
}
@ -331,7 +340,7 @@ public class AggregatorBenchmark {
switch (dataType) {
case LONGS -> {
LongBlock lValues = (LongBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).max().getAsLong();
if (lValues.getLong(g) != expected) {
@ -341,7 +350,7 @@ public class AggregatorBenchmark {
}
case DOUBLES -> {
DoubleBlock dValues = (DoubleBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).max().getAsLong();
if (dValues.getDouble(g) != expected) {
@ -356,7 +365,7 @@ public class AggregatorBenchmark {
switch (dataType) {
case LONGS -> {
LongBlock lValues = (LongBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum() * opCount;
if (lValues.getLong(g) != expected) {
@ -366,7 +375,7 @@ public class AggregatorBenchmark {
}
case DOUBLES -> {
DoubleBlock dValues = (DoubleBlock) values;
for (int g = 0; g < groups; g++) {
for (int g = 0; g < availableGroups; g++) {
long group = g;
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum() * opCount;
if (dValues.getDouble(g) != expected) {
@ -391,6 +400,14 @@ public class AggregatorBenchmark {
}
}
}
case TOP_N_LONGS -> {
LongBlock groups = (LongBlock) block;
for (int g = 0; g < TOP_N_LIMIT; g++) {
if (groups.getLong(g) != (long) g) {
throw new AssertionError(prefix + "bad group expected [" + g + "] but was [" + groups.getLong(g) + "]");
}
}
}
case INTS -> {
IntBlock groups = (IntBlock) block;
for (int g = 0; g < GROUPS; g++) {
@ -495,7 +512,7 @@ public class AggregatorBenchmark {
private static Page page(BlockFactory blockFactory, String grouping, String blockType) {
Block dataBlock = dataBlock(blockFactory, blockType);
if (grouping.equals("none")) {
if (grouping.equals(NONE)) {
return new Page(dataBlock);
}
List<Block> blocks = groupingBlocks(grouping, blockType);
@ -564,7 +581,7 @@ public class AggregatorBenchmark {
default -> throw new UnsupportedOperationException("bad grouping [" + grouping + "]");
};
return switch (grouping) {
case LONGS -> {
case TOP_N_LONGS, LONGS -> {
var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {

View file

@ -29,6 +29,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ProvidedIdFieldMapper;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptCompiler;
import org.elasticsearch.script.ScriptContext;
@ -38,11 +39,16 @@ import org.elasticsearch.xcontent.XContentParserConfiguration;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class MapperServiceFactory {
public static MapperService create(String mappings) {
return create(mappings, Collections.emptyList());
}
public static MapperService create(String mappings, List<MapperPlugin> mapperPlugins) {
Settings settings = Settings.builder()
.put("index.number_of_replicas", 0)
.put("index.number_of_shards", 1)
@ -51,7 +57,7 @@ public class MapperServiceFactory {
.build();
IndexMetadata meta = IndexMetadata.builder("index").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(meta, settings);
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
MapperRegistry mapperRegistry = new IndicesModule(mapperPlugins).getMapperRegistry();
SimilarityService similarityService = new SimilarityService(indexSettings, null, Map.of());
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, BitsetFilterCache.Listener.NOOP);

View file

@ -0,0 +1,123 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.benchmark.vector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.util.VectorUtil;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.simdvec.ES91Int4VectorsScorer;
import org.elasticsearch.simdvec.internal.vectorization.ESVectorizationProvider;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import java.io.IOException;
import java.nio.file.Files;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
// first iteration is complete garbage, so make sure we really warmup
@Warmup(iterations = 4, time = 1)
// real iterations. not useful to spend tons of time here, better to fork more
@Measurement(iterations = 5, time = 1)
// engage some noise reduction
@Fork(value = 1)
public class Int4ScorerBenchmark {
static {
LogConfigurator.configureESLogging(); // native access requires logging to be initialized
}
@Param({ "384", "702", "1024" })
int dims;
int numVectors = 200;
int numQueries = 10;
byte[] scratch;
byte[][] binaryVectors;
byte[][] binaryQueries;
ES91Int4VectorsScorer scorer;
Directory dir;
IndexInput in;
@Setup
public void setup() throws IOException {
binaryVectors = new byte[numVectors][dims];
dir = new MMapDirectory(Files.createTempDirectory("vectorData"));
try (IndexOutput out = dir.createOutput("vectors", IOContext.DEFAULT)) {
for (byte[] binaryVector : binaryVectors) {
for (int i = 0; i < dims; i++) {
// 4-bit quantization
binaryVector[i] = (byte) ThreadLocalRandom.current().nextInt(16);
}
out.writeBytes(binaryVector, 0, binaryVector.length);
}
}
in = dir.openInput("vectors", IOContext.DEFAULT);
binaryQueries = new byte[numVectors][dims];
for (byte[] binaryVector : binaryVectors) {
for (int i = 0; i < dims; i++) {
// 4-bit quantization
binaryVector[i] = (byte) ThreadLocalRandom.current().nextInt(16);
}
}
scratch = new byte[dims];
scorer = ESVectorizationProvider.getInstance().newES91Int4VectorsScorer(in, dims);
}
@TearDown
public void teardown() throws IOException {
IOUtils.close(dir, in);
}
@Benchmark
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
public void scoreFromArray(Blackhole bh) throws IOException {
for (int j = 0; j < numQueries; j++) {
in.seek(0);
for (int i = 0; i < numVectors; i++) {
in.readBytes(scratch, 0, dims);
bh.consume(VectorUtil.int4DotProduct(binaryQueries[j], scratch));
}
}
}
@Benchmark
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
public void scoreFromMemorySegmentOnlyVector(Blackhole bh) throws IOException {
for (int j = 0; j < numQueries; j++) {
in.seek(0);
for (int i = 0; i < numVectors; i++) {
bh.consume(scorer.int4DotProduct(binaryQueries[j]));
}
}
}
}

View file

@ -126,7 +126,10 @@ public class OSQScorerBenchmark {
in.readFloats(corrections, 0, corrections.length);
int addition = Short.toUnsignedInt(in.readShort());
float score = scorer.score(
result,
result.lowerInterval(),
result.upperInterval(),
result.quantizedComponentSum(),
result.additionalCorrection(),
VectorSimilarityFunction.EUCLIDEAN,
centroidDp,
corrections[0],
@ -150,7 +153,10 @@ public class OSQScorerBenchmark {
in.readFloats(corrections, 0, corrections.length);
int addition = Short.toUnsignedInt(in.readShort());
float score = scorer.score(
result,
result.lowerInterval(),
result.upperInterval(),
result.quantizedComponentSum(),
result.additionalCorrection(),
VectorSimilarityFunction.EUCLIDEAN,
centroidDp,
corrections[0],
@ -175,7 +181,10 @@ public class OSQScorerBenchmark {
in.readFloats(corrections, 0, corrections.length);
int addition = Short.toUnsignedInt(in.readShort());
float score = scorer.score(
result,
result.lowerInterval(),
result.upperInterval(),
result.quantizedComponentSum(),
result.additionalCorrection(),
VectorSimilarityFunction.EUCLIDEAN,
centroidDp,
corrections[0],
@ -196,7 +205,16 @@ public class OSQScorerBenchmark {
for (int j = 0; j < numQueries; j++) {
in.seek(0);
for (int i = 0; i < numVectors; i += 16) {
scorer.scoreBulk(binaryQueries[j], result, VectorSimilarityFunction.EUCLIDEAN, centroidDp, scratchScores);
scorer.scoreBulk(
binaryQueries[j],
result.lowerInterval(),
result.upperInterval(),
result.quantizedComponentSum(),
result.additionalCorrection(),
VectorSimilarityFunction.EUCLIDEAN,
centroidDp,
scratchScores
);
bh.consume(scratchScores);
}
}

View file

@ -0,0 +1,110 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.benchmark.xcontent;
import org.elasticsearch.benchmark.index.mapper.MapperServiceFactory;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
/**
* Benchmark to measure indexing performance of keyword fields. Used to measure performance impact of skipping
* UTF-8 to UTF-16 conversion during document parsing.
*/
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
@Fork(1)
@Threads(1)
@Warmup(iterations = 1)
@Measurement(iterations = 5)
public class OptimizedTextBenchmark {
static {
// For Elasticsearch900Lucene101Codec:
LogConfigurator.loadLog4jPlugins();
LogConfigurator.configureESLogging();
LogConfigurator.setNodeName("test");
}
/**
* Total number of documents to index.
*/
@Param("1048576")
private int nDocs;
private MapperService mapperService;
private SourceToParse[] sources;
private String randomValue(int length) {
final String CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 ";
Random random = new Random();
StringBuilder builder = new StringBuilder(length);
for (int i = 0; i < length; i++) {
builder.append(CHARS.charAt(random.nextInt(CHARS.length())));
}
return builder.toString();
}
@Setup(Level.Trial)
public void setup() throws IOException {
mapperService = MapperServiceFactory.create("""
{
"_doc": {
"dynamic": false,
"properties": {
"field": {
"type": "match_only_text"
}
}
}
}
""", List.of(new MapperExtrasPlugin()));
sources = new SourceToParse[nDocs];
for (int i = 0; i < nDocs; i++) {
XContentBuilder b = XContentFactory.jsonBuilder();
b.startObject().field("field", randomValue(512)).endObject();
sources[i] = new SourceToParse(UUIDs.randomBase64UUID(), BytesReference.bytes(b), XContentType.JSON);
}
}
@Benchmark
public void indexDocuments(final Blackhole bh) {
final var mapper = mapperService.documentMapper();
for (int i = 0; i < nDocs; i++) {
bh.consume(mapper.parse(sources[i]));
}
}
}

View file

@ -4,6 +4,9 @@
{
"branch": "main"
},
{
"branch": "9.1"
},
{
"branch": "9.0"
},

View file

@ -17,88 +17,89 @@ buildscript {
}
plugins {
id 'java-gradle-plugin'
id 'java-test-fixtures'
id 'eclipse'
id 'java-gradle-plugin'
id 'java-test-fixtures'
id 'eclipse'
}
group = "org.elasticsearch"
// This project contains Checkstyle rule implementations used by IDEs which use a Java 11 runtime
java {
targetCompatibility = 11
sourceCompatibility = 11
targetCompatibility = 17
sourceCompatibility = 17
}
gradlePlugin {
// We already configure publication and we don't need or want the one that comes
// with the java-gradle-plugin
automatedPublishing = false
plugins {
internalLicenseheaders {
id = 'elasticsearch.internal-licenseheaders'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.precommit.LicenseHeadersPrecommitPlugin'
}
eclipse {
id = 'elasticsearch.eclipse'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.EclipseConventionPlugin'
}
publish {
id = 'elasticsearch.publish'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.PublishPlugin'
}
licensing {
id = 'elasticsearch.licensing'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.LicensingPlugin'
}
buildTools {
id = 'elasticsearch.build-tools'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.BuildToolsConventionsPlugin'
}
versions {
id = 'elasticsearch.versions'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.VersionPropertiesPlugin'
}
formatting {
id = 'elasticsearch.formatting'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.precommit.FormattingPrecommitPlugin'
}
// We already configure publication and we don't need or want the one that comes
// with the java-gradle-plugin
automatedPublishing = false
plugins {
internalLicenseheaders {
id = 'elasticsearch.internal-licenseheaders'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.precommit.LicenseHeadersPrecommitPlugin'
}
eclipse {
id = 'elasticsearch.eclipse'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.EclipseConventionPlugin'
}
publish {
id = 'elasticsearch.publish'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.PublishPlugin'
}
licensing {
id = 'elasticsearch.licensing'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.LicensingPlugin'
}
buildTools {
id = 'elasticsearch.build-tools'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.BuildToolsConventionsPlugin'
}
versions {
id = 'elasticsearch.versions'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.VersionPropertiesPlugin'
}
formatting {
id = 'elasticsearch.formatting'
implementationClass = 'org.elasticsearch.gradle.internal.conventions.precommit.FormattingPrecommitPlugin'
}
}
}
repositories {
mavenCentral()
gradlePluginPortal()
mavenCentral()
gradlePluginPortal()
}
dependencies {
api buildLibs.maven.model
api buildLibs.shadow.plugin
api buildLibs.apache.rat
compileOnly buildLibs.checkstyle
constraints {
api("org.eclipse.platform:org.eclipse.osgi:3.18.300") {
because("Use the same version as we do in spotless gradle plugin at runtime")
}
}
api(buildLibs.spotless.plugin) {
exclude module: "groovy-xml"
api buildLibs.maven.model
api buildLibs.shadow.plugin
api buildLibs.apache.rat
api buildLibs.nmcp
compileOnly buildLibs.checkstyle
constraints {
api("org.eclipse.platform:org.eclipse.osgi:3.18.300") {
because("Use the same version as we do in spotless gradle plugin at runtime")
}
}
api(buildLibs.spotless.plugin) {
exclude module: "groovy-xml"
}
}
project.getPlugins().withType(JavaBasePlugin.class) {
java.getModularity().getInferModulePath().set(false);
eclipse.getClasspath().getFile().whenMerged { classpath ->
/*
* give each source folder a unique corresponding output folder
* outside of the usual `build` folder. We can't put the build
* in the usual build folder because eclipse becomes *very* sad
* if we delete it. Which `gradlew clean` does all the time.
*/
classpath.getEntries().findAll{ s -> s instanceof SourceFolder }.eachWithIndex { s, i ->
s.setOutput("out/eclipse" + i)
}
java.getModularity().getInferModulePath().set(false);
eclipse.getClasspath().getFile().whenMerged { classpath ->
/*
* give each source folder a unique corresponding output folder
* outside of the usual `build` folder. We can't put the build
* in the usual build folder because eclipse becomes *very* sad
* if we delete it. Which `gradlew clean` does all the time.
*/
classpath.getEntries().findAll { s -> s instanceof SourceFolder }.eachWithIndex { s, i ->
s.setOutput("out/eclipse" + i)
}
}
}
tasks.withType(JavaCompile).configureEach {

View file

@ -14,6 +14,8 @@ import groovy.util.Node;
import com.github.jengelman.gradle.plugins.shadow.ShadowExtension;
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin;
import nmcp.NmcpPlugin;
import org.elasticsearch.gradle.internal.conventions.info.GitInfo;
import org.elasticsearch.gradle.internal.conventions.precommit.PomValidationPrecommitPlugin;
import org.elasticsearch.gradle.internal.conventions.util.Util;
@ -27,6 +29,7 @@ import org.gradle.api.plugins.BasePluginExtension;
import org.gradle.api.plugins.ExtensionContainer;
import org.gradle.api.plugins.JavaLibraryPlugin;
import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.provider.MapProperty;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
@ -65,6 +68,7 @@ public class PublishPlugin implements Plugin<Project> {
project.getPluginManager().apply(MavenPublishPlugin.class);
project.getPluginManager().apply(PomValidationPrecommitPlugin.class);
project.getPluginManager().apply(LicensingPlugin.class);
project.getPluginManager().apply(NmcpPlugin.class);
configureJavadocJar(project);
configureSourcesJar(project);
configurePomGeneration(project);
@ -82,6 +86,11 @@ public class PublishPlugin implements Plugin<Project> {
publication.from(project.getComponents().getByName("java"));
}
});
project.getPlugins().withType(JavaPlugin.class, plugin -> {
var javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class);
javaPluginExtension.withJavadocJar();
javaPluginExtension.withSourcesJar();
});
@SuppressWarnings("unchecked")
var projectLicenses = (MapProperty<String, Provider<String>>) project.getExtensions().getExtraProperties().get("projectLicenses");
publication.getPom().withXml(xml -> {

View file

@ -1,7 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=d7042b3c11565c192041fc8c4703f541b888286404b4f267138c1d094d8ecdca
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.1-all.zip
distributionSha256Sum=443c9c8ee2ac1ee0e11881a40f2376d79c66386264a44b24a9f8ca67e633375f
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-all.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME

View file

@ -123,8 +123,6 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
then:
result.task(":assemble").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world.jar").exists()
file("build/distributions/hello-world-javadoc.jar").exists()
file("build/distributions/hello-world-sources.jar").exists()
assertValidJar(file("build/distributions/hello-world.jar"))
}
@ -162,7 +160,6 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
result.task(":forbiddenPatterns").outcome == TaskOutcome.SUCCESS
result.task(":validateModule").outcome == TaskOutcome.SUCCESS
result.task(":splitPackagesAudit").outcome == TaskOutcome.SUCCESS
result.task(":validateElasticPom").outcome == TaskOutcome.SUCCESS
// disabled but check for being on the task graph
result.task(":forbiddenApisMain").outcome == TaskOutcome.SKIPPED
result.task(":checkstyleMain").outcome == TaskOutcome.SKIPPED

View file

@ -9,11 +9,16 @@
package org.elasticsearch.gradle.internal
import spock.lang.IgnoreIf
import org.elasticsearch.gradle.fixtures.AbstractGradleFuncTest
import org.gradle.testkit.runner.TaskOutcome
import org.xmlunit.builder.DiffBuilder
import org.xmlunit.builder.Input
// Ignoring this test on windows due to what appears to be a bug in the gradle testkit runner.
// https://github.com/elastic/elasticsearch/issues/129100
@IgnoreIf({ os.isWindows() })
class PublishPluginFuncTest extends AbstractGradleFuncTest {
def setup() {
@ -23,9 +28,212 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
configurationCacheCompatible = false
}
def "artifacts and tweaked pom is published"() {
def "project with plugin applied is considered for maven central publication"() {
given:
// required for JarHell to work
subProject(":libs:some-public-lib") << """
plugins {
id 'elasticsearch.java'
id 'elasticsearch.publish'
}
group = 'org.acme'
version = '1.0'
"""
subProject(":libs:some-other-lib") << """
plugins {
id 'elasticsearch.java'
id 'elasticsearch.publish'
}
group = 'org.acme.xpack'
version = '1.0'
"""
subProject(":libs:some-private-lib") << """
plugins {
id 'elasticsearch.java'
}
group = 'org.acme.xpack'
version = '1.0'
"""
buildFile << """
plugins {
id 'com.gradleup.nmcp.aggregation'
}
version = "1.0"
group = 'org.acme'
description = "custom project description"
nmcpAggregation {
centralPortal {
username = 'acme'
password = 'acmepassword'
// publish manually from the portal
publishingType = "USER_MANAGED"
}
// this breaks project isolation but this is broken in elasticsearch build atm anyhow.
publishAllProjectsProbablyBreakingProjectIsolation()
}
"""
when:
def result = gradleRunner(':zipAggregation').build()
then:
result.task(":zipAggregation").outcome == TaskOutcome.SUCCESS
file("build/nmcp/zip/aggregation.zip").exists()
def zip = zip("build/nmcp/zip/aggregation.zip")
zip.files().findAll { it.isDirectory() == false }.collect { it.name }.sort() == [
"org/acme/some-public-lib/1.0/some-public-lib-1.0-javadoc.jar",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-javadoc.jar.md5",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-javadoc.jar.sha1",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-javadoc.jar.sha256",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-javadoc.jar.sha512",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-sources.jar",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-sources.jar.md5",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-sources.jar.sha1",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-sources.jar.sha256",
"org/acme/some-public-lib/1.0/some-public-lib-1.0-sources.jar.sha512",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.jar",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.jar.md5",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.jar.sha1",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.jar.sha256",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.jar.sha512",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.module",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.module.md5",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.module.sha1",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.module.sha256",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.module.sha512",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.pom",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.pom.md5",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.pom.sha1",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.pom.sha256",
"org/acme/some-public-lib/1.0/some-public-lib-1.0.pom.sha512",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-javadoc.jar",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-javadoc.jar.md5",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-javadoc.jar.sha1",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-javadoc.jar.sha256",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-javadoc.jar.sha512",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-sources.jar",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-sources.jar.md5",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-sources.jar.sha1",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-sources.jar.sha256",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0-sources.jar.sha512",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.jar",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.jar.md5",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.jar.sha1",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.jar.sha256",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.jar.sha512",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.module",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.module.md5",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.module.sha1",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.module.sha256",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.module.sha512",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.pom",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.pom.md5",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.pom.sha1",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.pom.sha256",
"org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.pom.sha512"
]
assertXmlEquals(zip.file("org/acme/some-public-lib/1.0/some-public-lib-1.0.pom").read(),"""
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
<!-- is to indicate to Gradle or any Gradle module metadata file consumer -->
<!-- that they should prefer consuming it instead. -->
<!-- do_not_remove: published-with-gradle-metadata -->
<modelVersion>4.0.0</modelVersion>
<groupId>org.acme</groupId>
<artifactId>some-public-lib</artifactId>
<version>1.0</version>
<name>some-public-lib</name>
<description/>
<url>unknown</url>
<scm>
<url>unknown</url>
</scm>
<inceptionYear>2009</inceptionYear>
<licenses>
<license>
<name>Elastic License 2.0</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
<distribution>repo</distribution>
</license>
</licenses>
<developers>
<developer>
<name>Elastic</name>
<url>https://www.elastic.co</url>
</developer>
</developers>
</project>
""")
assertXmlEquals(zip.file("org/acme/xpack/some-other-lib/1.0/some-other-lib-1.0.pom").read(),"""
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
<!-- is to indicate to Gradle or any Gradle module metadata file consumer -->
<!-- that they should prefer consuming it instead. -->
<!-- do_not_remove: published-with-gradle-metadata -->
<modelVersion>4.0.0</modelVersion>
<groupId>org.acme.xpack</groupId>
<artifactId>some-other-lib</artifactId>
<version>1.0</version>
<name>some-other-lib</name>
<description/>
<url>unknown</url>
<scm>
<url>unknown</url>
</scm>
<inceptionYear>2009</inceptionYear>
<licenses>
<license>
<name>Elastic License 2.0</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
<distribution>repo</distribution>
</license>
</licenses>
<developers>
<developer>
<name>Elastic</name>
<url>https://www.elastic.co</url>
</developer>
</developers>
</project>
""")
}
def "artifacts and tweaked pom is published"() {
given:
buildFile << """
plugins {
id 'elasticsearch.java'
id 'elasticsearch.publish'
@ -36,17 +244,17 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
description = "custom project description"
"""
when:
def result = gradleRunner('assemble').build()
when:
def result = gradleRunner('assemble').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0.jar").exists()
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0.jar").exists()
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -88,12 +296,12 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
</developer>
</developers>
</project>"""
)
}
)
}
def "hides runtime dependencies and handles shadow dependencies"() {
given:
buildFile << """
def "hides runtime dependencies and handles shadow dependencies"() {
given:
buildFile << """
plugins {
id 'elasticsearch.java'
id 'elasticsearch.publish'
@ -121,18 +329,18 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
description = 'shadowed project'
"""
when:
def result = gradleRunner('assemble', '--stacktrace').build()
when:
def result = gradleRunner('assemble', '--stacktrace').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0-original.jar").exists()
file("build/distributions/hello-world-1.0.jar").exists()
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0-original.jar").exists()
file("build/distributions/hello-world-1.0.jar").exists()
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.acme</groupId>
@ -177,14 +385,14 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
</dependency>
</dependencies>
</project>"""
)
}
)
}
def "handles project shadow dependencies"() {
given:
settingsFile << "include ':someLib'"
file('someLib').mkdirs()
buildFile << """
def "handles project shadow dependencies"() {
given:
settingsFile << "include ':someLib'"
file('someLib').mkdirs()
buildFile << """
plugins {
id 'elasticsearch.java'
id 'elasticsearch.publish'
@ -211,18 +419,18 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
description = 'with shadowed dependencies'
"""
when:
def result = gradleRunner(':assemble', '--stacktrace').build()
when:
def result = gradleRunner(':assemble', '--stacktrace').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0-original.jar").exists()
file("build/distributions/hello-world-1.0.jar").exists()
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0-original.jar").exists()
file("build/distributions/hello-world-1.0.jar").exists()
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.acme</groupId>
@ -267,16 +475,16 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
</dependency>
</dependencies>
</project>"""
)
}
)
}
def "generates artifacts for shadowed elasticsearch plugin"() {
given:
// we use the esplugin plugin in this test that is not configuration cache compatible yet
configurationCacheCompatible = false
file('license.txt') << "License file"
file('notice.txt') << "Notice file"
buildFile << """
def "generates artifacts for shadowed elasticsearch plugin"() {
given:
// we use the esplugin plugin in this test that is not configuration cache compatible yet
configurationCacheCompatible = false
file('license.txt') << "License file"
file('notice.txt') << "Notice file"
buildFile << """
plugins {
id 'elasticsearch.internal-es-plugin'
id 'elasticsearch.publish'
@ -305,18 +513,18 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
group = 'org.acme'
"""
when:
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build()
when:
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-plugin-1.0-original.jar").exists()
file("build/distributions/hello-world-plugin-1.0.jar").exists()
file("build/distributions/hello-world-plugin-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-plugin-1.0-sources.jar").exists()
file("build/distributions/hello-world-plugin-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-plugin-1.0.pom").text, """
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-plugin-1.0-original.jar").exists()
file("build/distributions/hello-world-plugin-1.0.jar").exists()
file("build/distributions/hello-world-plugin-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-plugin-1.0-sources.jar").exists()
file("build/distributions/hello-world-plugin-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-plugin-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -358,16 +566,16 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
</developer>
</developers>
</project>"""
)
}
)
}
def "generates pom for elasticsearch plugin"() {
given:
// we use the esplugin plugin in this test that is not configuration cache compatible yet
configurationCacheCompatible = false
file('license.txt') << "License file"
file('notice.txt') << "Notice file"
buildFile << """
def "generates pom for elasticsearch plugin"() {
given:
// we use the esplugin plugin in this test that is not configuration cache compatible yet
configurationCacheCompatible = false
file('license.txt') << "License file"
file('notice.txt') << "Notice file"
buildFile << """
plugins {
id 'elasticsearch.internal-es-plugin'
id 'elasticsearch.publish'
@ -387,14 +595,14 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
group = 'org.acme'
"""
when:
def result = gradleRunner('generatePom').build()
when:
def result = gradleRunner('generatePom').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-plugin-2.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-plugin-2.0.pom").text, """
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-plugin-2.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-plugin-2.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -436,14 +644,14 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
</developer>
</developers>
</project>"""
)
}
)
}
def "generated pom can be validated"() {
given:
// scm info only added for internal builds
internalBuild()
buildFile << """
def "generated pom can be validated"() {
given:
// scm info only added for internal builds
internalBuild()
buildFile << """
buildParams.setGitOrigin(project.providers.provider(() -> "https://some-repo.com/repo.git"))
apply plugin:'elasticsearch.java'
apply plugin:'elasticsearch.publish'
@ -455,14 +663,14 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
ext.projectLicenses.set(['The Apache Software License, Version 2.0': project.providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
"""
when:
def result = gradleRunner('generatePom', 'validateElasticPom').build()
when:
def result = gradleRunner('generatePom', 'validateElasticPom').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -494,30 +702,31 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
</developer>
</developers>
</project>"""
)
}
)
}
private boolean assertXmlEquals(String toTest, String expected) {
def diff = DiffBuilder.compare(Input.fromString(expected))
.ignoreWhitespace()
.ignoreComments()
.normalizeWhitespace()
.withTest(Input.fromString(toTest))
.build()
diff.differences.each { difference ->
println difference
}
if (diff.differences.size() > 0) {
println """ given:
private boolean assertXmlEquals(String toTest, String expected) {
def diff = DiffBuilder.compare(Input.fromString(expected))
.ignoreWhitespace()
.ignoreComments()
.normalizeWhitespace()
.withTest(Input.fromString(toTest))
.build()
diff.differences.each { difference ->
println difference
}
if (diff.differences.size() > 0) {
println """ given:
$toTest
"""
println """ expected:
println """ expected:
$expected
"""
}
assert diff.hasDifferences() == false
true
}
assert diff.hasDifferences() == false
true
}
}

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask
tasks.register("bcUpgradeTest", StandaloneRestIntegTestTask) {
// We use a phony version here as the real version is provided via `tests.bwc.main.version` system property
usesBwcDistribution(Version.fromString("0.0.0"))
systemProperty("tests.old_cluster_version", "0.0.0")
onlyIf("tests.bwc.main.version system property exists") { System.getProperty("tests.bwc.main.version") != null }
}

View file

@ -180,6 +180,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') {
// this path is produced by the extractLibs task above
String testLibraryPath = TestUtil.getTestLibraryPath("${elasticsearchProject.left()}/libs/native/libraries/build/platform")
def enableIdeaCC = providers.gradleProperty("org.elasticsearch.idea-configuration-cache").getOrElse("true").toBoolean()
def delegateToGradle = providers.gradleProperty("org.elasticsearch.idea-delegate-to-gradle").getOrElse("false").toBoolean()
idea {
project {
vcs = 'Git'
@ -188,7 +189,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') {
settings {
delegateActions {
delegateBuildRunToGradle = false
testRunner = 'choose_per_test'
testRunner = delegateToGradle ? 'gradle' : 'choose_per_test'
}
taskTriggers {
afterSync tasks.named('configureIdeCheckstyle'),

View file

@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal;
import org.elasticsearch.gradle.internal.conventions.LicensingPlugin;
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks;
import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin;
@ -59,9 +60,9 @@ public class BuildPlugin implements Plugin<Project> {
}
project.getPluginManager().apply("elasticsearch.java");
project.getPluginManager().apply("elasticsearch.publish");
project.getPluginManager().apply(ElasticsearchJavadocPlugin.class);
project.getPluginManager().apply(DependenciesInfoPlugin.class);
project.getPluginManager().apply(LicensingPlugin.class);
project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class);
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
InternalPrecommitTasks.create(project, true);

View file

@ -10,28 +10,31 @@
package org.elasticsearch.gradle.internal;
import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin;
import org.elasticsearch.gradle.internal.precommit.DependencyLicensesTask;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.attributes.Category;
import org.gradle.api.attributes.Usage;
import org.gradle.api.plugins.JavaPlugin;
public class DependenciesInfoPlugin implements Plugin<Project> {
public static String USAGE_ATTRIBUTE = "DependenciesInfo";
@Override
public void apply(final Project project) {
project.getPlugins().apply(CompileOnlyResolvePlugin.class);
var depsInfo = project.getTasks().register("dependenciesInfo", DependenciesInfoTask.class);
depsInfo.configure(t -> {
t.setRuntimeConfiguration(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
t.setCompileOnlyConfiguration(
project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME)
);
t.getConventionMapping().map("mappings", () -> {
var depLic = project.getTasks().named("dependencyLicenses", DependencyLicensesTask.class);
return depLic.get().getMappings();
});
var runtimeConfiguration = project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);
t.getRuntimeArtifacts().set(project.getProviders().provider(() -> runtimeConfiguration.getIncoming().getArtifacts()));
t.getClasspath().from(runtimeConfiguration);
var compileOnlyConfiguration = project.getConfigurations()
.getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME);
t.getCompileOnlyArtifacts().set(project.getProviders().provider(() -> compileOnlyConfiguration.getIncoming().getArtifacts()));
t.getClasspath().from(compileOnlyConfiguration);
});
Configuration dependenciesInfoFilesConfiguration = project.getConfigurations().create("dependenciesInfoFiles");
dependenciesInfoFilesConfiguration.setCanBeResolved(false);
@ -43,6 +46,9 @@ public class DependenciesInfoPlugin implements Plugin<Project> {
)
);
dependenciesInfoFilesConfiguration.attributes(
attributes -> attributes.attribute(Usage.USAGE_ATTRIBUTE, project.getObjects().named(Usage.class, USAGE_ATTRIBUTE))
);
project.getArtifacts().add("dependenciesInfoFiles", depsInfo);
}

View file

@ -11,19 +11,22 @@ package org.elasticsearch.gradle.internal;
import org.elasticsearch.gradle.internal.precommit.DependencyLicensesTask;
import org.elasticsearch.gradle.internal.precommit.LicenseAnalyzer;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.DependencySet;
import org.gradle.api.artifacts.ModuleVersionIdentifier;
import org.gradle.api.artifacts.ArtifactCollection;
import org.gradle.api.artifacts.ProjectDependency;
import org.gradle.api.artifacts.component.ModuleComponentIdentifier;
import org.gradle.api.file.ConfigurableFileCollection;
import org.gradle.api.file.DirectoryProperty;
import org.gradle.api.file.ProjectLayout;
import org.gradle.api.internal.ConventionTask;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.MapProperty;
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputDirectory;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Optional;
import org.gradle.api.tasks.OutputFile;
import org.gradle.api.tasks.TaskAction;
@ -34,6 +37,7 @@ import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@ -51,7 +55,50 @@ import javax.inject.Inject;
* <li>license: <a href="https://spdx.org/licenses/">SPDX license</a> identifier, custom license or UNKNOWN.</li>
* </ul>
*/
public class DependenciesInfoTask extends ConventionTask {
public abstract class DependenciesInfoTask extends ConventionTask {
@Inject
public abstract ProviderFactory getProviderFactory();
/**
* We have to use ArtifactCollection instead of ResolvedArtifactResult here as we're running
* into a an issue in Gradle: https://github.com/gradle/gradle/issues/27582
*/
@Internal
abstract Property<ArtifactCollection> getRuntimeArtifacts();
@Input
public Provider<Set<ModuleComponentIdentifier>> getRuntimeModules() {
return mapToModuleComponentIdentifiers(getRuntimeArtifacts().get());
}
@Internal
abstract Property<ArtifactCollection> getCompileOnlyArtifacts();
@Input
public Provider<Set<ModuleComponentIdentifier>> getCompileOnlyModules() {
return mapToModuleComponentIdentifiers(getCompileOnlyArtifacts().get());
}
/**
* We need to track file inputs here from the configurations we inspect to ensure we dont miss any
* artifact transforms that might be applied and fail due to missing task dependency to jar
* generating tasks.
* */
@InputFiles
abstract ConfigurableFileCollection getClasspath();
private Provider<Set<ModuleComponentIdentifier>> mapToModuleComponentIdentifiers(ArtifactCollection artifacts) {
return getProviderFactory().provider(
() -> artifacts.getArtifacts()
.stream()
.map(r -> r.getId())
.filter(id -> id instanceof ModuleComponentIdentifier)
.map(id -> (ModuleComponentIdentifier) id)
.collect(Collectors.toSet())
);
}
private final DirectoryProperty licensesDir;
@ -60,22 +107,6 @@ public class DependenciesInfoTask extends ConventionTask {
private LinkedHashMap<String, String> mappings;
public Configuration getRuntimeConfiguration() {
return runtimeConfiguration;
}
public void setRuntimeConfiguration(Configuration runtimeConfiguration) {
this.runtimeConfiguration = runtimeConfiguration;
}
public Configuration getCompileOnlyConfiguration() {
return compileOnlyConfiguration;
}
public void setCompileOnlyConfiguration(Configuration compileOnlyConfiguration) {
this.compileOnlyConfiguration = compileOnlyConfiguration;
}
/**
* Directory to read license files
*/
@ -102,17 +133,6 @@ public class DependenciesInfoTask extends ConventionTask {
this.outputFile = outputFile;
}
/**
* Dependencies to gather information from.
*/
@InputFiles
private Configuration runtimeConfiguration;
/**
* We subtract compile-only dependencies.
*/
@InputFiles
private Configuration compileOnlyConfiguration;
@Inject
public DependenciesInfoTask(ProjectLayout projectLayout, ObjectFactory objectFactory, ProviderFactory providerFactory) {
this.licensesDir = objectFactory.directoryProperty();
@ -123,22 +143,18 @@ public class DependenciesInfoTask extends ConventionTask {
@TaskAction
public void generateDependenciesInfo() throws IOException {
final DependencySet runtimeDependencies = runtimeConfiguration.getAllDependencies();
// we have to resolve the transitive dependencies and create a group:artifactId:version map
final Set<String> compileOnlyArtifacts = compileOnlyConfiguration.getResolvedConfiguration()
.getResolvedArtifacts()
.stream()
.map(r -> {
ModuleVersionIdentifier id = r.getModuleVersion().getId();
return id.getGroup() + ":" + id.getName() + ":" + id.getVersion();
})
.collect(Collectors.toSet());
final Set<String> compileOnlyIds = getCompileOnlyModules().map(
set -> set.stream()
.map(id -> id.getModuleIdentifier().getGroup() + ":" + id.getModuleIdentifier().getName() + ":" + id.getVersion())
.collect(Collectors.toSet())
).get();
final StringBuilder output = new StringBuilder();
for (final Dependency dep : runtimeDependencies) {
Map<String, String> mappings = getMappings().get();
for (final ModuleComponentIdentifier dep : getRuntimeModules().get()) {
// we do not need compile-only dependencies here
if (compileOnlyArtifacts.contains(dep.getGroup() + ":" + dep.getName() + ":" + dep.getVersion())) {
String moduleName = dep.getModuleIdentifier().getName();
if (compileOnlyIds.contains(dep.getGroup() + ":" + moduleName + ":" + dep.getVersion())) {
continue;
}
@ -147,25 +163,20 @@ public class DependenciesInfoTask extends ConventionTask {
continue;
}
final String url = createURL(dep.getGroup(), dep.getName(), dep.getVersion());
final String dependencyName = DependencyLicensesTask.getDependencyName(getMappings(), dep.getName());
getLogger().info("mapped dependency " + dep.getGroup() + ":" + dep.getName() + " to " + dependencyName + " for license info");
final String url = createURL(dep.getGroup(), moduleName, dep.getVersion());
final String dependencyName = DependencyLicensesTask.getDependencyName(mappings, moduleName);
getLogger().info("mapped dependency " + dep.getGroup() + ":" + moduleName + " to " + dependencyName + " for license info");
final String licenseType = getLicenseType(dep.getGroup(), dependencyName);
output.append(dep.getGroup() + ":" + dep.getName() + "," + dep.getVersion() + "," + url + "," + licenseType + "\n");
output.append(dep.getGroup() + ":" + moduleName + "," + dep.getVersion() + "," + url + "," + licenseType + "\n");
}
Files.writeString(outputFile.toPath(), output.toString(), StandardOpenOption.CREATE);
}
@Input
public LinkedHashMap<String, String> getMappings() {
return mappings;
}
public void setMappings(LinkedHashMap<String, String> mappings) {
this.mappings = mappings;
}
@Optional
public abstract MapProperty<String, String> getMappings();
/**
* Create an URL on <a href="https://repo1.maven.org/maven2/">Maven Central</a>

View file

@ -7,7 +7,7 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
package org.elasticsearch.gradle.internal.dependencies.patches;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
@ -16,7 +16,7 @@ public class MethodReplacement extends MethodVisitor {
private final MethodVisitor delegate;
private final Runnable bodyWriter;
MethodReplacement(MethodVisitor delegate, Runnable bodyWriter) {
public MethodReplacement(MethodVisitor delegate, Runnable bodyWriter) {
super(Opcodes.ASM9);
this.delegate = delegate;
this.bodyWriter = bodyWriter;

View file

@ -24,9 +24,11 @@ import java.util.Enumeration;
import java.util.HexFormat;
import java.util.Locale;
import java.util.function.Function;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.stream.Collectors;
import static org.objectweb.asm.ClassWriter.COMPUTE_FRAMES;
@ -60,6 +62,10 @@ public class Utils {
}
}
public static void patchJar(File inputJar, File outputJar, Collection<PatcherInfo> patchers) {
patchJar(inputJar, outputJar, patchers, false);
}
/**
* Patches the classes in the input JAR file, using the collection of patchers. Each patcher specifies a target class (its jar entry
* name) and the SHA256 digest on the class bytes.
@ -69,8 +75,11 @@ public class Utils {
* @param inputFile the JAR file to patch
* @param outputFile the output (patched) JAR file
* @param patchers list of patcher info (classes to patch (jar entry name + optional SHA256 digest) and ASM visitor to transform them)
* @param unsignJar whether to remove class signatures from the JAR Manifest; set this to true when patching a signed JAR,
* otherwise the patched classes will fail to load at runtime due to mismatched signatures.
* @see <a href="https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html">Understanding Signing and Verification</a>
*/
public static void patchJar(File inputFile, File outputFile, Collection<PatcherInfo> patchers) {
public static void patchJar(File inputFile, File outputFile, Collection<PatcherInfo> patchers, boolean unsignJar) {
var classPatchers = patchers.stream().collect(Collectors.toMap(PatcherInfo::jarEntryName, Function.identity()));
var mismatchedClasses = new ArrayList<MismatchInfo>();
try (JarFile jarFile = new JarFile(inputFile); JarOutputStream jos = new JarOutputStream(new FileOutputStream(outputFile))) {
@ -101,9 +110,23 @@ public class Utils {
);
}
} else {
// Read the entry's data and write it to the new JAR
try (InputStream is = jarFile.getInputStream(entry)) {
is.transferTo(jos);
if (unsignJar && entryName.equals("META-INF/MANIFEST.MF")) {
var manifest = new Manifest(is);
for (var manifestEntry : manifest.getEntries().entrySet()) {
var nonSignatureAttributes = new Attributes();
for (var attribute : manifestEntry.getValue().entrySet()) {
if (attribute.getKey().toString().endsWith("Digest") == false) {
nonSignatureAttributes.put(attribute.getKey(), attribute.getValue());
}
}
manifestEntry.setValue(nonSignatureAttributes);
}
manifest.write(jos);
} else if (unsignJar == false || entryName.matches("META-INF/.*\\.SF") == false) {
// Read the entry's data and write it to the new JAR
is.transferTo(jos);
}
}
}
jos.closeEntry();

View file

@ -0,0 +1,61 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.gradle.internal.dependencies.patches.azurecore;
import org.elasticsearch.gradle.internal.dependencies.patches.PatcherInfo;
import org.elasticsearch.gradle.internal.dependencies.patches.Utils;
import org.gradle.api.artifacts.transform.CacheableTransform;
import org.gradle.api.artifacts.transform.InputArtifact;
import org.gradle.api.artifacts.transform.TransformAction;
import org.gradle.api.artifacts.transform.TransformOutputs;
import org.gradle.api.artifacts.transform.TransformParameters;
import org.gradle.api.file.FileSystemLocation;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.Classpath;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.List;
import java.util.regex.Pattern;
import static org.elasticsearch.gradle.internal.dependencies.patches.PatcherInfo.classPatcher;
@CacheableTransform
public abstract class AzureCoreClassPatcher implements TransformAction<TransformParameters.None> {
private static final String JAR_FILE_TO_PATCH = "azure-core-[\\d.]*\\.jar";
private static final List<PatcherInfo> CLASS_PATCHERS = List.of(
classPatcher(
"com/azure/core/implementation/ImplUtils.class",
"7beda5bdff5ea460cfc08721a188cf07d16e0c987dae45401fca7abf4e6e6c0e",
ImplUtilsPatcher::new
)
);
@Classpath
@InputArtifact
public abstract Provider<FileSystemLocation> getInputArtifact();
@Override
public void transform(@NotNull TransformOutputs outputs) {
File inputFile = getInputArtifact().get().getAsFile();
if (Pattern.matches(JAR_FILE_TO_PATCH, inputFile.getName())) {
System.out.println("Patching " + inputFile.getName());
File outputFile = outputs.file(inputFile.getName().replace(".jar", "-patched.jar"));
Utils.patchJar(inputFile, outputFile, CLASS_PATCHERS, true);
} else {
System.out.println("Skipping " + inputFile.getName());
outputs.file(getInputArtifact());
}
}
}

View file

@ -0,0 +1,34 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.gradle.internal.dependencies.patches.azurecore;
import org.elasticsearch.gradle.internal.dependencies.patches.MethodReplacement;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
class ImplUtilsPatcher extends ClassVisitor {
ImplUtilsPatcher(ClassVisitor classVisitor) {
super(Opcodes.ASM9, classVisitor);
}
public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions);
// `addShutdownHook` invokes `java.lang.Runtime.addShutdownHook`, which is forbidden (i.e. it will throw an Entitlements error).
// We replace the method body here with `return null`.
if (name.equals("addShutdownHookSafely")) {
return new MethodReplacement(mv, () -> {
mv.visitInsn(Opcodes.ACONST_NULL);
mv.visitInsn(Opcodes.ARETURN);
});
}
return mv;
}
}

View file

@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
import org.elasticsearch.gradle.internal.dependencies.patches.MethodReplacement;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;

View file

@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
import org.elasticsearch.gradle.internal.dependencies.patches.MethodReplacement;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;

View file

@ -86,6 +86,7 @@
"Rollup",
"SQL",
"Search",
"Searchable Snapshots",
"Security",
"Snapshot/Restore",
"Stats",

View file

@ -61,3 +61,7 @@ org.apache.logging.log4j.message.ParameterizedMessage#<init>(java.lang.String, j
@defaultMessage Use WriteLoadForecaster#getForecastedWriteLoad instead
org.elasticsearch.cluster.metadata.IndexMetadata#getForecastedWriteLoad()
@defaultMessage Use org.elasticsearch.index.codec.vectors.OptimizedScalarQuantizer instead
org.apache.lucene.util.quantization.OptimizedScalarQuantizer#<init>(org.apache.lucene.index.VectorSimilarityFunction, float, int)
org.apache.lucene.util.quantization.OptimizedScalarQuantizer#<init>(org.apache.lucene.index.VectorSimilarityFunction)

View file

@ -160,7 +160,7 @@ org.elasticsearch.cluster.ClusterFeatures#clusterHasFeature(org.elasticsearch.cl
@defaultMessage Do not construct this records outside the source files they are declared in
org.elasticsearch.cluster.SnapshotsInProgress$ShardSnapshotStatus#<init>(java.lang.String, org.elasticsearch.cluster.SnapshotsInProgress$ShardState, org.elasticsearch.repositories.ShardGeneration, java.lang.String, org.elasticsearch.repositories.ShardSnapshotResult)
org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#<init>(java.lang.String, java.util.List, long, long, org.elasticsearch.cluster.SnapshotDeletionsInProgress$State, java.lang.String)
org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#<init>(org.elasticsearch.cluster.metadata.ProjectId, java.lang.String, java.util.List, long, long, org.elasticsearch.cluster.SnapshotDeletionsInProgress$State, java.lang.String)
@defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug
java.lang.Thread#<init>(java.lang.Runnable)

View file

@ -1 +1 @@
8.14.1
8.14.2

View file

@ -28,16 +28,17 @@ for(bundle in changelogBundles) {
def nonNotableHighlights = bundle.nonNotableHighlights
def unqualifiedVersion = bundle.unqualifiedVersion
def coming = !bundle.bundle.released
if (coming) {
print "\n"
print "```{applies_to}\n"
print "stack: coming ${version}\n"
print "```"
}
%>
## ${unqualifiedVersion} [elasticsearch-${versionForIds}-release-notes]
<%
if (coming) {
print "```{applies_to}\n"
print "stack: coming ${version}\n"
print "```"
print "\n"
}
if (!notableHighlights.isEmpty() || !nonNotableHighlights.isEmpty()) {
print "\n### Highlights [elasticsearch-${versionForIds}-highlights]\n"
}

View file

@ -20,10 +20,14 @@ import java.util.Objects;
import static org.elasticsearch.gradle.internal.release.GenerateReleaseNotesTask.getSortedBundlesWithUniqueChangelogs;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
public class ReleaseNotesGeneratorTest {
// Temporarily set this to `true` to regenerate test output files when they need to be updated
private final boolean UPDATE_EXPECTED_OUTPUT = false;
private static final List<String> CHANGE_TYPES = List.of(
"breaking",
"breaking-java",
@ -78,8 +82,9 @@ public class ReleaseNotesGeneratorTest {
public void testTemplate(String templateFilename, String outputFilename, List<ChangelogBundle> bundles) throws Exception {
// given:
final String outputFile = "/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest." + outputFilename;
final String template = getResource("/templates/" + templateFilename);
final String expectedOutput = getResource("/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest." + outputFilename);
final String expectedOutput = getResource(outputFile);
if (bundles == null) {
bundles = getBundles();
@ -91,7 +96,12 @@ public class ReleaseNotesGeneratorTest {
final String actualOutput = ReleaseNotesGenerator.generateFile(template, bundles);
// then:
assertThat(actualOutput, equalTo(expectedOutput));
if (UPDATE_EXPECTED_OUTPUT) {
writeResource(outputFile, actualOutput);
assertFalse("UPDATE_EXPECTED_OUTPUT should be set back to false after updating output", UPDATE_EXPECTED_OUTPUT);
} else {
assertThat(actualOutput, equalTo(expectedOutput));
}
}
private List<ChangelogBundle> getBundles() {
@ -176,4 +186,9 @@ public class ReleaseNotesGeneratorTest {
private String getResource(String name) throws Exception {
return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8);
}
private void writeResource(String name, String contents) throws Exception {
String path = "src/test/resources" + name;
Files.writeString(Paths.get(path), contents);
}
}

View file

@ -20,10 +20,10 @@ To check for security updates, go to [Security announcements for the Elastic sta
% ### Fixes [elasticsearch-next-fixes]
% *
## 9.1.0 [elasticsearch-9.1.0-release-notes]
```{applies_to}
stack: coming 9.1.0
```
## 9.1.0 [elasticsearch-9.1.0-release-notes]
### Highlights [elasticsearch-9.1.0-highlights]
@ -47,10 +47,10 @@ Search:
* [#52](https://github.com/elastic/elasticsearch/pull/52)
## 9.0.10 [elasticsearch-9.0.10-release-notes]
```{applies_to}
stack: coming 9.0.10
```
## 9.0.10 [elasticsearch-9.0.10-release-notes]
### Features and enhancements [elasticsearch-9.0.10-features-enhancements]

View file

@ -20,10 +20,10 @@ To check for security updates, go to [Security announcements for the Elastic sta
% ### Fixes [elasticsearch-next-fixes]
% *
## 9.0.10 [elasticsearch-9.0.10-release-notes]
```{applies_to}
stack: coming 9.0.10
```
## 9.0.10 [elasticsearch-9.0.10-release-notes]
### Features and enhancements [elasticsearch-9.0.10-features-enhancements]

View file

@ -1,5 +1,5 @@
elasticsearch = 9.1.0
lucene = 10.2.1
lucene = 10.2.2
bundled_jdk_vendor = openjdk
bundled_jdk = 24+36@1f9ff9062db4449d8ca828c504ffae90

View file

@ -10,6 +10,7 @@
package org.elasticsearch.gradle.fixtures
import org.apache.commons.io.FileUtils
import org.apache.commons.io.IOUtils
import org.elasticsearch.gradle.internal.test.BuildConfigurationAwareGradleRunner
import org.elasticsearch.gradle.internal.test.InternalAwareGradleRunner
import org.elasticsearch.gradle.internal.test.NormalizeOutputGradleRunner
@ -23,11 +24,14 @@ import spock.lang.Specification
import spock.lang.TempDir
import java.lang.management.ManagementFactory
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.io.File
import java.nio.file.Path
import java.util.jar.JarEntry
import java.util.jar.JarOutputStream
import java.util.zip.ZipEntry
import java.util.zip.ZipFile
import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString
@ -234,6 +238,64 @@ checkstyle = "com.puppycrawl.tools:checkstyle:10.3"
(it as TestResultExtension.ErrorListener).errorInfo != null }
}
ZipAssertion zip(String relativePath) {
File archiveFile = file(relativePath);
try (ZipFile zipFile = new ZipFile(archiveFile)) {
Map<String, ZipAssertionFile> files = zipFile.entries().collectEntries { ZipEntry entry ->
[(entry.name): new ZipAssertionFile(archiveFile, entry)]
}
return new ZipAssertion(files);
}
}
static class ZipAssertion {
private Map<String, ZipAssertionFile> files = new HashMap<>()
ZipAssertion(Map<String, ZipAssertionFile> files) {
this.files = files;
}
ZipAssertionFile file(String path) {
return this.files.get(path)
}
Collection<ZipAssertionFile> files() {
return files.values()
}
}
static class ZipAssertionFile {
private ZipEntry entry;
private File zipFile;
ZipAssertionFile(File zipFile, ZipEntry entry) {
this.entry = entry
this.zipFile = zipFile
}
boolean exists() {
entry == null
}
String getName() {
return entry.name
}
boolean isDirectory() {
return entry.isDirectory()
}
String read() {
try(ZipFile zipFile1 = new ZipFile(zipFile)) {
def inputStream = zipFile1.getInputStream(entry)
return IOUtils.toString(inputStream, StandardCharsets.UTF_8.name())
} catch (IOException e) {
throw new RuntimeException("Failed to read entry ${entry.name} from zip file ${zipFile.name}", e)
}
}
}
static class ProjectConfigurer {
private File projectDir

View file

@ -48,8 +48,34 @@ plugins {
id 'elasticsearch.internal-testclusters'
id 'elasticsearch.run'
id 'elasticsearch.run-ccs'
id 'elasticsearch.repositories'
id 'elasticsearch.release-tools'
id 'elasticsearch.versions'
id 'com.gradleup.nmcp.aggregation'
}
version = VersionProperties.elasticsearch
/**
* Here we package and aggregation zip file containing all maven artifacts we want to
* publish to maven central.
* The aggregation is done by picking all projects that have the elasticsearch.publish plugin applied,
* indicating the artifact is meant for beeing published to maven central.
* */
nmcpAggregation {
// this breaks project isolation but this is broken in elasticsearch build atm anyhow.
publishAllProjectsProbablyBreakingProjectIsolation()
}
tasks.named('zipAggregation').configure {
// put this in a place that works well with our DRA infrastructure
archiveFileName.unset();
archiveBaseName.set("elasticsearch-maven-aggregation")
archiveVersion.set(VersionProperties.elasticsearch)
destinationDirectory.set(layout.buildDirectory.dir("distributions"));
dependsOn gradle.includedBuild('build-tools').task(':zipElasticPublication')
from(zipTree(gradle.includedBuild('build-tools').task(':zipElasticPublication').resolveTask().archiveFile.get()))
}
/**
@ -295,7 +321,7 @@ allprojects {
tasks.register('resolveAllDependencies', ResolveAllDependencies) {
def ignoredPrefixes = [DistributionDownloadPlugin.ES_DISTRO_CONFIG_PREFIX, "jdbcDriver"]
configs = project.configurations.matching { config -> ignoredPrefixes.any { config.name.startsWith(it) } == false }
if(project.path == ':') {
if (project.path == ':') {
resolveJavaToolChain = true
// ensure we have best possible caching of bwc builds
@ -320,49 +346,50 @@ allprojects {
}
ext.withReleaseBuild = { Closure config ->
if(buildParams.snapshotBuild == false) {
if (buildParams.snapshotBuild == false) {
config.call()
}
}
def splitForCI = { proj, partString ->
proj.tasks.register("check$partString") {
dependsOn 'check'
withReleaseBuild {
dependsOn 'assemble'
}
}
proj.tasks.addRule("Pattern: v<BWC_VERSION>#bwcTest$partString") { name ->
if(name.endsWith("#bwcTest$partString")) {
proj.project.getTasks().register(name) {
task -> task.dependsOn(proj.tasks.named { tskName -> tskName == (name - partString) })
}
}
}
proj.tasks.register("bcUpgradeTest$partString") {
dependsOn tasks.matching { it.name == 'bcUpgradeTest' }
withReleaseBuild {
dependsOn 'assemble'
}
}
}
plugins.withId('lifecycle-base') {
if (project.path.startsWith(":x-pack:")) {
if (project.path.contains("security") || project.path.contains(":ml")) {
tasks.register('checkPart4') {
dependsOn 'check'
withReleaseBuild {
dependsOn 'assemble'
}
}
splitForCI(project, "Part4")
} else if (project.path == ":x-pack:plugin" || project.path.contains("ql") || project.path.contains("smoke-test")) {
tasks.register('checkPart3') {
dependsOn 'check'
withReleaseBuild {
dependsOn 'assemble'
}
}
splitForCI(project, "Part3")
} else if (project.path.contains("multi-node")) {
tasks.register('checkPart5') {
dependsOn 'check'
withReleaseBuild {
dependsOn 'assemble'
}
}
splitForCI(project, "Part5")
} else {
tasks.register('checkPart2') {
dependsOn 'check'
withReleaseBuild {
dependsOn 'assemble'
}
}
splitForCI(project, "Part2")
}
} else if(project.path.startsWith(":qa:")) {
splitForCI(project, "Part6")
} else {
tasks.register('checkPart1') {
dependsOn 'check'
withReleaseBuild {
dependsOn 'assemble'
}
}
splitForCI(project, "Part1")
}
tasks.register('functionalTests') {
dependsOn 'check'
@ -433,6 +460,10 @@ tasks.named("updateDaemonJvm") {
org.gradle.platform.Architecture.X86_64,
org.gradle.platform.OperatingSystem.WINDOWS
),
BuildPlatformFactory.of(
org.gradle.platform.Architecture.AARCH64,
org.gradle.platform.OperatingSystem.WINDOWS
),
// anyone still using x86 osx?
BuildPlatformFactory.of(
org.gradle.platform.Architecture.X86_64,

View file

@ -17,10 +17,6 @@ java {
group = "${group}.client.test"
// rest client sniffer is licenses under Apache 2.0
projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(layout.getSettingsDirectory().file('licenses/APACHE-LICENSE-2.0.txt').asFile)
dependencies {
api "org.apache.httpcomponents:httpcore:${versions.httpcore}"
api "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"

View file

@ -31,6 +31,9 @@ configurations {
attributes {
attribute(Category.CATEGORY_ATTRIBUTE, project.getObjects().named(Category.class, Category.DOCUMENTATION))
}
attributes {
attribute(Usage.USAGE_ATTRIBUTE, project.getObjects().named(Usage.class, DependenciesInfoPlugin.USAGE_ATTRIBUTE))
}
}
featuresMetadata {
attributes {

Some files were not shown because too many files have changed in this diff Show more