mirror of
https://github.com/elastic/logstash.git
synced 2025-04-21 21:27:20 -04:00
Compare commits
257 commits
Author | SHA1 | Date | |
---|---|---|---|
|
7cae0acf85 | ||
|
443241b6d3 | ||
|
b519cf4213 | ||
|
f91f5a692d | ||
|
005358ffb4 | ||
|
6646400637 | ||
|
47d430d4fb | ||
|
49cf7acad0 | ||
|
dae2b61ab2 | ||
|
86042f8c98 | ||
|
2c95068e04 | ||
|
187c925cc8 | ||
|
8e6e183adc | ||
|
712b37e1df | ||
|
815fa8be1c | ||
|
b9bac5dfc6 | ||
|
b9469e0726 | ||
|
d66a2cf758 | ||
|
e13fcadad8 | ||
|
cb4c234aee | ||
|
eeb2162ae4 | ||
|
ae3b3ed17c | ||
|
3c6cbbf35b | ||
|
5a052b33f9 | ||
|
7913f91340 | ||
|
5f5b4bb3c3 | ||
|
422cd4e06b | ||
|
26af21df85 | ||
|
a539695830 | ||
|
4b88773726 | ||
|
e5bebcea17 | ||
|
e2c6254c81 | ||
|
add7b3f4d3 | ||
|
6de59f2c02 | ||
|
075fdb4152 | ||
|
6b277ccf0d | ||
|
f76edcea5e | ||
|
f705a9de48 | ||
|
7ac0423de0 | ||
|
284272b137 | ||
|
960d997e9f | ||
|
3e0f488df2 | ||
|
7683983168 | ||
|
afde43f918 | ||
|
9c0e50faac | ||
|
10b5a84f84 | ||
|
787fd2c62f | ||
|
193af6a272 | ||
|
8d10baa957 | ||
|
ea99e1db58 | ||
|
4779d5e250 | ||
|
964468f922 | ||
|
0d931a502a | ||
|
e748488e4a | ||
|
d916972877 | ||
|
bff0d5c40f | ||
|
cb6886814c | ||
|
feb2b92ba2 | ||
|
d61a83abbe | ||
|
07a3c8e73b | ||
|
a736178d59 | ||
|
b993bec499 | ||
|
ba5f21576c | ||
|
1e06eea86e | ||
|
7446e6bf6a | ||
|
f4ca06cfed | ||
|
73ffa243bf | ||
|
0a745686f6 | ||
|
7d1458fad3 | ||
|
0a3a2a302c | ||
|
34416fd971 | ||
|
c95430b586 | ||
|
062154494a | ||
|
8c96913807 | ||
|
53d39adb21 | ||
|
50671709e3 | ||
|
24fd2a6c75 | ||
|
86785815bd | ||
|
a4cf2bcc52 | ||
|
f562f37df2 | ||
|
fecfc7c602 | ||
|
2d69d06809 | ||
|
0f81816311 | ||
|
793e8c0b45 | ||
|
d40386a335 | ||
|
3115c78bf8 | ||
|
884ae815b5 | ||
|
823dcd25fa | ||
|
4d52b7258d | ||
|
227c0d8150 | ||
|
91258c3f98 | ||
|
e8e24a0397 | ||
|
e094054c0e | ||
|
089558801e | ||
|
9abad6609c | ||
|
637f447b88 | ||
|
e896cd727d | ||
|
d20eb4dbcb | ||
|
78c34465dc | ||
|
8cd38499b5 | ||
|
a847ef7764 | ||
|
5573b5ad77 | ||
|
c7204fd7d6 | ||
|
e23da7985c | ||
|
1c8cf546c2 | ||
|
32cc85b9a7 | ||
|
14c16de0c5 | ||
|
786911fa6d | ||
|
2172879989 | ||
|
51ab5d85d2 | ||
|
7378b85f41 | ||
|
70a6c9aea6 | ||
|
8a41a4e0e5 | ||
|
6660395f4d | ||
|
d3093e4b44 | ||
|
6943df5570 | ||
|
1fda320ed9 | ||
|
2be4812118 | ||
|
3f41828ebb | ||
|
c8a6566877 | ||
|
03b11e9827 | ||
|
dc740b46ca | ||
|
f66e00ac10 | ||
|
52b7fb0ae6 | ||
|
d4ba08c358 | ||
|
9385cfac5a | ||
|
58e6dac94b | ||
|
92d7210146 | ||
|
cd729b7682 | ||
|
9a2cd015d4 | ||
|
ff44b7cc20 | ||
|
348f1627a5 | ||
|
356ecb3705 | ||
|
ae8ad28aaa | ||
|
db34116c46 | ||
|
a215101032 | ||
|
d978e07f2c | ||
|
47d04d06b2 | ||
|
4554749da2 | ||
|
16392908e2 | ||
|
dae7fd93db | ||
|
274c212d9d | ||
|
e2b322e8c1 | ||
|
ef36df6b81 | ||
|
de6a6c5b0f | ||
|
531f795037 | ||
|
cc608eb88b | ||
|
01c8e8bb55 | ||
|
ae75636e17 | ||
|
05789744d2 | ||
|
03ddf12893 | ||
|
6e0d235c9d | ||
|
e1f4e772dc | ||
|
e6e0f9f6eb | ||
|
2d51cc0ba9 | ||
|
188d9e7ed8 | ||
|
65495263d4 | ||
|
264283889e | ||
|
5bff2ad436 | ||
|
095fbbb992 | ||
|
e36cacedc8 | ||
|
202d07cbbf | ||
|
ab19769521 | ||
|
4d9942d68a | ||
|
e3265d93e8 | ||
|
af76c45e65 | ||
|
1851fe6b2d | ||
|
d913e2ae3d | ||
|
ccde1eb8fb | ||
|
0e58e417ee | ||
|
615545027f | ||
|
eb7e1253e0 | ||
|
aff8d1cce7 | ||
|
2f0e10468d | ||
|
0dd64a9d63 | ||
|
15b203448a | ||
|
7b3d23b9d5 | ||
|
977efbddde | ||
|
e0ed994ab1 | ||
|
d4fb06e498 | ||
|
a94659cf82 | ||
|
2a23680cfd | ||
|
ff8c154c4d | ||
|
74d87c9ea0 | ||
|
5826c6f902 | ||
|
d9ead9a8db | ||
|
046ea1f5a8 | ||
|
efbee31461 | ||
|
5847d77331 | ||
|
113585d4a5 | ||
|
6703aec476 | ||
|
849f431033 | ||
|
852149be2e | ||
|
8ce58b8355 | ||
|
00da72378b | ||
|
0006937e46 | ||
|
9eced9a106 | ||
|
472e27a014 | ||
|
db59cd0fbd | ||
|
c602b851bf | ||
|
5d523aa5c8 | ||
|
ed5874bc27 | ||
|
ca19f0029e | ||
|
93b0913fd9 | ||
|
566bdf66fc | ||
|
467ab3f44b | ||
|
dcafa0835e | ||
|
daf979c189 | ||
|
3f0ad12d06 | ||
|
b6f16c8b81 | ||
|
85493ce864 | ||
|
ab77d36daa | ||
|
63706c1a36 | ||
|
cb3b7c01dc | ||
|
3f2a659289 | ||
|
dfd256e307 | ||
|
b571e8f3e3 | ||
|
fc119df24a | ||
|
937a9ea49f | ||
|
8cd0fa8767 | ||
|
6064587bc4 | ||
|
a931b2cde6 | ||
|
065769636b | ||
|
4037adfc4a | ||
|
7f7af057f0 | ||
|
648472106f | ||
|
dc24f02972 | ||
|
a4eddb8a2a | ||
|
3480c32b6e | ||
|
5d4825f000 | ||
|
5aabeda5fd | ||
|
e84fb458ce | ||
|
60670087cb | ||
|
07c01f8231 | ||
|
4e49adc6f3 | ||
|
b69d993d71 | ||
|
fd1de39005 | ||
|
61de60fe26 | ||
|
8368c00367 | ||
|
2fe91226eb | ||
|
f35e10d792 | ||
|
5c57adebb9 | ||
|
51cca7320e | ||
|
0ef4c7da32 | ||
|
b54caf3fd8 | ||
|
3e98cb1625 | ||
|
7c64c7394b | ||
|
4e82655cd5 | ||
|
1ec37b7c41 | ||
|
2ebf2658ff | ||
|
5452cccf76 | ||
|
5195332bc6 | ||
|
701108f88b | ||
|
17dba9f829 | ||
|
f60e987173 | ||
|
69f0fa54ca | ||
|
bb7ecc203f |
1810 changed files with 21068 additions and 61100 deletions
|
@ -35,48 +35,71 @@ steps:
|
|||
automatic:
|
||||
- limit: 3
|
||||
|
||||
- label: ":lab_coat: Integration Tests / part 1"
|
||||
key: "integration-tests-part-1"
|
||||
- label: ":lab_coat: Integration Tests / part 1-of-3"
|
||||
key: "integration-tests-part-1-of-3"
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/vm-agent.sh
|
||||
ci/integration_tests.sh split 0
|
||||
ci/integration_tests.sh split 0 3
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
||||
- label: ":lab_coat: Integration Tests / part 2"
|
||||
key: "integration-tests-part-2"
|
||||
- label: ":lab_coat: Integration Tests / part 2-of-3"
|
||||
key: "integration-tests-part-2-of-3"
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/vm-agent.sh
|
||||
ci/integration_tests.sh split 1
|
||||
ci/integration_tests.sh split 1 3
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 1"
|
||||
key: "integration-tests-qa-part-1"
|
||||
- label: ":lab_coat: Integration Tests / part 3-of-3"
|
||||
key: "integration-tests-part-3-of-3"
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/vm-agent.sh
|
||||
ci/integration_tests.sh split 2 3
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 1-of-3"
|
||||
key: "integration-tests-qa-part-1-of-3"
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/vm-agent.sh
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split 0
|
||||
ci/integration_tests.sh split 0 3
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 2"
|
||||
key: "integration-tests-qa-part-2"
|
||||
- label: ":lab_coat: IT Persistent Queues / part 2-of-3"
|
||||
key: "integration-tests-qa-part-2-of-3"
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/vm-agent.sh
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split 1
|
||||
ci/integration_tests.sh split 1 3
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 3-of-3"
|
||||
key: "integration-tests-qa-part-3-of-3"
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/vm-agent.sh
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split 2 3
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
|
11
.buildkite/benchmark_marathon_pipeline.yml
Normal file
11
.buildkite/benchmark_marathon_pipeline.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
agents:
|
||||
provider: gcp
|
||||
imageProject: elastic-images-prod
|
||||
image: family/platform-ingest-logstash-ubuntu-2204
|
||||
machineType: "n2-standard-16"
|
||||
diskSizeGb: 100
|
||||
diskType: pd-ssd
|
||||
|
||||
steps:
|
||||
- label: "Benchmark Marathon"
|
||||
command: .buildkite/scripts/benchmark/marathon.sh
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
env:
|
||||
DEFAULT_MATRIX_OS: "ubuntu-2204"
|
||||
DEFAULT_MATRIX_JDK: "adoptiumjdk_17"
|
||||
DEFAULT_MATRIX_JDK: "adoptiumjdk_21"
|
||||
|
||||
steps:
|
||||
- input: "Test Parameters"
|
||||
|
@ -60,20 +60,12 @@ steps:
|
|||
value: "adoptiumjdk_21"
|
||||
- label: "Adoptium JDK 17 (Eclipse Temurin)"
|
||||
value: "adoptiumjdk_17"
|
||||
- label: "Adoptium JDK 11 (Eclipse Temurin)"
|
||||
value: "adoptiumjdk_11"
|
||||
- label: "OpenJDK 21"
|
||||
value: "openjdk_21"
|
||||
- label: "OpenJDK 17"
|
||||
value: "openjdk_17"
|
||||
- label: "OpenJDK 11"
|
||||
value: "openjdk_11"
|
||||
- label: "Zulu 21"
|
||||
value: "zulu_21"
|
||||
- label: "Zulu 17"
|
||||
value: "zulu_17"
|
||||
- label: "Zulu 11"
|
||||
value: "zulu_11"
|
||||
|
||||
- wait: ~
|
||||
if: build.source != "schedule" && build.source != "trigger_job"
|
||||
|
|
|
@ -14,7 +14,11 @@
|
|||
"skip_ci_labels": [ ],
|
||||
"skip_target_branches": [ ],
|
||||
"skip_ci_on_only_changed": [
|
||||
"^docs/"
|
||||
"^.github/",
|
||||
"^docs/",
|
||||
"^.mergify.yml$",
|
||||
"^.pre-commit-config.yaml",
|
||||
"\\.md$"
|
||||
],
|
||||
"always_require_ci_on_changed": [ ]
|
||||
}
|
||||
|
|
|
@ -22,10 +22,12 @@ steps:
|
|||
- label: ":rspec: Ruby unit tests"
|
||||
key: "ruby-unit-tests"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci"
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "4"
|
||||
memory: "8Gi"
|
||||
ephemeralStorage: "100Gi"
|
||||
# Run as a non-root user
|
||||
imageUID: "1002"
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
|
@ -79,8 +81,8 @@ steps:
|
|||
manual:
|
||||
allowed: true
|
||||
|
||||
- label: ":lab_coat: Integration Tests / part 1"
|
||||
key: "integration-tests-part-1"
|
||||
- label: ":lab_coat: Integration Tests / part 1-of-3"
|
||||
key: "integration-tests-part-1-of-3"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "8"
|
||||
|
@ -95,10 +97,10 @@ steps:
|
|||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/container-agent.sh
|
||||
ci/integration_tests.sh split 0
|
||||
ci/integration_tests.sh split 0 3
|
||||
|
||||
- label: ":lab_coat: Integration Tests / part 2"
|
||||
key: "integration-tests-part-2"
|
||||
- label: ":lab_coat: Integration Tests / part 2-of-3"
|
||||
key: "integration-tests-part-2-of-3"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "8"
|
||||
|
@ -113,10 +115,28 @@ steps:
|
|||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/container-agent.sh
|
||||
ci/integration_tests.sh split 1
|
||||
ci/integration_tests.sh split 1 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 1"
|
||||
key: "integration-tests-qa-part-1"
|
||||
- label: ":lab_coat: Integration Tests / part 3-of-3"
|
||||
key: "integration-tests-part-3-of-3"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "8"
|
||||
memory: "16Gi"
|
||||
ephemeralStorage: "100Gi"
|
||||
# Run as a non-root user
|
||||
imageUID: "1002"
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/container-agent.sh
|
||||
ci/integration_tests.sh split 2 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 1-of-3"
|
||||
key: "integration-tests-qa-part-1-of-3"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "8"
|
||||
|
@ -132,10 +152,10 @@ steps:
|
|||
|
||||
source .buildkite/scripts/common/container-agent.sh
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split 0
|
||||
ci/integration_tests.sh split 0 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 2"
|
||||
key: "integration-tests-qa-part-2"
|
||||
- label: ":lab_coat: IT Persistent Queues / part 2-of-3"
|
||||
key: "integration-tests-qa-part-2-of-3"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "8"
|
||||
|
@ -151,7 +171,26 @@ steps:
|
|||
|
||||
source .buildkite/scripts/common/container-agent.sh
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split 1
|
||||
ci/integration_tests.sh split 1 3
|
||||
|
||||
- label: ":lab_coat: IT Persistent Queues / part 3-of-3"
|
||||
key: "integration-tests-qa-part-3-of-3"
|
||||
agents:
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci-no-root"
|
||||
cpu: "8"
|
||||
memory: "16Gi"
|
||||
ephemeralStorage: "100Gi"
|
||||
# Run as non root (logstash) user. UID is hardcoded in image.
|
||||
imageUID: "1002"
|
||||
retry:
|
||||
automatic:
|
||||
- limit: 3
|
||||
command: |
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/container-agent.sh
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split 2 3
|
||||
|
||||
- label: ":lab_coat: x-pack unit tests"
|
||||
key: "x-pack-unit-tests"
|
||||
|
|
22
.buildkite/scripts/benchmark/README.md
Normal file
22
.buildkite/scripts/benchmark/README.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
## Steps to set up GCP instance to run benchmark script
|
||||
- Create an instance "n2-standard-16" with Ubuntu image
|
||||
- Install docker
|
||||
- `sudo snap install docker`
|
||||
- `sudo usermod -a -G docker $USER`
|
||||
- Install jq
|
||||
- Install vault
|
||||
- `sudo snap install vault`
|
||||
- `vault login --method github`
|
||||
- `vault kv get -format json secret/ci/elastic-logstash/benchmark`
|
||||
- Setup Elasticsearch index mapping and alias with `setup/*`
|
||||
- Import Kibana dashboard with `save-objects/*`
|
||||
- Run the benchmark script
|
||||
- Send data to your own Elasticsearch. Customise `VAULT_PATH="secret/ci/elastic-logstash/your/path"`
|
||||
- Run the script `main.sh`
|
||||
- or run in background `nohup bash -x main.sh > log.log 2>&1 &`
|
||||
|
||||
## Notes
|
||||
- Benchmarks should only be compared using the same hardware setup.
|
||||
- Please do not send the test metrics to the benchmark cluster. You can set `VAULT_PATH` to send data and metrics to your own server.
|
||||
- Run `all.sh` as calibration which gives you a baseline of performance in different versions.
|
||||
- [#16586](https://github.com/elastic/logstash/pull/16586) allows legacy monitoring using the configuration `xpack.monitoring.allow_legacy_collection: true`, which is not recognized in version 8. To run benchmarks in version 8, use the script of the corresponding branch (e.g. `8.16`) instead of `main` in buildkite.
|
|
@ -3,6 +3,7 @@ pipeline.workers: ${WORKER}
|
|||
pipeline.batch.size: ${BATCH_SIZE}
|
||||
queue.type: ${QTYPE}
|
||||
|
||||
xpack.monitoring.allow_legacy_collection: true
|
||||
xpack.monitoring.enabled: true
|
||||
xpack.monitoring.elasticsearch.username: ${MONITOR_ES_USER}
|
||||
xpack.monitoring.elasticsearch.password: ${MONITOR_ES_PW}
|
||||
|
|
1
.buildkite/scripts/benchmark/config/uuid
Normal file
1
.buildkite/scripts/benchmark/config/uuid
Normal file
|
@ -0,0 +1 @@
|
|||
f74f1a28-25e9-494f-ba41-ca9f13d4446d
|
315
.buildkite/scripts/benchmark/core.sh
Executable file
315
.buildkite/scripts/benchmark/core.sh
Executable file
|
@ -0,0 +1,315 @@
|
|||
#!/usr/bin/env bash
|
||||
set -eo pipefail
|
||||
|
||||
SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
|
||||
CONFIG_PATH="$SCRIPT_PATH/config"
|
||||
source "$SCRIPT_PATH/util.sh"
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 [FB_CNT] [QTYPE] [CPU] [MEM]"
|
||||
echo "Example: $0 4 {persisted|memory|all} 2 2"
|
||||
exit 1
|
||||
}
|
||||
|
||||
parse_args() {
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
if [ -z "$FB_CNT" ]; then
|
||||
FB_CNT=$1
|
||||
elif [ -z "$QTYPE" ]; then
|
||||
case $1 in
|
||||
all | persisted | memory)
|
||||
QTYPE=$1
|
||||
;;
|
||||
*)
|
||||
echo "Error: wrong queue type $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
elif [ -z "$CPU" ]; then
|
||||
CPU=$1
|
||||
elif [ -z "$MEM" ]; then
|
||||
MEM=$1
|
||||
else
|
||||
echo "Error: Too many arguments"
|
||||
usage
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
# set default value
|
||||
# number of filebeat
|
||||
FB_CNT=${FB_CNT:-4}
|
||||
# all | persisted | memory
|
||||
QTYPE=${QTYPE:-all}
|
||||
CPU=${CPU:-4}
|
||||
MEM=${MEM:-4}
|
||||
XMX=$((MEM / 2))
|
||||
|
||||
IFS=','
|
||||
# worker multiplier: 1,2,4
|
||||
MULTIPLIERS="${MULTIPLIERS:-1,2,4}"
|
||||
read -ra MULTIPLIERS <<< "$MULTIPLIERS"
|
||||
BATCH_SIZES="${BATCH_SIZES:-500}"
|
||||
read -ra BATCH_SIZES <<< "$BATCH_SIZES"
|
||||
# tags to json array
|
||||
read -ra TAG_ARRAY <<< "$TAGS"
|
||||
JSON_TAGS=$(printf '"%s",' "${TAG_ARRAY[@]}" | sed 's/,$//')
|
||||
JSON_TAGS="[$JSON_TAGS]"
|
||||
|
||||
IFS=' '
|
||||
echo "filebeats: $FB_CNT, cpu: $CPU, mem: $MEM, Queue: $QTYPE, worker multiplier: ${MULTIPLIERS[@]}, batch size: ${BATCH_SIZES[@]}"
|
||||
}
|
||||
|
||||
get_secret() {
|
||||
VAULT_PATH=${VAULT_PATH:-secret/ci/elastic-logstash/benchmark}
|
||||
VAULT_DATA=$(vault kv get -format json $VAULT_PATH)
|
||||
BENCHMARK_ES_HOST=$(echo $VAULT_DATA | jq -r '.data.es_host')
|
||||
BENCHMARK_ES_USER=$(echo $VAULT_DATA | jq -r '.data.es_user')
|
||||
BENCHMARK_ES_PW=$(echo $VAULT_DATA | jq -r '.data.es_pw')
|
||||
|
||||
MONITOR_ES_HOST=$(echo $VAULT_DATA | jq -r '.data.monitor_es_host')
|
||||
MONITOR_ES_USER=$(echo $VAULT_DATA | jq -r '.data.monitor_es_user')
|
||||
MONITOR_ES_PW=$(echo $VAULT_DATA | jq -r '.data.monitor_es_pw')
|
||||
}
|
||||
|
||||
pull_images() {
|
||||
echo "--- Pull docker images"
|
||||
|
||||
if [[ -n "$LS_VERSION" ]]; then
|
||||
# pull image if it doesn't exist in local
|
||||
[[ -z $(docker images -q docker.elastic.co/logstash/logstash:$LS_VERSION) ]] && docker pull "docker.elastic.co/logstash/logstash:$LS_VERSION"
|
||||
else
|
||||
# pull the latest snapshot logstash image
|
||||
# select the SNAPSHOT artifact with the highest semantic version number
|
||||
LS_VERSION=$( curl --retry-all-errors --retry 5 --retry-delay 1 -s "https://storage.googleapis.com/artifacts-api/snapshots/main.json" | jq -r '.version' )
|
||||
BUILD_ID=$(curl --retry-all-errors --retry 5 --retry-delay 1 -s "https://storage.googleapis.com/artifacts-api/snapshots/main.json" | jq -r '.build_id')
|
||||
ARCH=$(arch)
|
||||
IMAGE_URL="https://snapshots.elastic.co/${BUILD_ID}/downloads/logstash/logstash-$LS_VERSION-docker-image-$ARCH.tar.gz"
|
||||
IMAGE_FILENAME="$LS_VERSION.tar.gz"
|
||||
|
||||
echo "Download $LS_VERSION from $IMAGE_URL"
|
||||
[[ ! -e $IMAGE_FILENAME ]] && curl -fsSL --retry-max-time 60 --retry 3 --retry-delay 5 -o "$IMAGE_FILENAME" "$IMAGE_URL"
|
||||
[[ -z $(docker images -q docker.elastic.co/logstash/logstash:$LS_VERSION) ]] && docker load -i "$IMAGE_FILENAME"
|
||||
fi
|
||||
|
||||
# pull filebeat image
|
||||
FB_DEFAULT_VERSION="8.13.4"
|
||||
FB_VERSION=${FB_VERSION:-$FB_DEFAULT_VERSION}
|
||||
docker pull "docker.elastic.co/beats/filebeat:$FB_VERSION"
|
||||
}
|
||||
|
||||
generate_logs() {
|
||||
FLOG_FILE_CNT=${FLOG_FILE_CNT:-4}
|
||||
SINGLE_SIZE=524288000
|
||||
TOTAL_SIZE="$((FLOG_FILE_CNT * SINGLE_SIZE))"
|
||||
FLOG_PATH="$SCRIPT_PATH/flog"
|
||||
mkdir -p $FLOG_PATH
|
||||
|
||||
if [[ ! -e "$FLOG_PATH/log${FLOG_FILE_CNT}.log" ]]; then
|
||||
echo "--- Generate logs in background. log: ${FLOG_FILE_CNT}, each size: 500mb"
|
||||
docker run -d --name=flog --rm -v $FLOG_PATH:/go/src/data mingrammer/flog -t log -w -o "/go/src/data/log.log" -b $TOTAL_SIZE -p $SINGLE_SIZE
|
||||
fi
|
||||
}
|
||||
|
||||
check_logs() {
|
||||
echo "--- Check log generation"
|
||||
|
||||
local cnt=0
|
||||
until [[ -e "$FLOG_PATH/log${FLOG_FILE_CNT}.log" || $cnt -gt 600 ]]; do
|
||||
echo "wait 30s" && sleep 30
|
||||
cnt=$((cnt + 30))
|
||||
done
|
||||
|
||||
ls -lah $FLOG_PATH
|
||||
}
|
||||
|
||||
start_logstash() {
|
||||
LS_CONFIG_PATH=$SCRIPT_PATH/ls/config
|
||||
mkdir -p $LS_CONFIG_PATH
|
||||
|
||||
cp $CONFIG_PATH/pipelines.yml $LS_CONFIG_PATH/pipelines.yml
|
||||
cp $CONFIG_PATH/logstash.yml $LS_CONFIG_PATH/logstash.yml
|
||||
cp $CONFIG_PATH/uuid $LS_CONFIG_PATH/uuid
|
||||
|
||||
LS_JAVA_OPTS=${LS_JAVA_OPTS:--Xmx${XMX}g}
|
||||
docker run -d --name=ls --net=host --cpus=$CPU --memory=${MEM}g -e LS_JAVA_OPTS="$LS_JAVA_OPTS" \
|
||||
-e QTYPE="$QTYPE" -e WORKER="$WORKER" -e BATCH_SIZE="$BATCH_SIZE" \
|
||||
-e BENCHMARK_ES_HOST="$BENCHMARK_ES_HOST" -e BENCHMARK_ES_USER="$BENCHMARK_ES_USER" -e BENCHMARK_ES_PW="$BENCHMARK_ES_PW" \
|
||||
-e MONITOR_ES_HOST="$MONITOR_ES_HOST" -e MONITOR_ES_USER="$MONITOR_ES_USER" -e MONITOR_ES_PW="$MONITOR_ES_PW" \
|
||||
-v $LS_CONFIG_PATH/logstash.yml:/usr/share/logstash/config/logstash.yml:ro \
|
||||
-v $LS_CONFIG_PATH/pipelines.yml:/usr/share/logstash/config/pipelines.yml:ro \
|
||||
-v $LS_CONFIG_PATH/uuid:/usr/share/logstash/data/uuid:ro \
|
||||
docker.elastic.co/logstash/logstash:$LS_VERSION
|
||||
}
|
||||
|
||||
start_filebeat() {
|
||||
for ((i = 0; i < FB_CNT; i++)); do
|
||||
FB_PATH="$SCRIPT_PATH/fb${i}"
|
||||
mkdir -p $FB_PATH
|
||||
|
||||
cp $CONFIG_PATH/filebeat.yml $FB_PATH/filebeat.yml
|
||||
|
||||
docker run -d --name=fb$i --net=host --user=root \
|
||||
-v $FB_PATH/filebeat.yml:/usr/share/filebeat/filebeat.yml \
|
||||
-v $SCRIPT_PATH/flog:/usr/share/filebeat/flog \
|
||||
docker.elastic.co/beats/filebeat:$FB_VERSION filebeat -e --strict.perms=false
|
||||
done
|
||||
}
|
||||
|
||||
capture_stats() {
|
||||
CURRENT=$(jq -r '.flow.output_throughput.current' $NS_JSON)
|
||||
local eps_1m=$(jq -r '.flow.output_throughput.last_1_minute' $NS_JSON)
|
||||
local eps_5m=$(jq -r '.flow.output_throughput.last_5_minutes' $NS_JSON)
|
||||
local worker_util=$(jq -r '.pipelines.main.flow.worker_utilization.last_1_minute' $NS_JSON)
|
||||
local worker_concurr=$(jq -r '.pipelines.main.flow.worker_concurrency.last_1_minute' $NS_JSON)
|
||||
local cpu_percent=$(jq -r '.process.cpu.percent' $NS_JSON)
|
||||
local heap=$(jq -r '.jvm.mem.heap_used_in_bytes' $NS_JSON)
|
||||
local non_heap=$(jq -r '.jvm.mem.non_heap_used_in_bytes' $NS_JSON)
|
||||
local q_event_cnt=$(jq -r '.pipelines.main.queue.events_count' $NS_JSON)
|
||||
local q_size=$(jq -r '.pipelines.main.queue.queue_size_in_bytes' $NS_JSON)
|
||||
TOTAL_EVENTS_OUT=$(jq -r '.pipelines.main.events.out' $NS_JSON)
|
||||
printf "current: %s, 1m: %s, 5m: %s, worker_utilization: %s, worker_concurrency: %s, cpu: %s, heap: %s, non-heap: %s, q_events: %s, q_size: %s, total_events_out: %s\n" \
|
||||
$CURRENT $eps_1m $eps_5m $worker_util $worker_concurr $cpu_percent $heap $non_heap $q_event_cnt $q_size $TOTAL_EVENTS_OUT
|
||||
}
|
||||
|
||||
aggregate_stats() {
|
||||
local file_glob="$SCRIPT_PATH/$NS_DIR/${QTYPE:0:1}_w${WORKER}b${BATCH_SIZE}_*.json"
|
||||
MAX_EPS_1M=$( jqmax '.flow.output_throughput.last_1_minute' "$file_glob" )
|
||||
MAX_EPS_5M=$( jqmax '.flow.output_throughput.last_5_minutes' "$file_glob" )
|
||||
MAX_WORKER_UTIL=$( jqmax '.pipelines.main.flow.worker_utilization.last_1_minute' "$file_glob" )
|
||||
MAX_WORKER_CONCURR=$( jqmax '.pipelines.main.flow.worker_concurrency.last_1_minute' "$file_glob" )
|
||||
MAX_Q_EVENT_CNT=$( jqmax '.pipelines.main.queue.events_count' "$file_glob" )
|
||||
MAX_Q_SIZE=$( jqmax '.pipelines.main.queue.queue_size_in_bytes' "$file_glob" )
|
||||
|
||||
AVG_CPU_PERCENT=$( jqavg '.process.cpu.percent' "$file_glob" )
|
||||
AVG_VIRTUAL_MEM=$( jqavg '.process.mem.total_virtual_in_bytes' "$file_glob" )
|
||||
AVG_HEAP=$( jqavg '.jvm.mem.heap_used_in_bytes' "$file_glob" )
|
||||
AVG_NON_HEAP=$( jqavg '.jvm.mem.non_heap_used_in_bytes' "$file_glob" )
|
||||
}
|
||||
|
||||
send_summary() {
|
||||
echo "--- Send summary to Elasticsearch"
|
||||
|
||||
# build json
|
||||
local timestamp
|
||||
timestamp=$(date -u +"%Y-%m-%dT%H:%M:%S")
|
||||
SUMMARY="{\"timestamp\": \"$timestamp\", \"version\": \"$LS_VERSION\", \"cpu\": \"$CPU\", \"mem\": \"$MEM\", \"workers\": \"$WORKER\", \"batch_size\": \"$BATCH_SIZE\", \"queue_type\": \"$QTYPE\""
|
||||
not_empty "$TOTAL_EVENTS_OUT" && SUMMARY="$SUMMARY, \"total_events_out\": \"$TOTAL_EVENTS_OUT\""
|
||||
not_empty "$MAX_EPS_1M" && SUMMARY="$SUMMARY, \"max_eps_1m\": \"$MAX_EPS_1M\""
|
||||
not_empty "$MAX_EPS_5M" && SUMMARY="$SUMMARY, \"max_eps_5m\": \"$MAX_EPS_5M\""
|
||||
not_empty "$MAX_WORKER_UTIL" && SUMMARY="$SUMMARY, \"max_worker_utilization\": \"$MAX_WORKER_UTIL\""
|
||||
not_empty "$MAX_WORKER_CONCURR" && SUMMARY="$SUMMARY, \"max_worker_concurrency\": \"$MAX_WORKER_CONCURR\""
|
||||
not_empty "$AVG_CPU_PERCENT" && SUMMARY="$SUMMARY, \"avg_cpu_percentage\": \"$AVG_CPU_PERCENT\""
|
||||
not_empty "$AVG_HEAP" && SUMMARY="$SUMMARY, \"avg_heap\": \"$AVG_HEAP\""
|
||||
not_empty "$AVG_NON_HEAP" && SUMMARY="$SUMMARY, \"avg_non_heap\": \"$AVG_NON_HEAP\""
|
||||
not_empty "$AVG_VIRTUAL_MEM" && SUMMARY="$SUMMARY, \"avg_virtual_memory\": \"$AVG_VIRTUAL_MEM\""
|
||||
not_empty "$MAX_Q_EVENT_CNT" && SUMMARY="$SUMMARY, \"max_queue_events\": \"$MAX_Q_EVENT_CNT\""
|
||||
not_empty "$MAX_Q_SIZE" && SUMMARY="$SUMMARY, \"max_queue_bytes_size\": \"$MAX_Q_SIZE\""
|
||||
not_empty "$TAGS" && SUMMARY="$SUMMARY, \"tags\": $JSON_TAGS"
|
||||
SUMMARY="$SUMMARY}"
|
||||
|
||||
tee summary.json << EOF
|
||||
{"index": {}}
|
||||
$SUMMARY
|
||||
EOF
|
||||
|
||||
# send to ES
|
||||
local resp
|
||||
local err_status
|
||||
resp=$(curl -s -X POST -u "$BENCHMARK_ES_USER:$BENCHMARK_ES_PW" "$BENCHMARK_ES_HOST/benchmark_summary/_bulk" -H 'Content-Type: application/json' --data-binary @"summary.json")
|
||||
echo "$resp"
|
||||
err_status=$(echo "$resp" | jq -r ".errors")
|
||||
if [[ "$err_status" == "true" ]]; then
|
||||
echo "Failed to send summary"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# $1: snapshot index
|
||||
node_stats() {
|
||||
NS_JSON="$SCRIPT_PATH/$NS_DIR/${QTYPE:0:1}_w${WORKER}b${BATCH_SIZE}_$1.json" # m_w8b1000_0.json
|
||||
|
||||
# curl inside container because docker on mac cannot resolve localhost to host network interface
|
||||
docker exec -i ls curl localhost:9600/_node/stats > "$NS_JSON" 2> /dev/null
|
||||
}
|
||||
|
||||
# $1: index
|
||||
snapshot() {
|
||||
node_stats $1
|
||||
capture_stats
|
||||
}
|
||||
|
||||
create_directory() {
|
||||
NS_DIR="fb${FB_CNT}c${CPU}m${MEM}" # fb4c4m4
|
||||
mkdir -p "$SCRIPT_PATH/$NS_DIR"
|
||||
}
|
||||
|
||||
queue() {
|
||||
for QTYPE in "persisted" "memory"; do
|
||||
worker
|
||||
done
|
||||
}
|
||||
|
||||
worker() {
|
||||
for m in "${MULTIPLIERS[@]}"; do
|
||||
WORKER=$((CPU * m))
|
||||
batch
|
||||
done
|
||||
}
|
||||
|
||||
batch() {
|
||||
for BATCH_SIZE in "${BATCH_SIZES[@]}"; do
|
||||
run_pipeline
|
||||
stop_pipeline
|
||||
done
|
||||
}
|
||||
|
||||
run_pipeline() {
|
||||
echo "--- Run pipeline. queue type: $QTYPE, worker: $WORKER, batch size: $BATCH_SIZE"
|
||||
|
||||
start_logstash
|
||||
start_filebeat
|
||||
docker ps
|
||||
|
||||
echo "(0) sleep 3m" && sleep 180
|
||||
snapshot "0"
|
||||
|
||||
for i in {1..8}; do
|
||||
echo "($i) sleep 30s" && sleep 30
|
||||
snapshot "$i"
|
||||
|
||||
# print docker log when ingestion rate is zero
|
||||
# remove '.' in number and return max val
|
||||
[[ $(max -g "${CURRENT/./}" "0") -eq 0 ]] &&
|
||||
docker logs fb0 &&
|
||||
docker logs ls
|
||||
done
|
||||
|
||||
aggregate_stats
|
||||
send_summary
|
||||
}
|
||||
|
||||
stop_pipeline() {
|
||||
echo "--- Stop Pipeline"
|
||||
|
||||
for ((i = 0; i < FB_CNT; i++)); do
|
||||
docker stop fb$i
|
||||
docker rm fb$i
|
||||
done
|
||||
|
||||
docker stop ls
|
||||
docker rm ls
|
||||
|
||||
curl -u "$BENCHMARK_ES_USER:$BENCHMARK_ES_PW" -X DELETE $BENCHMARK_ES_HOST/_data_stream/logs-generic-default
|
||||
echo " data stream deleted "
|
||||
|
||||
# TODO: clean page caches, reduce memory fragmentation
|
||||
# https://github.com/elastic/logstash/pull/16191#discussion_r1647050216
|
||||
}
|
||||
|
||||
clean_up() {
|
||||
# stop log generation if it has not done yet
|
||||
[[ -n $(docker ps | grep flog) ]] && docker stop flog || true
|
||||
# remove image
|
||||
docker image rm docker.elastic.co/logstash/logstash:$LS_VERSION
|
||||
}
|
|
@ -15,9 +15,8 @@ set -eo pipefail
|
|||
# - The script sends a summary of EPS and resource usage to index `benchmark_summary`
|
||||
# *******************************************************
|
||||
|
||||
SCRIPT_PATH="$(cd "$(dirname "$0")"; pwd)"
|
||||
CONFIG_PATH="$SCRIPT_PATH/config"
|
||||
source "$SCRIPT_PATH/util.sh"
|
||||
SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
|
||||
source "$SCRIPT_PATH/core.sh"
|
||||
|
||||
## usage:
|
||||
## main.sh FB_CNT QTYPE CPU MEM
|
||||
|
@ -36,272 +35,9 @@ source "$SCRIPT_PATH/util.sh"
|
|||
## MEM=4 # number of GB for Logstash container
|
||||
## QTYPE=memory # queue type to test {persisted|memory|all}
|
||||
## FB_CNT=4 # number of filebeats to use in benchmark
|
||||
usage() {
|
||||
echo "Usage: $0 [FB_CNT] [QTYPE] [CPU] [MEM]"
|
||||
echo "Example: $0 4 {persisted|memory|all} 2 2"
|
||||
exit 1
|
||||
}
|
||||
|
||||
parse_args() {
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
if [ -z "$FB_CNT" ]; then
|
||||
FB_CNT=$1
|
||||
elif [ -z "$QTYPE" ]; then
|
||||
case $1 in
|
||||
all | persisted | memory)
|
||||
QTYPE=$1
|
||||
;;
|
||||
*)
|
||||
echo "Error: wrong queue type $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
elif [ -z "$CPU" ]; then
|
||||
CPU=$1
|
||||
elif [ -z "$MEM" ]; then
|
||||
MEM=$1
|
||||
else
|
||||
echo "Error: Too many arguments"
|
||||
usage
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
# set default value
|
||||
# number of filebeat
|
||||
FB_CNT=${FB_CNT:-4}
|
||||
# all | persisted | memory
|
||||
QTYPE=${QTYPE:-all}
|
||||
CPU=${CPU:-4}
|
||||
MEM=${MEM:-4}
|
||||
XMX=$((MEM / 2))
|
||||
|
||||
IFS=','
|
||||
# worker multiplier: 1,2,4
|
||||
MULTIPLIERS="${MULTIPLIERS:-1,2,4}"
|
||||
read -ra MULTIPLIERS <<< "$MULTIPLIERS"
|
||||
BATCH_SIZES="${BATCH_SIZES:-500}"
|
||||
read -ra BATCH_SIZES <<< "$BATCH_SIZES"
|
||||
|
||||
IFS=' '
|
||||
echo "filebeats: $FB_CNT, cpu: $CPU, mem: $MEM, Queue: $QTYPE, worker multiplier: ${MULTIPLIERS[@]}, batch size: ${BATCH_SIZES[@]}"
|
||||
}
|
||||
|
||||
get_secret() {
|
||||
VAULT_PATH=secret/ci/elastic-logstash/benchmark
|
||||
VAULT_DATA=$(vault kv get -format json $VAULT_PATH)
|
||||
BENCHMARK_ES_HOST=$(echo $VAULT_DATA | jq -r '.data.es_host')
|
||||
BENCHMARK_ES_USER=$(echo $VAULT_DATA | jq -r '.data.es_user')
|
||||
BENCHMARK_ES_PW=$(echo $VAULT_DATA | jq -r '.data.es_pw')
|
||||
|
||||
MONITOR_ES_HOST=$(echo $VAULT_DATA | jq -r '.data.monitor_es_host')
|
||||
MONITOR_ES_USER=$(echo $VAULT_DATA | jq -r '.data.monitor_es_user')
|
||||
MONITOR_ES_PW=$(echo $VAULT_DATA | jq -r '.data.monitor_es_pw')
|
||||
}
|
||||
|
||||
pull_images() {
|
||||
echo "--- Pull docker images"
|
||||
|
||||
# pull the latest snapshot logstash image
|
||||
if [[ -n "$LS_VERSION" ]]; then
|
||||
docker pull "docker.elastic.co/logstash/logstash:$LS_VERSION"
|
||||
else
|
||||
# select the SNAPSHOT artifact with the highest semantic version number
|
||||
LS_VERSION=$( curl --retry-all-errors --retry 5 --retry-delay 1 -s https://artifacts-api.elastic.co/v1/versions | jq -r '.versions | map(select(endswith("-SNAPSHOT"))) | max_by(rtrimstr("-SNAPSHOT")|split(".")|map(tonumber))' )
|
||||
BUILD_ID=$(curl --retry-all-errors --retry 5 --retry-delay 1 -s "https://artifacts-api.elastic.co/v1/versions/${LS_VERSION}/builds/latest" | jq -re '.build.build_id')
|
||||
ARCH=$(arch)
|
||||
IMAGE_URL="https://snapshots.elastic.co/${BUILD_ID}/downloads/logstash/logstash-$LS_VERSION-docker-image-$ARCH.tar.gz"
|
||||
IMAGE_FILENAME="$LS_VERSION.tar.gz"
|
||||
|
||||
echo "Download $LS_VERSION from $IMAGE_URL"
|
||||
[[ ! -e $IMAGE_FILENAME ]] && curl -fsSL --retry-max-time 60 --retry 3 --retry-delay 5 -o "$IMAGE_FILENAME" "$IMAGE_URL"
|
||||
[[ -z $(docker images -q docker.elastic.co/logstash/logstash:$LS_VERSION) ]] && docker load -i "$IMAGE_FILENAME"
|
||||
fi
|
||||
|
||||
# pull filebeat image
|
||||
FB_DEFAULT_VERSION="8.13.4"
|
||||
FB_VERSION=${FB_VERSION:-$FB_DEFAULT_VERSION}
|
||||
docker pull "docker.elastic.co/beats/filebeat:$FB_VERSION"
|
||||
}
|
||||
|
||||
generate_logs() {
|
||||
FLOG_PATH="$SCRIPT_PATH/flog"
|
||||
mkdir -p $FLOG_PATH
|
||||
|
||||
if [[ ! -e "$FLOG_PATH/log4.log" ]]; then
|
||||
echo "--- Generate logs in background. log: 5, size: 500mb"
|
||||
docker run -d --name=flog --rm -v $FLOG_PATH:/go/src/data mingrammer/flog -t log -w -o "/go/src/data/log.log" -b 2621440000 -p 524288000
|
||||
fi
|
||||
}
|
||||
|
||||
check_logs() {
|
||||
echo "--- Check log generation"
|
||||
|
||||
local cnt=0
|
||||
until [[ -e "$FLOG_PATH/log4.log" || $cnt -gt 600 ]]; do
|
||||
echo "wait 30s" && sleep 30
|
||||
cnt=$((cnt + 30))
|
||||
done
|
||||
|
||||
ls -lah $FLOG_PATH
|
||||
}
|
||||
|
||||
start_logstash() {
|
||||
LS_CONFIG_PATH=$SCRIPT_PATH/ls/config
|
||||
mkdir -p $LS_CONFIG_PATH
|
||||
|
||||
cp $CONFIG_PATH/pipelines.yml $LS_CONFIG_PATH/pipelines.yml
|
||||
cp $CONFIG_PATH/logstash.yml $LS_CONFIG_PATH/logstash.yml
|
||||
|
||||
LS_JAVA_OPTS=${LS_JAVA_OPTS:--Xmx${XMX}g}
|
||||
docker run -d --name=ls --net=host --cpus=$CPU --memory=${MEM}g -e LS_JAVA_OPTS="$LS_JAVA_OPTS" \
|
||||
-e QTYPE="$QTYPE" -e WORKER="$WORKER" -e BATCH_SIZE="$BATCH_SIZE" \
|
||||
-e BENCHMARK_ES_HOST="$BENCHMARK_ES_HOST" -e BENCHMARK_ES_USER="$BENCHMARK_ES_USER" -e BENCHMARK_ES_PW="$BENCHMARK_ES_PW" \
|
||||
-e MONITOR_ES_HOST="$MONITOR_ES_HOST" -e MONITOR_ES_USER="$MONITOR_ES_USER" -e MONITOR_ES_PW="$MONITOR_ES_PW" \
|
||||
-v $LS_CONFIG_PATH/logstash.yml:/usr/share/logstash/config/logstash.yml:ro \
|
||||
-v $LS_CONFIG_PATH/pipelines.yml:/usr/share/logstash/config/pipelines.yml:ro \
|
||||
docker.elastic.co/logstash/logstash:$LS_VERSION
|
||||
}
|
||||
|
||||
start_filebeat() {
|
||||
for ((i = 0; i < FB_CNT; i++)); do
|
||||
FB_PATH="$SCRIPT_PATH/fb${i}"
|
||||
mkdir -p $FB_PATH
|
||||
|
||||
cp $CONFIG_PATH/filebeat.yml $FB_PATH/filebeat.yml
|
||||
|
||||
docker run -d --name=fb$i --net=host --user=root \
|
||||
-v $FB_PATH/filebeat.yml:/usr/share/filebeat/filebeat.yml \
|
||||
-v $SCRIPT_PATH/flog:/usr/share/filebeat/flog \
|
||||
docker.elastic.co/beats/filebeat:$FB_VERSION filebeat -e --strict.perms=false
|
||||
done
|
||||
}
|
||||
|
||||
capture_stats() {
|
||||
CURRENT=$(jq -r '.flow.output_throughput.current' $NS_JSON)
|
||||
local eps_1m=$(jq -r '.flow.output_throughput.last_1_minute' $NS_JSON)
|
||||
local eps_5m=$(jq -r '.flow.output_throughput.last_5_minutes' $NS_JSON)
|
||||
local worker_util=$(jq -r '.pipelines.main.flow.worker_utilization.last_1_minute' $NS_JSON)
|
||||
local worker_concurr=$(jq -r '.pipelines.main.flow.worker_concurrency.last_1_minute' $NS_JSON)
|
||||
local cpu_percent=$(jq -r '.process.cpu.percent' $NS_JSON)
|
||||
local heap=$(jq -r '.jvm.mem.heap_used_in_bytes' $NS_JSON)
|
||||
local non_heap=$(jq -r '.jvm.mem.non_heap_used_in_bytes' $NS_JSON)
|
||||
local q_event_cnt=$(jq -r '.pipelines.main.queue.events_count' $NS_JSON)
|
||||
local q_size=$(jq -r '.pipelines.main.queue.queue_size_in_bytes' $NS_JSON)
|
||||
TOTAL_EVENTS_OUT=$(jq -r '.pipelines.main.events.out' $NS_JSON)
|
||||
printf "current: %s, 1m: %s, 5m: %s, worker_utilization: %s, worker_concurrency: %s, cpu: %s, heap: %s, non-heap: %s, q_events: %s, q_size: %s, total_events_out: %s\n" \
|
||||
$CURRENT $eps_1m $eps_5m $worker_util $worker_concurr $cpu_percent $heap $non_heap $q_event_cnt $q_size $TOTAL_EVENTS_OUT
|
||||
}
|
||||
|
||||
aggregate_stats() {
|
||||
local file_glob="$SCRIPT_PATH/$NS_DIR/${QTYPE:0:1}_w${WORKER}b${BATCH_SIZE}_*.json"
|
||||
MAX_EPS_1M=$( jqmax '.flow.output_throughput.last_1_minute' "$file_glob" )
|
||||
MAX_EPS_5M=$( jqmax '.flow.output_throughput.last_5_minutes' "$file_glob" )
|
||||
MAX_WORKER_UTIL=$( jqmax '.pipelines.main.flow.worker_utilization.last_1_minute' "$file_glob" )
|
||||
MAX_WORKER_CONCURR=$( jqmax '.pipelines.main.flow.worker_concurrency.last_1_minute' "$file_glob" )
|
||||
MAX_Q_EVENT_CNT=$( jqmax '.pipelines.main.queue.events_count' "$file_glob" )
|
||||
MAX_Q_SIZE=$( jqmax '.pipelines.main.queue.queue_size_in_bytes' "$file_glob" )
|
||||
|
||||
AVG_CPU_PERCENT=$( jqavg '.process.cpu.percent' "$file_glob" )
|
||||
AVG_VIRTUAL_MEM=$( jqavg '.process.mem.total_virtual_in_bytes' "$file_glob" )
|
||||
AVG_HEAP=$( jqavg '.jvm.mem.heap_used_in_bytes' "$file_glob" )
|
||||
AVG_NON_HEAP=$( jqavg '.jvm.mem.non_heap_used_in_bytes' "$file_glob" )
|
||||
}
|
||||
|
||||
send_summary() {
|
||||
echo "--- Send summary to Elasticsearch"
|
||||
|
||||
timestamp=$(date -u +"%Y-%m-%dT%H:%M:%S")
|
||||
tee summary.json << EOF
|
||||
{"index": {}}
|
||||
{"timestamp": "$timestamp", "version": "$LS_VERSION", "cpu": "$CPU", "mem": "$MEM", "workers": "$WORKER", "batch_size": "$BATCH_SIZE", "queue_type": "$QTYPE", "total_events_out": "$TOTAL_EVENTS_OUT", "max_eps_1m": "$MAX_EPS_1M", "max_eps_5m": "$MAX_EPS_5M", "max_worker_utilization": "$MAX_WORKER_UTIL", "max_worker_concurrency": "$MAX_WORKER_CONCURR", "avg_cpu_percentage": "$AVG_CPU_PERCENT", "avg_heap": "$AVG_HEAP", "avg_non_heap": "$AVG_NON_HEAP", "avg_virtual_memory": "$AVG_VIRTUAL_MEM", "max_queue_events": "$MAX_Q_EVENT_CNT", "max_queue_bytes_size": "$MAX_Q_SIZE"}
|
||||
EOF
|
||||
curl -X POST -u "$BENCHMARK_ES_USER:$BENCHMARK_ES_PW" "$BENCHMARK_ES_HOST/benchmark_summary/_bulk" -H 'Content-Type: application/json' --data-binary @"summary.json"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# $1: snapshot index
|
||||
node_stats() {
|
||||
NS_JSON="$SCRIPT_PATH/$NS_DIR/${QTYPE:0:1}_w${WORKER}b${BATCH_SIZE}_$1.json" # m_w8b1000_0.json
|
||||
|
||||
# curl inside container because docker on mac cannot resolve localhost to host network interface
|
||||
docker exec -it ls curl localhost:9600/_node/stats > "$NS_JSON" 2> /dev/null
|
||||
}
|
||||
|
||||
# $1: index
|
||||
snapshot() {
|
||||
node_stats $1
|
||||
capture_stats
|
||||
}
|
||||
|
||||
create_directory() {
|
||||
NS_DIR="fb${FB_CNT}c${CPU}m${MEM}" # fb4c4m4
|
||||
mkdir -p "$SCRIPT_PATH/$NS_DIR"
|
||||
}
|
||||
|
||||
queue() {
|
||||
for QTYPE in "persisted" "memory"; do
|
||||
worker
|
||||
done
|
||||
}
|
||||
|
||||
worker() {
|
||||
for m in "${MULTIPLIERS[@]}"; do
|
||||
WORKER=$((CPU * m))
|
||||
batch
|
||||
done
|
||||
}
|
||||
|
||||
batch() {
|
||||
for BATCH_SIZE in "${BATCH_SIZES[@]}"; do
|
||||
run_pipeline
|
||||
stop_pipeline
|
||||
done
|
||||
}
|
||||
|
||||
run_pipeline() {
|
||||
echo "--- Run pipeline. queue type: $QTYPE, worker: $WORKER, batch size: $BATCH_SIZE"
|
||||
|
||||
start_logstash
|
||||
start_filebeat
|
||||
docker ps
|
||||
|
||||
echo "(0) sleep 3m" && sleep 180
|
||||
snapshot "0"
|
||||
|
||||
for i in {1..8}; do
|
||||
echo "($i) sleep 30s" && sleep 30
|
||||
snapshot "$i"
|
||||
|
||||
# print docker log when ingestion rate is zero
|
||||
# remove '.' in number and return max val
|
||||
[[ $(max -g "${CURRENT/./}" "0") -eq 0 ]] &&
|
||||
docker logs fb0 &&
|
||||
docker logs ls
|
||||
done
|
||||
|
||||
aggregate_stats
|
||||
send_summary
|
||||
}
|
||||
|
||||
stop_pipeline() {
|
||||
echo "--- Stop Pipeline"
|
||||
|
||||
for ((i = 0; i < FB_CNT; i++)); do
|
||||
docker stop fb$i
|
||||
docker rm fb$i
|
||||
done
|
||||
|
||||
docker stop ls
|
||||
docker rm ls
|
||||
|
||||
curl -u "$BENCHMARK_ES_USER:$BENCHMARK_ES_PW" -X DELETE $BENCHMARK_ES_HOST/_data_stream/logs-generic-default
|
||||
echo " data stream deleted "
|
||||
|
||||
# TODO: clean page caches, reduce memory fragmentation
|
||||
# https://github.com/elastic/logstash/pull/16191#discussion_r1647050216
|
||||
}
|
||||
|
||||
## FLOG_FILE_CNT=4 # number of files to generate for ingestion
|
||||
## VAULT_PATH=secret/path # vault path point to Elasticsearch credentials. The default value points to benchmark cluster.
|
||||
## TAGS=test,other # tags with "," separator.
|
||||
main() {
|
||||
parse_args "$@"
|
||||
get_secret
|
||||
|
@ -317,8 +53,7 @@ main() {
|
|||
worker
|
||||
fi
|
||||
|
||||
# stop log generation if it has not done yet
|
||||
[[ -n $(docker ps | grep flog) ]] && docker stop flog || true
|
||||
clean_up
|
||||
}
|
||||
|
||||
main "$@"
|
||||
main "$@"
|
44
.buildkite/scripts/benchmark/marathon.sh
Executable file
44
.buildkite/scripts/benchmark/marathon.sh
Executable file
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env bash
|
||||
set -eo pipefail
|
||||
|
||||
# *******************************************************
|
||||
# Run benchmark for versions that have flow metrics
|
||||
# When the hardware changes, run the marathon task to establish a new baseline.
|
||||
# Usage:
|
||||
# nohup bash -x all.sh > log.log 2>&1 &
|
||||
# Accept env vars:
|
||||
# STACK_VERSIONS=8.15.0,8.15.1,8.16.0-SNAPSHOT # versions to test. It is comma separator string
|
||||
# *******************************************************
|
||||
|
||||
SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
|
||||
source "$SCRIPT_PATH/core.sh"
|
||||
|
||||
parse_stack_versions() {
|
||||
IFS=','
|
||||
STACK_VERSIONS="${STACK_VERSIONS:-8.6.0,8.7.0,8.8.0,8.9.0,8.10.0,8.11.0,8.12.0,8.13.0,8.14.0,8.15.0}"
|
||||
read -ra STACK_VERSIONS <<< "$STACK_VERSIONS"
|
||||
}
|
||||
|
||||
main() {
|
||||
parse_stack_versions
|
||||
parse_args "$@"
|
||||
get_secret
|
||||
generate_logs
|
||||
check_logs
|
||||
|
||||
USER_QTYPE="$QTYPE"
|
||||
|
||||
for V in "${STACK_VERSIONS[@]}" ; do
|
||||
LS_VERSION="$V"
|
||||
QTYPE="$USER_QTYPE"
|
||||
pull_images
|
||||
create_directory
|
||||
if [[ $QTYPE == "all" ]]; then
|
||||
queue
|
||||
else
|
||||
worker
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
main "$@"
|
8
.buildkite/scripts/benchmark/save-objects/CHANGELOG.md
Normal file
8
.buildkite/scripts/benchmark/save-objects/CHANGELOG.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
## 20241210
|
||||
Remove scripted field `5m_num` from dashboards
|
||||
|
||||
## 20240912
|
||||
Updated runtime field `release` to return `true` when `version` contains "SNAPSHOT"
|
||||
|
||||
## 20240912
|
||||
Initial dashboards
|
14
.buildkite/scripts/benchmark/save-objects/README.md
Normal file
14
.buildkite/scripts/benchmark/save-objects/README.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
benchmark_objects.ndjson contains the following resources
|
||||
|
||||
- Dashboards
|
||||
- daily snapshot
|
||||
- released versions
|
||||
- Data Views
|
||||
- benchmark
|
||||
- runtime fields
|
||||
- | Fields Name | Type | Comment |
|
||||
|--------------|---------------------------------------------------------------------------------------|--------------------------------------------------|
|
||||
| versions_num | long | convert semantic versioning to number for graph sorting |
|
||||
| release | boolean | `true` for released version. `false` for snapshot version. It is for graph filtering. |
|
||||
|
||||
To import objects to Kibana, navigate to Stack Management > Save Objects and click Import
|
File diff suppressed because one or more lines are too long
6
.buildkite/scripts/benchmark/setup/alias
Normal file
6
.buildkite/scripts/benchmark/setup/alias
Normal file
|
@ -0,0 +1,6 @@
|
|||
POST /_aliases
|
||||
{
|
||||
"actions": [
|
||||
{ "add": { "index": "benchmark_summary_v2", "alias": "benchmark_summary" } }
|
||||
]
|
||||
}
|
179
.buildkite/scripts/benchmark/setup/benchmark_summary_v2
Normal file
179
.buildkite/scripts/benchmark/setup/benchmark_summary_v2
Normal file
|
@ -0,0 +1,179 @@
|
|||
PUT /benchmark_summary_v2/_mapping
|
||||
{
|
||||
"properties": {
|
||||
"avg_cpu_percentage": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"avg_heap": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"avg_non_heap": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"avg_virtual_memory": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"batch_size": {
|
||||
"type": "integer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpu": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"max_eps_1m": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"max_eps_5m": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"max_queue_bytes_size": {
|
||||
"type": "integer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"max_queue_events": {
|
||||
"type": "integer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"max_worker_concurrency": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"max_worker_utilization": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"mem": {
|
||||
"type": "float",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"queue_type": {
|
||||
"type": "text",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"tag": {
|
||||
"type": "text",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"total_events_out": {
|
||||
"type": "integer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"version": {
|
||||
"type": "text",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"workers": {
|
||||
"type": "integer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"tags" : {
|
||||
"type": "text",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -30,3 +30,12 @@ jqavg() {
|
|||
jqmax() {
|
||||
jq -r "$1 | select(. != null)" $2 | jq -s . | jq 'max'
|
||||
}
|
||||
|
||||
# return true if $1 is non empty and not "null"
|
||||
not_empty() {
|
||||
if [[ -n "$1" && "$1" != "null" ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
|
@ -19,7 +19,7 @@ changed_files=$(git diff --name-only $previous_commit)
|
|||
if [[ -n "$changed_files" ]] && [[ -z "$(echo "$changed_files" | grep -vE "$1")" ]]; then
|
||||
echo "All files compared to the previous commit [$previous_commit] match the specified regex: [$1]"
|
||||
echo "Files changed:"
|
||||
git diff --name-only HEAD^
|
||||
git --no-pager diff --name-only HEAD^
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
|
|
29
.buildkite/scripts/common/qualified-version.sh
Executable file
29
.buildkite/scripts/common/qualified-version.sh
Executable file
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# ********************************************************
|
||||
# Source this script to get the QUALIFIED_VERSION env var
|
||||
# or execute it to receive the qualified version on stdout
|
||||
# ********************************************************
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export QUALIFIED_VERSION="$(
|
||||
# Extract the version number from the version.yml file
|
||||
# e.g.: 8.6.0
|
||||
printf '%s' "$(awk -F':' '{ if ("logstash" == $1) { gsub(/^ | $/,"",$2); printf $2; exit } }' versions.yml)"
|
||||
|
||||
# Qualifier is passed from CI as optional field and specify the version postfix
|
||||
# in case of alpha or beta releases for staging builds only:
|
||||
# e.g: 8.0.0-alpha1
|
||||
printf '%s' "${VERSION_QUALIFIER:+-${VERSION_QUALIFIER}}"
|
||||
|
||||
# add the SNAPSHOT tag unless WORKFLOW_TYPE=="staging" or RELEASE=="1"
|
||||
if [[ ! ( "${WORKFLOW_TYPE:-}" == "staging" || "${RELEASE:+$RELEASE}" == "1" ) ]]; then
|
||||
printf '%s' "-SNAPSHOT"
|
||||
fi
|
||||
)"
|
||||
|
||||
# if invoked directly, output the QUALIFIED_VERSION to stdout
|
||||
if [[ "$0" == "${BASH_SOURCE:-${ZSH_SCRIPT:-}}" ]]; then
|
||||
printf '%s' "${QUALIFIED_VERSION}"
|
||||
fi
|
|
@ -26,21 +26,7 @@ rake artifact:docker_oss || error "artifact:docker_oss build failed."
|
|||
rake artifact:docker_wolfi || error "artifact:docker_wolfi build failed."
|
||||
rake artifact:dockerfiles || error "artifact:dockerfiles build failed."
|
||||
|
||||
if [[ "$ARCH" != "aarch64" ]]; then
|
||||
rake artifact:docker_ubi8 || error "artifact:docker_ubi8 build failed."
|
||||
fi
|
||||
|
||||
if [[ "$WORKFLOW_TYPE" == "staging" ]] && [[ -n "$VERSION_QUALIFIER" ]]; then
|
||||
# Qualifier is passed from CI as optional field and specify the version postfix
|
||||
# in case of alpha or beta releases for staging builds only:
|
||||
# e.g: 8.0.0-alpha1
|
||||
STACK_VERSION="${STACK_VERSION}-${VERSION_QUALIFIER}"
|
||||
fi
|
||||
|
||||
if [[ "$WORKFLOW_TYPE" == "snapshot" ]]; then
|
||||
STACK_VERSION="${STACK_VERSION}-SNAPSHOT"
|
||||
fi
|
||||
|
||||
STACK_VERSION="$(./$(dirname "$0")/../common/qualified-version.sh)"
|
||||
info "Build complete, setting STACK_VERSION to $STACK_VERSION."
|
||||
|
||||
info "Saving tar.gz for docker images"
|
||||
|
@ -52,10 +38,6 @@ for file in build/logstash-*; do shasum $file;done
|
|||
info "Uploading DRA artifacts in buildkite's artifact store ..."
|
||||
# Note the deb, rpm tar.gz AARCH64 files generated has already been loaded by the build_packages.sh
|
||||
images="logstash logstash-oss logstash-wolfi"
|
||||
if [ "$ARCH" != "aarch64" ]; then
|
||||
# No logstash-ubi8 for AARCH64
|
||||
images="logstash logstash-oss logstash-wolfi logstash-ubi8"
|
||||
fi
|
||||
for image in ${images}; do
|
||||
buildkite-agent artifact upload "build/$image-${STACK_VERSION}-docker-image-${ARCH}.tar.gz"
|
||||
done
|
||||
|
@ -63,7 +45,7 @@ done
|
|||
# Upload 'docker-build-context.tar.gz' files only when build x86_64, otherwise they will be
|
||||
# overwritten when building aarch64 (or viceversa).
|
||||
if [ "$ARCH" != "aarch64" ]; then
|
||||
for image in logstash logstash-oss logstash-wolfi logstash-ubi8 logstash-ironbank; do
|
||||
for image in logstash logstash-oss logstash-wolfi logstash-ironbank; do
|
||||
buildkite-agent artifact upload "build/${image}-${STACK_VERSION}-docker-build-context.tar.gz"
|
||||
done
|
||||
fi
|
||||
|
|
|
@ -23,17 +23,7 @@ esac
|
|||
|
||||
SKIP_DOCKER=1 rake artifact:all || error "rake artifact:all build failed."
|
||||
|
||||
if [[ "$WORKFLOW_TYPE" == "staging" ]] && [[ -n "$VERSION_QUALIFIER" ]]; then
|
||||
# Qualifier is passed from CI as optional field and specify the version postfix
|
||||
# in case of alpha or beta releases for staging builds only:
|
||||
# e.g: 8.0.0-alpha1
|
||||
STACK_VERSION="${STACK_VERSION}-${VERSION_QUALIFIER}"
|
||||
fi
|
||||
|
||||
if [[ "$WORKFLOW_TYPE" == "snapshot" ]]; then
|
||||
STACK_VERSION="${STACK_VERSION}-SNAPSHOT"
|
||||
fi
|
||||
|
||||
STACK_VERSION="$(./$(dirname "$0")/../common/qualified-version.sh)"
|
||||
info "Build complete, setting STACK_VERSION to $STACK_VERSION."
|
||||
|
||||
info "Generated Artifacts"
|
||||
|
|
|
@ -11,10 +11,6 @@ function save_docker_tarballs {
|
|||
local arch="${1:?architecture required}"
|
||||
local version="${2:?stack-version required}"
|
||||
local images="logstash logstash-oss logstash-wolfi"
|
||||
if [ "${arch}" != "aarch64" ]; then
|
||||
# No logstash-ubi8 for AARCH64
|
||||
images="logstash logstash-oss logstash-wolfi logstash-ubi8"
|
||||
fi
|
||||
|
||||
for image in ${images}; do
|
||||
tar_file="${image}-${version}-docker-image-${arch}.tar"
|
||||
|
@ -33,12 +29,12 @@ export JRUBY_OPTS="-J-Xmx4g"
|
|||
|
||||
# Extract the version number from the version.yml file
|
||||
# e.g.: 8.6.0
|
||||
# The suffix part like alpha1 etc is managed by the optional VERSION_QUALIFIER_OPT environment variable
|
||||
# The suffix part like alpha1 etc is managed by the optional VERSION_QUALIFIER environment variable
|
||||
STACK_VERSION=`cat versions.yml | sed -n 's/^logstash\:[[:space:]]\([[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\)$/\1/p'`
|
||||
|
||||
info "Agent is running on architecture [$(uname -i)]"
|
||||
|
||||
export VERSION_QUALIFIER_OPT=${VERSION_QUALIFIER_OPT:-""}
|
||||
export VERSION_QUALIFIER=${VERSION_QUALIFIER:-""}
|
||||
export DRA_DRY_RUN=${DRA_DRY_RUN:-""}
|
||||
|
||||
if [[ ! -z $DRA_DRY_RUN && $BUILDKITE_STEP_KEY == "logstash_publish_dra" ]]; then
|
||||
|
|
|
@ -42,24 +42,10 @@ if [ "$RELEASE_VER" != "7.17" ]; then
|
|||
:
|
||||
fi
|
||||
|
||||
# Deleting ubi8 for aarch64 for the time being. This image itself is not being built, and it is not expected
|
||||
# by the release manager.
|
||||
# See https://github.com/elastic/infra/blob/master/cd/release/release-manager/project-configs/8.5/logstash.gradle
|
||||
# for more details.
|
||||
# TODO filter it out when uploading artifacts instead
|
||||
rm -f build/logstash-ubi8-${STACK_VERSION}-docker-image-aarch64.tar.gz
|
||||
|
||||
info "Downloaded ARTIFACTS sha report"
|
||||
for file in build/logstash-*; do shasum $file;done
|
||||
|
||||
FINAL_VERSION=$STACK_VERSION
|
||||
if [[ -n "$VERSION_QUALIFIER" ]]; then
|
||||
FINAL_VERSION="$FINAL_VERSION-${VERSION_QUALIFIER}"
|
||||
fi
|
||||
|
||||
if [[ "$WORKFLOW_TYPE" == "snapshot" ]]; then
|
||||
FINAL_VERSION="${STACK_VERSION}-SNAPSHOT"
|
||||
fi
|
||||
FINAL_VERSION="$(./$(dirname "$0")/../common/qualified-version.sh)"
|
||||
|
||||
mv build/distributions/dependencies-reports/logstash-${FINAL_VERSION}.csv build/distributions/dependencies-${FINAL_VERSION}.csv
|
||||
|
||||
|
|
|
@ -155,7 +155,7 @@ ci/acceptance_tests.sh"""),
|
|||
|
||||
def acceptance_docker_steps()-> list[typing.Any]:
|
||||
steps = []
|
||||
for flavor in ["full", "oss", "ubi8", "wolfi"]:
|
||||
for flavor in ["full", "oss", "ubi", "wolfi"]:
|
||||
steps.append({
|
||||
"label": f":docker: {flavor} flavor acceptance",
|
||||
"agents": gcp_agent(vm_name="ubuntu-2204", image_prefix="family/platform-ingest-logstash"),
|
||||
|
|
|
@ -6,6 +6,7 @@ Health Report Integration test bootstrapper with Python script
|
|||
"""
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import util
|
||||
import yaml
|
||||
|
||||
|
@ -92,3 +93,19 @@ class Bootstrap:
|
|||
|
||||
print(f"Logstash is running with PID: {process.pid}.")
|
||||
return process
|
||||
|
||||
def stop_logstash(self, process: subprocess.Popen):
|
||||
start_time = time.time() # in seconds
|
||||
process.terminate()
|
||||
for stdout_line in iter(process.stdout.readline, ""):
|
||||
# print(f"STDOUT: {stdout_line.strip()}")
|
||||
if "Logstash shut down" in stdout_line or "Logstash stopped" in stdout_line:
|
||||
print(f"Logstash stopped.")
|
||||
return None
|
||||
# shudown watcher keep running, we should be good with considering time spent
|
||||
if time.time() - start_time > 60:
|
||||
print(f"Logstash didn't stop in 1min, sending SIGTERM signal.")
|
||||
process.kill()
|
||||
if time.time() - start_time > 70:
|
||||
print(f"Logstash didn't stop over 1min, exiting.")
|
||||
return None
|
||||
|
|
|
@ -6,11 +6,11 @@ class ConfigValidator:
|
|||
REQUIRED_KEYS = {
|
||||
"root": ["name", "config", "conditions", "expectation"],
|
||||
"config": ["pipeline.id", "config.string"],
|
||||
"conditions": ["full_start_required"],
|
||||
"conditions": ["full_start_required", "wait_seconds"],
|
||||
"expectation": ["status", "symptom", "indicators"],
|
||||
"indicators": ["pipelines"],
|
||||
"pipelines": ["status", "symptom", "indicators"],
|
||||
"DYNAMIC": ["status", "symptom", "diagnosis", "impacts", "details"],
|
||||
"DYNAMIC": ["status", "symptom", "diagnosis", "impacts", "details"], # pipeline-id is a DYNAMIC
|
||||
"details": ["status"],
|
||||
"status": ["state"]
|
||||
}
|
||||
|
@ -19,7 +19,8 @@ class ConfigValidator:
|
|||
self.yaml_content = None
|
||||
|
||||
def __has_valid_keys(self, data: any, key_path: str, repeated: bool) -> bool:
|
||||
if isinstance(data, str) or isinstance(data, bool): # we reached values
|
||||
# we reached the value
|
||||
if isinstance(data, str) or isinstance(data, bool) or isinstance(data, int) or isinstance(data, float):
|
||||
return True
|
||||
|
||||
# we have two indicators section and for the next repeated ones, we go deeper
|
||||
|
|
|
@ -62,21 +62,23 @@ def main():
|
|||
print(f"Testing `{scenario_content.get('name')}` scenario.")
|
||||
scenario_name = scenario_content['name']
|
||||
|
||||
is_full_start_required = next(sub.get('full_start_required') for sub in
|
||||
scenario_content.get('conditions') if 'full_start_required' in sub)
|
||||
is_full_start_required = scenario_content.get('conditions').get('full_start_required')
|
||||
wait_seconds = scenario_content.get('conditions').get('wait_seconds')
|
||||
config = scenario_content['config']
|
||||
if config is not None:
|
||||
bootstrap.apply_config(config)
|
||||
expectations = scenario_content.get("expectation")
|
||||
process = bootstrap.run_logstash(is_full_start_required)
|
||||
if process is not None:
|
||||
if wait_seconds is not None:
|
||||
print(f"Test requires to wait for `{wait_seconds}` seconds.")
|
||||
time.sleep(wait_seconds) # wait for Logstash to start
|
||||
try:
|
||||
scenario_executor.on(scenario_name, expectations)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
has_failed_scenario = True
|
||||
process.terminate()
|
||||
time.sleep(5) # leave some window to terminate the process
|
||||
bootstrap.stop_logstash(process)
|
||||
|
||||
if has_failed_scenario:
|
||||
# intentionally fail due to visibility
|
||||
|
|
|
@ -4,14 +4,13 @@ set -euo pipefail
|
|||
|
||||
export PATH="/opt/buildkite-agent/.rbenv/bin:/opt/buildkite-agent/.pyenv/bin:/opt/buildkite-agent/.java/bin:$PATH"
|
||||
export JAVA_HOME="/opt/buildkite-agent/.java"
|
||||
export PYENV_VERSION="3.11.5"
|
||||
|
||||
eval "$(rbenv init -)"
|
||||
eval "$(pyenv init -)"
|
||||
|
||||
echo "--- Installing pip"
|
||||
sudo apt-get install python3-pip -y
|
||||
|
||||
echo "--- Installing dependencies"
|
||||
python3 -mpip install -r .buildkite/scripts/health-report-tests/requirements.txt
|
||||
python3 -m pip install -r .buildkite/scripts/health-report-tests/requirements.txt
|
||||
|
||||
echo "--- Running tests"
|
||||
python3 .buildkite/scripts/health-report-tests/main.py
|
|
@ -12,10 +12,12 @@ class ScenarioExecutor:
|
|||
pass
|
||||
|
||||
def __has_intersection(self, expects, results):
|
||||
# TODO: this logic is aligned on current Health API response
|
||||
# there is no guarantee that method correctly runs if provided multi expects and results
|
||||
# we expect expects to be existing in results
|
||||
for expect in expects:
|
||||
for result in results:
|
||||
if result.get('help_url') and "health-report-pipeline-status.html#" not in result.get('help_url'):
|
||||
if result.get('help_url') and "health-report-pipeline-" not in result.get('help_url'):
|
||||
return False
|
||||
if not all(key in result and result[key] == value for key, value in expect.items()):
|
||||
return False
|
||||
|
|
|
@ -8,7 +8,8 @@ config:
|
|||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
conditions:
|
||||
- full_start_required: true
|
||||
full_start_required: true
|
||||
wait_seconds: 5
|
||||
expectation:
|
||||
status: "red"
|
||||
symptom: "1 indicator is unhealthy (`pipelines`)"
|
||||
|
@ -22,10 +23,10 @@ expectation:
|
|||
symptom: "The pipeline is unhealthy; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- cause: "pipeline is not running, likely because it has encountered an error"
|
||||
- action: "view logs to determine the cause of abnormal pipeline shutdown"
|
||||
action: "view logs to determine the cause of abnormal pipeline shutdown"
|
||||
impacts:
|
||||
- description: "the pipeline is not currently processing"
|
||||
- impact_areas: ["pipeline_execution"]
|
||||
impact_areas: ["pipeline_execution"]
|
||||
details:
|
||||
status:
|
||||
state: "TERMINATED"
|
|
@ -0,0 +1,38 @@
|
|||
name: "Backpressured in 1min pipeline"
|
||||
config:
|
||||
- pipeline.id: backpressure-1m-pp
|
||||
config.string: |
|
||||
input { heartbeat { interval => 0.1 } }
|
||||
filter { failure_injector { degrade_at => [filter] } }
|
||||
output { stdout {} }
|
||||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
conditions:
|
||||
full_start_required: true
|
||||
wait_seconds: 70 # give more seconds to make sure time is over the threshold, 1m in this case
|
||||
expectation:
|
||||
status: "yellow"
|
||||
symptom: "1 indicator is concerning (`pipelines`)"
|
||||
indicators:
|
||||
pipelines:
|
||||
status: "yellow"
|
||||
symptom: "1 indicator is concerning (`backpressure-1m-pp`)"
|
||||
indicators:
|
||||
backpressure-1m-pp:
|
||||
status: "yellow"
|
||||
symptom: "The pipeline is concerning; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- id: "logstash:health:pipeline:flow:worker_utilization:diagnosis:1m-blocked"
|
||||
cause: "pipeline workers have been completely blocked for at least one minute"
|
||||
action: "address bottleneck or add resources"
|
||||
impacts:
|
||||
- id: "logstash:health:pipeline:flow:impact:blocked_processing"
|
||||
severity: 2
|
||||
description: "the pipeline is blocked"
|
||||
impact_areas: ["pipeline_execution"]
|
||||
details:
|
||||
status:
|
||||
state: "RUNNING"
|
||||
flow:
|
||||
worker_utilization:
|
||||
last_1_minute: 100.0
|
|
@ -0,0 +1,39 @@
|
|||
name: "Backpressured in 5min pipeline"
|
||||
config:
|
||||
- pipeline.id: backpressure-5m-pp
|
||||
config.string: |
|
||||
input { heartbeat { interval => 0.1 } }
|
||||
filter { failure_injector { degrade_at => [filter] } }
|
||||
output { stdout {} }
|
||||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
conditions:
|
||||
full_start_required: true
|
||||
wait_seconds: 310 # give more seconds to make sure time is over the threshold, 1m in this case
|
||||
expectation:
|
||||
status: "red"
|
||||
symptom: "1 indicator is unhealthy (`pipelines`)"
|
||||
indicators:
|
||||
pipelines:
|
||||
status: "red"
|
||||
symptom: "1 indicator is unhealthy (`backpressure-5m-pp`)"
|
||||
indicators:
|
||||
backpressure-5m-pp:
|
||||
status: "red"
|
||||
symptom: "The pipeline is unhealthy; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- id: "logstash:health:pipeline:flow:worker_utilization:diagnosis:5m-blocked"
|
||||
cause: "pipeline workers have been completely blocked for at least five minutes"
|
||||
action: "address bottleneck or add resources"
|
||||
impacts:
|
||||
- id: "logstash:health:pipeline:flow:impact:blocked_processing"
|
||||
severity: 1
|
||||
description: "the pipeline is blocked"
|
||||
impact_areas: ["pipeline_execution"]
|
||||
details:
|
||||
status:
|
||||
state: "RUNNING"
|
||||
flow:
|
||||
worker_utilization:
|
||||
last_1_minute: 100.0
|
||||
last_5_minutes: 100.0
|
|
@ -0,0 +1,67 @@
|
|||
name: "Multi pipeline"
|
||||
config:
|
||||
- pipeline.id: slow-start-pp-multipipeline
|
||||
config.string: |
|
||||
input { heartbeat {} }
|
||||
filter { failure_injector { degrade_at => [register] } }
|
||||
output { stdout {} }
|
||||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
- pipeline.id: normally-terminated-pp-multipipeline
|
||||
config.string: |
|
||||
input { generator { count => 1 } }
|
||||
output { stdout {} }
|
||||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
- pipeline.id: abnormally-terminated-pp-multipipeline
|
||||
config.string: |
|
||||
input { heartbeat { interval => 1 } }
|
||||
filter { failure_injector { crash_at => filter } }
|
||||
output { stdout {} }
|
||||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
conditions:
|
||||
full_start_required: false
|
||||
wait_seconds: 10
|
||||
expectation:
|
||||
status: "red"
|
||||
symptom: "1 indicator is unhealthy (`pipelines`)"
|
||||
indicators:
|
||||
pipelines:
|
||||
status: "red"
|
||||
symptom: "1 indicator is unhealthy (`abnormally-terminated-pp-multipipeline`) and 2 indicators are concerning (`slow-start-pp-multipipeline`, `normally-terminated-pp-multipipeline`)"
|
||||
indicators:
|
||||
slow-start-pp-multipipeline:
|
||||
status: "yellow"
|
||||
symptom: "The pipeline is concerning; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- cause: "pipeline is loading"
|
||||
action: "if pipeline does not come up quickly, you may need to check the logs to see if it is stalled"
|
||||
impacts:
|
||||
- impact_areas: ["pipeline_execution"]
|
||||
details:
|
||||
status:
|
||||
state: "LOADING"
|
||||
normally-terminated-pp-multipipeline:
|
||||
status: "yellow"
|
||||
symptom: "The pipeline is concerning; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- cause: "pipeline has finished running because its inputs have been closed and events have been processed"
|
||||
action: "if you expect this pipeline to run indefinitely, you will need to configure its inputs to continue receiving or fetching events"
|
||||
impacts:
|
||||
- impact_areas: [ "pipeline_execution" ]
|
||||
details:
|
||||
status:
|
||||
state: "FINISHED"
|
||||
abnormally-terminated-pp-multipipeline:
|
||||
status: "red"
|
||||
symptom: "The pipeline is unhealthy; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- cause: "pipeline is not running, likely because it has encountered an error"
|
||||
action: "view logs to determine the cause of abnormal pipeline shutdown"
|
||||
impacts:
|
||||
- description: "the pipeline is not currently processing"
|
||||
impact_areas: [ "pipeline_execution" ]
|
||||
details:
|
||||
status:
|
||||
state: "TERMINATED"
|
|
@ -7,7 +7,8 @@ config:
|
|||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
conditions:
|
||||
- full_start_required: true
|
||||
full_start_required: true
|
||||
wait_seconds: 5
|
||||
expectation:
|
||||
status: "yellow"
|
||||
symptom: "1 indicator is concerning (`pipelines`)"
|
||||
|
@ -21,7 +22,7 @@ expectation:
|
|||
symptom: "The pipeline is concerning; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- cause: "pipeline has finished running because its inputs have been closed and events have been processed"
|
||||
- action: "if you expect this pipeline to run indefinitely, you will need to configure its inputs to continue receiving or fetching events"
|
||||
action: "if you expect this pipeline to run indefinitely, you will need to configure its inputs to continue receiving or fetching events"
|
||||
impacts:
|
||||
- impact_areas: ["pipeline_execution"]
|
||||
details:
|
||||
|
|
|
@ -8,7 +8,8 @@ config:
|
|||
pipeline.workers: 1
|
||||
pipeline.batch.size: 1
|
||||
conditions:
|
||||
- full_start_required: false
|
||||
full_start_required: false
|
||||
wait_seconds: 0
|
||||
expectation:
|
||||
status: "yellow"
|
||||
symptom: "1 indicator is concerning (`pipelines`)"
|
||||
|
@ -22,7 +23,7 @@ expectation:
|
|||
symptom: "The pipeline is concerning; 1 area is impacted and 1 diagnosis is available"
|
||||
diagnosis:
|
||||
- cause: "pipeline is loading"
|
||||
- action: "if pipeline does not come up quickly, you may need to check the logs to see if it is stalled"
|
||||
action: "if pipeline does not come up quickly, you may need to check the logs to see if it is stalled"
|
||||
impacts:
|
||||
- impact_areas: ["pipeline_execution"]
|
||||
details:
|
||||
|
|
|
@ -4,6 +4,7 @@ from dataclasses import dataclass, field
|
|||
import os
|
||||
import sys
|
||||
import typing
|
||||
from functools import partial
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
from ruamel.yaml.scalarstring import LiteralScalarString
|
||||
|
@ -177,17 +178,15 @@ class LinuxJobs(Jobs):
|
|||
super().__init__(os=os, jdk=jdk, group_key=group_key, agent=agent)
|
||||
|
||||
def all_jobs(self) -> list[typing.Callable[[], JobRetValues]]:
|
||||
return [
|
||||
self.init_annotation,
|
||||
self.java_unit_test,
|
||||
self.ruby_unit_test,
|
||||
self.integration_tests_part_1,
|
||||
self.integration_tests_part_2,
|
||||
self.pq_integration_tests_part_1,
|
||||
self.pq_integration_tests_part_2,
|
||||
self.x_pack_unit_tests,
|
||||
self.x_pack_integration,
|
||||
]
|
||||
jobs=list()
|
||||
jobs.append(self.init_annotation)
|
||||
jobs.append(self.java_unit_test)
|
||||
jobs.append(self.ruby_unit_test)
|
||||
jobs.extend(self.integration_test_parts(3))
|
||||
jobs.extend(self.pq_integration_test_parts(3))
|
||||
jobs.append(self.x_pack_unit_tests)
|
||||
jobs.append(self.x_pack_integration)
|
||||
return jobs
|
||||
|
||||
def prepare_shell(self) -> str:
|
||||
jdk_dir = f"/opt/buildkite-agent/.java/{self.jdk}"
|
||||
|
@ -259,17 +258,14 @@ ci/unit_tests.sh ruby
|
|||
retry=copy.deepcopy(ENABLED_RETRIES),
|
||||
)
|
||||
|
||||
def integration_tests_part_1(self) -> JobRetValues:
|
||||
return self.integration_tests(part=1)
|
||||
def integration_test_parts(self, parts) -> list[partial[JobRetValues]]:
|
||||
return [partial(self.integration_tests, part=idx+1, parts=parts) for idx in range(parts)]
|
||||
|
||||
def integration_tests_part_2(self) -> JobRetValues:
|
||||
return self.integration_tests(part=2)
|
||||
|
||||
def integration_tests(self, part: int) -> JobRetValues:
|
||||
step_name_human = f"Integration Tests - {part}"
|
||||
step_key = f"{self.group_key}-integration-tests-{part}"
|
||||
def integration_tests(self, part: int, parts: int) -> JobRetValues:
|
||||
step_name_human = f"Integration Tests - {part}/{parts}"
|
||||
step_key = f"{self.group_key}-integration-tests-{part}-of-{parts}"
|
||||
test_command = f"""
|
||||
ci/integration_tests.sh split {part-1}
|
||||
ci/integration_tests.sh split {part-1} {parts}
|
||||
"""
|
||||
|
||||
return JobRetValues(
|
||||
|
@ -281,18 +277,15 @@ ci/integration_tests.sh split {part-1}
|
|||
retry=copy.deepcopy(ENABLED_RETRIES),
|
||||
)
|
||||
|
||||
def pq_integration_tests_part_1(self) -> JobRetValues:
|
||||
return self.pq_integration_tests(part=1)
|
||||
def pq_integration_test_parts(self, parts) -> list[partial[JobRetValues]]:
|
||||
return [partial(self.pq_integration_tests, part=idx+1, parts=parts) for idx in range(parts)]
|
||||
|
||||
def pq_integration_tests_part_2(self) -> JobRetValues:
|
||||
return self.pq_integration_tests(part=2)
|
||||
|
||||
def pq_integration_tests(self, part: int) -> JobRetValues:
|
||||
step_name_human = f"IT Persistent Queues - {part}"
|
||||
step_key = f"{self.group_key}-it-persistent-queues-{part}"
|
||||
def pq_integration_tests(self, part: int, parts: int) -> JobRetValues:
|
||||
step_name_human = f"IT Persistent Queues - {part}/{parts}"
|
||||
step_key = f"{self.group_key}-it-persistent-queues-{part}-of-{parts}"
|
||||
test_command = f"""
|
||||
export FEATURE_FLAG=persistent_queues
|
||||
ci/integration_tests.sh split {part-1}
|
||||
ci/integration_tests.sh split {part-1} {parts}
|
||||
"""
|
||||
|
||||
return JobRetValues(
|
||||
|
|
|
@ -4,7 +4,7 @@ set -e
|
|||
|
||||
install_java() {
|
||||
# TODO: let's think about regularly creating a custom image for Logstash which may align on version.yml definitions
|
||||
sudo apt update && sudo apt install -y openjdk-17-jdk && sudo apt install -y openjdk-17-jre
|
||||
sudo apt update && sudo apt install -y openjdk-21-jdk && sudo apt install -y openjdk-21-jre
|
||||
}
|
||||
|
||||
install_java
|
||||
|
|
|
@ -4,22 +4,13 @@ set -e
|
|||
|
||||
TARGET_BRANCHES=("main")
|
||||
|
||||
install_java() {
|
||||
# TODO: let's think about using BK agent which has Java installed
|
||||
# Current caveat is Logstash BK agent doesn't support docker operatioins in it
|
||||
sudo apt update && sudo apt install -y openjdk-17-jdk && sudo apt install -y openjdk-17-jre
|
||||
install_java_11() {
|
||||
curl -L -s "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.24%2B8/OpenJDK11U-jdk_x64_linux_hotspot_11.0.24_8.tar.gz" | tar -zxf -
|
||||
}
|
||||
|
||||
# Resolves the branches we are going to track
|
||||
resolve_latest_branches() {
|
||||
source .buildkite/scripts/snyk/resolve_stack_version.sh
|
||||
for SNAPSHOT_VERSION in "${SNAPSHOT_VERSIONS[@]}"
|
||||
do
|
||||
IFS='.'
|
||||
read -a versions <<< "$SNAPSHOT_VERSION"
|
||||
version=${versions[0]}.${versions[1]}
|
||||
TARGET_BRANCHES+=("$version")
|
||||
done
|
||||
}
|
||||
|
||||
# Build Logstash specific branch to generate Gemlock file where Snyk scans
|
||||
|
@ -42,7 +33,7 @@ download_auth_snyk() {
|
|||
report() {
|
||||
REMOTE_REPO_URL=$1
|
||||
echo "Reporting $REMOTE_REPO_URL branch."
|
||||
if [ "$REMOTE_REPO_URL" != "main" ]; then
|
||||
if [ "$REMOTE_REPO_URL" != "main" ] && [ "$REMOTE_REPO_URL" != "8.x" ]; then
|
||||
MAJOR_VERSION=$(echo "$REMOTE_REPO_URL"| cut -d'.' -f 1)
|
||||
REMOTE_REPO_URL="$MAJOR_VERSION".latest
|
||||
echo "Using '$REMOTE_REPO_URL' remote repo url."
|
||||
|
@ -55,13 +46,18 @@ report() {
|
|||
./snyk monitor --prune-repeated-subdependencies --all-projects --org=logstash --remote-repo-url="$REMOTE_REPO_URL" --target-reference="$REMOTE_REPO_URL" --detection-depth=6 --exclude=qa,tools,devtools,requirements.txt --project-tags=branch="$TARGET_BRANCH",git_head="$GIT_HEAD" || :
|
||||
}
|
||||
|
||||
install_java
|
||||
resolve_latest_branches
|
||||
download_auth_snyk
|
||||
|
||||
# clone Logstash repo, build and report
|
||||
for TARGET_BRANCH in "${TARGET_BRANCHES[@]}"
|
||||
do
|
||||
if [ "$TARGET_BRANCH" == "7.17" ]; then
|
||||
echo "Installing and configuring JDK11."
|
||||
export OLD_PATH=$PATH
|
||||
install_java_11
|
||||
export PATH=$PWD/jdk-11.0.24+8/bin:$PATH
|
||||
fi
|
||||
git reset --hard HEAD # reset if any generated files appeared
|
||||
# check if target branch exists
|
||||
echo "Checking out $TARGET_BRANCH branch."
|
||||
|
@ -71,70 +67,10 @@ do
|
|||
else
|
||||
echo "$TARGET_BRANCH branch doesn't exist."
|
||||
fi
|
||||
done
|
||||
|
||||
# Scan Logstash docker images and report
|
||||
REPOSITORY_BASE_URL="docker.elastic.co/logstash/"
|
||||
|
||||
report_docker_image() {
|
||||
image=$1
|
||||
project_name=$2
|
||||
platform=$3
|
||||
echo "Reporting $image to Snyk started..."
|
||||
docker pull "$image"
|
||||
if [[ $platform != null ]]; then
|
||||
./snyk container monitor "$image" --org=logstash --platform="$platform" --project-name="$project_name" --project-tags=version="$version" || :
|
||||
else
|
||||
./snyk container monitor "$image" --org=logstash --project-name="$project_name" --project-tags=version="$version" || :
|
||||
if [ "$TARGET_BRANCH" == "7.17" ]; then
|
||||
# reset state
|
||||
echo "Removing JDK11 installation."
|
||||
rm -rf jdk-11.0.24+8
|
||||
export PATH=$OLD_PATH
|
||||
fi
|
||||
}
|
||||
|
||||
report_docker_images() {
|
||||
version=$1
|
||||
echo "Version value: $version"
|
||||
|
||||
image=$REPOSITORY_BASE_URL"logstash:$version-SNAPSHOT"
|
||||
snyk_project_name="logstash-$version-SNAPSHOT"
|
||||
report_docker_image "$image" "$snyk_project_name"
|
||||
|
||||
image=$REPOSITORY_BASE_URL"logstash-oss:$version-SNAPSHOT"
|
||||
snyk_project_name="logstash-oss-$version-SNAPSHOT"
|
||||
report_docker_image "$image" "$snyk_project_name"
|
||||
|
||||
image=$REPOSITORY_BASE_URL"logstash:$version-SNAPSHOT-arm64"
|
||||
snyk_project_name="logstash-$version-SNAPSHOT-arm64"
|
||||
report_docker_image "$image" "$snyk_project_name" "linux/arm64"
|
||||
|
||||
image=$REPOSITORY_BASE_URL"logstash:$version-SNAPSHOT-amd64"
|
||||
snyk_project_name="logstash-$version-SNAPSHOT-amd64"
|
||||
report_docker_image "$image" "$snyk_project_name" "linux/amd64"
|
||||
|
||||
image=$REPOSITORY_BASE_URL"logstash-oss:$version-SNAPSHOT-arm64"
|
||||
snyk_project_name="logstash-oss-$version-SNAPSHOT-arm64"
|
||||
report_docker_image "$image" "$snyk_project_name" "linux/arm64"
|
||||
|
||||
image=$REPOSITORY_BASE_URL"logstash-oss:$version-SNAPSHOT-amd64"
|
||||
snyk_project_name="logstash-oss-$version-SNAPSHOT-amd64"
|
||||
report_docker_image "$image" "$snyk_project_name" "linux/amd64"
|
||||
}
|
||||
|
||||
resolve_version_and_report_docker_images() {
|
||||
git reset --hard HEAD # reset if any generated files appeared
|
||||
git checkout "$1"
|
||||
|
||||
# parse version (ex: 8.8.2 from 8.8 branch, or 8.9.0 from main branch)
|
||||
versions_file="$PWD/versions.yml"
|
||||
version=$(awk '/logstash:/ { print $2 }' "$versions_file")
|
||||
report_docker_images "$version"
|
||||
}
|
||||
|
||||
# resolve docker artifact and report
|
||||
#for TARGET_BRANCH in "${TARGET_BRANCHES[@]}"
|
||||
#do
|
||||
# if git show-ref --quiet refs/heads/"$TARGET_BRANCH"; then
|
||||
# echo "Using $TARGET_BRANCH branch for docker images."
|
||||
# resolve_version_and_report_docker_images "$TARGET_BRANCH"
|
||||
# else
|
||||
# echo "$TARGET_BRANCH branch doesn't exist."
|
||||
# fi
|
||||
#done
|
||||
done
|
||||
|
|
|
@ -6,14 +6,9 @@
|
|||
|
||||
set -e
|
||||
|
||||
VERSION_URL="https://raw.githubusercontent.com/elastic/logstash/main/ci/logstash_releases.json"
|
||||
VERSION_URL="https://storage.googleapis.com/artifacts-api/snapshots/branches.json"
|
||||
|
||||
echo "Fetching versions from $VERSION_URL"
|
||||
VERSIONS=$(curl --silent $VERSION_URL)
|
||||
SNAPSHOT_KEYS=$(echo "$VERSIONS" | jq -r '.snapshots | .[]')
|
||||
readarray -t TARGET_BRANCHES < <(curl --retry-all-errors --retry 5 --retry-delay 5 -fsSL $VERSION_URL | jq -r '.branches[]')
|
||||
echo "${TARGET_BRANCHES[@]}"
|
||||
|
||||
SNAPSHOT_VERSIONS=()
|
||||
while IFS= read -r line; do
|
||||
SNAPSHOT_VERSIONS+=("$line")
|
||||
echo "Resolved snapshot version: $line"
|
||||
done <<< "$SNAPSHOT_KEYS"
|
|
@ -1,9 +1,8 @@
|
|||
agents:
|
||||
provider: gcp
|
||||
imageProject: elastic-images-prod
|
||||
image: family/platform-ingest-logstash-ubuntu-2204
|
||||
machineType: "n2-standard-4"
|
||||
diskSizeGb: 120
|
||||
image: "docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-logstash-ci"
|
||||
cpu: "2"
|
||||
memory: "4Gi"
|
||||
ephemeralStorage: "64Gi"
|
||||
|
||||
steps:
|
||||
# reports main, previous (ex: 7.latest) and current (ex: 8.latest) release branches to Snyk
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
env:
|
||||
DEFAULT_MATRIX_OS: "windows-2022"
|
||||
DEFAULT_MATRIX_JDK: "adoptiumjdk_17"
|
||||
DEFAULT_MATRIX_JDK: "adoptiumjdk_21"
|
||||
|
||||
steps:
|
||||
- input: "Test Parameters"
|
||||
|
@ -35,20 +35,14 @@ steps:
|
|||
value: "adoptiumjdk_21"
|
||||
- label: "Adoptium JDK 17 (Eclipse Temurin)"
|
||||
value: "adoptiumjdk_17"
|
||||
- label: "Adoptium JDK 11 (Eclipse Temurin)"
|
||||
value: "adoptiumjdk_11"
|
||||
- label: "OpenJDK 21"
|
||||
value: "openjdk_21"
|
||||
- label: "OpenJDK 17"
|
||||
value: "openjdk_17"
|
||||
- label: "OpenJDK 11"
|
||||
value: "openjdk_11"
|
||||
- label: "Zulu 21"
|
||||
value: "zulu_21"
|
||||
- label: "Zulu 17"
|
||||
value: "zulu_17"
|
||||
- label: "Zulu 11"
|
||||
value: "zulu_11"
|
||||
|
||||
- wait: ~
|
||||
if: build.source != "schedule" && build.source != "trigger_job"
|
||||
|
|
42
.ci/Makefile
Normal file
42
.ci/Makefile
Normal file
|
@ -0,0 +1,42 @@
|
|||
.SILENT:
|
||||
MAKEFLAGS += --no-print-directory
|
||||
.SHELLFLAGS = -euc
|
||||
SHELL = /bin/bash
|
||||
|
||||
#######################
|
||||
## Templates
|
||||
#######################
|
||||
## Mergify template
|
||||
define MERGIFY_TMPL
|
||||
|
||||
- name: backport patches to $(BRANCH) branch
|
||||
conditions:
|
||||
- merged
|
||||
- base=main
|
||||
- label=$(BACKPORT_LABEL)
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "$(BRANCH)"
|
||||
endef
|
||||
|
||||
# Add mergify entry for the new backport label
|
||||
.PHONY: mergify
|
||||
export MERGIFY_TMPL
|
||||
mergify: BACKPORT_LABEL=$${BACKPORT_LABEL} BRANCH=$${BRANCH} PUSH_BRANCH=$${PUSH_BRANCH}
|
||||
mergify:
|
||||
@echo ">> mergify"
|
||||
echo "$$MERGIFY_TMPL" >> ../.mergify.yml
|
||||
git add ../.mergify.yml
|
||||
git status
|
||||
if [ ! -z "$$(git status --porcelain)" ]; then \
|
||||
git commit -m "mergify: add $(BACKPORT_LABEL) rule"; \
|
||||
git push origin $(PUSH_BRANCH) ; \
|
||||
fi
|
||||
|
||||
# Create GitHub backport label
|
||||
.PHONY: backport-label
|
||||
backport-label: BACKPORT_LABEL=$${BACKPORT_LABEL}
|
||||
backport-label:
|
||||
@echo ">> backport-label"
|
||||
gh label create $(BACKPORT_LABEL) --description "Automated backport with mergify" --color 0052cc --force
|
|
@ -1,2 +1,2 @@
|
|||
LS_BUILD_JAVA=adoptiumjdk_17
|
||||
LS_RUNTIME_JAVA=adoptiumjdk_17
|
||||
LS_BUILD_JAVA=adoptiumjdk_21
|
||||
LS_RUNTIME_JAVA=adoptiumjdk_21
|
||||
|
|
|
@ -21,10 +21,6 @@ analyze:
|
|||
type: gradle
|
||||
target: 'dependencies-report:'
|
||||
path: .
|
||||
- name: ingest-converter
|
||||
type: gradle
|
||||
target: 'ingest-converter:'
|
||||
path: .
|
||||
- name: logstash-core
|
||||
type: gradle
|
||||
target: 'logstash-core:'
|
||||
|
|
1
.github/ISSUE_TEMPLATE/test-failure.md
vendored
1
.github/ISSUE_TEMPLATE/test-failure.md
vendored
|
@ -21,6 +21,5 @@ to reproduce locally
|
|||
**Failure history**:
|
||||
<!--
|
||||
Link to build stats and possible indication of when this started failing and how often it fails
|
||||
<https://build-stats.elastic.co/app/kibana>
|
||||
-->
|
||||
**Failure excerpt**:
|
||||
|
|
18
.github/dependabot.yml
vendored
Normal file
18
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
---
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directories:
|
||||
- '/'
|
||||
- '/.github/actions/*'
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "sunday"
|
||||
time: "22:00"
|
||||
reviewers:
|
||||
- "elastic/observablt-ci"
|
||||
- "elastic/observablt-ci-contractors"
|
||||
groups:
|
||||
github-actions:
|
||||
patterns:
|
||||
- "*"
|
33
.github/workflows/add-docs-preview-link.yml
vendored
33
.github/workflows/add-docs-preview-link.yml
vendored
|
@ -1,33 +0,0 @@
|
|||
name: Docs Preview Link
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- docs/**
|
||||
- docsk8s/**
|
||||
jobs:
|
||||
docs-preview-link:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: wait-for-status
|
||||
uses: autotelic/action-wait-for-status-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
owner: elastic
|
||||
# when running with on: pull_request_target we get the PR base ref by default
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
statusName: "buildkite/docs-build-pr"
|
||||
# https://elasticsearch-ci.elastic.co/job/elastic+logstash+pull-request+build-docs
|
||||
# usually finishes in ~ 20 minutes
|
||||
timeoutSeconds: 900
|
||||
intervalSeconds: 30
|
||||
- name: Add Docs Preview link in PR Comment
|
||||
if: steps.wait-for-status.outputs.state == 'success'
|
||||
uses: thollander/actions-comment-pull-request@v1
|
||||
with:
|
||||
message: |
|
||||
:page_with_curl: **DOCS PREVIEW** :sparkles: https://logstash_bk_${{ github.event.number }}.docs-preview.app.elstc.co/diff
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
22
.github/workflows/backport-active.yml
vendored
Normal file
22
.github/workflows/backport-active.yml
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
name: Backport to active branches
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
# Only run if the PR was merged (not just closed) and has one of the backport labels
|
||||
if: |
|
||||
github.event.pull_request.merged == true &&
|
||||
contains(toJSON(github.event.pull_request.labels.*.name), 'backport-active-')
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: elastic/oblt-actions/github/backport-active@v1
|
19
.github/workflows/docs-build.yml
vendored
Normal file
19
.github/workflows/docs-build.yml
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
name: docs-build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target: ~
|
||||
merge_group: ~
|
||||
|
||||
jobs:
|
||||
docs-preview:
|
||||
uses: elastic/docs-builder/.github/workflows/preview-build.yml@main
|
||||
with:
|
||||
path-pattern: docs/**
|
||||
permissions:
|
||||
deployments: write
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: read
|
14
.github/workflows/docs-cleanup.yml
vendored
Normal file
14
.github/workflows/docs-cleanup.yml
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
name: docs-cleanup
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
docs-preview:
|
||||
uses: elastic/docs-builder/.github/workflows/preview-cleanup.yml@main
|
||||
permissions:
|
||||
contents: none
|
||||
id-token: write
|
||||
deployments: write
|
23
.github/workflows/mergify-labels-copier.yml
vendored
Normal file
23
.github/workflows/mergify-labels-copier.yml
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
name: mergify backport labels copier
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
mergify-backport-labels-copier:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.head_ref, 'mergify/bp/')
|
||||
permissions:
|
||||
# Add GH labels
|
||||
pull-requests: write
|
||||
# See https://github.com/cli/cli/issues/6274
|
||||
repository-projects: read
|
||||
steps:
|
||||
- uses: elastic/oblt-actions/mergify/labels-copier@v1
|
||||
with:
|
||||
excluded-labels-regex: "^backport-*"
|
49
.github/workflows/pr_backporter.yml
vendored
49
.github/workflows/pr_backporter.yml
vendored
|
@ -1,49 +0,0 @@
|
|||
name: Backport PR to another branch
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
pr_commented:
|
||||
name: PR comment
|
||||
if: github.event.issue.pull_request
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-regex-match@v2
|
||||
id: regex-match
|
||||
with:
|
||||
text: ${{ github.event.comment.body }}
|
||||
regex: '^@logstashmachine backport (main|[x0-9\.]+)$'
|
||||
- if: ${{ steps.regex-match.outputs.group1 == '' }}
|
||||
run: exit 1
|
||||
- name: Fetch logstash-core team member list
|
||||
uses: tspascoal/get-user-teams-membership@v1
|
||||
id: checkUserMember
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
organization: elastic
|
||||
team: logstash
|
||||
GITHUB_TOKEN: ${{ secrets.READ_ORG_SECRET_JSVD }}
|
||||
- name: Is user not a core team member?
|
||||
if: ${{ steps.checkUserMember.outputs.isTeamMember == 'false' }}
|
||||
run: exit 1
|
||||
- name: checkout repo content
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: 'main'
|
||||
- run: git config --global user.email "43502315+logstashmachine@users.noreply.github.com"
|
||||
- run: git config --global user.name "logstashmachine"
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- run: |
|
||||
mkdir ~/.elastic && echo ${{ github.token }} >> ~/.elastic/github.token
|
||||
- run: pip install requests
|
||||
- name: run backport
|
||||
run: python devtools/backport ${{ steps.regex-match.outputs.group1 }} ${{ github.event.issue.number }} --remote=origin --yes
|
18
.github/workflows/pre-commit.yml
vendored
Normal file
18
.github/workflows/pre-commit.yml
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
name: pre-commit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 8.*
|
||||
- 9.*
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: elastic/oblt-actions/pre-commit@v1
|
29
.github/workflows/version_bumps.yml
vendored
29
.github/workflows/version_bumps.yml
vendored
|
@ -25,9 +25,13 @@ jobs:
|
|||
version_bumper:
|
||||
name: Bump versions
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
INPUTS_BRANCH: "${{ inputs.branch }}"
|
||||
INPUTS_BUMP: "${{ inputs.bump }}"
|
||||
BACKPORT_LABEL: "backport-${{ inputs.branch }}"
|
||||
steps:
|
||||
- name: Fetch logstash-core team member list
|
||||
uses: tspascoal/get-user-teams-membership@v1
|
||||
uses: tspascoal/get-user-teams-membership@57e9f42acd78f4d0f496b3be4368fc5f62696662 #v3.0.0
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
organization: elastic
|
||||
|
@ -37,14 +41,14 @@ jobs:
|
|||
if: ${{ steps.checkUserMember.outputs.isTeamMember == 'false' }}
|
||||
run: exit 1
|
||||
- name: checkout repo content
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
ref: ${{ env.INPUTS_BRANCH }}
|
||||
- run: git config --global user.email "43502315+logstashmachine@users.noreply.github.com"
|
||||
- run: git config --global user.name "logstashmachine"
|
||||
- run: ./gradlew clean installDefaultGems
|
||||
- run: ./vendor/jruby/bin/jruby -S bundle update --all --${{ github.event.inputs.bump }} --strict
|
||||
- run: ./vendor/jruby/bin/jruby -S bundle update --all --${{ env.INPUTS_BUMP }} --strict
|
||||
- run: mv Gemfile.lock Gemfile.jruby-*.lock.release
|
||||
- run: echo "T=$(date +%s)" >> $GITHUB_ENV
|
||||
- run: echo "BRANCH=update_lock_${T}" >> $GITHUB_ENV
|
||||
|
@ -53,8 +57,21 @@ jobs:
|
|||
git add .
|
||||
git status
|
||||
if [[ -z $(git status --porcelain) ]]; then echo "No changes. We're done."; exit 0; fi
|
||||
git commit -m "Update ${{ github.event.inputs.bump }} plugin versions in gemfile lock" -a
|
||||
git commit -m "Update ${{ env.INPUTS_BUMP }} plugin versions in gemfile lock" -a
|
||||
git push origin $BRANCH
|
||||
|
||||
- name: Update mergify (minor only)
|
||||
if: ${{ inputs.bump == 'minor' }}
|
||||
continue-on-error: true
|
||||
run: make -C .ci mergify BACKPORT_LABEL=$BACKPORT_LABEL BRANCH=$INPUTS_BRANCH PUSH_BRANCH=$BRANCH
|
||||
|
||||
- name: Create Pull Request
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -X POST -d "{\"title\": \"bump lock file for ${{ github.event.inputs.branch }}\",\"head\": \"${BRANCH}\",\"base\": \"${{ github.event.inputs.branch }}\"}" https://api.github.com/repos/elastic/logstash/pulls
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -X POST -d "{\"title\": \"bump lock file for ${{ env.INPUTS_BRANCH }}\",\"head\": \"${BRANCH}\",\"base\": \"${{ env.INPUTS_BRANCH }}\"}" https://api.github.com/repos/elastic/logstash/pulls
|
||||
|
||||
- name: Create GitHub backport label (Mergify) (minor only)
|
||||
if: ${{ inputs.bump == 'minor' }}
|
||||
continue-on-error: true
|
||||
run: make -C .ci backport-label BACKPORT_LABEL=$BACKPORT_LABEL
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
132
.mergify.yml
Normal file
132
.mergify.yml
Normal file
|
@ -0,0 +1,132 @@
|
|||
commands_restrictions:
|
||||
backport:
|
||||
conditions:
|
||||
- or:
|
||||
- sender-permission>=write
|
||||
- sender=github-actions[bot]
|
||||
defaults:
|
||||
actions:
|
||||
backport:
|
||||
title: "[{{ destination_branch }}] (backport #{{ number }}) {{ title }}"
|
||||
assignees:
|
||||
- "{{ author }}"
|
||||
labels:
|
||||
- "backport"
|
||||
pull_request_rules:
|
||||
# - name: ask to resolve conflict
|
||||
# conditions:
|
||||
# - conflict
|
||||
# actions:
|
||||
# comment:
|
||||
# message: |
|
||||
# This pull request is now in conflicts. Could you fix it @{{author}}? 🙏
|
||||
# To fixup this pull request, you can check out it locally. See documentation: https://help.github.com/articles/checking-out-pull-requests-locally/
|
||||
# ```
|
||||
# git fetch upstream
|
||||
# git checkout -b {{head}} upstream/{{head}}
|
||||
# git merge upstream/{{base}}
|
||||
# git push upstream {{head}}
|
||||
# ```
|
||||
|
||||
- name: notify the backport policy
|
||||
conditions:
|
||||
- -label~=^backport
|
||||
- base=main
|
||||
actions:
|
||||
comment:
|
||||
message: |
|
||||
This pull request does not have a backport label. Could you fix it @{{author}}? 🙏
|
||||
To fixup this pull request, you need to add the backport labels for the needed
|
||||
branches, such as:
|
||||
* `backport-8./d` is the label to automatically backport to the `8./d` branch. `/d` is the digit.
|
||||
* If no backport is necessary, please add the `backport-skip` label
|
||||
|
||||
- name: remove backport-skip label
|
||||
conditions:
|
||||
- label~=^backport-\d
|
||||
actions:
|
||||
label:
|
||||
remove:
|
||||
- backport-skip
|
||||
|
||||
- name: notify the backport has not been merged yet
|
||||
conditions:
|
||||
- -merged
|
||||
- -closed
|
||||
- author=mergify[bot]
|
||||
- "#check-success>0"
|
||||
- schedule=Mon-Mon 06:00-10:00[Europe/Paris]
|
||||
actions:
|
||||
comment:
|
||||
message: |
|
||||
This pull request has not been merged yet. Could you please review and merge it @{{ assignee | join(', @') }}? 🙏
|
||||
|
||||
- name: backport patches to 8.16 branch
|
||||
conditions:
|
||||
- merged
|
||||
- base=main
|
||||
- label=backport-8.16
|
||||
actions:
|
||||
backport:
|
||||
assignees:
|
||||
- "{{ author }}"
|
||||
branches:
|
||||
- "8.16"
|
||||
labels:
|
||||
- "backport"
|
||||
title: "[{{ destination_branch }}] {{ title }} (backport #{{ number }})"
|
||||
|
||||
- name: backport patches to 8.17 branch
|
||||
conditions:
|
||||
- merged
|
||||
- base=main
|
||||
- label=backport-8.17
|
||||
actions:
|
||||
backport:
|
||||
assignees:
|
||||
- "{{ author }}"
|
||||
branches:
|
||||
- "8.17"
|
||||
labels:
|
||||
- "backport"
|
||||
title: "[{{ destination_branch }}] {{ title }} (backport #{{ number }})"
|
||||
|
||||
- name: backport patches to 8.18 branch
|
||||
conditions:
|
||||
- merged
|
||||
- base=main
|
||||
- label=backport-8.18
|
||||
actions:
|
||||
backport:
|
||||
assignees:
|
||||
- "{{ author }}"
|
||||
branches:
|
||||
- "8.18"
|
||||
labels:
|
||||
- "backport"
|
||||
title: "[{{ destination_branch }}] {{ title }} (backport #{{ number }})"
|
||||
|
||||
- name: backport patches to 8.19 branch
|
||||
conditions:
|
||||
- merged
|
||||
- base=main
|
||||
- label=backport-8.19
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "8.19"
|
||||
|
||||
- name: backport patches to 9.0 branch
|
||||
conditions:
|
||||
- merged
|
||||
- base=main
|
||||
- label=backport-9.0
|
||||
actions:
|
||||
backport:
|
||||
assignees:
|
||||
- "{{ author }}"
|
||||
branches:
|
||||
- "9.0"
|
||||
labels:
|
||||
- "backport"
|
||||
title: "[{{ destination_branch }}] {{ title }} (backport #{{ number }})"
|
6
.pre-commit-config.yaml
Normal file
6
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
args: ['--assume-in-merge']
|
|
@ -1,998 +0,0 @@
|
|||
PATH
|
||||
remote: logstash-core-plugin-api
|
||||
specs:
|
||||
logstash-core-plugin-api (2.1.16-java)
|
||||
logstash-core (= 8.17.4)
|
||||
|
||||
PATH
|
||||
remote: logstash-core
|
||||
specs:
|
||||
logstash-core (8.17.4-java)
|
||||
clamp (~> 1)
|
||||
concurrent-ruby (~> 1, < 1.1.10)
|
||||
down (~> 5.2.0)
|
||||
elasticsearch (~> 8)
|
||||
filesize (~> 0.2)
|
||||
gems (~> 1)
|
||||
i18n (~> 1)
|
||||
jar-dependencies (= 0.4.1)
|
||||
jrjackson (= 0.4.20)
|
||||
manticore (~> 0.6)
|
||||
minitar (~> 1)
|
||||
pry (~> 0.12)
|
||||
puma (~> 6.3, >= 6.4.2)
|
||||
rack (~> 3)
|
||||
ruby-maven-libs (~> 3, >= 3.8.9)
|
||||
rubyzip (~> 1)
|
||||
sinatra (~> 4)
|
||||
stud (~> 0.0.19)
|
||||
thread_safe (~> 0.3.6)
|
||||
thwait
|
||||
treetop (~> 1)
|
||||
tzinfo-data
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
amazing_print (1.6.0)
|
||||
arr-pm (0.0.12)
|
||||
ast (2.4.2)
|
||||
atomic (1.1.101-java)
|
||||
avl_tree (1.2.1)
|
||||
atomic (~> 1.1)
|
||||
avro (1.10.2)
|
||||
multi_json (~> 1)
|
||||
aws-eventstream (1.3.2)
|
||||
aws-partitions (1.1020.0)
|
||||
aws-sdk-cloudfront (1.107.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-cloudwatch (1.108.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-core (3.214.1)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.992.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.96.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-resourcegroups (1.76.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.176.1)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-sns (1.92.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-sqs (1.89.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.10.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
back_pressure (1.0.0)
|
||||
backports (3.25.0)
|
||||
base64 (0.2.0)
|
||||
belzebuth (0.2.3)
|
||||
childprocess
|
||||
benchmark-ips (2.14.0)
|
||||
bigdecimal (3.1.9-java)
|
||||
bindata (2.5.0)
|
||||
buftok (0.2.0)
|
||||
builder (3.3.0)
|
||||
cabin (0.9.0)
|
||||
childprocess (4.1.0)
|
||||
ci_reporter (2.1.0)
|
||||
builder (>= 2.1.2)
|
||||
rexml
|
||||
ci_reporter_rspec (1.0.0)
|
||||
ci_reporter (~> 2.0)
|
||||
rspec (>= 2.14, < 4)
|
||||
clamp (1.3.2)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.1.9)
|
||||
crack (1.0.0)
|
||||
bigdecimal
|
||||
rexml
|
||||
dalli (3.2.8)
|
||||
date (3.3.3-java)
|
||||
diff-lcs (1.5.1)
|
||||
docile (1.4.1)
|
||||
domain_name (0.6.20240107)
|
||||
dotenv (3.1.7)
|
||||
down (5.2.4)
|
||||
addressable (~> 2.8)
|
||||
e2mmap (0.1.0)
|
||||
edn (1.1.1)
|
||||
elastic-enterprise-search (8.9.0)
|
||||
elastic-transport (~> 8.1)
|
||||
jwt (>= 1.5, < 3.0)
|
||||
elastic-transport (8.4.0)
|
||||
faraday (< 3)
|
||||
multi_json
|
||||
elasticsearch (8.17.1)
|
||||
elastic-transport (~> 8.3)
|
||||
elasticsearch-api (= 8.17.1)
|
||||
elasticsearch-api (8.17.1)
|
||||
multi_json
|
||||
equalizer (0.0.11)
|
||||
et-orbi (1.2.11)
|
||||
tzinfo
|
||||
faraday (2.12.2)
|
||||
faraday-net_http (>= 2.0, < 3.5)
|
||||
json
|
||||
logger
|
||||
faraday-net_http (3.4.0)
|
||||
net-http (>= 0.5.0)
|
||||
ffi (1.17.1-java)
|
||||
filesize (0.2.0)
|
||||
fileutils (1.7.3)
|
||||
fivemat (1.3.7)
|
||||
flores (0.0.8)
|
||||
fpm (1.16.0)
|
||||
arr-pm (~> 0.0.11)
|
||||
backports (>= 2.6.2)
|
||||
cabin (>= 0.6.0)
|
||||
clamp (>= 1.0.0)
|
||||
pleaserun (~> 0.0.29)
|
||||
rexml
|
||||
stud
|
||||
fugit (1.11.1)
|
||||
et-orbi (~> 1, >= 1.2.11)
|
||||
raabro (~> 1.4)
|
||||
gelfd2 (0.4.1)
|
||||
gem_publisher (1.5.0)
|
||||
gems (1.3.0)
|
||||
gene_pool (1.5.0)
|
||||
concurrent-ruby (>= 1.0)
|
||||
hashdiff (1.1.2)
|
||||
hitimes (1.3.1-java)
|
||||
http (3.3.0)
|
||||
addressable (~> 2.3)
|
||||
http-cookie (~> 1.0)
|
||||
http-form_data (~> 2.0)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
http-cookie (1.0.8)
|
||||
domain_name (~> 0.5)
|
||||
http-form_data (2.3.0)
|
||||
http_parser.rb (0.6.0-java)
|
||||
i18n (1.14.7)
|
||||
concurrent-ruby (~> 1.0)
|
||||
insist (1.0.0)
|
||||
jar-dependencies (0.4.1)
|
||||
jls-grok (0.11.5)
|
||||
cabin (>= 0.6.0)
|
||||
jls-lumberjack (0.0.26)
|
||||
concurrent-ruby
|
||||
jmespath (1.6.2)
|
||||
jrjackson (0.4.20-java)
|
||||
jruby-jms (1.3.0-java)
|
||||
gene_pool
|
||||
semantic_logger
|
||||
jruby-openssl (0.15.3-java)
|
||||
jruby-stdin-channel (0.2.0-java)
|
||||
json (2.9.1-java)
|
||||
json-schema (2.8.1)
|
||||
addressable (>= 2.4)
|
||||
jwt (2.9.3)
|
||||
base64
|
||||
kramdown (2.5.1)
|
||||
rexml (>= 3.3.9)
|
||||
language_server-protocol (3.17.0.4)
|
||||
logger (1.6.6)
|
||||
logstash-codec-avro (3.4.1-java)
|
||||
avro (~> 1.10.2)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-cef (6.2.8-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-codec-collectd (3.1.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-dots (3.0.6)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-codec-edn (3.1.0)
|
||||
edn
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-edn_lines (3.1.0)
|
||||
edn
|
||||
logstash-codec-line
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-es_bulk (3.1.0)
|
||||
logstash-codec-line
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-fluent (3.4.3-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
msgpack (~> 1.1)
|
||||
logstash-codec-graphite (3.0.6)
|
||||
logstash-codec-line
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-codec-json (3.1.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0, >= 1.0.1)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-json_lines (3.2.2)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0, >= 1.0.1)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-codec-line (3.1.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-codec-msgpack (3.1.0-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
msgpack (~> 1.1)
|
||||
logstash-codec-multiline (3.1.2)
|
||||
concurrent-ruby
|
||||
jls-grok (~> 0.11.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-patterns-core
|
||||
logstash-codec-netflow (4.3.2)
|
||||
bindata (>= 1.5.0)
|
||||
logstash-core-plugin-api (~> 2.0)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-codec-plain (3.1.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-codec-rubydebug (3.1.0)
|
||||
amazing_print (~> 1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-devutils (2.6.2-java)
|
||||
fivemat
|
||||
gem_publisher
|
||||
kramdown (~> 2)
|
||||
logstash-codec-plain
|
||||
logstash-core (>= 6.3)
|
||||
minitar
|
||||
rake
|
||||
rspec (~> 3.0)
|
||||
rspec-wait
|
||||
stud (>= 0.0.20)
|
||||
logstash-filter-aggregate (2.10.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-anonymize (3.0.7)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
murmurhash3 (= 0.1.6)
|
||||
logstash-filter-cidr (3.1.3-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-clone (4.2.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.1)
|
||||
logstash-filter-csv (3.1.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-filter-date (3.1.15)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-de_dot (1.1.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-dissect (1.2.5)
|
||||
jar-dependencies
|
||||
logstash-core-plugin-api (>= 2.1.1, <= 2.99)
|
||||
logstash-filter-dns (3.2.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
lru_redux (~> 1.1.0)
|
||||
logstash-filter-drop (3.0.5)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-elastic_integration (8.17.1-java)
|
||||
logstash-core (>= 8.7.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-elasticsearch (3.17.0)
|
||||
elasticsearch (>= 7.14.9)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ca_trusted_fingerprint_support (~> 1.0)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
manticore (>= 0.7.1)
|
||||
logstash-filter-fingerprint (3.4.4)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
murmurhash3 (= 0.1.6)
|
||||
logstash-filter-geoip (7.3.1-java)
|
||||
logstash-core (>= 7.14.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-filter-grok (4.4.3)
|
||||
jls-grok (~> 0.11.3)
|
||||
logstash-core (>= 5.6.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.0)
|
||||
logstash-patterns-core (>= 4.3.0, < 5)
|
||||
stud (~> 0.0.22)
|
||||
logstash-filter-http (1.6.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-mixin-http_client (>= 7.4.0, < 8.0.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-filter-json (3.2.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-filter-kv (4.7.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-filter-memcached (1.2.0)
|
||||
dalli (~> 3)
|
||||
logstash-core-plugin-api (~> 2.0)
|
||||
logstash-filter-metrics (4.0.7)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
metriks
|
||||
thread_safe
|
||||
logstash-filter-mutate (3.5.8)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-prune (3.0.4)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-ruby (3.1.8)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-sleep (3.0.7)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-split (3.1.8)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-syslog_pri (3.2.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-filter-throttle (4.0.4)
|
||||
atomic
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
thread_safe
|
||||
logstash-filter-translate (3.4.2)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-deprecation_logger_support (~> 1.0)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-mixin-scheduler (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
psych (>= 5.1.0)
|
||||
logstash-filter-truncate (1.0.6)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-urldecode (3.0.6)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-useragent (3.3.5-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-filter-uuid (3.0.5)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-xml (4.2.1)
|
||||
logstash-core (>= 8.4.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
nokogiri (>= 1.13.8)
|
||||
xml-simple
|
||||
logstash-input-azure_event_hubs (1.5.1)
|
||||
logstash-codec-json
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (~> 2.0)
|
||||
stud (>= 0.0.22)
|
||||
logstash-input-beats (6.9.3-java)
|
||||
concurrent-ruby (~> 1.0)
|
||||
logstash-codec-multiline (>= 2.0.5)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
logstash-mixin-plugin_factory_support (~> 1.0)
|
||||
thread_safe (~> 0.3.5)
|
||||
logstash-input-couchdb_changes (3.1.6)
|
||||
json
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
stud (>= 0.0.22)
|
||||
logstash-input-dead_letter_queue (2.0.1)
|
||||
logstash-codec-plain
|
||||
logstash-core (>= 8.4.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-input-elastic_serverless_forwarder (0.1.5-java)
|
||||
logstash-codec-json_lines
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-input-http (>= 3.7.2)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
logstash-mixin-plugin_factory_support
|
||||
logstash-input-elasticsearch (4.21.1)
|
||||
elasticsearch (>= 7.17.9)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ca_trusted_fingerprint_support (~> 1.0)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
logstash-mixin-scheduler (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
manticore (>= 0.7.1)
|
||||
tzinfo
|
||||
tzinfo-data
|
||||
logstash-input-exec (3.6.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-scheduler (~> 1.0)
|
||||
stud (~> 0.0.22)
|
||||
logstash-input-file (4.4.6)
|
||||
addressable
|
||||
concurrent-ruby (~> 1.0)
|
||||
logstash-codec-multiline (~> 3.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-input-ganglia (3.1.4)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
stud (~> 0.0.22)
|
||||
logstash-input-gelf (3.3.2)
|
||||
gelfd2 (= 0.4.1)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
stud (>= 0.0.22, < 0.1.0)
|
||||
logstash-input-generator (3.1.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-input-graphite (3.0.6)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-input-tcp
|
||||
logstash-input-heartbeat (3.1.1)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-deprecation_logger_support (~> 1.0)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
stud
|
||||
logstash-input-http (3.10.2-java)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
logstash-input-http_poller (5.6.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0, >= 1.0.1)
|
||||
logstash-mixin-http_client (>= 7.4.0, < 8.0.0)
|
||||
logstash-mixin-scheduler (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-input-jms (3.2.2-java)
|
||||
jruby-jms (>= 1.2.0)
|
||||
logstash-codec-json (~> 3.0)
|
||||
logstash-codec-plain (~> 3.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
semantic_logger (< 4.0.0)
|
||||
logstash-input-pipe (3.1.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
stud (~> 0.0.22)
|
||||
logstash-input-redis (3.7.1)
|
||||
logstash-codec-json
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
redis (>= 4.0.1, < 5)
|
||||
logstash-input-stdin (3.4.0)
|
||||
jruby-stdin-channel
|
||||
logstash-codec-line
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-input-syslog (3.7.1)
|
||||
concurrent-ruby
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-filter-date
|
||||
logstash-filter-grok (>= 4.4.1)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
stud (>= 0.0.22, < 0.1.0)
|
||||
logstash-input-tcp (6.4.6-java)
|
||||
jruby-openssl (>= 0.12.2)
|
||||
logstash-codec-json
|
||||
logstash-codec-json_lines
|
||||
logstash-codec-line
|
||||
logstash-codec-multiline
|
||||
logstash-codec-plain
|
||||
logstash-core (>= 8.1.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
logstash-input-twitter (4.1.1)
|
||||
http-form_data (~> 2)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
public_suffix (> 4, < 6)
|
||||
stud (>= 0.0.22, < 0.1)
|
||||
twitter (= 6.2.0)
|
||||
logstash-input-udp (3.5.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.2)
|
||||
stud (~> 0.0.22)
|
||||
logstash-input-unix (3.1.2)
|
||||
logstash-codec-line
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-integration-aws (7.1.8-java)
|
||||
aws-sdk-cloudfront
|
||||
aws-sdk-cloudwatch
|
||||
aws-sdk-core (~> 3)
|
||||
aws-sdk-resourcegroups
|
||||
aws-sdk-s3
|
||||
aws-sdk-sns
|
||||
aws-sdk-sqs
|
||||
concurrent-ruby
|
||||
logstash-codec-json
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 2.1.12, <= 2.99)
|
||||
rexml
|
||||
rufus-scheduler (>= 3.0.9)
|
||||
stud (~> 0.0.22)
|
||||
logstash-integration-elastic_enterprise_search (3.0.1)
|
||||
elastic-enterprise-search (>= 7.16, < 9)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (~> 2.0)
|
||||
logstash-mixin-deprecation_logger_support (~> 1.0)
|
||||
manticore (~> 0.8)
|
||||
logstash-integration-jdbc (5.5.3)
|
||||
logstash-codec-plain
|
||||
logstash-core (>= 6.5.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-scheduler (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
lru_redux
|
||||
sequel (>= 5.74.0)
|
||||
tzinfo
|
||||
tzinfo-data
|
||||
logstash-integration-kafka (11.5.4-java)
|
||||
logstash-codec-json
|
||||
logstash-codec-plain
|
||||
logstash-core (>= 8.3.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-deprecation_logger_support (~> 1.0)
|
||||
manticore (>= 0.5.4, < 1.0.0)
|
||||
stud (>= 0.0.22, < 0.1.0)
|
||||
logstash-integration-logstash (1.0.4-java)
|
||||
logstash-codec-json_lines (~> 3.1)
|
||||
logstash-core-plugin-api (>= 2.1.12, <= 2.99)
|
||||
logstash-input-http (>= 3.7.0)
|
||||
logstash-mixin-http_client (~> 7.3)
|
||||
logstash-mixin-plugin_factory_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.1)
|
||||
stud
|
||||
logstash-integration-rabbitmq (7.4.0-java)
|
||||
back_pressure (~> 1.0)
|
||||
logstash-codec-json
|
||||
logstash-core (>= 6.5.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
march_hare (~> 4.0)
|
||||
stud (~> 0.0.22)
|
||||
logstash-integration-snmp (4.0.6-java)
|
||||
logstash-codec-plain
|
||||
logstash-core (>= 6.5.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.3)
|
||||
logstash-mixin-event_support (~> 1.0)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
logstash-mixin-validator_support (~> 1.0)
|
||||
logstash-mixin-ca_trusted_fingerprint_support (1.0.1-java)
|
||||
logstash-core (>= 6.8.0)
|
||||
logstash-mixin-deprecation_logger_support (1.0.0-java)
|
||||
logstash-core (>= 5.0.0)
|
||||
logstash-mixin-ecs_compatibility_support (1.3.0-java)
|
||||
logstash-core (>= 6.0.0)
|
||||
logstash-mixin-event_support (1.0.1-java)
|
||||
logstash-core (>= 6.8)
|
||||
logstash-mixin-http_client (7.5.0)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
manticore (>= 0.8.0, < 1.0.0)
|
||||
logstash-mixin-normalize_config_support (1.0.0-java)
|
||||
logstash-core (>= 6.8.0)
|
||||
logstash-mixin-plugin_factory_support (1.0.0-java)
|
||||
logstash-core (>= 7.13.0)
|
||||
logstash-mixin-scheduler (1.0.1-java)
|
||||
logstash-core (>= 7.16)
|
||||
rufus-scheduler (>= 3.0.9)
|
||||
logstash-mixin-validator_support (1.1.1-java)
|
||||
logstash-core (>= 6.8)
|
||||
logstash-output-csv (3.0.10)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-output-file
|
||||
logstash-output-elasticsearch (11.22.12-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-ca_trusted_fingerprint_support (~> 1.0)
|
||||
logstash-mixin-deprecation_logger_support (~> 1.0)
|
||||
logstash-mixin-ecs_compatibility_support (~> 1.0)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
manticore (>= 0.8.0, < 1.0.0)
|
||||
stud (~> 0.0, >= 0.0.17)
|
||||
logstash-output-email (4.1.3)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
mail (~> 2.8)
|
||||
mustache (>= 0.99.8)
|
||||
logstash-output-file (4.3.0)
|
||||
logstash-codec-json_lines
|
||||
logstash-codec-line
|
||||
logstash-core-plugin-api (>= 2.0.0, < 2.99)
|
||||
logstash-output-graphite (3.1.6)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-output-http (5.7.1)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-http_client (>= 7.4.0, < 8.0.0)
|
||||
logstash-output-lumberjack (3.1.9)
|
||||
jls-lumberjack (>= 0.0.26)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
stud
|
||||
logstash-output-nagios (3.0.6)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-output-null (3.0.5)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-output-pipe (3.0.6)
|
||||
logstash-codec-plain
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-output-redis (5.2.0)
|
||||
logstash-core (>= 6.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
redis (~> 4)
|
||||
stud
|
||||
logstash-output-stdout (3.1.4)
|
||||
logstash-codec-rubydebug
|
||||
logstash-core-plugin-api (>= 1.60.1, < 2.99)
|
||||
logstash-output-tcp (6.2.1)
|
||||
jruby-openssl (>= 0.12.2)
|
||||
logstash-codec-json
|
||||
logstash-core (>= 8.1.0)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-mixin-normalize_config_support (~> 1.0)
|
||||
stud
|
||||
logstash-output-udp (3.2.0)
|
||||
logstash-codec-json
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
logstash-output-webhdfs (3.1.0-java)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
webhdfs
|
||||
logstash-patterns-core (4.3.4)
|
||||
logstash-core-plugin-api (>= 1.60, <= 2.99)
|
||||
lru_redux (1.1.0)
|
||||
mail (2.8.1)
|
||||
mini_mime (>= 0.1.1)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
manticore (0.9.1-java)
|
||||
openssl_pkcs8_pure
|
||||
march_hare (4.6.0-java)
|
||||
memoizable (0.4.2)
|
||||
thread_safe (~> 0.3, >= 0.3.1)
|
||||
method_source (1.1.0)
|
||||
metriks (0.9.9.8)
|
||||
atomic (~> 1.0)
|
||||
avl_tree (~> 1.2.0)
|
||||
hitimes (~> 1.1)
|
||||
mini_mime (1.1.5)
|
||||
minitar (1.0.2)
|
||||
msgpack (1.7.5-java)
|
||||
multi_json (1.15.0)
|
||||
multipart-post (2.4.1)
|
||||
murmurhash3 (0.1.6-java)
|
||||
mustache (0.99.8)
|
||||
mustermann (3.0.3)
|
||||
ruby2_keywords (~> 0.0.1)
|
||||
naught (1.1.0)
|
||||
net-http (0.6.0)
|
||||
uri
|
||||
net-imap (0.5.6)
|
||||
date
|
||||
net-protocol
|
||||
net-pop (0.1.2)
|
||||
net-protocol
|
||||
net-protocol (0.2.2)
|
||||
timeout
|
||||
net-smtp (0.5.1)
|
||||
net-protocol
|
||||
nio4r (2.7.4-java)
|
||||
nokogiri (1.17.2-java)
|
||||
racc (~> 1.4)
|
||||
octokit (4.25.1)
|
||||
faraday (>= 1, < 3)
|
||||
sawyer (~> 0.9)
|
||||
openssl_pkcs8_pure (0.0.0.2)
|
||||
paquet (0.2.1)
|
||||
parallel (1.26.3)
|
||||
parser (3.3.7.1)
|
||||
ast (~> 2.4.1)
|
||||
racc
|
||||
pleaserun (0.0.32)
|
||||
cabin (> 0)
|
||||
clamp
|
||||
dotenv
|
||||
insist
|
||||
mustache (= 0.99.8)
|
||||
stud
|
||||
polyglot (0.3.5)
|
||||
pry (0.15.2-java)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
spoon (~> 0.0)
|
||||
psych (5.2.2-java)
|
||||
date
|
||||
jar-dependencies (>= 0.1.7)
|
||||
public_suffix (5.1.1)
|
||||
puma (6.5.0-java)
|
||||
nio4r (~> 2.0)
|
||||
raabro (1.4.0)
|
||||
racc (1.8.1-java)
|
||||
rack (3.1.12)
|
||||
rack-protection (4.1.1)
|
||||
base64 (>= 0.1.0)
|
||||
logger (>= 1.6.0)
|
||||
rack (>= 3.0.0, < 4)
|
||||
rack-session (2.0.0)
|
||||
rack (>= 3.0.0)
|
||||
rack-test (2.1.0)
|
||||
rack (>= 1.3)
|
||||
rainbow (3.1.1)
|
||||
rake (13.2.1)
|
||||
redis (4.8.1)
|
||||
regexp_parser (2.9.3)
|
||||
rexml (3.3.9)
|
||||
rspec (3.13.0)
|
||||
rspec-core (~> 3.13.0)
|
||||
rspec-expectations (~> 3.13.0)
|
||||
rspec-mocks (~> 3.13.0)
|
||||
rspec-core (3.13.3)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-expectations (3.13.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-mocks (3.13.2)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-support (3.13.2)
|
||||
rspec-wait (1.0.1)
|
||||
rspec (>= 3.4)
|
||||
rubocop (1.69.2)
|
||||
json (~> 2.3)
|
||||
language_server-protocol (>= 3.17.0)
|
||||
parallel (~> 1.10)
|
||||
parser (>= 3.3.0.2)
|
||||
rainbow (>= 2.2.2, < 4.0)
|
||||
regexp_parser (>= 2.9.3, < 3.0)
|
||||
rubocop-ast (>= 1.36.2, < 2.0)
|
||||
ruby-progressbar (~> 1.7)
|
||||
unicode-display_width (>= 2.4.0, < 4.0)
|
||||
rubocop-ast (1.36.2)
|
||||
parser (>= 3.3.1.0)
|
||||
ruby-maven-libs (3.9.9)
|
||||
ruby-progressbar (1.13.0)
|
||||
ruby2_keywords (0.0.5)
|
||||
rubyzip (1.3.0)
|
||||
rufus-scheduler (3.9.2)
|
||||
fugit (~> 1.1, >= 1.11.1)
|
||||
sawyer (0.9.2)
|
||||
addressable (>= 2.3.5)
|
||||
faraday (>= 0.17.3, < 3)
|
||||
semantic_logger (3.4.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
sequel (5.87.0)
|
||||
bigdecimal
|
||||
simple_oauth (0.3.1)
|
||||
simplecov (0.22.0)
|
||||
docile (~> 1.1)
|
||||
simplecov-html (~> 0.11)
|
||||
simplecov_json_formatter (~> 0.1)
|
||||
simplecov-html (0.13.1)
|
||||
simplecov-json (0.2.3)
|
||||
json
|
||||
simplecov
|
||||
simplecov_json_formatter (0.1.4)
|
||||
sinatra (4.1.1)
|
||||
logger (>= 1.6.0)
|
||||
mustermann (~> 3.0)
|
||||
rack (>= 3.0.0, < 4)
|
||||
rack-protection (= 4.1.1)
|
||||
rack-session (>= 2.0.0, < 3)
|
||||
tilt (~> 2.0)
|
||||
spoon (0.0.6)
|
||||
ffi
|
||||
stud (0.0.23)
|
||||
thread_safe (0.3.6-java)
|
||||
thwait (0.2.0)
|
||||
e2mmap
|
||||
tilt (2.4.0)
|
||||
timeout (0.4.3)
|
||||
treetop (1.6.14)
|
||||
polyglot (~> 0.3)
|
||||
twitter (6.2.0)
|
||||
addressable (~> 2.3)
|
||||
buftok (~> 0.2.0)
|
||||
equalizer (~> 0.0.11)
|
||||
http (~> 3.0)
|
||||
http-form_data (~> 2.0)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
memoizable (~> 0.4.0)
|
||||
multipart-post (~> 2.0)
|
||||
naught (~> 1.0)
|
||||
simple_oauth (~> 0.3.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
tzinfo-data (1.2024.2)
|
||||
tzinfo (>= 1.0.0)
|
||||
unicode-display_width (2.6.0)
|
||||
uri (1.0.3)
|
||||
webhdfs (0.11.0)
|
||||
addressable
|
||||
webmock (3.24.0)
|
||||
addressable (>= 2.8.0)
|
||||
crack (>= 0.3.2)
|
||||
hashdiff (>= 0.4.0, < 2.0.0)
|
||||
xml-simple (1.1.9)
|
||||
rexml
|
||||
|
||||
PLATFORMS
|
||||
java
|
||||
universal-java-11
|
||||
|
||||
DEPENDENCIES
|
||||
belzebuth
|
||||
benchmark-ips
|
||||
bigdecimal (~> 3.1)
|
||||
childprocess (~> 4)
|
||||
ci_reporter_rspec (~> 1)
|
||||
date (= 3.3.3)
|
||||
fileutils (~> 1.7)
|
||||
flores (~> 0.0.8)
|
||||
fpm (~> 1, >= 1.14.1)
|
||||
gems (~> 1)
|
||||
jar-dependencies (= 0.4.1)
|
||||
json-schema (~> 2)
|
||||
logstash-codec-avro
|
||||
logstash-codec-cef
|
||||
logstash-codec-collectd
|
||||
logstash-codec-dots
|
||||
logstash-codec-edn
|
||||
logstash-codec-edn_lines
|
||||
logstash-codec-es_bulk
|
||||
logstash-codec-fluent
|
||||
logstash-codec-graphite
|
||||
logstash-codec-json
|
||||
logstash-codec-json_lines
|
||||
logstash-codec-line
|
||||
logstash-codec-msgpack
|
||||
logstash-codec-multiline
|
||||
logstash-codec-netflow
|
||||
logstash-codec-plain
|
||||
logstash-codec-rubydebug
|
||||
logstash-core!
|
||||
logstash-core-plugin-api!
|
||||
logstash-devutils (~> 2.6.0)
|
||||
logstash-filter-aggregate
|
||||
logstash-filter-anonymize
|
||||
logstash-filter-cidr
|
||||
logstash-filter-clone
|
||||
logstash-filter-csv
|
||||
logstash-filter-date
|
||||
logstash-filter-de_dot
|
||||
logstash-filter-dissect
|
||||
logstash-filter-dns
|
||||
logstash-filter-drop
|
||||
logstash-filter-elastic_integration
|
||||
logstash-filter-elasticsearch
|
||||
logstash-filter-fingerprint
|
||||
logstash-filter-geoip
|
||||
logstash-filter-grok
|
||||
logstash-filter-http
|
||||
logstash-filter-json
|
||||
logstash-filter-kv
|
||||
logstash-filter-memcached
|
||||
logstash-filter-metrics
|
||||
logstash-filter-mutate
|
||||
logstash-filter-prune
|
||||
logstash-filter-ruby
|
||||
logstash-filter-sleep
|
||||
logstash-filter-split
|
||||
logstash-filter-syslog_pri
|
||||
logstash-filter-throttle
|
||||
logstash-filter-translate
|
||||
logstash-filter-truncate
|
||||
logstash-filter-urldecode
|
||||
logstash-filter-useragent
|
||||
logstash-filter-uuid
|
||||
logstash-filter-xml
|
||||
logstash-input-azure_event_hubs
|
||||
logstash-input-beats
|
||||
logstash-input-couchdb_changes
|
||||
logstash-input-dead_letter_queue
|
||||
logstash-input-elastic_serverless_forwarder
|
||||
logstash-input-elasticsearch
|
||||
logstash-input-exec
|
||||
logstash-input-file
|
||||
logstash-input-ganglia
|
||||
logstash-input-gelf
|
||||
logstash-input-generator
|
||||
logstash-input-graphite
|
||||
logstash-input-heartbeat
|
||||
logstash-input-http
|
||||
logstash-input-http_poller
|
||||
logstash-input-jms
|
||||
logstash-input-pipe
|
||||
logstash-input-redis
|
||||
logstash-input-stdin
|
||||
logstash-input-syslog
|
||||
logstash-input-tcp
|
||||
logstash-input-twitter
|
||||
logstash-input-udp
|
||||
logstash-input-unix
|
||||
logstash-integration-aws
|
||||
logstash-integration-elastic_enterprise_search
|
||||
logstash-integration-jdbc
|
||||
logstash-integration-kafka
|
||||
logstash-integration-logstash
|
||||
logstash-integration-rabbitmq
|
||||
logstash-integration-snmp
|
||||
logstash-output-csv
|
||||
logstash-output-elasticsearch (>= 11.14.0)
|
||||
logstash-output-email
|
||||
logstash-output-file
|
||||
logstash-output-graphite
|
||||
logstash-output-http
|
||||
logstash-output-lumberjack
|
||||
logstash-output-nagios
|
||||
logstash-output-null
|
||||
logstash-output-pipe
|
||||
logstash-output-redis
|
||||
logstash-output-stdout
|
||||
logstash-output-tcp
|
||||
logstash-output-udp
|
||||
logstash-output-webhdfs
|
||||
minitar (~> 1)
|
||||
murmurhash3 (= 0.1.6)
|
||||
octokit (~> 4.25)
|
||||
paquet (~> 0.2)
|
||||
pleaserun (~> 0.0.28)
|
||||
polyglot
|
||||
psych (= 5.2.2)
|
||||
rack-test
|
||||
rake (~> 13)
|
||||
rspec (~> 3.5)
|
||||
rubocop
|
||||
ruby-maven-libs (~> 3, >= 3.9.6.1)
|
||||
ruby-progressbar (~> 1)
|
||||
rubyzip (~> 1)
|
||||
simplecov (~> 0.22.0)
|
||||
simplecov-json
|
||||
stud (~> 0.0.22)
|
||||
thwait
|
||||
treetop
|
||||
webmock (~> 3)
|
||||
|
||||
BUNDLED WITH
|
||||
2.3.26
|
|
@ -26,6 +26,8 @@ gem "stud", "~> 0.0.22", :group => :build
|
|||
gem "fileutils", "~> 1.7"
|
||||
|
||||
gem "rubocop", :group => :development
|
||||
# rubocop-ast 1.43.0 carries a dep on `prism` which requires native c extensions
|
||||
gem 'rubocop-ast', '= 1.42.0', :group => :development
|
||||
gem "belzebuth", :group => :development
|
||||
gem "benchmark-ips", :group => :development
|
||||
gem "ci_reporter_rspec", "~> 1", :group => :development
|
||||
|
@ -43,3 +45,5 @@ gem "date", "= 3.3.3"
|
|||
gem "thwait"
|
||||
gem "bigdecimal", "~> 3.1"
|
||||
gem "psych", "5.2.2"
|
||||
gem "cgi", "0.3.7" # Pins until a new jruby version with updated cgi is released
|
||||
gem "uri", "0.12.3" # Pins until a new jruby version with updated cgi is released
|
||||
|
|
997
NOTICE.TXT
997
NOTICE.TXT
File diff suppressed because it is too large
Load diff
|
@ -20,7 +20,6 @@ supported platforms, from [downloads page](https://www.elastic.co/downloads/logs
|
|||
|
||||
- [Logstash Forum](https://discuss.elastic.co/c/logstash)
|
||||
- [Logstash Documentation](https://www.elastic.co/guide/en/logstash/current/index.html)
|
||||
- [#logstash on freenode IRC](https://webchat.freenode.net/?channels=logstash)
|
||||
- [Logstash Product Information](https://www.elastic.co/products/logstash)
|
||||
- [Elastic Support](https://www.elastic.co/subscriptions)
|
||||
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
cd /d "%~dp0\.."
|
||||
for /f %%i in ('cd') do set RESULT=%%i
|
||||
|
||||
"%JAVACMD%" -cp "!RESULT!\tools\ingest-converter\build\libs\ingest-converter.jar;*" ^
|
||||
org.logstash.ingest.Pipeline %*
|
||||
|
||||
endlocal
|
|
@ -1,4 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
java -cp "$(cd `dirname $0`/..; pwd)"'/tools/ingest-converter/build/libs/ingest-converter.jar:*' \
|
||||
org.logstash.ingest.Pipeline "$@"
|
|
@ -6,7 +6,7 @@ set params='%*'
|
|||
if "%1" == "-V" goto version
|
||||
if "%1" == "--version" goto version
|
||||
|
||||
call "%~dp0setup.bat" || exit /b 1
|
||||
1>&2 (call "%~dp0setup.bat") || exit /b 1
|
||||
if errorlevel 1 (
|
||||
if not defined nopauseonerror (
|
||||
pause
|
||||
|
|
|
@ -186,8 +186,8 @@ setup_vendored_jruby() {
|
|||
}
|
||||
|
||||
setup() {
|
||||
setup_java
|
||||
setup_vendored_jruby
|
||||
>&2 setup_java
|
||||
>&2 setup_vendored_jruby
|
||||
}
|
||||
|
||||
ruby_exec() {
|
||||
|
|
19
build.gradle
19
build.gradle
|
@ -328,7 +328,6 @@ tasks.register("assembleTarDistribution") {
|
|||
inputs.files fileTree("${projectDir}/bin")
|
||||
inputs.files fileTree("${projectDir}/config")
|
||||
inputs.files fileTree("${projectDir}/lib")
|
||||
inputs.files fileTree("${projectDir}/modules")
|
||||
inputs.files fileTree("${projectDir}/logstash-core-plugin-api")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/lib")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/src")
|
||||
|
@ -345,7 +344,6 @@ tasks.register("assembleOssTarDistribution") {
|
|||
inputs.files fileTree("${projectDir}/bin")
|
||||
inputs.files fileTree("${projectDir}/config")
|
||||
inputs.files fileTree("${projectDir}/lib")
|
||||
inputs.files fileTree("${projectDir}/modules")
|
||||
inputs.files fileTree("${projectDir}/logstash-core-plugin-api")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/lib")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/src")
|
||||
|
@ -360,7 +358,6 @@ tasks.register("assembleZipDistribution") {
|
|||
inputs.files fileTree("${projectDir}/bin")
|
||||
inputs.files fileTree("${projectDir}/config")
|
||||
inputs.files fileTree("${projectDir}/lib")
|
||||
inputs.files fileTree("${projectDir}/modules")
|
||||
inputs.files fileTree("${projectDir}/logstash-core-plugin-api")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/lib")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/src")
|
||||
|
@ -377,7 +374,6 @@ tasks.register("assembleOssZipDistribution") {
|
|||
inputs.files fileTree("${projectDir}/bin")
|
||||
inputs.files fileTree("${projectDir}/config")
|
||||
inputs.files fileTree("${projectDir}/lib")
|
||||
inputs.files fileTree("${projectDir}/modules")
|
||||
inputs.files fileTree("${projectDir}/logstash-core-plugin-api")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/lib")
|
||||
inputs.files fileTree("${projectDir}/logstash-core/src")
|
||||
|
@ -412,7 +408,7 @@ def qaBuildPath = "${buildDir}/qa/integration"
|
|||
def qaVendorPath = "${qaBuildPath}/vendor"
|
||||
|
||||
tasks.register("installIntegrationTestGems") {
|
||||
dependsOn unpackTarDistribution
|
||||
dependsOn assembleTarDistribution
|
||||
def gemfilePath = file("${projectDir}/qa/integration/Gemfile")
|
||||
inputs.files gemfilePath
|
||||
inputs.files file("${projectDir}/qa/integration/integration_tests.gemspec")
|
||||
|
@ -563,7 +559,8 @@ project(":logstash-integration-tests") {
|
|||
systemProperty 'org.logstash.integration.specs', rubyIntegrationSpecs
|
||||
environment "FEATURE_FLAG", System.getenv('FEATURE_FLAG')
|
||||
workingDir integrationTestPwd
|
||||
dependsOn = [installIntegrationTestGems, copyProductionLog4jConfiguration]
|
||||
dependsOn installIntegrationTestGems
|
||||
dependsOn copyProductionLog4jConfiguration
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -783,10 +780,16 @@ class JDKDetails {
|
|||
}
|
||||
|
||||
tasks.register("lint") {
|
||||
// Calls rake's 'lint' task
|
||||
description = "Lint Ruby source files. Use -PrubySource=file1.rb,file2.rb to specify files"
|
||||
dependsOn installDevelopmentGems
|
||||
doLast {
|
||||
rake(projectDir, buildDir, 'lint:report')
|
||||
if (project.hasProperty("rubySource")) {
|
||||
// Split the comma-separated files and pass them as separate arguments
|
||||
def files = project.property("rubySource").split(",")
|
||||
rake(projectDir, buildDir, "lint:report", *files)
|
||||
} else {
|
||||
rake(projectDir, buildDir, "lint:report")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -142,7 +142,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -185,7 +185,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -236,7 +236,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -294,7 +294,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -345,7 +345,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -388,7 +388,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -407,6 +407,7 @@ spec:
|
|||
ELASTIC_SLACK_NOTIFICATIONS_ENABLED: 'true'
|
||||
SLACK_NOTIFICATIONS_CHANNEL: '#logstash-build'
|
||||
SLACK_NOTIFICATIONS_ON_SUCCESS: 'false'
|
||||
SLACK_NOTIFICATIONS_SKIP_FOR_RETRIES: 'true'
|
||||
teams:
|
||||
ingest-fp:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
|
@ -438,7 +439,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -454,7 +455,8 @@ spec:
|
|||
build_pull_requests: false
|
||||
build_tags: false
|
||||
trigger_mode: code
|
||||
filter_condition: 'build.branch !~ /^backport.*$/'
|
||||
filter_condition: >-
|
||||
build.branch !~ /^backport.*$/ && build.branch !~ /^mergify\/bp\/.*$/
|
||||
filter_enabled: true
|
||||
cancel_intermediate_builds: false
|
||||
skip_intermediate_builds: false
|
||||
|
@ -493,7 +495,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -535,7 +537,7 @@ metadata:
|
|||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: buildkite
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
|
@ -615,7 +617,7 @@ spec:
|
|||
kind: Pipeline
|
||||
metadata:
|
||||
name: logstash-benchmark-pipeline
|
||||
description: ':logstash: The Benchmark pipeline'
|
||||
description: ':running: The Benchmark pipeline for snapshot version'
|
||||
spec:
|
||||
repository: elastic/logstash
|
||||
pipeline_file: ".buildkite/benchmark_pipeline.yml"
|
||||
|
@ -646,6 +648,54 @@ spec:
|
|||
# SECTION END: Benchmark pipeline
|
||||
# *******************************
|
||||
|
||||
# ***********************************
|
||||
# SECTION START: Benchmark Marathon
|
||||
# ***********************************
|
||||
---
|
||||
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/rre.schema.json
|
||||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Resource
|
||||
metadata:
|
||||
name: logstash-benchmark-marathon-pipeline
|
||||
description: Buildkite pipeline for benchmarking multi-version
|
||||
links:
|
||||
- title: 'Logstash Benchmark Marathon'
|
||||
url: https://buildkite.com/elastic/logstash-benchmark-marathon-pipeline
|
||||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:logstash
|
||||
system: platform-ingest
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: logstash-benchmark-marathon-pipeline
|
||||
description: ':running: The Benchmark Marathon for multi-version'
|
||||
spec:
|
||||
repository: elastic/logstash
|
||||
pipeline_file: ".buildkite/benchmark_marathon_pipeline.yml"
|
||||
maximum_timeout_in_minutes: 480
|
||||
provider_settings:
|
||||
trigger_mode: none # don't trigger jobs from github activity
|
||||
env:
|
||||
ELASTIC_SLACK_NOTIFICATIONS_ENABLED: 'false'
|
||||
SLACK_NOTIFICATIONS_CHANNEL: '#logstash-build'
|
||||
SLACK_NOTIFICATIONS_ON_SUCCESS: 'false'
|
||||
SLACK_NOTIFICATIONS_SKIP_FOR_RETRIES: 'true'
|
||||
teams:
|
||||
ingest-fp:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
logstash:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
ingest-eng-prod:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
everyone:
|
||||
access_level: READ_ONLY
|
||||
|
||||
# *******************************
|
||||
# SECTION END: Benchmark Marathon
|
||||
# *******************************
|
||||
|
||||
# ***********************************
|
||||
# Declare Health Report Tests pipeline
|
||||
# ***********************************
|
||||
|
@ -672,7 +722,7 @@ spec:
|
|||
spec:
|
||||
repository: elastic/logstash
|
||||
pipeline_file: ".buildkite/health_report_tests_pipeline.yml"
|
||||
maximum_timeout_in_minutes: 60
|
||||
maximum_timeout_in_minutes: 30 # usually tests last max ~17mins
|
||||
provider_settings:
|
||||
trigger_mode: none # don't trigger jobs from github activity
|
||||
env:
|
||||
|
@ -743,8 +793,8 @@ spec:
|
|||
message: Weekly trigger of JDK update availability pipeline per branch
|
||||
env:
|
||||
PIPELINES_TO_TRIGGER: 'logstash-jdk-availability-check-pipeline'
|
||||
Weekly JDK availability check (8.x):
|
||||
branch: 8.x
|
||||
Weekly JDK availability check (8.19):
|
||||
branch: "8.19"
|
||||
cronline: 0 2 * * 1 # every Monday@2AM UTC
|
||||
message: Weekly trigger of JDK update availability pipeline per branch
|
||||
env:
|
||||
|
@ -752,4 +802,4 @@ spec:
|
|||
|
||||
# *******************************
|
||||
# SECTION END: JDK check pipeline
|
||||
# *******************************
|
||||
# *******************************
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"notice": "This file is not maintained outside of the main branch and should only be used for tooling.",
|
||||
"branches": [
|
||||
{
|
||||
"branch": "main"
|
||||
},
|
||||
{
|
||||
"branch": "8.15"
|
||||
},
|
||||
{
|
||||
"branch": "7.17"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -15,7 +15,6 @@ fi
|
|||
# Can run either a specific flavor, or all flavors -
|
||||
# eg `ci/acceptance_tests.sh oss` will run tests for open source container
|
||||
# `ci/acceptance_tests.sh full` will run tests for the default container
|
||||
# `ci/acceptance_tests.sh ubi8` will run tests for the ubi8 based container
|
||||
# `ci/acceptance_tests.sh wolfi` will run tests for the wolfi based container
|
||||
# `ci/acceptance_tests.sh` will run tests for all containers
|
||||
SELECTED_TEST_SUITE=$1
|
||||
|
@ -49,23 +48,13 @@ if [[ $SELECTED_TEST_SUITE == "oss" ]]; then
|
|||
elif [[ $SELECTED_TEST_SUITE == "full" ]]; then
|
||||
echo "--- Building $SELECTED_TEST_SUITE docker images"
|
||||
cd $LS_HOME
|
||||
rake artifact:docker
|
||||
rake artifact:build_docker_full
|
||||
echo "--- Acceptance: Installing dependencies"
|
||||
cd $QA_DIR
|
||||
bundle install
|
||||
|
||||
echo "--- Acceptance: Running the tests"
|
||||
bundle exec rspec docker/spec/full/*_spec.rb
|
||||
elif [[ $SELECTED_TEST_SUITE == "ubi8" ]]; then
|
||||
echo "--- Building $SELECTED_TEST_SUITE docker images"
|
||||
cd $LS_HOME
|
||||
rake artifact:docker_ubi8
|
||||
echo "--- Acceptance: Installing dependencies"
|
||||
cd $QA_DIR
|
||||
bundle install
|
||||
|
||||
echo "--- Acceptance: Running the tests"
|
||||
bundle exec rspec docker/spec/ubi8/*_spec.rb
|
||||
elif [[ $SELECTED_TEST_SUITE == "wolfi" ]]; then
|
||||
echo "--- Building $SELECTED_TEST_SUITE docker images"
|
||||
cd $LS_HOME
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# get_test_half returns either the first or second half of integration tests
|
||||
# Usage: get_test_half <half_number>
|
||||
# half_number: 0 for first half, 1 for second half
|
||||
get_test_half() {
|
||||
local half_number=$1
|
||||
# Ensure only spec files go to stdout
|
||||
pushd qa/integration >/dev/null 2>&1
|
||||
|
||||
# Collect all spec files
|
||||
local glob1=(specs/*spec.rb)
|
||||
local glob2=(specs/**/*spec.rb)
|
||||
local all_specs=("${glob1[@]}" "${glob2[@]}")
|
||||
|
||||
# Calculate the split point
|
||||
local split_point=$((${#all_specs[@]} / 2))
|
||||
|
||||
# Get the requested half (:: is "up to", : is "from")
|
||||
if [[ $half_number -eq 0 ]]; then
|
||||
local specs="${all_specs[@]::$split_point}"
|
||||
else
|
||||
local specs="${all_specs[@]:$split_point}"
|
||||
fi
|
||||
popd >/dev/null 2>&1
|
||||
echo "$specs"
|
||||
}
|
|
@ -10,9 +10,6 @@ export GRADLE_OPTS="-Xmx2g -Dorg.gradle.jvmargs=-Xmx2g -Dorg.gradle.daemon=false
|
|||
export SPEC_OPTS="--order rand --format documentation"
|
||||
export CI=true
|
||||
|
||||
# Source shared function for splitting integration tests
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/get-test-half.sh"
|
||||
|
||||
if [ -n "$BUILD_JAVA_HOME" ]; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS -Dorg.gradle.java.home=$BUILD_JAVA_HOME"
|
||||
fi
|
||||
|
@ -22,14 +19,15 @@ if [[ $1 = "setup" ]]; then
|
|||
exit 0
|
||||
|
||||
elif [[ $1 == "split" ]]; then
|
||||
if [[ $2 =~ ^[01]$ ]]; then
|
||||
specs=$(get_test_half "$2")
|
||||
echo "Running half $2 of integration specs: $specs"
|
||||
./gradlew runIntegrationTests -PrubyIntegrationSpecs="$specs" --console=plain
|
||||
else
|
||||
echo "Error, must specify 0 or 1 after the split. For example ci/integration_tests.sh split 0"
|
||||
exit 1
|
||||
fi
|
||||
# Source shared function for splitting integration tests
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/partition-files.lib.sh"
|
||||
|
||||
index="${2:?index}"
|
||||
count="${3:-2}"
|
||||
specs=($(cd qa/integration; partition_files "${index}" "${count}" < <(find specs -name '*_spec.rb') ))
|
||||
|
||||
echo "Running integration tests partition[${index}] of ${count}: ${specs[*]}"
|
||||
./gradlew runIntegrationTests -PrubyIntegrationSpecs="${specs[*]}" --console=plain
|
||||
|
||||
elif [[ ! -z $@ ]]; then
|
||||
echo "Running integration tests 'rspec $@'"
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
{
|
||||
"releases": {
|
||||
"5.x": "5.6.16",
|
||||
"6.x": "6.8.23",
|
||||
"7.x": "7.17.23",
|
||||
"8.x": "8.15.1"
|
||||
"7.current": "7.17.28",
|
||||
"8.previous": "8.17.5",
|
||||
"8.current": "8.18.0"
|
||||
},
|
||||
"snapshots": {
|
||||
"7.x": "7.17.24-SNAPSHOT",
|
||||
"8.x": "8.15.2-SNAPSHOT",
|
||||
"main": "8.16.0-SNAPSHOT"
|
||||
"7.current": "7.17.29-SNAPSHOT",
|
||||
"8.previous": "8.17.6-SNAPSHOT",
|
||||
"8.current": "8.18.1-SNAPSHOT",
|
||||
"8.next": "8.19.0-SNAPSHOT",
|
||||
"9.next": "9.0.1-SNAPSHOT",
|
||||
"main": "9.1.0-SNAPSHOT"
|
||||
}
|
||||
}
|
||||
|
|
78
ci/partition-files.lib.sh
Executable file
78
ci/partition-files.lib.sh
Executable file
|
@ -0,0 +1,78 @@
|
|||
#!/bin/bash
|
||||
|
||||
# partition_files returns a consistent partition of the filenames given on stdin
|
||||
# Usage: partition_files <partition_index> <partition_count=2> < <(ls files)
|
||||
# partition_index: the zero-based index of the partition to select `[0,partition_count)`
|
||||
# partition_count: the number of partitions `[2,#files]`
|
||||
partition_files() (
|
||||
set -e
|
||||
|
||||
local files
|
||||
# ensure files is consistently sorted and distinct
|
||||
IFS=$'\n' read -ra files -d '' <<<"$(cat - | sort | uniq)" || true
|
||||
|
||||
local partition_index="${1:?}"
|
||||
local partition_count="${2:?}"
|
||||
|
||||
_error () { >&2 echo "ERROR: ${1:-UNSPECIFIED}"; exit 1; }
|
||||
|
||||
# safeguard against nonsense invocations
|
||||
if (( ${#files[@]} < 2 )); then
|
||||
_error "#files(${#files[@]}) must be at least 2 in order to partition"
|
||||
elif ( ! [[ "${partition_count}" =~ ^[0-9]+$ ]] ) || (( partition_count < 2 )) || (( partition_count > ${#files[@]})); then
|
||||
_error "partition_count(${partition_count}) must be a number that is at least 2 and not greater than #files(${#files[@]})"
|
||||
elif ( ! [[ "${partition_index}" =~ ^[0-9]+$ ]] ) || (( partition_index < 0 )) || (( partition_index >= $partition_count )) ; then
|
||||
_error "partition_index(${partition_index}) must be a number that is greater 0 and less than partition_count(${partition_count})"
|
||||
fi
|
||||
|
||||
# round-robbin emit those in our selected partition
|
||||
for index in "${!files[@]}"; do
|
||||
partition="$(( index % partition_count ))"
|
||||
if (( partition == partition_index )); then
|
||||
echo "${files[$index]}"
|
||||
fi
|
||||
done
|
||||
)
|
||||
|
||||
if [[ "$0" == "${BASH_SOURCE[0]}" ]]; then
|
||||
if [[ "$1" == "test" ]]; then
|
||||
status=0
|
||||
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
file_list="$( cd "${SCRIPT_DIR}"; find . -type f )"
|
||||
|
||||
# for any legal partitioning into N partitions, we ensure that
|
||||
# the combined output of `partition_files I N` where `I` is all numbers in
|
||||
# the range `[0,N)` produces no repeats and no omissions, even if the
|
||||
# input list is not consistently ordered.
|
||||
for n in $(seq 2 $(wc -l <<<"${file_list}")); do
|
||||
result=""
|
||||
for i in $(seq 0 $(( n - 1 ))); do
|
||||
for file in $(partition_files $i $n <<<"$( shuf <<<"${file_list}" )"); do
|
||||
result+="${file}"$'\n'
|
||||
done
|
||||
done
|
||||
|
||||
repeated="$( uniq --repeated <<<"$( sort <<<"${result}" )" )"
|
||||
if (( $(printf "${repeated}" | wc -l) > 0 )); then
|
||||
status=1
|
||||
echo "[n=${n}]FAIL(repeated):"$'\n'"${repeated}"
|
||||
fi
|
||||
|
||||
missing=$( comm -23 <(sort <<<"${file_list}") <( sort <<<"${result}" ) )
|
||||
if (( $(printf "${missing}" | wc -l) > 0 )); then
|
||||
status=1
|
||||
echo "[n=${n}]FAIL(omitted):"$'\n'"${missing}"
|
||||
fi
|
||||
done
|
||||
|
||||
if (( status > 0 )); then
|
||||
echo "There were failures. The input list was:"
|
||||
echo "${file_list}"
|
||||
fi
|
||||
|
||||
exit "${status}"
|
||||
else
|
||||
partition_files $@
|
||||
fi
|
||||
fi
|
|
@ -28,7 +28,9 @@ build_logstash() {
|
|||
}
|
||||
|
||||
index_test_data() {
|
||||
curl -X POST -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_bulk" -H 'Content-Type: application/json' --data-binary @"$CURRENT_DIR/test_data/book.json"
|
||||
curl -X POST -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_bulk" \
|
||||
-H 'x-elastic-product-origin: logstash' \
|
||||
-H 'Content-Type: application/json' --data-binary @"$CURRENT_DIR/test_data/book.json"
|
||||
}
|
||||
|
||||
# $1: check function
|
||||
|
|
|
@ -7,7 +7,8 @@ export PIPELINE_NAME='gen_es'
|
|||
|
||||
# update pipeline and check response code
|
||||
index_pipeline() {
|
||||
RESP_CODE=$(curl -s -w "%{http_code}" -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_logstash/pipeline/$1" -H 'Content-Type: application/json' -d "$2")
|
||||
RESP_CODE=$(curl -s -w "%{http_code}" -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_logstash/pipeline/$1" \
|
||||
-H 'x-elastic-product-origin: logstash' -H 'Content-Type: application/json' -d "$2")
|
||||
if [[ $RESP_CODE -ge '400' ]]; then
|
||||
echo "failed to update pipeline for Central Pipeline Management. Got $RESP_CODE from Elasticsearch"
|
||||
exit 1
|
||||
|
@ -34,7 +35,7 @@ check_plugin() {
|
|||
}
|
||||
|
||||
delete_pipeline() {
|
||||
curl -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -X DELETE "$ES_ENDPOINT/_logstash/pipeline/$PIPELINE_NAME" -H 'Content-Type: application/json';
|
||||
curl -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' -X DELETE "$ES_ENDPOINT/_logstash/pipeline/$PIPELINE_NAME" -H 'Content-Type: application/json';
|
||||
}
|
||||
|
||||
cpm_clean_up_and_get_result() {
|
||||
|
|
|
@ -6,10 +6,12 @@ source ./$(dirname "$0")/common.sh
|
|||
deploy_ingest_pipeline() {
|
||||
PIPELINE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_ingest/pipeline/integration-logstash_test.events-default" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'x-elastic-product-origin: logstash' \
|
||||
--data-binary @"$CURRENT_DIR/test_data/ingest_pipeline.json")
|
||||
|
||||
TEMPLATE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/_index_template/logs-serverless-default-template" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'x-elastic-product-origin: logstash' \
|
||||
--data-binary @"$CURRENT_DIR/test_data/index_template.json")
|
||||
|
||||
# ingest pipeline is likely be there from the last run
|
||||
|
@ -29,7 +31,7 @@ check_integration_filter() {
|
|||
}
|
||||
|
||||
get_doc_msg_length() {
|
||||
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.004-default/_search?size=1" | jq '.hits.hits[0]._source.message | length'
|
||||
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.004-default/_search?size=1" -H 'x-elastic-product-origin: logstash' | jq '.hits.hits[0]._source.message | length'
|
||||
}
|
||||
|
||||
# ensure no double run of ingest pipeline
|
||||
|
|
|
@ -9,7 +9,7 @@ check_named_index() {
|
|||
}
|
||||
|
||||
get_data_stream_count() {
|
||||
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/logs-$INDEX_NAME.001-default/_count" | jq '.count // 0'
|
||||
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' "$ES_ENDPOINT/logs-$INDEX_NAME.001-default/_count" | jq '.count // 0'
|
||||
}
|
||||
|
||||
compare_data_stream_count() {
|
||||
|
|
|
@ -10,7 +10,7 @@ export EXIT_CODE="0"
|
|||
|
||||
create_pipeline() {
|
||||
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
|
||||
-H 'Content-Type: application/json' -H 'kbn-xsrf: logstash' \
|
||||
-H 'Content-Type: application/json' -H 'kbn-xsrf: logstash' -H 'x-elastic-product-origin: logstash' \
|
||||
--data-binary @"$CURRENT_DIR/test_data/$PIPELINE_NAME.json")
|
||||
|
||||
if [[ RESP_CODE -ge '400' ]]; then
|
||||
|
@ -20,7 +20,8 @@ create_pipeline() {
|
|||
}
|
||||
|
||||
get_pipeline() {
|
||||
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME")
|
||||
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' \
|
||||
"$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME") \
|
||||
SOURCE_BODY=$(cat "$CURRENT_DIR/test_data/$PIPELINE_NAME.json")
|
||||
|
||||
RESP_PIPELINE_NAME=$(echo "$RESP_BODY" | jq -r '.id')
|
||||
|
@ -41,7 +42,8 @@ get_pipeline() {
|
|||
}
|
||||
|
||||
list_pipeline() {
|
||||
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipelines" | jq --arg name "$PIPELINE_NAME" '.pipelines[] | select(.id==$name)' )
|
||||
RESP_BODY=$(curl -s -X GET -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' \
|
||||
"$KB_ENDPOINT/api/logstash/pipelines" | jq --arg name "$PIPELINE_NAME" '.pipelines[] | select(.id==$name)' )
|
||||
if [[ -z "$RESP_BODY" ]]; then
|
||||
EXIT_CODE=$(( EXIT_CODE + 1 ))
|
||||
echo "Fail to list pipeline."
|
||||
|
@ -49,7 +51,8 @@ list_pipeline() {
|
|||
}
|
||||
|
||||
delete_pipeline() {
|
||||
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X DELETE -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
|
||||
RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X DELETE -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' \
|
||||
"$KB_ENDPOINT/api/logstash/pipeline/$PIPELINE_NAME" \
|
||||
-H 'Content-Type: application/json' -H 'kbn-xsrf: logstash' \
|
||||
--data-binary @"$CURRENT_DIR/test_data/$PIPELINE_NAME.json")
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ stop_metricbeat() {
|
|||
}
|
||||
|
||||
get_monitor_count() {
|
||||
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" "$ES_ENDPOINT/$INDEX_NAME/_count" | jq '.count // 0'
|
||||
curl -s -H "Authorization: ApiKey $TESTER_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' "$ES_ENDPOINT/$INDEX_NAME/_count" | jq '.count // 0'
|
||||
}
|
||||
|
||||
compare_monitor_count() {
|
||||
|
|
|
@ -6,7 +6,7 @@ set -ex
|
|||
source ./$(dirname "$0")/common.sh
|
||||
|
||||
get_monitor_count() {
|
||||
curl -s -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" "$ES_ENDPOINT/.monitoring-logstash-7-*/_count" | jq '.count'
|
||||
curl -s -H "Authorization: ApiKey $LS_ROLE_API_KEY_ENCODED" -H 'x-elastic-product-origin: logstash' "$ES_ENDPOINT/.monitoring-logstash-7-*/_count" | jq '.count'
|
||||
}
|
||||
|
||||
compare_monitor_count() {
|
||||
|
|
|
@ -16,10 +16,6 @@
|
|||
##
|
||||
################################################################
|
||||
|
||||
## GC configuration
|
||||
11-13:-XX:+UseConcMarkSweepGC
|
||||
11-13:-XX:CMSInitiatingOccupancyFraction=75
|
||||
11-13:-XX:+UseCMSInitiatingOccupancyOnly
|
||||
|
||||
## Locale
|
||||
# Set the locale language
|
||||
|
@ -59,11 +55,7 @@
|
|||
#-XX:HeapDumpPath=${LOGSTASH_HOME}/heapdump.hprof
|
||||
|
||||
## GC logging
|
||||
#-Xlog:gc*,gc+age=trace,safepoint:file=@loggc@:utctime,pid,tags:filecount=32,filesize=64m
|
||||
|
||||
# log GC status to a file with time stamps
|
||||
# ensure the directory exists
|
||||
#-Xloggc:${LS_GC_LOG_FILE}
|
||||
#-Xlog:gc*,gc+age=trace,safepoint:file=${LS_GC_LOG_FILE}:utctime,pid,tags:filecount=32,filesize=64m
|
||||
|
||||
# Entropy source for randomness
|
||||
-Djava.security.egd=file:/dev/urandom
|
||||
|
@ -79,11 +71,11 @@
|
|||
# text values with sizes less than or equal to this limit will be treated as invalid.
|
||||
# This value should be higher than `logstash.jackson.stream-read-constraints.max-number-length`.
|
||||
# The jackson library defaults to 20000000 or 20MB, whereas Logstash defaults to 200MB or 200000000 characters.
|
||||
-Dlogstash.jackson.stream-read-constraints.max-string-length=200000000
|
||||
#-Dlogstash.jackson.stream-read-constraints.max-string-length=200000000
|
||||
#
|
||||
# Sets the maximum number length (in chars or bytes, depending on input context).
|
||||
# The jackson library defaults to 1000, whereas Logstash defaults to 10000.
|
||||
-Dlogstash.jackson.stream-read-constraints.max-number-length=10000
|
||||
#-Dlogstash.jackson.stream-read-constraints.max-number-length=10000
|
||||
#
|
||||
# Sets the maximum nesting depth. The depth is a count of objects and arrays that have not
|
||||
# been closed, `{` and `[` respectively.
|
||||
|
|
|
@ -181,38 +181,6 @@
|
|||
#
|
||||
# api.auth.basic.password_policy.mode: WARN
|
||||
#
|
||||
# ------------ Module Settings ---------------
|
||||
# Define modules here. Modules definitions must be defined as an array.
|
||||
# The simple way to see this is to prepend each `name` with a `-`, and keep
|
||||
# all associated variables under the `name` they are associated with, and
|
||||
# above the next, like this:
|
||||
#
|
||||
# modules:
|
||||
# - name: MODULE_NAME
|
||||
# var.PLUGINTYPE1.PLUGINNAME1.KEY1: VALUE
|
||||
# var.PLUGINTYPE1.PLUGINNAME1.KEY2: VALUE
|
||||
# var.PLUGINTYPE2.PLUGINNAME1.KEY1: VALUE
|
||||
# var.PLUGINTYPE3.PLUGINNAME3.KEY1: VALUE
|
||||
#
|
||||
# Module variable names must be in the format of
|
||||
#
|
||||
# var.PLUGIN_TYPE.PLUGIN_NAME.KEY
|
||||
#
|
||||
# modules:
|
||||
#
|
||||
# ------------ Cloud Settings ---------------
|
||||
# Define Elastic Cloud settings here.
|
||||
# Format of cloud.id is a base64 value e.g. dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy
|
||||
# and it may have an label prefix e.g. staging:dXMtZ...
|
||||
# This will overwrite 'var.elasticsearch.hosts' and 'var.kibana.host'
|
||||
# cloud.id: <identifier>
|
||||
#
|
||||
# Format of cloud.auth is: <user>:<pass>
|
||||
# This is optional
|
||||
# If supplied this will overwrite 'var.elasticsearch.username' and 'var.elasticsearch.password'
|
||||
# If supplied this will overwrite 'var.kibana.username' and 'var.kibana.password'
|
||||
# cloud.auth: elastic:<password>
|
||||
#
|
||||
# ------------ Queuing Settings --------------
|
||||
#
|
||||
# Internal queuing model, "memory" for legacy in-memory based queuing and
|
||||
|
@ -314,13 +282,13 @@
|
|||
# * json
|
||||
#
|
||||
# log.format: plain
|
||||
# log.format.json.fix_duplicate_message_fields: false
|
||||
# log.format.json.fix_duplicate_message_fields: true
|
||||
#
|
||||
# path.logs:
|
||||
#
|
||||
# ------------ Other Settings --------------
|
||||
#
|
||||
# Allow or block running Logstash as superuser (default: true)
|
||||
# Allow or block running Logstash as superuser (default: true). Windows are excluded from the checking
|
||||
# allow_superuser: false
|
||||
#
|
||||
# Where to find custom plugins
|
||||
|
@ -331,13 +299,15 @@
|
|||
# pipeline.separate_logs: false
|
||||
#
|
||||
# Determine where to allocate memory buffers, for plugins that leverage them.
|
||||
# Default to direct, optionally can be switched to heap to select Java heap space.
|
||||
# Defaults to heap,but can be switched to direct if you prefer using direct memory space instead.
|
||||
# pipeline.buffer.type: heap
|
||||
#
|
||||
# ------------ X-Pack Settings (not applicable for OSS build)--------------
|
||||
#
|
||||
# X-Pack Monitoring
|
||||
# https://www.elastic.co/guide/en/logstash/current/monitoring-logstash.html
|
||||
# Flag to allow the legacy internal monitoring (default: false)
|
||||
#xpack.monitoring.allow_legacy_collection: false
|
||||
#xpack.monitoring.enabled: false
|
||||
#xpack.monitoring.elasticsearch.username: logstash_system
|
||||
#xpack.monitoring.elasticsearch.password: password
|
||||
|
|
|
@ -1,232 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Cherry pick and backport a PR"""
|
||||
from __future__ import print_function
|
||||
|
||||
from builtins import input
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
from os.path import expanduser
|
||||
import re
|
||||
from subprocess import check_call, call, check_output
|
||||
import requests
|
||||
import json
|
||||
|
||||
usage = """
|
||||
Example usage:
|
||||
./devtools/backport 7.16 2565 6490604aa0cf7fa61932a90700e6ca988fc8a527
|
||||
|
||||
In case of backporting errors, fix them, then run
|
||||
git cherry-pick --continue
|
||||
./devtools/backport 7.16 2565 6490604aa0cf7fa61932a90700e6ca988fc8a527 --continue
|
||||
|
||||
This script does the following:
|
||||
* cleanups both from_branch and to_branch (warning: drops local changes)
|
||||
* creates a temporary branch named something like "branch_2565"
|
||||
* calls the git cherry-pick command in this branch
|
||||
* after fixing the merge errors (if needed), pushes the branch to your
|
||||
remote
|
||||
* it will attempt to create a PR for you using the GitHub API, but requires
|
||||
the GitHub token, with the public_repo scope, available in `~/.elastic/github.token`.
|
||||
Keep in mind this token has to also be authorized to the Elastic organization as
|
||||
well as to work with SSO.
|
||||
(see https://help.github.com/en/articles/authorizing-a-personal-access-token-for-use-with-saml-single-sign-on)
|
||||
|
||||
Note that you need to take the commit hashes from `git log` on the
|
||||
from_branch, copying the IDs from Github doesn't work in case we squashed the
|
||||
PR.
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
"""Main"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Creates a PR for cherry-picking commits",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=usage)
|
||||
parser.add_argument("to_branch",
|
||||
help="To branch (e.g 7.x)")
|
||||
parser.add_argument("pr_number",
|
||||
help="The PR number being merged (e.g. 2345)")
|
||||
parser.add_argument("commit_hashes", metavar="hash", nargs="*",
|
||||
help="The commit hashes to cherry pick." +
|
||||
" You can specify multiple.")
|
||||
parser.add_argument("--yes", action="store_true",
|
||||
help="Assume yes. Warning: discards local changes.")
|
||||
parser.add_argument("--continue", action="store_true",
|
||||
help="Continue after fixing merging errors.")
|
||||
parser.add_argument("--from_branch", default="main",
|
||||
help="From branch")
|
||||
parser.add_argument("--diff", action="store_true",
|
||||
help="Display the diff before pushing the PR")
|
||||
parser.add_argument("--remote", default="",
|
||||
help="Which remote to push the backport branch to")
|
||||
#parser.add_argument("--zube-team", default="",
|
||||
# help="Team the PR belongs to")
|
||||
#parser.add_argument("--keep-backport-label", action="store_true",
|
||||
# help="Preserve label needs_backport in original PR")
|
||||
args = parser.parse_args()
|
||||
|
||||
print(args)
|
||||
|
||||
create_pr(parser, args)
|
||||
|
||||
def create_pr(parser, args):
|
||||
info("Checking if GitHub API token is available in `~/.elastic/github.token`")
|
||||
token = get_github_token()
|
||||
|
||||
tmp_branch = "backport_{}_{}".format(args.pr_number, args.to_branch)
|
||||
|
||||
if not vars(args)["continue"]:
|
||||
if not args.yes and input("This will destroy all local changes. " +
|
||||
"Continue? [y/n]: ") != "y":
|
||||
return 1
|
||||
info("Destroying local changes...")
|
||||
check_call("git reset --hard", shell=True)
|
||||
check_call("git clean -df", shell=True)
|
||||
check_call("git fetch", shell=True)
|
||||
|
||||
info("Checkout of {} to backport from....".format(args.from_branch))
|
||||
check_call("git checkout {}".format(args.from_branch), shell=True)
|
||||
check_call("git pull", shell=True)
|
||||
|
||||
info("Checkout of {} to backport to...".format(args.to_branch))
|
||||
check_call("git checkout {}".format(args.to_branch), shell=True)
|
||||
check_call("git pull", shell=True)
|
||||
|
||||
info("Creating backport branch {}...".format(tmp_branch))
|
||||
call("git branch -D {} > /dev/null".format(tmp_branch), shell=True)
|
||||
check_call("git checkout -b {}".format(tmp_branch), shell=True)
|
||||
|
||||
if len(args.commit_hashes) == 0:
|
||||
if token:
|
||||
session = github_session(token)
|
||||
base = "https://api.github.com/repos/elastic/logstash"
|
||||
original_pr = session.get(base + "/pulls/" + args.pr_number).json()
|
||||
merge_commit = original_pr['merge_commit_sha']
|
||||
if not merge_commit:
|
||||
info("Could not auto resolve merge commit - PR isn't merged yet")
|
||||
return 1
|
||||
info("Merge commit detected from PR: {}".format(merge_commit))
|
||||
commit_hashes = merge_commit
|
||||
else:
|
||||
info("GitHub API token not available. " +
|
||||
"Please manually specify commit hash(es) argument(s)\n")
|
||||
parser.print_help()
|
||||
return 1
|
||||
else:
|
||||
commit_hashes = "{}".format(" ").join(args.commit_hashes)
|
||||
|
||||
info("Cherry-picking {}".format(commit_hashes))
|
||||
if call("git cherry-pick -x {}".format(commit_hashes), shell=True) != 0:
|
||||
info("Looks like you have cherry-pick errors.")
|
||||
info("Fix them, then run: ")
|
||||
info(" git cherry-pick --continue")
|
||||
info(" {} --continue".format(" ".join(sys.argv)))
|
||||
return 1
|
||||
|
||||
if len(check_output("git status -s", shell=True).strip()) > 0:
|
||||
info("Looks like you have uncommitted changes." +
|
||||
" Please execute first: git cherry-pick --continue")
|
||||
return 1
|
||||
|
||||
if len(check_output("git log HEAD...{}".format(args.to_branch),
|
||||
shell=True).strip()) == 0:
|
||||
info("No commit to push")
|
||||
return 1
|
||||
|
||||
if args.diff:
|
||||
call("git diff {}".format(args.to_branch), shell=True)
|
||||
if input("Continue? [y/n]: ") != "y":
|
||||
info("Aborting cherry-pick.")
|
||||
return 1
|
||||
|
||||
info("Ready to push branch.")
|
||||
|
||||
remote = args.remote
|
||||
if not remote:
|
||||
remote = input("To which remote should I push? (your fork): ")
|
||||
|
||||
info("Pushing branch {} to remote {}".format(tmp_branch, remote))
|
||||
call("git push {} :{} > /dev/null".format(remote, tmp_branch), shell=True)
|
||||
check_call("git push --set-upstream {} {}".format(remote, tmp_branch), shell=True)
|
||||
|
||||
if not token:
|
||||
info("GitHub API token not available.\n" +
|
||||
"Manually create a PR by following this URL: \n\t" +
|
||||
"https://github.com/elastic/logstash/compare/{}...{}:{}?expand=1"
|
||||
.format(args.to_branch, remote, tmp_branch))
|
||||
else:
|
||||
info("Automatically creating a PR for you...")
|
||||
|
||||
session = github_session(token)
|
||||
base = "https://api.github.com/repos/elastic/logstash"
|
||||
original_pr = session.get(base + "/pulls/" + args.pr_number).json()
|
||||
|
||||
# get the github username from the remote where we pushed
|
||||
remote_url = check_output("git remote get-url {}".format(remote), shell=True)
|
||||
remote_user = re.search("github.com[:/](.+)/logstash", str(remote_url)).group(1)
|
||||
|
||||
# create PR
|
||||
request = session.post(base + "/pulls", json=dict(
|
||||
title="Backport PR #{} to {}: {}".format(args.pr_number, args.to_branch, original_pr["title"]),
|
||||
head=remote_user + ":" + tmp_branch,
|
||||
base=args.to_branch,
|
||||
body="**Backport PR #{} to {} branch, original message:**\n\n---\n\n{}"
|
||||
.format(args.pr_number, args.to_branch, original_pr["body"])
|
||||
))
|
||||
if request.status_code > 299:
|
||||
info("Creating PR failed: {}".format(request.json()))
|
||||
sys.exit(1)
|
||||
new_pr = request.json()
|
||||
|
||||
# add labels
|
||||
labels = ["backport"]
|
||||
# get the version (vX.Y.Z) we are backporting to
|
||||
version = get_version(os.getcwd())
|
||||
if version:
|
||||
labels.append(version)
|
||||
|
||||
session.post(
|
||||
base + "/issues/{}/labels".format(new_pr["number"]), json=labels)
|
||||
|
||||
"""
|
||||
if not args.keep_backport_label:
|
||||
# remove needs backport label from the original PR
|
||||
session.delete(base + "/issues/{}/labels/needs_backport".format(args.pr_number))
|
||||
"""
|
||||
# Set a version label on the original PR
|
||||
if version:
|
||||
session.post(
|
||||
base + "/issues/{}/labels".format(args.pr_number), json=[version])
|
||||
|
||||
info("Done. PR created: {}".format(new_pr["html_url"]))
|
||||
info("Please go and check it and add the review tags")
|
||||
|
||||
def get_version(base_dir):
|
||||
#pattern = re.compile(r'(const\s|)\w*(v|V)ersion\s=\s"(?P<version>.*)"')
|
||||
with open(os.path.join(base_dir, "versions.yml"), "r") as f:
|
||||
for line in f:
|
||||
if line.startswith('logstash:'):
|
||||
return "v" + line.split(':')[-1].strip()
|
||||
#match = pattern.match(line)
|
||||
#if match:
|
||||
# return match.group('version')
|
||||
|
||||
def get_github_token():
|
||||
try:
|
||||
token = open(expanduser("~/.elastic/github.token"), "r").read().strip()
|
||||
except:
|
||||
token = False
|
||||
return token
|
||||
|
||||
def github_session(token):
|
||||
session = requests.Session()
|
||||
session.headers.update({"Authorization": "token " + token})
|
||||
return session
|
||||
|
||||
def info(msg):
|
||||
print("\nINFO: {}".format(msg))
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
|
@ -1,163 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Cherry pick and backport a PR"""
|
||||
from __future__ import print_function
|
||||
|
||||
from builtins import input
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
from os.path import expanduser
|
||||
import re
|
||||
from subprocess import check_call, call, check_output
|
||||
import requests
|
||||
|
||||
usage = """
|
||||
Example usage:
|
||||
./dev-tools/create local_branch
|
||||
|
||||
This script does the following:
|
||||
* cleanups local_branch (warning: drops local changes)
|
||||
* rebases the branch against main
|
||||
* it will attempt to create a PR for you using the GitHub API, but requires
|
||||
the GitHub token, with the public_repo scope, available in `~/.elastic/github.token`.
|
||||
Keep in mind this token has to also be authorized to the Elastic organization as
|
||||
well as to work with SSO.
|
||||
(see https://help.github.com/en/articles/authorizing-a-personal-access-token-for-use-with-saml-single-sign-on)
|
||||
|
||||
Note that you need to take the commit hashes from `git log` on the
|
||||
from_branch, copying the IDs from Github doesn't work in case we squashed the
|
||||
PR.
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
"""Main"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Creates a new PR from a branch",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=usage)
|
||||
parser.add_argument("local_branch",
|
||||
help="Branch to Create a PR for")
|
||||
parser.add_argument("--to_branch", default="main",
|
||||
help="Which remote to push the backport branch to")
|
||||
parser.add_argument("--yes", action="store_true",
|
||||
help="Assume yes. Warning: discards local changes.")
|
||||
parser.add_argument("--continue", action="store_true",
|
||||
help="Continue after fixing merging errors.")
|
||||
parser.add_argument("--diff", action="store_true",
|
||||
help="Display the diff before pushing the PR")
|
||||
parser.add_argument("--remote", default="",
|
||||
help="Which remote to push the backport branch to")
|
||||
args = parser.parse_args()
|
||||
|
||||
print(args)
|
||||
|
||||
create_pr(args)
|
||||
|
||||
def create_pr(args):
|
||||
|
||||
if not vars(args)["continue"]:
|
||||
if not args.yes and input("This will destroy all local changes. " +
|
||||
"Continue? [y/n]: ") != "y":
|
||||
return 1
|
||||
info("Destroying local changess...")
|
||||
check_call("git reset --hard", shell=True)
|
||||
check_call("git clean -df", shell=True)
|
||||
#check_call("git fetch", shell=True)
|
||||
|
||||
info("Checkout of {} to create a PR....".format(args.local_branch))
|
||||
check_call("git checkout {}".format(args.local_branch), shell=True)
|
||||
check_call("git rebase {}".format(args.to_branch), shell=True)
|
||||
|
||||
if args.diff:
|
||||
call("git diff {}".format(args.to_branch), shell=True)
|
||||
if input("Continue? [y/n]: ") != "y":
|
||||
info("Aborting PR creation...")
|
||||
return 1
|
||||
|
||||
info("Ready to push branch and create PR...")
|
||||
|
||||
remote = args.remote
|
||||
if not remote:
|
||||
remote = input("To which remote should I push? (your fork): ")
|
||||
|
||||
info("Pushing branch {} to remote {}".format(args.local_branch, remote))
|
||||
call("git push {} :{} > /dev/null".format(remote, args.local_branch),
|
||||
shell=True)
|
||||
check_call("git push --set-upstream {} {}"
|
||||
.format(remote, args.local_branch), shell=True)
|
||||
|
||||
info("Checking if GitHub API token is available in `~/.elastic/github.token`")
|
||||
try:
|
||||
token = open(expanduser("~/.elastic/github.token"), "r").read().strip()
|
||||
except:
|
||||
token = False
|
||||
|
||||
if not token:
|
||||
info("GitHub API token not available.\n" +
|
||||
"Manually create a PR by following this URL: \n\t" +
|
||||
"https://github.com/elastic/logstash/compare/{}...{}:{}?expand=1"
|
||||
.format(args.to_branch, remote, args.local_branch))
|
||||
else:
|
||||
info("Automatically creating a PR for you...")
|
||||
|
||||
base = "https://api.github.com/repos/elastic/logstash"
|
||||
session = requests.Session()
|
||||
session.headers.update({"Authorization": "token " + token})
|
||||
|
||||
# get the github username from the remote where we pushed
|
||||
remote_url = check_output("git remote get-url {}".format(remote),
|
||||
shell=True)
|
||||
remote_user = re.search("github.com[:/](.+)/logstash", str(remote_url)).group(1)
|
||||
|
||||
### TODO:
|
||||
title = input("Title: ")
|
||||
body = input("Description: ")
|
||||
|
||||
# create PR
|
||||
request = session.post(base + "/pulls", json=dict(
|
||||
title=title,
|
||||
head=remote_user + ":" + args.local_branch,
|
||||
base=args.to_branch,
|
||||
body=body
|
||||
))
|
||||
if request.status_code > 299:
|
||||
info("Creating PR failed: {}".format(request.json()))
|
||||
sys.exit(1)
|
||||
new_pr = request.json()
|
||||
|
||||
"""
|
||||
# add labels
|
||||
labels = ["backport"]
|
||||
# get the version we are backported to
|
||||
version = get_version(os.getcwd())
|
||||
if version:
|
||||
labels.append("v" + version)
|
||||
|
||||
session.post(
|
||||
base + "/issues/{}/labels".format(new_pr["number"]), json=labels)
|
||||
|
||||
# Set a version label on the original PR
|
||||
if version:
|
||||
session.post(
|
||||
base + "/issues/{}/labels".format(args.pr_number), json=[version])
|
||||
"""
|
||||
|
||||
info("Done. PR created: {}".format(new_pr["html_url"]))
|
||||
info("Please go and check it and add the review tags")
|
||||
|
||||
def get_version(base_dir):
|
||||
#pattern = re.compile(r'(const\s|)\w*(v|V)ersion\s=\s"(?P<version>.*)"')
|
||||
with open(os.path.join(base_dir, "versions.yml"), "r") as f:
|
||||
for line in f:
|
||||
if line.startswith('logstash:'):
|
||||
return line.split(':')[-1].strip()
|
||||
#match = pattern.match(line)
|
||||
#if match:
|
||||
# return match.group('version')
|
||||
|
||||
def info(msg):
|
||||
print("\nINFO: {}".format(msg))
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
|
@ -2,7 +2,7 @@ SHELL=/bin/bash
|
|||
ELASTIC_REGISTRY ?= docker.elastic.co
|
||||
|
||||
# Determine the version to build.
|
||||
ELASTIC_VERSION := $(shell ../vendor/jruby/bin/jruby bin/elastic-version)
|
||||
ELASTIC_VERSION ?= $(shell ../vendor/jruby/bin/jruby bin/elastic-version)
|
||||
|
||||
ifdef STAGING_BUILD_NUM
|
||||
VERSION_TAG := $(ELASTIC_VERSION)-$(STAGING_BUILD_NUM)
|
||||
|
@ -14,9 +14,13 @@ ifdef DOCKER_ARCHITECTURE
|
|||
ARCHITECTURE := $(DOCKER_ARCHITECTURE)
|
||||
else
|
||||
ARCHITECTURE := $(shell uname -m)
|
||||
# For MacOS
|
||||
ifeq ($(ARCHITECTURE), arm64)
|
||||
ARCHITECTURE := aarch64
|
||||
endif
|
||||
endif
|
||||
|
||||
IMAGE_FLAVORS ?= oss full ubi8 wolfi
|
||||
IMAGE_FLAVORS ?= oss full wolfi
|
||||
DEFAULT_IMAGE_FLAVOR ?= full
|
||||
|
||||
IMAGE_TAG := $(ELASTIC_REGISTRY)/logstash/logstash
|
||||
|
@ -26,7 +30,7 @@ all: build-from-local-artifacts build-from-local-oss-artifacts public-dockerfile
|
|||
|
||||
# Build from artifacts on the local filesystem, using an http server (running
|
||||
# in a container) to provide the artifacts to the Dockerfile.
|
||||
build-from-local-full-artifacts: dockerfile env2yaml
|
||||
build-from-local-full-artifacts: dockerfile
|
||||
docker run --rm -d --name=$(HTTPD) \
|
||||
-p 8000:8000 --expose=8000 -v $(ARTIFACTS_DIR):/mnt \
|
||||
python:3 bash -c 'cd /mnt && python3 -m http.server'
|
||||
|
@ -36,7 +40,7 @@ build-from-local-full-artifacts: dockerfile env2yaml
|
|||
docker tag $(IMAGE_TAG)-full:$(VERSION_TAG) $(IMAGE_TAG):$(VERSION_TAG);
|
||||
docker kill $(HTTPD)
|
||||
|
||||
build-from-local-oss-artifacts: dockerfile env2yaml
|
||||
build-from-local-oss-artifacts: dockerfile
|
||||
docker run --rm -d --name=$(HTTPD) \
|
||||
-p 8000:8000 --expose=8000 -v $(ARTIFACTS_DIR):/mnt \
|
||||
python:3 bash -c 'cd /mnt && python3 -m http.server'
|
||||
|
@ -45,15 +49,6 @@ build-from-local-oss-artifacts: dockerfile env2yaml
|
|||
(docker kill $(HTTPD); false);
|
||||
-docker kill $(HTTPD)
|
||||
|
||||
build-from-local-ubi8-artifacts: dockerfile env2yaml
|
||||
docker run --rm -d --name=$(HTTPD) \
|
||||
-p 8000:8000 --expose=8000 -v $(ARTIFACTS_DIR):/mnt \
|
||||
python:3 bash -c 'cd /mnt && python3 -m http.server'
|
||||
timeout 120 bash -c 'until curl -s localhost:8000 > /dev/null; do sleep 1; done'
|
||||
docker build --progress=plain --network=host -t $(IMAGE_TAG)-ubi8:$(VERSION_TAG) -f $(ARTIFACTS_DIR)/Dockerfile-ubi8 data/logstash || \
|
||||
(docker kill $(HTTPD); false);
|
||||
-docker kill $(HTTPD)
|
||||
|
||||
build-from-local-wolfi-artifacts: dockerfile
|
||||
docker run --rm -d --name=$(HTTPD) \
|
||||
-p 8000:8000 --expose=8000 -v $(ARTIFACTS_DIR):/mnt \
|
||||
|
@ -65,9 +60,8 @@ build-from-local-wolfi-artifacts: dockerfile
|
|||
|
||||
COPY_FILES := $(ARTIFACTS_DIR)/docker/config/pipelines.yml $(ARTIFACTS_DIR)/docker/config/logstash-oss.yml $(ARTIFACTS_DIR)/docker/config/logstash-full.yml
|
||||
COPY_FILES += $(ARTIFACTS_DIR)/docker/config/log4j2.file.properties $(ARTIFACTS_DIR)/docker/config/log4j2.properties
|
||||
COPY_FILES += $(ARTIFACTS_DIR)/docker/env2yaml/env2yaml.go $(ARTIFACTS_DIR)/docker/env2yaml/go.mod $(ARTIFACTS_DIR)/docker/env2yaml/go.sum
|
||||
COPY_FILES += $(ARTIFACTS_DIR)/docker/pipeline/default.conf $(ARTIFACTS_DIR)/docker/bin/docker-entrypoint
|
||||
COPY_FILES += $(ARTIFACTS_DIR)/docker/env2yaml/env2yaml-arm64
|
||||
COPY_FILES += $(ARTIFACTS_DIR)/docker/env2yaml/env2yaml-amd64
|
||||
|
||||
$(ARTIFACTS_DIR)/docker/config/pipelines.yml: data/logstash/config/pipelines.yml
|
||||
$(ARTIFACTS_DIR)/docker/config/logstash-oss.yml: data/logstash/config/logstash-oss.yml
|
||||
|
@ -76,8 +70,9 @@ $(ARTIFACTS_DIR)/docker/config/log4j2.file.properties: data/logstash/config/log4
|
|||
$(ARTIFACTS_DIR)/docker/config/log4j2.properties: data/logstash/config/log4j2.properties
|
||||
$(ARTIFACTS_DIR)/docker/pipeline/default.conf: data/logstash/pipeline/default.conf
|
||||
$(ARTIFACTS_DIR)/docker/bin/docker-entrypoint: data/logstash/bin/docker-entrypoint
|
||||
$(ARTIFACTS_DIR)/docker/env2yaml/env2yaml-arm64: data/logstash/env2yaml/env2yaml-arm64
|
||||
$(ARTIFACTS_DIR)/docker/env2yaml/env2yaml-amd64: data/logstash/env2yaml/env2yaml-amd64
|
||||
$(ARTIFACTS_DIR)/docker/env2yaml/env2yaml.go: data/logstash/env2yaml/env2yaml.go
|
||||
$(ARTIFACTS_DIR)/docker/env2yaml/go.mod: data/logstash/env2yaml/go.mod
|
||||
$(ARTIFACTS_DIR)/docker/env2yaml/go.sum: data/logstash/env2yaml/go.sum
|
||||
|
||||
$(ARTIFACTS_DIR)/docker/%:
|
||||
cp -f $< $@
|
||||
|
@ -118,7 +113,7 @@ ironbank_docker_paths:
|
|||
mkdir -p $(ARTIFACTS_DIR)/ironbank/scripts/go/src/env2yaml/vendor
|
||||
mkdir -p $(ARTIFACTS_DIR)/ironbank/scripts/pipeline
|
||||
|
||||
public-dockerfiles: public-dockerfiles_oss public-dockerfiles_full public-dockerfiles_ubi8 public-dockerfiles_wolfi public-dockerfiles_ironbank
|
||||
public-dockerfiles: public-dockerfiles_oss public-dockerfiles_full public-dockerfiles_wolfi public-dockerfiles_ironbank
|
||||
|
||||
public-dockerfiles_full: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
||||
../vendor/jruby/bin/jruby -S erb -T "-"\
|
||||
|
@ -134,6 +129,13 @@ public-dockerfiles_full: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
|||
cp $(ARTIFACTS_DIR)/Dockerfile-full Dockerfile && \
|
||||
tar -zcf ../logstash-$(VERSION_TAG)-docker-build-context.tar.gz Dockerfile bin config env2yaml pipeline
|
||||
|
||||
build-from-dockerfiles_full: public-dockerfiles_full
|
||||
cd $(ARTIFACTS_DIR)/docker && \
|
||||
mkdir -p dockerfile_build_full && cd dockerfile_build_full && \
|
||||
tar -zxf ../../logstash-$(VERSION_TAG)-docker-build-context.tar.gz && \
|
||||
sed 's/artifacts/snapshots/g' Dockerfile > Dockerfile.tmp && mv Dockerfile.tmp Dockerfile && \
|
||||
docker build --progress=plain --network=host -t $(IMAGE_TAG)-dockerfile-full:$(VERSION_TAG) .
|
||||
|
||||
public-dockerfiles_oss: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
||||
../vendor/jruby/bin/jruby -S erb -T "-"\
|
||||
created_date="${BUILD_DATE}" \
|
||||
|
@ -148,19 +150,12 @@ public-dockerfiles_oss: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
|||
cp $(ARTIFACTS_DIR)/Dockerfile-oss Dockerfile && \
|
||||
tar -zcf ../logstash-oss-$(VERSION_TAG)-docker-build-context.tar.gz Dockerfile bin config env2yaml pipeline
|
||||
|
||||
public-dockerfiles_ubi8: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
||||
../vendor/jruby/bin/jruby -S erb -T "-"\
|
||||
created_date="${BUILD_DATE}" \
|
||||
elastic_version="${ELASTIC_VERSION}" \
|
||||
arch="${ARCHITECTURE}" \
|
||||
version_tag="${VERSION_TAG}" \
|
||||
release="${RELEASE}" \
|
||||
image_flavor="ubi8" \
|
||||
local_artifacts="false" \
|
||||
templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-ubi8" && \
|
||||
build-from-dockerfiles_oss: public-dockerfiles_oss
|
||||
cd $(ARTIFACTS_DIR)/docker && \
|
||||
cp $(ARTIFACTS_DIR)/Dockerfile-ubi8 Dockerfile && \
|
||||
tar -zcf ../logstash-ubi8-$(VERSION_TAG)-docker-build-context.tar.gz Dockerfile bin config env2yaml pipeline
|
||||
mkdir -p dockerfile_build_oss && cd dockerfile_build_oss && \
|
||||
tar -zxf ../../logstash-$(VERSION_TAG)-docker-build-context.tar.gz && \
|
||||
sed 's/artifacts/snapshots/g' Dockerfile > Dockerfile.tmp && mv Dockerfile.tmp Dockerfile && \
|
||||
docker build --progress=plain --network=host -t $(IMAGE_TAG)-dockerfile-oss:$(VERSION_TAG) .
|
||||
|
||||
public-dockerfiles_wolfi: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
||||
../vendor/jruby/bin/jruby -S erb -T "-"\
|
||||
|
@ -176,7 +171,14 @@ public-dockerfiles_wolfi: templates/Dockerfile.erb docker_paths $(COPY_FILES)
|
|||
cp $(ARTIFACTS_DIR)/Dockerfile-wolfi Dockerfile && \
|
||||
tar -zcf ../logstash-wolfi-$(VERSION_TAG)-docker-build-context.tar.gz Dockerfile bin config env2yaml pipeline
|
||||
|
||||
public-dockerfiles_ironbank: templates/hardening_manifest.yaml.erb templates/Dockerfile.erb ironbank_docker_paths $(COPY_IRONBANK_FILES)
|
||||
build-from-dockerfiles_wolfi: public-dockerfiles_wolfi
|
||||
cd $(ARTIFACTS_DIR)/docker && \
|
||||
mkdir -p dockerfile_build_wolfi && cd dockerfile_build_wolfi && \
|
||||
tar -zxf ../../logstash-$(VERSION_TAG)-docker-build-context.tar.gz && \
|
||||
sed 's/artifacts/snapshots/g' Dockerfile > Dockerfile.tmp && mv Dockerfile.tmp Dockerfile && \
|
||||
docker build --progress=plain --network=host -t $(IMAGE_TAG)-dockerfile-wolfi:$(VERSION_TAG) .
|
||||
|
||||
public-dockerfiles_ironbank: templates/hardening_manifest.yaml.erb templates/IronbankDockerfile.erb ironbank_docker_paths $(COPY_IRONBANK_FILES)
|
||||
../vendor/jruby/bin/jruby -S erb -T "-"\
|
||||
elastic_version="${ELASTIC_VERSION}" \
|
||||
templates/hardening_manifest.yaml.erb > $(ARTIFACTS_DIR)/ironbank/hardening_manifest.yaml && \
|
||||
|
@ -188,35 +190,11 @@ public-dockerfiles_ironbank: templates/hardening_manifest.yaml.erb templates/Doc
|
|||
release="${RELEASE}" \
|
||||
image_flavor="ironbank" \
|
||||
local_artifacts="false" \
|
||||
templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-ironbank" && \
|
||||
templates/IronbankDockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-ironbank" && \
|
||||
cd $(ARTIFACTS_DIR)/ironbank && \
|
||||
cp $(ARTIFACTS_DIR)/Dockerfile-ironbank Dockerfile && \
|
||||
tar -zcf ../logstash-ironbank-$(VERSION_TAG)-docker-build-context.tar.gz scripts Dockerfile hardening_manifest.yaml LICENSE README.md
|
||||
|
||||
# Push the image to the dedicated push endpoint at "push.docker.elastic.co"
|
||||
push:
|
||||
$(foreach FLAVOR, $(IMAGE_FLAVORS), \
|
||||
docker tag $(IMAGE_TAG)-$(FLAVOR):$(VERSION_TAG) push.$(IMAGE_TAG)-$(FLAVOR):$(VERSION_TAG); \
|
||||
docker push push.$(IMAGE_TAG)-$(FLAVOR):$(VERSION_TAG); \
|
||||
docker rmi push.$(IMAGE_TAG)-$(FLAVOR):$(VERSION_TAG); \
|
||||
)
|
||||
# Also push the default version, with no suffix like '-oss' or '-full'
|
||||
docker tag $(IMAGE_TAG):$(VERSION_TAG) push.$(IMAGE_TAG):$(VERSION_TAG);
|
||||
docker push push.$(IMAGE_TAG):$(VERSION_TAG);
|
||||
docker rmi push.$(IMAGE_TAG):$(VERSION_TAG);
|
||||
|
||||
# Compile "env2yaml", the helper for configuring logstash.yml via environment
|
||||
# variables.
|
||||
env2yaml:
|
||||
docker run --rm \
|
||||
-v "$(PWD)/data/logstash/env2yaml:/usr/src/env2yaml" \
|
||||
-e GOARCH=arm64 -e GOOS=linux \
|
||||
-w /usr/src/env2yaml golang:1 go build -o /usr/src/env2yaml/env2yaml-arm64
|
||||
docker run --rm \
|
||||
-v "$(PWD)/data/logstash/env2yaml:/usr/src/env2yaml" \
|
||||
-e GOARCH=amd64 -e GOOS=linux \
|
||||
-w /usr/src/env2yaml golang:1 go build -o /usr/src/env2yaml/env2yaml-amd64
|
||||
|
||||
# Generate the Dockerfiles from ERB templates.
|
||||
dockerfile: templates/Dockerfile.erb
|
||||
$(foreach FLAVOR, $(IMAGE_FLAVORS), \
|
||||
|
@ -226,7 +204,7 @@ dockerfile: templates/Dockerfile.erb
|
|||
arch="${ARCHITECTURE}" \
|
||||
version_tag="${VERSION_TAG}" \
|
||||
image_flavor="${FLAVOR}" \
|
||||
local_artifacts="true" \
|
||||
local_artifacts="${LOCAL_ARTIFACTS}" \
|
||||
templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-${FLAVOR}" ; \
|
||||
)
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
http.host: "0.0.0.0"
|
||||
api.http.host: "0.0.0.0"
|
||||
xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ]
|
||||
|
|
|
@ -1 +1 @@
|
|||
http.host: "0.0.0.0"
|
||||
api.http.host: "0.0.0.0"
|
||||
|
|
|
@ -16,12 +16,13 @@ package main
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"gopkg.in/yaml.v2"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"fmt"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var validSettings = []string{
|
||||
|
@ -49,7 +50,6 @@ var validSettings = []string{
|
|||
"config.debug",
|
||||
"config.support_escapes",
|
||||
"config.field_reference.escape_style",
|
||||
"event_api.tags.illegal",
|
||||
"queue.type",
|
||||
"path.queue",
|
||||
"queue.page_capacity",
|
||||
|
@ -65,14 +65,9 @@ var validSettings = []string{
|
|||
"dead_letter_queue.storage_policy",
|
||||
"dead_letter_queue.retain.age",
|
||||
"path.dead_letter_queue",
|
||||
"http.enabled", // DEPRECATED: prefer `api.enabled`
|
||||
"http.environment", // DEPRECATED: prefer `api.environment`
|
||||
"http.host", // DEPRECATED: prefer `api.http.host`
|
||||
"http.port", // DEPRECATED: prefer `api.http.port`
|
||||
"log.level",
|
||||
"log.format",
|
||||
"log.format.json.fix_duplicate_message_fields",
|
||||
"modules",
|
||||
"metric.collect",
|
||||
"path.logs",
|
||||
"path.plugins",
|
||||
|
@ -87,6 +82,7 @@ var validSettings = []string{
|
|||
"api.auth.basic.password_policy.include.symbol",
|
||||
"allow_superuser",
|
||||
"monitoring.cluster_uuid",
|
||||
"xpack.monitoring.allow_legacy_collection",
|
||||
"xpack.monitoring.enabled",
|
||||
"xpack.monitoring.collection.interval",
|
||||
"xpack.monitoring.elasticsearch.hosts",
|
||||
|
@ -130,8 +126,6 @@ var validSettings = []string{
|
|||
"xpack.management.elasticsearch.ssl.cipher_suites",
|
||||
"xpack.geoip.download.endpoint",
|
||||
"xpack.geoip.downloader.enabled",
|
||||
"cloud.id",
|
||||
"cloud.auth",
|
||||
}
|
||||
|
||||
// Given a setting name, return a downcased version with delimiters removed.
|
||||
|
|
|
@ -1,147 +1,73 @@
|
|||
# This Dockerfile was generated from templates/Dockerfile.erb
|
||||
<% if image_flavor == 'wolfi' -%>
|
||||
FROM docker.elastic.co/wolfi/go:1-dev as builder-env2yaml
|
||||
<%# image_flavor 'full', oss', 'wolfi' -%>
|
||||
<% if local_artifacts == 'false' -%>
|
||||
<% url_root = 'https://artifacts.elastic.co/downloads/logstash' -%>
|
||||
<% else -%>
|
||||
<% url_root = 'http://localhost:8000' -%>
|
||||
<% end -%>
|
||||
<% if image_flavor == 'oss' -%>
|
||||
<% tarball = "logstash-oss-#{elastic_version}-linux-#{arch}.tar.gz" -%>
|
||||
<% license = 'Apache 2.0' -%>
|
||||
<% else -%>
|
||||
<% tarball = "logstash-#{elastic_version}-linux-#{arch}.tar.gz" -%>
|
||||
<% license = 'Elastic License' -%>
|
||||
<% end -%>
|
||||
<% if image_flavor == 'full' || image_flavor == 'oss' -%>
|
||||
<% base_image = 'redhat/ubi9-minimal:latest' -%>
|
||||
<% go_image = 'golang:1.23' -%>
|
||||
<% package_manager = 'microdnf' -%>
|
||||
<% else -%>
|
||||
<% base_image = 'docker.elastic.co/wolfi/chainguard-base' -%>
|
||||
<% go_image = 'docker.elastic.co/wolfi/go:1.23' -%>
|
||||
<% package_manager = 'apk' -%>
|
||||
<% end -%>
|
||||
<% locale = 'C.UTF-8' -%>
|
||||
|
||||
COPY env2yaml/env2yaml.go /tmp/go/src/env2yaml/env2yaml.go
|
||||
COPY env2yaml/go.mod /tmp/go/src/env2yaml/go.mod
|
||||
COPY env2yaml/go.sum /tmp/go/src/env2yaml/go.sum
|
||||
# Build env2yaml
|
||||
FROM <%= go_image %> AS builder-env2yaml
|
||||
|
||||
COPY env2yaml/env2yaml.go env2yaml/go.mod env2yaml/go.sum /tmp/go/src/env2yaml/
|
||||
|
||||
WORKDIR /tmp/go/src/env2yaml
|
||||
|
||||
RUN go build
|
||||
<% end -%>
|
||||
RUN go build -trimpath
|
||||
|
||||
<% if image_flavor == 'ironbank' -%>
|
||||
<%# Start image_flavor 'ironbank' %>
|
||||
ARG BASE_REGISTRY=registry1.dso.mil
|
||||
ARG BASE_IMAGE=ironbank/redhat/ubi/ubi9
|
||||
ARG BASE_TAG=9.5
|
||||
ARG LOGSTASH_VERSION=<%= elastic_version %>
|
||||
ARG GOLANG_VERSION=1.21.8
|
||||
# Build main image
|
||||
# Minimal distributions do not ship with en language packs.
|
||||
FROM <%= base_image %>
|
||||
|
||||
# stage 1: build env2yaml
|
||||
FROM ${BASE_REGISTRY}/google/golang/ubi9/golang-1.21:${GOLANG_VERSION} AS env2yaml
|
||||
|
||||
ENV GOPATH=/go
|
||||
|
||||
COPY scripts/go /go
|
||||
|
||||
USER root
|
||||
|
||||
RUN dnf-3 -y upgrade && dnf-3 install -y git && \
|
||||
cd /go/src/env2yaml && \
|
||||
go build
|
||||
|
||||
# Final stage
|
||||
FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}
|
||||
|
||||
ARG LOGSTASH_VERSION
|
||||
|
||||
ENV ELASTIC_CONTAINER true
|
||||
ENV ELASTIC_CONTAINER=true
|
||||
ENV PATH=/usr/share/logstash/bin:$PATH
|
||||
ENV LANG=<%= locale %> LC_ALL=<%= locale %>
|
||||
|
||||
WORKDIR /usr/share
|
||||
|
||||
COPY --from=env2yaml /go/src/env2yaml/env2yaml /usr/local/bin/env2yaml
|
||||
COPY scripts/config/* config/
|
||||
COPY scripts/pipeline/default.conf pipeline/logstash.conf
|
||||
COPY scripts/bin/docker-entrypoint /usr/local/bin/
|
||||
COPY logstash-${LOGSTASH_VERSION}-linux-x86_64.tar.gz /tmp/logstash.tar.gz
|
||||
|
||||
RUN dnf -y upgrade && \
|
||||
dnf install -y procps findutils tar gzip which shadow-utils && \
|
||||
dnf clean all && \
|
||||
groupadd --gid 1000 logstash && \
|
||||
adduser --uid 1000 --gid 1000 --home-dir /usr/share/logstash --no-create-home logstash && \
|
||||
tar -zxf /tmp/logstash.tar.gz -C /usr/share/ && \
|
||||
mv /usr/share/logstash-${LOGSTASH_VERSION} /usr/share/logstash && \
|
||||
chown -R 1000:0 /usr/share/logstash && \
|
||||
chown --recursive logstash:logstash /usr/share/logstash/ && \
|
||||
chown -R logstash:root /usr/share/logstash config/ pipeline/ && \
|
||||
chmod -R g=u /usr/share/logstash && \
|
||||
mv config/* /usr/share/logstash/config && \
|
||||
mv pipeline /usr/share/logstash/pipeline && \
|
||||
mkdir /licenses/ && \
|
||||
mv /usr/share/logstash/NOTICE.TXT /licenses/NOTICE.TXT && \
|
||||
mv /usr/share/logstash/LICENSE.txt /licenses/LICENSE.txt && \
|
||||
ln -s /usr/share/logstash /opt/logstash && \
|
||||
chmod 0755 /usr/local/bin/docker-entrypoint && \
|
||||
rmdir config && \
|
||||
rm /tmp/logstash.tar.gz
|
||||
<%# End image_flavor 'ironbank' %>
|
||||
<% else -%>
|
||||
<%# Start image_flavor 'full', oss', 'ubi8', 'wolfi' %>
|
||||
<% if local_artifacts == 'false' -%>
|
||||
<% url_root = 'https://artifacts.elastic.co/downloads/logstash' -%>
|
||||
<% else -%>
|
||||
<% url_root = 'http://localhost:8000' -%>
|
||||
<% end -%>
|
||||
<% if image_flavor == 'oss' -%>
|
||||
<% tarball = "logstash-oss-#{elastic_version}-linux-$(arch).tar.gz" -%>
|
||||
<% license = 'Apache 2.0' -%>
|
||||
<% else -%>
|
||||
<% tarball = "logstash-#{elastic_version}-linux-$(arch).tar.gz" -%>
|
||||
<% license = 'Elastic License' -%>
|
||||
<% end -%>
|
||||
<% if image_flavor == 'ubi8' %>
|
||||
<% base_image = 'docker.elastic.co/ubi8/ubi-minimal' -%>
|
||||
<% package_manager = 'microdnf' -%>
|
||||
<% arch_command = 'uname -m' -%>
|
||||
# Minimal distributions do not ship with en language packs.
|
||||
<% locale = 'C.UTF-8' -%>
|
||||
<% elsif image_flavor == 'wolfi' %>
|
||||
<% base_image = 'docker.elastic.co/wolfi/chainguard-base' -%>
|
||||
<% package_manager = 'apk' -%>
|
||||
<% arch_command = 'uname -m' -%>
|
||||
# Minimal distributions do not ship with en language packs.
|
||||
<% locale = 'C.UTF-8' -%>
|
||||
<% else -%>
|
||||
<% base_image = 'ubuntu:20.04' -%>
|
||||
<% package_manager = 'apt-get' -%>
|
||||
<% locale = 'en_US.UTF-8' -%>
|
||||
<% arch_command = 'dpkg --print-architecture' -%>
|
||||
<% end -%>
|
||||
|
||||
FROM <%= base_image %>
|
||||
|
||||
# Install packages
|
||||
RUN for iter in {1..10}; do \
|
||||
<% if image_flavor == 'wolfi' %>
|
||||
<%= package_manager %> add --no-cache curl bash openssl && \
|
||||
<% else -%>
|
||||
<% if image_flavor == 'full' || image_flavor == 'oss' -%>
|
||||
export DEBIAN_FRONTEND=noninteractive && \
|
||||
<% end -%>
|
||||
<%= package_manager %> update -y && \
|
||||
<%= package_manager %> upgrade -y && \
|
||||
<%= package_manager %> install -y procps findutils tar gzip && \
|
||||
<% if image_flavor == 'ubi8' -%>
|
||||
<%= package_manager %> install -y openssl && \
|
||||
<% end -%>
|
||||
<% if image_flavor == 'ubi8' -%>
|
||||
<%= package_manager %> install -y which shadow-utils && \
|
||||
<% else -%>
|
||||
<%= package_manager %> install -y locales && \
|
||||
<% end -%>
|
||||
<% if image_flavor != 'ubi9' -%>
|
||||
<%= package_manager %> install -y curl && \
|
||||
<% end -%>
|
||||
<%= package_manager %> clean all && \
|
||||
<% if image_flavor == 'full' || image_flavor == 'oss' -%>
|
||||
locale-gen 'en_US.UTF-8' && \
|
||||
<%= package_manager %> clean metadata && \
|
||||
<% end -%>
|
||||
<% end -%>
|
||||
exit_code=0 && break || exit_code=$? && \
|
||||
echo "packaging error: retry $iter in 10s" && \
|
||||
<%= package_manager %> clean all && \
|
||||
<% if image_flavor == 'full' || image_flavor == 'oss' -%>
|
||||
<%= package_manager %> clean metadata && \
|
||||
<%= package_manager %> update -y && \
|
||||
<%= package_manager %> install -y procps findutils tar gzip && \
|
||||
<%= package_manager %> install -y openssl && \
|
||||
<%= package_manager %> install -y which shadow-utils && \
|
||||
<%= package_manager %> clean all && \
|
||||
<% else -%><%# 'wolfi' -%>
|
||||
<%= package_manager %> add --no-cache curl bash openssl && \
|
||||
<% end -%>
|
||||
sleep 10; done; \
|
||||
(exit $exit_code)
|
||||
exit_code=0 && break || \
|
||||
exit_code=$? && echo "packaging error: retry $iter in 10s" && \
|
||||
<%= package_manager %> clean all && sleep 10; \
|
||||
done; \
|
||||
(exit $exit_code)
|
||||
|
||||
# Provide a non-root user to run the process.
|
||||
<% if image_flavor == 'wolfi' -%>
|
||||
# Provide a non-root user to run the process
|
||||
# Add Logstash itself and set permissions
|
||||
<% if image_flavor == 'full' || image_flavor == 'oss' -%>
|
||||
RUN groupadd --gid 1000 logstash && \
|
||||
adduser --uid 1000 --gid 1000 \
|
||||
--home "/usr/share/logstash" \
|
||||
--no-create-home \
|
||||
logstash && \
|
||||
<% else -%><%# 'wolfi' -%>
|
||||
RUN addgroup -g 1000 logstash && \
|
||||
adduser -u 1000 -G logstash \
|
||||
--disabled-password \
|
||||
|
@ -149,95 +75,54 @@ RUN addgroup -g 1000 logstash && \
|
|||
--home "/usr/share/logstash" \
|
||||
--shell "/sbin/nologin" \
|
||||
--no-create-home \
|
||||
logstash
|
||||
<% else -%>
|
||||
RUN groupadd --gid 1000 logstash && \
|
||||
adduser --uid 1000 --gid 1000 --home /usr/share/logstash --no-create-home logstash
|
||||
logstash && \
|
||||
<% end -%>
|
||||
|
||||
# Add Logstash itself.
|
||||
RUN curl -Lo - <%= url_root %>/<%= tarball %> | \
|
||||
curl -Lo - <%= url_root %>/<%= tarball %> | \
|
||||
tar zxf - -C /usr/share && \
|
||||
mv /usr/share/logstash-<%= elastic_version %> /usr/share/logstash && \
|
||||
chown --recursive logstash:logstash /usr/share/logstash/ && \
|
||||
chown -R logstash:root /usr/share/logstash && \
|
||||
chmod -R g=u /usr/share/logstash && \
|
||||
mkdir /licenses/ && \
|
||||
mkdir /licenses && \
|
||||
mv /usr/share/logstash/NOTICE.TXT /licenses/NOTICE.TXT && \
|
||||
mv /usr/share/logstash/LICENSE.txt /licenses/LICENSE.txt && \
|
||||
find /usr/share/logstash -type d -exec chmod g+s {} \; && \
|
||||
ln -s /usr/share/logstash /opt/logstash
|
||||
|
||||
WORKDIR /usr/share/logstash
|
||||
ENV ELASTIC_CONTAINER true
|
||||
ENV PATH=/usr/share/logstash/bin:$PATH
|
||||
|
||||
# Provide a minimal configuration, so that simple invocations will provide
|
||||
# a good experience.
|
||||
<% if image_flavor == 'oss' -%>
|
||||
COPY config/logstash-oss.yml config/logstash.yml
|
||||
<% else -%>
|
||||
COPY config/logstash-full.yml config/logstash.yml
|
||||
<% end -%>
|
||||
COPY config/pipelines.yml config/log4j2.properties config/log4j2.file.properties config/
|
||||
COPY pipeline/default.conf pipeline/logstash.conf
|
||||
|
||||
RUN chown --recursive logstash:root config/ pipeline/
|
||||
# Ensure Logstash gets the correct locale by default.
|
||||
ENV LANG=<%= locale %> LC_ALL=<%= locale %>
|
||||
|
||||
<% if image_flavor == 'wolfi' -%>
|
||||
COPY --from=builder-env2yaml /tmp/go/src/env2yaml/env2yaml /usr/local/bin/env2yaml
|
||||
<% else -%>
|
||||
COPY env2yaml/env2yaml-amd64 env2yaml/env2yaml-arm64 env2yaml/
|
||||
# Copy over the appropriate env2yaml artifact
|
||||
RUN set -eux; env2yamlarch="$(<%= arch_command %>)"; \
|
||||
case "${env2yamlarch}" in \
|
||||
'x86_64'|'amd64') \
|
||||
env2yamlarch=amd64; \
|
||||
;; \
|
||||
'aarch64'|'arm64') \
|
||||
env2yamlarch=arm64; \
|
||||
;; \
|
||||
*) echo >&2 "error: unsupported architecture '$env2yamlarch'"; exit 1 ;; \
|
||||
esac; \
|
||||
mkdir -p /usr/local/bin; \
|
||||
cp env2yaml/env2yaml-${env2yamlarch} /usr/local/bin/env2yaml; \
|
||||
rm -rf env2yaml
|
||||
COPY --chown=logstash:root config/pipelines.yml config/log4j2.properties config/log4j2.file.properties /usr/share/logstash/config/
|
||||
<% if image_flavor == 'oss' -%>
|
||||
COPY --chown=logstash:root config/logstash-oss.yml /usr/share/logstash/config/logstash.yml
|
||||
<% else -%><%# 'full', 'wolfi' -%>
|
||||
COPY --chown=logstash:root config/logstash-full.yml /usr/share/logstash/config/logstash.yml
|
||||
<% end -%>
|
||||
# Place the startup wrapper script.
|
||||
COPY bin/docker-entrypoint /usr/local/bin/
|
||||
COPY --chown=logstash:root pipeline/default.conf /usr/share/logstash/pipeline/logstash.conf
|
||||
COPY --chmod=0755 bin/docker-entrypoint /usr/local/bin/
|
||||
|
||||
RUN chmod 0755 /usr/local/bin/docker-entrypoint
|
||||
<%# End image_flavor 'full', oss', 'ubi8', 'wolfi' %>
|
||||
<% end -%>
|
||||
WORKDIR /usr/share/logstash
|
||||
|
||||
USER 1000
|
||||
|
||||
EXPOSE 9600 5044
|
||||
|
||||
<% if image_flavor != 'ironbank' -%>
|
||||
LABEL org.label-schema.schema-version="1.0" \
|
||||
org.label-schema.vendor="Elastic" \
|
||||
org.opencontainers.image.vendor="Elastic" \
|
||||
LABEL org.label-schema.build-date=<%= created_date %> \
|
||||
org.label-schema.license="<%= license %>" \
|
||||
org.label-schema.name="logstash" \
|
||||
org.opencontainers.image.title="logstash" \
|
||||
org.label-schema.version="<%= elastic_version %>" \
|
||||
org.opencontainers.image.version="<%= elastic_version %>" \
|
||||
org.label-schema.schema-version="1.0" \
|
||||
org.label-schema.url="https://www.elastic.co/products/logstash" \
|
||||
org.label-schema.vcs-url="https://github.com/elastic/logstash" \
|
||||
org.label-schema.license="<%= license %>" \
|
||||
org.opencontainers.image.licenses="<%= license %>" \
|
||||
org.label-schema.vendor="Elastic" \
|
||||
org.label-schema.version="<%= elastic_version %>" \
|
||||
org.opencontainers.image.created=<%= created_date %> \
|
||||
org.opencontainers.image.description="Logstash is a free and open server-side data processing pipeline that ingests data from a multitude of sources, transforms it, and then sends it to your favorite 'stash.'" \
|
||||
org.label-schema.build-date=<%= created_date %> \
|
||||
<% if image_flavor == 'ubi8' -%> license="<%= license %>" \
|
||||
org.opencontainers.image.licenses="<%= license %>" \
|
||||
org.opencontainers.image.title="logstash" \
|
||||
org.opencontainers.image.vendor="Elastic" \
|
||||
org.opencontainers.image.version="<%= elastic_version %>" \
|
||||
description="Logstash is a free and open server-side data processing pipeline that ingests data from a multitude of sources, transforms it, and then sends it to your favorite 'stash.'" \
|
||||
name="logstash" \
|
||||
license="<%= license %>" \
|
||||
maintainer="info@elastic.co" \
|
||||
name="logstash" \
|
||||
summary="Logstash is a free and open server-side data processing pipeline that ingests data from a multitude of sources, transforms it, and then sends it to your favorite 'stash.'" \
|
||||
vendor="Elastic" \
|
||||
<% end -%>
|
||||
org.opencontainers.image.created=<%= created_date %>
|
||||
<% end -%>
|
||||
vendor="Elastic"
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint"]
|
||||
|
|
65
docker/templates/IronbankDockerfile.erb
Normal file
65
docker/templates/IronbankDockerfile.erb
Normal file
|
@ -0,0 +1,65 @@
|
|||
# This Dockerfile was generated from templates/IronbankDockerfile.erb
|
||||
|
||||
ARG BASE_REGISTRY=registry1.dso.mil
|
||||
ARG BASE_IMAGE=ironbank/redhat/ubi/ubi9
|
||||
ARG BASE_TAG=9.5
|
||||
ARG LOGSTASH_VERSION=<%= elastic_version %>
|
||||
ARG GOLANG_VERSION=1.21.8
|
||||
|
||||
# stage 1: build env2yaml
|
||||
FROM ${BASE_REGISTRY}/google/golang/ubi9/golang-1.21:${GOLANG_VERSION} AS env2yaml
|
||||
|
||||
ENV GOPATH=/go
|
||||
|
||||
COPY scripts/go /go
|
||||
|
||||
USER root
|
||||
|
||||
RUN dnf-3 -y upgrade && dnf-3 install -y git && \
|
||||
cd /go/src/env2yaml && \
|
||||
go build
|
||||
|
||||
# Final stage
|
||||
FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}
|
||||
|
||||
ARG LOGSTASH_VERSION
|
||||
|
||||
ENV ELASTIC_CONTAINER true
|
||||
ENV PATH=/usr/share/logstash/bin:$PATH
|
||||
|
||||
WORKDIR /usr/share
|
||||
|
||||
COPY --from=env2yaml /go/src/env2yaml/env2yaml /usr/local/bin/env2yaml
|
||||
COPY scripts/config/* config/
|
||||
COPY scripts/pipeline/default.conf pipeline/logstash.conf
|
||||
COPY scripts/bin/docker-entrypoint /usr/local/bin/
|
||||
COPY logstash-${LOGSTASH_VERSION}-linux-x86_64.tar.gz /tmp/logstash.tar.gz
|
||||
|
||||
RUN dnf -y upgrade && \
|
||||
dnf install -y procps findutils tar gzip which shadow-utils && \
|
||||
dnf clean all && \
|
||||
groupadd --gid 1000 logstash && \
|
||||
adduser --uid 1000 --gid 1000 --home-dir /usr/share/logstash --no-create-home logstash && \
|
||||
tar -zxf /tmp/logstash.tar.gz -C /usr/share/ && \
|
||||
mv /usr/share/logstash-${LOGSTASH_VERSION} /usr/share/logstash && \
|
||||
chown -R 1000:0 /usr/share/logstash && \
|
||||
chown --recursive logstash:logstash /usr/share/logstash/ && \
|
||||
chown -R logstash:root /usr/share/logstash config/ pipeline/ && \
|
||||
chmod -R g=u /usr/share/logstash && \
|
||||
mv config/* /usr/share/logstash/config && \
|
||||
mv pipeline /usr/share/logstash/pipeline && \
|
||||
mkdir /licenses/ && \
|
||||
mv /usr/share/logstash/NOTICE.TXT /licenses/NOTICE.TXT && \
|
||||
mv /usr/share/logstash/LICENSE.txt /licenses/LICENSE.txt && \
|
||||
ln -s /usr/share/logstash /opt/logstash && \
|
||||
chmod 0755 /usr/local/bin/docker-entrypoint && \
|
||||
rmdir config && \
|
||||
rm /tmp/logstash.tar.gz
|
||||
|
||||
WORKDIR /usr/share/logstash
|
||||
|
||||
USER 1000
|
||||
|
||||
EXPOSE 9600 5044
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint"]
|
46
docs/docset.yml
Normal file
46
docs/docset.yml
Normal file
|
@ -0,0 +1,46 @@
|
|||
project: 'Logstash'
|
||||
cross_links:
|
||||
- beats
|
||||
- docs-content
|
||||
- ecs
|
||||
- elasticsearch
|
||||
- integration-docs
|
||||
- logstash-docs-md
|
||||
- search-ui
|
||||
toc:
|
||||
- toc: reference
|
||||
- toc: release-notes
|
||||
- toc: extend
|
||||
subs:
|
||||
logstash-ref: "https://www.elastic.co/guide/en/logstash/current"
|
||||
ecloud: "Elastic Cloud"
|
||||
esf: "Elastic Serverless Forwarder"
|
||||
ess: "Elasticsearch Service"
|
||||
serverless-full: "Elastic Cloud Serverless"
|
||||
serverless-short: "Serverless"
|
||||
es-serverless: "Elasticsearch Serverless"
|
||||
agent: "Elastic Agent"
|
||||
fleet: "Fleet"
|
||||
integrations: "Integrations"
|
||||
stack: "Elastic Stack"
|
||||
xpack: "X-Pack"
|
||||
es: "Elasticsearch"
|
||||
kib: "Kibana"
|
||||
ls: "Logstash"
|
||||
beats: "Beats"
|
||||
filebeat: "Filebeat"
|
||||
metricbeat: "Metricbeat"
|
||||
winlogbeat: "Winlogbeat"
|
||||
security: "X-Pack security"
|
||||
security-features: "security features"
|
||||
monitoring: "X-Pack monitoring"
|
||||
monitor-features: "monitoring features"
|
||||
stack-monitor-features: "Elastic Stack monitoring features"
|
||||
ilm: "index lifecycle management"
|
||||
ilm-cap: "Index lifecycle management"
|
||||
ilm-init: "ILM"
|
||||
dlm: "data lifecycle management"
|
||||
dlm-init: "DLM"
|
||||
stack-version: "9.0.0"
|
||||
major-version: "9.x"
|
||||
docker-repo: "docker.elastic.co/logstash/logstash"
|
636
docs/extend/codec-new-plugin.md
Normal file
636
docs/extend/codec-new-plugin.md
Normal file
|
@ -0,0 +1,636 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/codec-new-plugin.html
|
||||
---
|
||||
|
||||
# How to write a Logstash codec plugin [codec-new-plugin]
|
||||
|
||||
To develop a new codec for Logstash, build a self-contained Ruby gem whose source code lives in its own GitHub repository. The Ruby gem can then be hosted and shared on RubyGems.org. You can use the example codec implementation as a starting point. (If you’re unfamiliar with Ruby, you can find an excellent quickstart guide at [https://www.ruby-lang.org/en/documentation/quickstart/](https://www.ruby-lang.org/en/documentation/quickstart/).)
|
||||
|
||||
## Get started [_get_started_2]
|
||||
|
||||
Let’s step through creating a codec plugin using the [example codec plugin](https://github.com/logstash-plugins/logstash-codec-example/).
|
||||
|
||||
### Create a GitHub repo for your new plugin [_create_a_github_repo_for_your_new_plugin_2]
|
||||
|
||||
Each Logstash plugin lives in its own GitHub repository. To create a new repository for your plugin:
|
||||
|
||||
1. Log in to GitHub.
|
||||
2. Click the **Repositories** tab. You’ll see a list of other repositories you’ve forked or contributed to.
|
||||
3. Click the green **New** button in the upper right.
|
||||
4. Specify the following settings for your new repo:
|
||||
|
||||
* **Repository name** — a unique name of the form `logstash-codec-pluginname`.
|
||||
* **Public or Private** — your choice, but the repository must be Public if you want to submit it as an official plugin.
|
||||
* **Initialize this repository with a README** — enables you to immediately clone the repository to your computer.
|
||||
|
||||
5. Click **Create Repository**.
|
||||
|
||||
|
||||
### Use the plugin generator tool [_use_the_plugin_generator_tool_2]
|
||||
|
||||
You can create your own Logstash plugin in seconds! The `generate` subcommand of `bin/logstash-plugin` creates the foundation for a new Logstash plugin with templatized files. It creates the correct directory structure, gemspec files, and dependencies so you can start adding custom code to process data with Logstash.
|
||||
|
||||
For more information, see [Generating plugins](/reference/plugin-generator.md)
|
||||
|
||||
|
||||
### Copy the codec code [_copy_the_codec_code]
|
||||
|
||||
Alternatively, you can use the examples repo we host on github.com
|
||||
|
||||
1. **Clone your plugin.** Replace `GITUSERNAME` with your github username, and `MYPLUGINNAME` with your plugin name.
|
||||
|
||||
* `git clone https://github.com/GITUSERNAME/logstash-``codec-MYPLUGINNAME.git`
|
||||
|
||||
* alternately, via ssh: `git clone git@github.com:GITUSERNAME/logstash``-codec-MYPLUGINNAME.git`
|
||||
|
||||
* `cd logstash-codec-MYPLUGINNAME`
|
||||
|
||||
2. **Clone the codec plugin example and copy it to your plugin branch.**
|
||||
|
||||
You don’t want to include the example .git directory or its contents, so delete it before you copy the example.
|
||||
|
||||
* `cd /tmp`
|
||||
* `git clone https://github.com/logstash-plugins/logstash``-codec-example.git`
|
||||
* `cd logstash-codec-example`
|
||||
* `rm -rf .git`
|
||||
* `cp -R * /path/to/logstash-codec-mypluginname/`
|
||||
|
||||
3. **Rename the following files to match the name of your plugin.**
|
||||
|
||||
* `logstash-codec-example.gemspec`
|
||||
* `example.rb`
|
||||
* `example_spec.rb`
|
||||
|
||||
```txt
|
||||
cd /path/to/logstash-codec-mypluginname
|
||||
mv logstash-codec-example.gemspec logstash-codec-mypluginname.gemspec
|
||||
mv lib/logstash/codecs/example.rb lib/logstash/codecs/mypluginname.rb
|
||||
mv spec/codecs/example_spec.rb spec/codecs/mypluginname_spec.rb
|
||||
```
|
||||
|
||||
|
||||
Your file structure should look like this:
|
||||
|
||||
```txt
|
||||
$ tree logstash-codec-mypluginname
|
||||
├── Gemfile
|
||||
├── LICENSE
|
||||
├── README.md
|
||||
├── Rakefile
|
||||
├── lib
|
||||
│ └── logstash
|
||||
│ └── codecs
|
||||
│ └── mypluginname.rb
|
||||
├── logstash-codec-mypluginname.gemspec
|
||||
└── spec
|
||||
└── codecs
|
||||
└── mypluginname_spec.rb
|
||||
```
|
||||
|
||||
For more information about the Ruby gem file structure and an excellent walkthrough of the Ruby gem creation process, see [http://timelessrepo.com/making-ruby-gems](http://timelessrepo.com/making-ruby-gems)
|
||||
|
||||
|
||||
### See what your plugin looks like [_see_what_your_plugin_looks_like_2]
|
||||
|
||||
Before we dive into the details, open up the plugin file in your favorite text editor and take a look.
|
||||
|
||||
```ruby
|
||||
require "logstash/codecs/base"
|
||||
require "logstash/codecs/line"
|
||||
|
||||
# Add any asciidoc formatted documentation here
|
||||
class LogStash::Codecs::Example < LogStash::Codecs::Base
|
||||
|
||||
# This example codec will append a string to the message field
|
||||
# of an event, either in the decoding or encoding methods
|
||||
#
|
||||
# This is only intended to be used as an example.
|
||||
#
|
||||
# input {
|
||||
# stdin { codec => example }
|
||||
# }
|
||||
#
|
||||
# or
|
||||
#
|
||||
# output {
|
||||
# stdout { codec => example }
|
||||
# }
|
||||
config_name "example"
|
||||
|
||||
# Append a string to the message
|
||||
config :append, :validate => :string, :default => ', Hello World!'
|
||||
|
||||
public
|
||||
def register
|
||||
@lines = LogStash::Codecs::Line.new
|
||||
@lines.charset = "UTF-8"
|
||||
end
|
||||
|
||||
public
|
||||
def decode(data)
|
||||
@lines.decode(data) do |line|
|
||||
replace = { "message" => line["message"].to_s + @append }
|
||||
yield LogStash::Event.new(replace)
|
||||
end
|
||||
end # def decode
|
||||
|
||||
public
|
||||
def encode(event)
|
||||
@on_event.call(event, event.get("message").to_s + @append + NL)
|
||||
end # def encode
|
||||
|
||||
end # class LogStash::Codecs::Example
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Coding codec plugins [_coding_codec_plugins]
|
||||
|
||||
Now let’s take a line-by-line look at the example plugin.
|
||||
|
||||
### `require` Statements [_require_statements_2]
|
||||
|
||||
Logstash codec plugins require parent classes defined in `logstash/codecs/base` and logstash/namespace:
|
||||
|
||||
```ruby
|
||||
require "logstash/codecs/base"
|
||||
require "logstash/namespace"
|
||||
```
|
||||
|
||||
Of course, the plugin you build may depend on other code, or even gems. Just put them here along with these Logstash dependencies.
|
||||
|
||||
|
||||
|
||||
## Plugin Body [_plugin_body_2]
|
||||
|
||||
Let’s go through the various elements of the plugin itself.
|
||||
|
||||
### `class` Declaration [_class_declaration_2]
|
||||
|
||||
The codec plugin class should be a subclass of `LogStash::Codecs::Base`:
|
||||
|
||||
```ruby
|
||||
class LogStash::Codecs::Example < LogStash::Codecs::Base
|
||||
```
|
||||
|
||||
The class name should closely mirror the plugin name, for example:
|
||||
|
||||
```ruby
|
||||
LogStash::Codecs::Example
|
||||
```
|
||||
|
||||
|
||||
### `config_name` [_config_name_2]
|
||||
|
||||
```ruby
|
||||
config_name "example"
|
||||
```
|
||||
|
||||
This is the name your plugin will call inside the codec configuration block.
|
||||
|
||||
If you set `config_name "example"` in your plugin code, the corresponding Logstash configuration block would need to look like this:
|
||||
|
||||
|
||||
|
||||
## Configuration Parameters [_configuration_parameters_2]
|
||||
|
||||
```ruby
|
||||
config :variable_name, :validate => :variable_type, :default => "Default value", :required => boolean, :deprecated => boolean, :obsolete => string
|
||||
```
|
||||
|
||||
The configuration, or `config` section allows you to define as many (or as few) parameters as are needed to enable Logstash to process events.
|
||||
|
||||
There are several configuration attributes:
|
||||
|
||||
* `:validate` - allows you to enforce passing a particular data type to Logstash for this configuration option, such as `:string`, `:password`, `:boolean`, `:number`, `:array`, `:hash`, `:path` (a file-system path), `uri`, `:codec` (since 1.2.0), `:bytes`. Note that this also works as a coercion in that if I specify "true" for boolean (even though technically a string), it will become a valid boolean in the config. This coercion works for the `:number` type as well where "1.2" becomes a float and "22" is an integer.
|
||||
* `:default` - lets you specify a default value for a parameter
|
||||
* `:required` - whether or not this parameter is mandatory (a Boolean `true` or
|
||||
* `:list` - whether or not this value should be a list of values. Will typecheck the list members, and convert scalars to one element lists. Note that this mostly obviates the array type, though if you need lists of complex objects that will be more suitable. `false`)
|
||||
* `:deprecated` - informational (also a Boolean `true` or `false`)
|
||||
* `:obsolete` - used to declare that a given setting has been removed and is no longer functioning. The idea is to provide an informed upgrade path to users who are still using a now-removed setting.
|
||||
|
||||
|
||||
## Plugin Methods [_plugin_methods_2]
|
||||
|
||||
Logstash codecs must implement the `register` method, and the `decode` method or the `encode` method (or both).
|
||||
|
||||
### `register` Method [_register_method_2]
|
||||
|
||||
```ruby
|
||||
public
|
||||
def register
|
||||
end # def register
|
||||
```
|
||||
|
||||
The Logstash `register` method is like an `initialize` method. It was originally created to enforce having `super` called, preventing headaches for newbies. (Note: It may go away in favor of `initialize`, in conjunction with some enforced testing to ensure `super` is called.)
|
||||
|
||||
`public` means the method can be called anywhere, not just within the class. This is the default behavior for methods in Ruby, but it is specified explicitly here anyway.
|
||||
|
||||
You can also assign instance variables here (variables prepended by `@`). Configuration variables are now in scope as instance variables, like `@message`
|
||||
|
||||
|
||||
### `decode` Method [_decode_method]
|
||||
|
||||
```ruby
|
||||
public
|
||||
def decode(data)
|
||||
@lines.decode(data) do |line|
|
||||
replace = { "message" => line["message"].to_s + @append }
|
||||
yield LogStash::Event.new(replace)
|
||||
end
|
||||
end # def decode
|
||||
```
|
||||
|
||||
The codec’s `decode` method is where data coming in from an input is transformed into an event. There are complex examples like the [collectd](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/lib/logstash/codecs/collectd.rb#L386-L484) codec, and simpler examples like the [spool](https://github.com/logstash-plugins/logstash-codec-spool/blob/main/lib/logstash/codecs/spool.rb#L11-L16) codec.
|
||||
|
||||
There must be a `yield` statement as part of the `decode` method which will return decoded events to the pipeline.
|
||||
|
||||
|
||||
### `encode` Method [_encode_method]
|
||||
|
||||
```ruby
|
||||
public
|
||||
def encode(event)
|
||||
@on_event.call(event, event.get("message").to_s + @append + NL)
|
||||
end # def encode
|
||||
```
|
||||
|
||||
The `encode` method takes an event and serializes it (*encodes*) into another format. Good examples of `encode` methods include the simple [plain](https://github.com/logstash-plugins/logstash-codec-plain/blob/main/lib/logstash/codecs/plain.rb#L39-L46) codec, the slightly more involved [msgpack](https://github.com/logstash-plugins/logstash-codec-msgpack/blob/main/lib/logstash/codecs/msgpack.rb#L38-L46) codec, and even an [avro](https://github.com/logstash-plugins/logstash-codec-avro/blob/main/lib/logstash/codecs/avro.rb#L38-L45) codec.
|
||||
|
||||
In most cases, your `encode` method should have an `@on_event.call()` statement. This call will output data per event in the described way.
|
||||
|
||||
|
||||
|
||||
## Building the Plugin [_building_the_plugin_2]
|
||||
|
||||
At this point in the process you have coded your plugin and are ready to build a Ruby Gem from it. The following information will help you complete the process.
|
||||
|
||||
### External dependencies [_external_dependencies_2]
|
||||
|
||||
A `require` statement in Ruby is used to include necessary code. In some cases your plugin may require additional files. For example, the collectd plugin [uses](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/lib/logstash/codecs/collectd.rb#L148) the `types.db` file provided by collectd. In the main directory of your plugin, a file called `vendor.json` is where these files are described.
|
||||
|
||||
The `vendor.json` file contains an array of JSON objects, each describing a file dependency. This example comes from the [collectd](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/vendor.json) codec plugin:
|
||||
|
||||
```txt
|
||||
[{
|
||||
"sha1": "a90fe6cc53b76b7bdd56dc57950d90787cb9c96e",
|
||||
"url": "http://collectd.org/files/collectd-5.4.0.tar.gz",
|
||||
"files": [ "/src/types.db" ]
|
||||
}]
|
||||
```
|
||||
|
||||
* `sha1` is the sha1 signature used to verify the integrity of the file referenced by `url`.
|
||||
* `url` is the address from where Logstash will download the file.
|
||||
* `files` is an optional array of files to extract from the downloaded file. Note that while tar archives can use absolute or relative paths, treat them as absolute in this array. If `files` is not present, all files will be uncompressed and extracted into the vendor directory.
|
||||
|
||||
Another example of the `vendor.json` file is the [`geoip` filter](https://github.com/logstash-plugins/logstash-filter-geoip/blob/main/vendor.json)
|
||||
|
||||
The process used to download these dependencies is to call `rake vendor`. This will be discussed further in the testing section of this document.
|
||||
|
||||
Another kind of external dependency is on jar files. This will be described in the "Add a `gemspec` file" section.
|
||||
|
||||
|
||||
### Deprecated features [_deprecated_features_2]
|
||||
|
||||
As a plugin evolves, an option or feature may no longer serve the intended purpose, and the developer may want to *deprecate* its usage. Deprecation warns users about the option’s status, so they aren’t caught by surprise when it is removed in a later release.
|
||||
|
||||
{{ls}} 7.6 introduced a *deprecation logger* to make handling those situations easier. You can use the [adapter](https://github.com/logstash-plugins/logstash-mixin-deprecation_logger_support) to ensure that your plugin can use the deprecation logger while still supporting older versions of {{ls}}. See the [readme](https://github.com/logstash-plugins/logstash-mixin-deprecation_logger_support/blob/main/README.md) for more information and for instructions on using the adapter.
|
||||
|
||||
Deprecations are noted in the `logstash-deprecation.log` file in the `log` directory.
|
||||
|
||||
|
||||
### Add a Gemfile [_add_a_gemfile_2]
|
||||
|
||||
Gemfiles allow Ruby’s Bundler to maintain the dependencies for your plugin. Currently, all we’ll need is the Logstash gem, for testing, but if you require other gems, you should add them in here.
|
||||
|
||||
::::{tip}
|
||||
See [Bundler’s Gemfile page](http://bundler.io/gemfile.html) for more details.
|
||||
::::
|
||||
|
||||
|
||||
```ruby
|
||||
source 'https://rubygems.org'
|
||||
gemspec
|
||||
gem "logstash", :github => "elastic/logstash", :branch => "master"
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Add a `gemspec` file [_add_a_gemspec_file_2]
|
||||
|
||||
Gemspecs define the Ruby gem which will be built and contain your plugin.
|
||||
|
||||
::::{tip}
|
||||
More information can be found on the [Rubygems Specification page](http://guides.rubygems.org/specification-reference/).
|
||||
::::
|
||||
|
||||
|
||||
```ruby
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'logstash-codec-example'
|
||||
s.version = '0.1.0'
|
||||
s.licenses = ['Apache License (2.0)']
|
||||
s.summary = "This codec does x, y, z in Logstash"
|
||||
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
||||
s.authors = ["Elastic"]
|
||||
s.email = 'info@elastic.co'
|
||||
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
||||
s.require_paths = ["lib"]
|
||||
|
||||
# Files
|
||||
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
||||
# Tests
|
||||
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
||||
|
||||
# Special flag to let us know this is actually a logstash plugin
|
||||
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "codec" }
|
||||
|
||||
# Gem dependencies
|
||||
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
||||
s.add_development_dependency 'logstash-devutils'
|
||||
end
|
||||
```
|
||||
|
||||
It is appropriate to change these values to fit your plugin. In particular, `s.name` and `s.summary` should reflect your plugin’s name and behavior.
|
||||
|
||||
`s.licenses` and `s.version` are also important and will come into play when you are ready to publish your plugin.
|
||||
|
||||
Logstash and all its plugins are licensed under [Apache License, version 2 ("ALv2")](https://github.com/elastic/logstash/blob/main/LICENSE.txt). If you make your plugin publicly available via [RubyGems.org](http://rubygems.org), please make sure to have this line in your gemspec:
|
||||
|
||||
* `s.licenses = ['Apache License (2.0)']`
|
||||
|
||||
The gem version, designated by `s.version`, helps track changes to plugins over time. You should use [semver versioning](http://semver.org/) strategy for version numbers.
|
||||
|
||||
### Runtime and Development Dependencies [_runtime_and_development_dependencies_2]
|
||||
|
||||
At the bottom of the `gemspec` file is a section with a comment: `Gem dependencies`. This is where any other needed gems must be mentioned. If a gem is necessary for your plugin to function, it is a runtime dependency. If a gem are only used for testing, then it would be a development dependency.
|
||||
|
||||
::::{note}
|
||||
You can also have versioning requirements for your dependencies—including other Logstash plugins:
|
||||
|
||||
```ruby
|
||||
# Gem dependencies
|
||||
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
||||
s.add_development_dependency 'logstash-devutils'
|
||||
```
|
||||
|
||||
This gemspec has a runtime dependency on the logstash-core-plugin-api and requires that it have a version number greater than or equal to version 1.60 and less than or equal to version 2.99.
|
||||
|
||||
::::
|
||||
|
||||
|
||||
::::{important}
|
||||
All plugins have a runtime dependency on the `logstash-core-plugin-api` gem, and a development dependency on `logstash-devutils`.
|
||||
::::
|
||||
|
||||
|
||||
|
||||
### Jar dependencies [_jar_dependencies_2]
|
||||
|
||||
In some cases, such as the [Elasticsearch output plugin](https://github.com/logstash-plugins/logstash-output-elasticsearch/blob/main/logstash-output-elasticsearch.gemspec#L22-L23), your code may depend on a jar file. In cases such as this, the dependency is added in the gemspec file in this manner:
|
||||
|
||||
```ruby
|
||||
# Jar dependencies
|
||||
s.requirements << "jar 'org.elasticsearch:elasticsearch', '5.0.0'"
|
||||
s.add_runtime_dependency 'jar-dependencies'
|
||||
```
|
||||
|
||||
With these both defined, the install process will search for the required jar file at [http://mvnrepository.com](http://mvnrepository.com) and download the specified version.
|
||||
|
||||
|
||||
|
||||
## Document your plugin [_document_your_plugin_2]
|
||||
|
||||
Documentation is an important part of your plugin. All plugin documentation is rendered and placed in the [Logstash Reference](/reference/index.md) and the [Versioned plugin docs](logstash-docs-md://vpr/integration-plugins.md).
|
||||
|
||||
See [Document your plugin](/extend/plugin-doc.md) for tips and guidelines.
|
||||
|
||||
|
||||
## Add Tests [_add_tests_2]
|
||||
|
||||
Logstash loves tests. Lots of tests. If you’re using your new codec plugin in a production environment, you’ll want to have some tests to ensure you are not breaking any existing functionality.
|
||||
|
||||
::::{note}
|
||||
A full exposition on RSpec is outside the scope of this document. Learn more about RSpec at [http://rspec.info](http://rspec.info)
|
||||
::::
|
||||
|
||||
|
||||
For help learning about tests and testing, look in the `spec/codecs/` directory of several other similar plugins.
|
||||
|
||||
|
||||
## Clone and test! [_clone_and_test_2]
|
||||
|
||||
Now let’s start with a fresh clone of the plugin, build it and run the tests.
|
||||
|
||||
* **Clone your plugin into a temporary location** Replace `GITUSERNAME` with your github username, and `MYPLUGINNAME` with your plugin name.
|
||||
|
||||
* `git clone https://github.com/GITUSERNAME/logstash-``codec-MYPLUGINNAME.git`
|
||||
|
||||
* alternately, via ssh: `git clone git@github.com:GITUSERNAME/logstash-``codec-MYPLUGINNAME.git`
|
||||
|
||||
* `cd logstash-codec-MYPLUGINNAME`
|
||||
|
||||
|
||||
Then, you’ll need to install your plugins dependencies with bundler:
|
||||
|
||||
```
|
||||
bundle install
|
||||
```
|
||||
|
||||
::::{important}
|
||||
If your plugin has an external file dependency described in `vendor.json`, you must download that dependency before running or testing. You can do this by running:
|
||||
|
||||
```
|
||||
rake vendor
|
||||
```
|
||||
|
||||
::::
|
||||
|
||||
|
||||
And finally, run the tests:
|
||||
|
||||
```
|
||||
bundle exec rspec
|
||||
```
|
||||
|
||||
You should see a success message, which looks something like this:
|
||||
|
||||
```
|
||||
Finished in 0.034 seconds
|
||||
1 example, 0 failures
|
||||
```
|
||||
|
||||
Hooray! You’re almost there! (Unless you saw failures… you should fix those first).
|
||||
|
||||
|
||||
## Building and Testing [_building_and_testing_2]
|
||||
|
||||
Now you’re ready to build your (well-tested) plugin into a Ruby gem.
|
||||
|
||||
### Build [_build_2]
|
||||
|
||||
You already have all the necessary ingredients, so let’s go ahead and run the build command:
|
||||
|
||||
```sh
|
||||
gem build logstash-codec-example.gemspec
|
||||
```
|
||||
|
||||
That’s it! Your gem should be built and be in the same path with the name
|
||||
|
||||
```sh
|
||||
logstash-codec-mypluginname-0.1.0.gem
|
||||
```
|
||||
|
||||
The `s.version` number from your gemspec file will provide the gem version, in this case, `0.1.0`.
|
||||
|
||||
|
||||
### Test installation [_test_installation_2]
|
||||
|
||||
You should test install your plugin into a clean installation of Logstash. Download the latest version from the [Logstash downloads page](https://www.elastic.co/downloads/logstash/).
|
||||
|
||||
1. Untar and cd in to the directory:
|
||||
|
||||
```sh
|
||||
curl -O https://download.elastic.co/logstash/logstash/logstash-9.0.0.tar.gz
|
||||
tar xzvf logstash-9.0.0.tar.gz
|
||||
cd logstash-9.0.0
|
||||
```
|
||||
|
||||
2. Using the plugin tool, we can install the gem we just built.
|
||||
|
||||
* Replace `/my/logstash/plugins` with the correct path to the gem for your environment, and `0.1.0` with the correct version number from the gemspec file.
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin install /my/logstash/plugins/logstash-codec-example/logstash-codec-example-0.1.0.gem
|
||||
```
|
||||
|
||||
* After running this, you should see feedback from Logstash that it was successfully installed:
|
||||
|
||||
```sh
|
||||
validating /my/logstash/plugins/logstash-codec-example/logstash-codec-example-0.1.0.gem >= 0
|
||||
Valid logstash plugin. Continuing...
|
||||
Successfully installed 'logstash-codec-example' with version '0.1.0'
|
||||
```
|
||||
|
||||
::::{tip}
|
||||
You can also use the Logstash plugin tool to determine which plugins are currently available:
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin list
|
||||
```
|
||||
|
||||
Depending on what you have installed, you might see a short or long list of plugins: inputs, codecs, filters and outputs.
|
||||
|
||||
::::
|
||||
|
||||
3. Now try running Logstash with a simple configuration passed in via the command-line, using the `-e` flag.
|
||||
|
||||
::::{note}
|
||||
Your results will depend on what your codec plugin is designed to do.
|
||||
::::
|
||||
|
||||
|
||||
```sh
|
||||
bin/logstash -e 'input { stdin{ codec => example{}} } output {stdout { codec => rubydebug }}'
|
||||
```
|
||||
|
||||
The example codec plugin will append the contents of `append` (which by default appends ", Hello World!")
|
||||
|
||||
After starting Logstash, type something, for example "Random output string". The resulting output message field contents should be, "Random output string, Hello World!":
|
||||
|
||||
```sh
|
||||
Random output string
|
||||
{
|
||||
"message" => "Random output string, Hello World!",
|
||||
"@version" => "1",
|
||||
"@timestamp" => "2015-01-27T19:17:18.932Z",
|
||||
"host" => "cadenza"
|
||||
}
|
||||
```
|
||||
|
||||
Feel free to experiment and test this by changing the `append` parameter:
|
||||
|
||||
```sh
|
||||
bin/logstash -e 'input { stdin{ codec => example{ append => ", I am appending this! }} } output {stdout { codec => rubydebug }}'
|
||||
```
|
||||
|
||||
Congratulations! You’ve built, deployed and successfully run a Logstash codec.
|
||||
|
||||
|
||||
|
||||
## Submitting your plugin to [RubyGems.org](http://rubygems.org) and [logstash-plugins](https://github.com/logstash-plugins) [_submitting_your_plugin_to_rubygems_orghttprubygems_org_and_logstash_pluginshttpsgithub_comlogstash_plugins_2]
|
||||
|
||||
Logstash uses [RubyGems.org](http://rubygems.org) as its repository for all plugin artifacts. Once you have developed your new plugin, you can make it available to Logstash users by simply publishing it to RubyGems.org.
|
||||
|
||||
### Licensing [_licensing_2]
|
||||
|
||||
Logstash and all its plugins are licensed under [Apache License, version 2 ("ALv2")](https://github.com/elasticsearch/logstash/blob/main/LICENSE). If you make your plugin publicly available via [RubyGems.org](http://rubygems.org), please make sure to have this line in your gemspec:
|
||||
|
||||
* `s.licenses = ['Apache License (2.0)']`
|
||||
|
||||
|
||||
### Publishing to [RubyGems.org](http://rubygems.org) [_publishing_to_rubygems_orghttprubygems_org_2]
|
||||
|
||||
To begin, you’ll need an account on RubyGems.org
|
||||
|
||||
* [Sign-up for a RubyGems account](https://rubygems.org/sign_up).
|
||||
|
||||
After creating an account, [obtain](http://guides.rubygems.org/rubygems-org-api/#api-authorization) an API key from RubyGems.org. By default, RubyGems uses the file `~/.gem/credentials` to store your API key. These credentials will be used to publish the gem. Replace `username` and `password` with the credentials you created at RubyGems.org:
|
||||
|
||||
```sh
|
||||
curl -u username:password https://rubygems.org/api/v1/api_key.yaml > ~/.gem/credentials
|
||||
chmod 0600 ~/.gem/credentials
|
||||
```
|
||||
|
||||
Before proceeding, make sure you have the right version in your gemspec file and commit your changes.
|
||||
|
||||
* `s.version = '0.1.0'`
|
||||
|
||||
To publish version 0.1.0 of your new logstash gem:
|
||||
|
||||
```sh
|
||||
bundle install
|
||||
bundle exec rake vendor
|
||||
bundle exec rspec
|
||||
bundle exec rake publish_gem
|
||||
```
|
||||
|
||||
::::{note}
|
||||
Executing `rake publish_gem`:
|
||||
|
||||
1. Reads the version from the gemspec file (`s.version = '0.1.0'`)
|
||||
2. Checks in your local repository if a tag exists for that version. If the tag already exists, it aborts the process. Otherwise, it creates a new version tag in your local repository.
|
||||
3. Builds the gem
|
||||
4. Publishes the gem to RubyGems.org
|
||||
|
||||
::::
|
||||
|
||||
|
||||
That’s it! Your plugin is published! Logstash users can now install your plugin by running:
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin install logstash-codec-mypluginname
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Contributing your source code to [logstash-plugins](https://github.com/logstash-plugins) [_contributing_your_source_code_to_logstash_pluginshttpsgithub_comlogstash_plugins_2]
|
||||
|
||||
It is not required to contribute your source code to [logstash-plugins](https://github.com/logstash-plugins) github organization, but we always welcome new plugins!
|
||||
|
||||
### Benefits [_benefits_2]
|
||||
|
||||
Some of the many benefits of having your plugin in the logstash-plugins repository are:
|
||||
|
||||
* **Discovery.** Your plugin will appear in the [Logstash Reference](/reference/index.md), where Logstash users look first for plugins and documentation.
|
||||
* **Documentation.** Your plugin documentation will automatically be added to the [Logstash Reference](/reference/index.md).
|
||||
* **Testing.** With our testing infrastructure, your plugin will be continuously tested against current and future releases of Logstash. As a result, users will have the assurance that if incompatibilities arise, they will be quickly discovered and corrected.
|
||||
|
||||
|
||||
### Acceptance Guidelines [_acceptance_guidelines_2]
|
||||
|
||||
* **Code Review.** Your plugin must be reviewed by members of the community for coherence, quality, readability, stability and security.
|
||||
* **Tests.** Your plugin must contain tests to be accepted. These tests are also subject to code review for scope and completeness. It’s ok if you don’t know how to write tests — we will guide you. We are working on publishing a guide to creating tests for Logstash which will make it easier. In the meantime, you can refer to [http://betterspecs.org/](http://betterspecs.org/) for examples.
|
||||
|
||||
To begin migrating your plugin to logstash-plugins, simply create a new [issue](https://github.com/elasticsearch/logstash/issues) in the Logstash repository. When the acceptance guidelines are completed, we will facilitate the move to the logstash-plugins organization using the recommended [github process](https://help.github.com/articles/transferring-a-repository/#transferring-from-a-user-to-an-organization).
|
193
docs/extend/community-maintainer.md
Normal file
193
docs/extend/community-maintainer.md
Normal file
|
@ -0,0 +1,193 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/community-maintainer.html
|
||||
---
|
||||
|
||||
# Logstash Plugins Community Maintainer Guide [community-maintainer]
|
||||
|
||||
This document, to be read by new Maintainers, should explain their responsibilities. It was inspired by the [C4](http://rfc.zeromq.org/spec:22) document from the ZeroMQ project. This document is subject to change and suggestions through Pull Requests and issues are strongly encouraged.
|
||||
|
||||
|
||||
## Contribution Guidelines [_contribution_guidelines]
|
||||
|
||||
For general guidance around contributing to Logstash Plugins, see the [*Contributing to Logstash*](/extend/index.md) section.
|
||||
|
||||
|
||||
## Document Goals [_document_goals]
|
||||
|
||||
To help make the Logstash plugins community participation easy with positive feedback.
|
||||
|
||||
To increase diversity.
|
||||
|
||||
To reduce code review, merge and release dependencies on the core team by providing support and tools to the Community and Maintainers.
|
||||
|
||||
To support the natural life cycle of a plugin.
|
||||
|
||||
To codify the roles and responsibilities of: Maintainers and Contributors with specific focus on patch testing, code review, merging and release.
|
||||
|
||||
|
||||
## Development Workflow [_development_workflow]
|
||||
|
||||
All Issues and Pull Requests must be tracked using the Github issue tracker.
|
||||
|
||||
The plugin uses the [Apache 2.0 license](http://www.apache.org/licenses/LICENSE-2.0). Maintainers should check whether a patch introduces code which has an incompatible license. Patch ownership and copyright is defined in the Elastic [Contributor License Agreement](https://www.elastic.co/contributor-agreement) (CLA).
|
||||
|
||||
|
||||
### Terminology [_terminology_2]
|
||||
|
||||
A "Contributor" is a role a person assumes when providing a patch. Contributors will not have commit access to the repository. They need to sign the Elastic [Contributor License Agreement](https://www.elastic.co/contributor-agreement) before a patch can be reviewed. Contributors can add themselves to the plugin Contributor list.
|
||||
|
||||
A "Maintainer" is a role a person assumes when maintaining a plugin and keeping it healthy, including triaging issues, and reviewing and merging patches.
|
||||
|
||||
|
||||
### Patch Requirements [_patch_requirements]
|
||||
|
||||
A patch is a minimal and accurate answer to exactly one identified and agreed upon problem. It must conform to the [code style guidelines](https://github.com/elastic/logstash/blob/main/STYLE.md) and must include RSpec tests that verify the fitness of the solution.
|
||||
|
||||
A patch will be automatically tested by a CI system that will report on the Pull Request status.
|
||||
|
||||
A patch CLA will be automatically verified and reported on the Pull Request status.
|
||||
|
||||
A patch commit message has a single short (less than 50 character) first line summarizing the change, a blank second line, and any additional lines as necessary for change explanation and rationale.
|
||||
|
||||
A patch is mergeable when it satisfies the above requirements and has been reviewed positively by at least one other person.
|
||||
|
||||
|
||||
### Development Process [_development_process]
|
||||
|
||||
A user will log an issue on the issue tracker describing the problem they face or observe with as much detail as possible.
|
||||
|
||||
To work on an issue, a Contributor forks the plugin repository and then works on their forked repository and submits a patch by creating a pull request back to the plugin.
|
||||
|
||||
Maintainers must not merge patches where the author has not signed the CLA.
|
||||
|
||||
Before a patch can be accepted it should be reviewed. Maintainers should merge accepted patches without delay.
|
||||
|
||||
Maintainers should not merge their own patches except in exceptional cases, such as non-responsiveness from other Maintainers or core team for an extended period (more than 2 weeks).
|
||||
|
||||
Reviewer’s comments should not be based on personal preferences.
|
||||
|
||||
The Maintainers should label Issues and Pull Requests.
|
||||
|
||||
Maintainers should involve the core team if help is needed to reach consensus.
|
||||
|
||||
Review non-source changes such as documentation in the same way as source code changes.
|
||||
|
||||
|
||||
### Branch Management [_branch_management]
|
||||
|
||||
The plugin has a main branch that always holds the latest in-progress version and should always build. Topic branches should kept to the minimum.
|
||||
|
||||
|
||||
### Changelog Management [_changelog_management]
|
||||
|
||||
Every plugin should have a changelog (https://www.elastic.co/guide/en/logstash/current/CHANGELOG.html). If not, please create one. When changes are made to a plugin, make sure to include a changelog entry under the respective version to document the change. The changelog should be easily understood from a user point of view. As we iterate and release plugins rapidly, users use the changelog as a mechanism for deciding whether to update.
|
||||
|
||||
Changes that are not user facing should be tagged as `internal:`. For example:
|
||||
|
||||
```markdown
|
||||
- internal: Refactored specs for better testing
|
||||
- config: Default timeout configuration changed from 10s to 5s
|
||||
```
|
||||
|
||||
|
||||
#### Detailed format of https://www.elastic.co/guide/en/logstash/current/CHANGELOG.html [_detailed_format_of_changelog_md]
|
||||
|
||||
Sharing a similar format of https://www.elastic.co/guide/en/logstash/current/CHANGELOG.html in plugins ease readability for users. Please see following annotated example and see a concrete example in [logstash-filter-date](https://raw.githubusercontent.com/logstash-plugins/logstash-filter-date/main/https://www.elastic.co/guide/en/logstash/current/CHANGELOG.html).
|
||||
|
||||
```markdown
|
||||
## 1.0.x <1>
|
||||
- change description <2>
|
||||
- tag: change description <3>
|
||||
- tag1,tag2: change description <4>
|
||||
- tag: Multi-line description <5>
|
||||
must be indented and can use
|
||||
additional markdown syntax
|
||||
<6>
|
||||
## 1.0.0 <7>
|
||||
[...]
|
||||
```
|
||||
|
||||
1. Latest version is the first line of https://www.elastic.co/guide/en/logstash/current/CHANGELOG.html. Each version identifier should be a level-2 header using `##`
|
||||
2. One change description is described as a list item using a dash `-` aligned under the version identifier
|
||||
3. One change can be tagged by a word and suffixed by `:`.<br> Common tags are `bugfix`, `feature`, `doc`, `test` or `internal`.
|
||||
4. One change can have multiple tags separated by a comma and suffixed by `:`
|
||||
5. A multi-line change description must be properly indented
|
||||
6. Please take care to **separate versions with an empty line**
|
||||
7. Previous version identifier
|
||||
|
||||
|
||||
|
||||
### Continuous Integration [_continuous_integration]
|
||||
|
||||
Plugins are setup with automated continuous integration (CI) environments and there should be a corresponding badge on each Github page. If it’s missing, please contact the Logstash core team.
|
||||
|
||||
Every Pull Request opened automatically triggers a CI run. To conduct a manual run, comment “Jenkins, please test this.” on the Pull Request.
|
||||
|
||||
|
||||
## Versioning Plugins [_versioning_plugins]
|
||||
|
||||
Logstash core and its plugins have separate product development lifecycles. Hence the versioning and release strategy for the core and plugins do not have to be aligned. In fact, this was one of our goals during the great separation of plugins work in Logstash 1.5.
|
||||
|
||||
At times, there will be changes in core API in Logstash, which will require mass update of plugins to reflect the changes in core. However, this does not happen frequently.
|
||||
|
||||
For plugins, we would like to adhere to a versioning and release strategy that can better inform our users, about any breaking changes to the Logstash configuration formats and functionality.
|
||||
|
||||
Plugin releases follows a three-placed numbering scheme X.Y.Z. where X denotes a major release version which may break compatibility with existing configuration or functionality. Y denotes releases which includes features which are backward compatible. Z denotes releases which includes bug fixes and patches.
|
||||
|
||||
|
||||
### Changing the version [_changing_the_version]
|
||||
|
||||
Version can be changed in the Gemspec, which needs to be associated with a changelog entry. Following this, we can publish the gem to RubyGem.org manually. At this point only the core developers can publish a gem.
|
||||
|
||||
|
||||
### Labeling [_labeling]
|
||||
|
||||
Labeling is a critical aspect of maintaining plugins. All issues in GitHub should be labeled correctly so it can:
|
||||
|
||||
* Provide good feedback to users/developers
|
||||
* Help prioritize changes
|
||||
* Be used in release notes
|
||||
|
||||
Most labels are self explanatory, but here’s a quick recap of few important labels:
|
||||
|
||||
* `bug`: Labels an issue as an unintentional defect
|
||||
* `needs details`: If a the issue reporter has incomplete details, please ask them for more info and label as needs details.
|
||||
* `missing cla`: Contributor License Agreement is missing and patch cannot be accepted without it
|
||||
* `adopt me`: Ask for help from the community to take over this issue
|
||||
* `enhancement`: New feature, not a bug fix
|
||||
* `needs tests`: Patch has no tests, and cannot be accepted without unit/integration tests
|
||||
* `docs`: Documentation related issue/PR
|
||||
|
||||
|
||||
## Logging [_logging]
|
||||
|
||||
Although it’s important not to bog down performance with excessive logging, debug level logs can be immensely helpful when diagnosing and troubleshooting issues with Logstash. Please remember to liberally add debug logs wherever it makes sense as users will be forever gracious.
|
||||
|
||||
```shell
|
||||
@logger.debug("Logstash loves debug logs!", :actions => actions)
|
||||
```
|
||||
|
||||
|
||||
## Contributor License Agreement (CLA) Guidance [_contributor_license_agreement_cla_guidance]
|
||||
|
||||
Why is a [CLA](https://www.elastic.co/contributor-agreement) required?
|
||||
: We ask this of all Contributors in order to assure our users of the origin and continuing existence of the code. We are not asking Contributors to assign copyright to us, but to give us the right to distribute a Contributor’s code without restriction.
|
||||
|
||||
Please make sure the CLA is signed by every Contributor prior to reviewing PRs and commits.
|
||||
: Contributors only need to sign the CLA once and should sign with the same email as used in Github. If a Contributor signs the CLA after a PR is submitted, they can refresh the automated CLA checker by pushing another comment on the PR after 5 minutes of signing.
|
||||
|
||||
|
||||
## Need Help? [_need_help]
|
||||
|
||||
Ping @logstash-core on Github to get the attention of the Logstash core team.
|
||||
|
||||
|
||||
## Community Administration [_community_administration]
|
||||
|
||||
The core team is there to support the plugin Maintainers and overall ecosystem.
|
||||
|
||||
Maintainers should propose Contributors to become a Maintainer.
|
||||
|
||||
Contributors and Maintainers should follow the Elastic Community [Code of Conduct](https://www.elastic.co/community/codeofconduct). The core team should block or ban "bad actors".
|
||||
|
11
docs/extend/contribute-to-core.md
Normal file
11
docs/extend/contribute-to-core.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/contribute-to-core.html
|
||||
---
|
||||
|
||||
# Extending Logstash core [contribute-to-core]
|
||||
|
||||
We also welcome contributions and bug fixes to the Logstash core feature set.
|
||||
|
||||
Please read through our [contribution](https://github.com/elastic/logstash/blob/main/CONTRIBUTING.md) guide, and the Logstash [readme](https://github.com/elastic/logstash/blob/main/README.md) document.
|
||||
|
386
docs/extend/contributing-patch-plugin.md
Normal file
386
docs/extend/contributing-patch-plugin.md
Normal file
|
@ -0,0 +1,386 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/contributing-patch-plugin.html
|
||||
---
|
||||
|
||||
# Contributing a patch to a Logstash plugin [contributing-patch-plugin]
|
||||
|
||||
This section discusses the information you need to know to successfully contribute a patch to a Logstash plugin.
|
||||
|
||||
Each plugin defines its own configuration options. These control the behavior of the plugin to some degree. Configuration option definitions commonly include:
|
||||
|
||||
* Data validation
|
||||
* Default value
|
||||
* Any required flags
|
||||
|
||||
Plugins are subclasses of a Logstash base class. A plugin’s base class defines common configuration and methods.
|
||||
|
||||
## Input plugins [contrib-patch-input]
|
||||
|
||||
Input plugins ingest data from an external source. Input plugins are always associated with a codec. An input plugin always has an associated codec plugin. Input and codec plugins operate in conjunction to create a Logstash event and add that event to the processing queue. An input codec is a subclass of the `LogStash::Inputs::Base` class.
|
||||
|
||||
### Input API [input-api]
|
||||
|
||||
`#register() -> nil`
|
||||
: Required. This API sets up resources for the plugin, typically the connection to the external source.
|
||||
|
||||
`#run(queue) -> nil`
|
||||
: Required. This API fetches or listens for source data, typically looping until stopped. Must handle errors inside the loop. Pushes any created events to the queue object specified in the method argument. Some inputs may receive batched data to minimize the external call overhead.
|
||||
|
||||
`#stop() -> nil`
|
||||
: Optional. Stops external connections and cleans up.
|
||||
|
||||
|
||||
|
||||
## Codec plugins [contrib-patch-codec]
|
||||
|
||||
Codec plugins decode input data that has a specific structure, such as JSON input data. A codec plugin is a subclass of `LogStash::Codecs::Base`.
|
||||
|
||||
### Codec API [codec-api]
|
||||
|
||||
`#register() -> nil`
|
||||
: Identical to the API of the same name for input plugins.
|
||||
|
||||
`#decode(data){|event| block} -> nil`
|
||||
: Must be implemented. Used to create an Event from the raw data given in the method argument. Must handle errors. The caller must provide a Ruby block. The block is called with the created Event.
|
||||
|
||||
`#encode(event) -> nil`
|
||||
: Required. Used to create a structured data object from the given Event. May handle errors. This method calls a block that was previously stored as @on_event with two arguments: the original event and the data object.
|
||||
|
||||
|
||||
|
||||
## Filter plugins [contrib-patch-filter]
|
||||
|
||||
A mechanism to change, mutate or merge one or more Events. A filter plugin is a subclass of the `LogStash::Filters::Base` class.
|
||||
|
||||
### Filter API [filter-api]
|
||||
|
||||
`#register() -> nil`
|
||||
: Identical to the API of the same name for input plugins.
|
||||
|
||||
`#filter(event) -> nil`
|
||||
: Required. May handle errors. Used to apply a mutation function to the given event.
|
||||
|
||||
|
||||
|
||||
## Output plugins [contrib-patch-output]
|
||||
|
||||
A mechanism to send an event to an external destination. This process may require serialization. An output plugin is a subclass of the `LogStash::Outputs::Base` class.
|
||||
|
||||
### Output API [output-api]
|
||||
|
||||
`#register() -> nil`
|
||||
: Identical to the API of the same name for input plugins.
|
||||
|
||||
`#receive(event) -> nil`
|
||||
: Required. Must handle errors. Used to prepare the given event for transmission to the external destination. Some outputs may buffer the prepared events to batch transmit to the destination.
|
||||
|
||||
|
||||
|
||||
## Process [patch-process]
|
||||
|
||||
A bug or feature is identified. An issue is created in the plugin repository. A patch is created and a pull request (PR) is submitted. After review and possible rework the PR is merged and the plugin is published.
|
||||
|
||||
The [Community Maintainer Guide](/extend/community-maintainer.md) explains, in more detail, the process of getting a patch accepted, merged and published. The Community Maintainer Guide also details the roles that contributors and maintainers are expected to perform.
|
||||
|
||||
|
||||
## Testing methodologies [test-methods]
|
||||
|
||||
### Test driven development [tdd]
|
||||
|
||||
Test driven development (TDD) describes a methodology for using tests to guide evolution of source code. For our purposes, we are use only a part of it. Before writing the fix, we create tests that illustrate the bug by failing. We stop when we have written enough code to make the tests pass and submit the fix and tests as a patch. It is not necessary to write the tests before the fix, but it is very easy to write a passing test afterwards that may not actually verify that the fault is really fixed especially if the fault can be triggered via multiple execution paths or varying input data.
|
||||
|
||||
|
||||
### RSpec framework [rspec]
|
||||
|
||||
Logstash uses Rspec, a Ruby testing framework, to define and run the test suite. What follows is a summary of various sources.
|
||||
|
||||
```ruby
|
||||
2 require "logstash/devutils/rspec/spec_helper"
|
||||
3 require "logstash/plugin"
|
||||
4
|
||||
5 describe "outputs/riemann" do
|
||||
6 describe "#register" do
|
||||
7 let(:output) do
|
||||
8 LogStash::Plugin.lookup("output", "riemann").new(configuration)
|
||||
9 end
|
||||
10
|
||||
11 context "when no protocol is specified" do
|
||||
12 let(:configuration) { Hash.new }
|
||||
13
|
||||
14 it "the method completes without error" do
|
||||
15 expect {output.register}.not_to raise_error
|
||||
16 end
|
||||
17 end
|
||||
18
|
||||
19 context "when a bad protocol is specified" do
|
||||
20 let(:configuration) { {"protocol" => "fake"} }
|
||||
21
|
||||
22 it "the method fails with error" do
|
||||
23 expect {output.register}.to raise_error
|
||||
24 end
|
||||
25 end
|
||||
26
|
||||
27 context "when the tcp protocol is specified" do
|
||||
28 let(:configuration) { {"protocol" => "tcp"} }
|
||||
29
|
||||
30 it "the method completes without error" do
|
||||
31 expect {output.register}.not_to raise_error
|
||||
32 end
|
||||
33 end
|
||||
34 end
|
||||
35
|
||||
36 describe "#receive" do
|
||||
37 let(:output) do
|
||||
38 LogStash::Plugin.lookup("output", "riemann").new(configuration)
|
||||
39 end
|
||||
40
|
||||
41 context "when operating normally" do
|
||||
42 let(:configuration) { Hash.new }
|
||||
43 let(:event) do
|
||||
44 data = {"message"=>"hello", "@version"=>"1",
|
||||
45 "@timestamp"=>"2015-06-03T23:34:54.076Z",
|
||||
46 "host"=>"vagrant-ubuntu-trusty-64"}
|
||||
47 LogStash::Event.new(data)
|
||||
48 end
|
||||
49
|
||||
50 before(:example) do
|
||||
51 output.register
|
||||
52 end
|
||||
53
|
||||
54 it "should accept the event" do
|
||||
55 expect { output.receive event }.not_to raise_error
|
||||
56 end
|
||||
57 end
|
||||
58 end
|
||||
59 end
|
||||
```
|
||||
|
||||
```ruby
|
||||
describe(string){block} -> nil
|
||||
describe(Class){block} -> nil
|
||||
```
|
||||
|
||||
With RSpec, we are always describing the plugin method behavior. The describe block is added in logical sections and can accept either an existing class name or a string. The string used in line 5 is the plugin name. Line 6 is the register method, line 36 is the receive method. It is a RSpec convention to prefix instance methods with one hash and class methods with one dot.
|
||||
|
||||
```ruby
|
||||
context(string){block} -> nil
|
||||
```
|
||||
|
||||
In RSpec, context blocks define sections that group tests by a variation. The string should start with the word `when` and then detail the variation. See line 11. The tests in the content block should should only be for that variation.
|
||||
|
||||
```ruby
|
||||
let(symbol){block} -> nil
|
||||
```
|
||||
|
||||
In RSpec, `let` blocks define resources for use in the test blocks. These resources are reinitialized for every test block. They are available as method calls inside the test block. Define `let` blocks in `describe` and `context` blocks, which scope the `let` block and any other nested blocks. You can use other `let` methods defined later within the `let` block body. See lines 7-9, which define the output resource and use the configuration method, defined with different variations in lines 12, 20 and 28.
|
||||
|
||||
```ruby
|
||||
before(symbol){block} -> nil - symbol is one of :suite, :context, :example, but :all and :each are synonyms for :suite and :example respectively.
|
||||
```
|
||||
|
||||
In RSpec, `before` blocks are used to further set up any resources that would have been initialized in a `let` block. You cannot define `let` blocks inside `before` blocks.
|
||||
|
||||
You can also define `after` blocks, which are typically used to clean up any setup activity performed by a `before` block.
|
||||
|
||||
```ruby
|
||||
it(string){block} -> nil
|
||||
```
|
||||
|
||||
In RSpec, `it` blocks set the expectations that verify the behavior of the tested code. The string should not start with *it* or *should*, but needs to express the outcome of the expectation. When put together the texts from the enclosing describe, `context` and `it` blocks should form a fairly readable sentence, as in lines 5, 6, 11 and 14:
|
||||
|
||||
```ruby
|
||||
outputs/riemann
|
||||
#register when no protocol is specified the method completes without error
|
||||
```
|
||||
|
||||
Readable code like this make the goals of tests easy to understand.
|
||||
|
||||
```ruby
|
||||
expect(object){block} -> nil
|
||||
```
|
||||
|
||||
In RSpec, the expect method verifies a statement that compares an actual result to an expected result. The `expect` method is usually paired with a call to the `to` or `not_to` methods. Use the block form when expecting errors or observing for changes. The `to` or `not_to` methods require a `matcher` object that encapsulates the expected value. The argument form of the `expect` method encapsulates the actual value. When put together the whole line tests the actual against the expected value.
|
||||
|
||||
```ruby
|
||||
raise_error(error class|nil) -> matcher instance
|
||||
be(object) -> matcher instance
|
||||
eq(object) -> matcher instance
|
||||
eql(object) -> matcher instance
|
||||
for more see http://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
|
||||
```
|
||||
|
||||
In RSpec, a matcher is an object generated by the equivalent method call (be, eq) that will be used to evaluate the expected against the actual values.
|
||||
|
||||
|
||||
|
||||
## Putting it all together [all-together]
|
||||
|
||||
This example fixes an [issue](https://github.com/logstash-plugins/logstash-output-zeromq/issues/9) in the ZeroMQ output plugin. The issue does not require knowledge of ZeroMQ.
|
||||
|
||||
The activities in this example have the following prerequisites:
|
||||
|
||||
* A minimal knowledge of Git and Github. See the [Github boot camp](https://help.github.com/categories/bootcamp/).
|
||||
* A text editor.
|
||||
* A JRuby [runtime](https://www.ruby-lang.org/en/documentation/installation/#managers) [environment](https://howistart.org/posts/ruby/1). The `chruby` tool manages Ruby versions.
|
||||
* JRuby 1.7.22 or later.
|
||||
* The `bundler` and `rake` gems installed.
|
||||
* ZeroMQ [installed](http://zeromq.org/intro:get-the-software).
|
||||
|
||||
1. In Github, fork the ZeroMQ [output plugin repository](https://github.com/logstash-plugins/logstash-output-zeromq).
|
||||
2. On your local machine, [clone](https://help.github.com/articles/fork-a-repo/) the fork to a known folder such as `logstash/`.
|
||||
3. Open the following files in a text editor:
|
||||
|
||||
* `logstash-output-zeromq/lib/logstash/outputs/zeromq.rb`
|
||||
* `logstash-output-zeromq/lib/logstash/util/zeromq.rb`
|
||||
* `logstash-output-zeromq/spec/outputs/zeromq_spec.rb`
|
||||
|
||||
4. According to the issue, log output in server mode must indicate `bound`. Furthermore, the test file contains no tests.
|
||||
|
||||
::::{note}
|
||||
Line 21 of `util/zeromq.rb` reads `@logger.info("0mq: #{server? ? 'connected' : 'bound'}", :address => address)`
|
||||
::::
|
||||
|
||||
5. In the text editor, require `zeromq.rb` for the file `zeromq_spec.rb` by adding the following lines:
|
||||
|
||||
```ruby
|
||||
require "logstash/outputs/zeromq"
|
||||
require "logstash/devutils/rspec/spec_helper"
|
||||
```
|
||||
|
||||
6. The desired error message should read:
|
||||
|
||||
```ruby
|
||||
LogStash::Outputs::ZeroMQ when in server mode a 'bound' info line is logged
|
||||
```
|
||||
|
||||
To properly generate this message, add a `describe` block with the fully qualified class name as the argument, a context block, and an `it` block.
|
||||
|
||||
```ruby
|
||||
describe LogStash::Outputs::ZeroMQ do
|
||||
context "when in server mode" do
|
||||
it "a 'bound' info line is logged" do
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
7. To add the missing test, use an instance of the ZeroMQ output and a substitute logger. This example uses an RSpec feature called *test doubles* as the substitute logger.
|
||||
|
||||
Add the following lines to `zeromq_spec.rb`, after `describe LogStash::Outputs::ZeroMQ do` and before `context "when in server mode" do`:
|
||||
|
||||
```ruby
|
||||
let(:output) { described_class.new("mode" => "server", "topology" => "pushpull" }
|
||||
let(:tracer) { double("logger") }
|
||||
```
|
||||
|
||||
8. Add the body to the `it` block. Add the following five lines after the line `context "when in server mode" do`:
|
||||
|
||||
```ruby
|
||||
allow(tracer).to receive(:debug)<1>
|
||||
output.logger = logger<2>
|
||||
expect(tracer).to receive(:info).with("0mq: bound", {:address=>"tcp://127.0.0.1:2120"})<3>
|
||||
output.register<4>
|
||||
output.do_close<5>
|
||||
```
|
||||
|
||||
|
||||
1. Allow the double to receive `debug` method calls.
|
||||
2. Make the output use the test double.
|
||||
3. Set an expectation on the test to receive an `info` method call.
|
||||
4. Call `register` on the output.
|
||||
5. Call `do_close` on the output so the test does not hang.
|
||||
|
||||
|
||||
At the end of the modifications, the relevant code section reads:
|
||||
|
||||
```ruby
|
||||
require "logstash/outputs/zeromq"
|
||||
require "logstash/devutils/rspec/spec_helper"
|
||||
|
||||
describe LogStash::Outputs::ZeroMQ do
|
||||
let(:output) { described_class.new("mode" => "server", "topology" => "pushpull") }
|
||||
let(:tracer) { double("logger") }
|
||||
|
||||
context "when in server mode" do
|
||||
it "a ‘bound’ info line is logged" do
|
||||
allow(tracer).to receive(:debug)
|
||||
output.logger = tracer
|
||||
expect(tracer).to receive(:info).with("0mq: bound", {:address=>"tcp://127.0.0.1:2120"})
|
||||
output.register
|
||||
output.do_close
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
To run this test:
|
||||
|
||||
1. Open a terminal window
|
||||
2. Navigate to the cloned plugin folder
|
||||
3. The first time you run the test, run the command `bundle install`
|
||||
4. Run the command `bundle exec rspec`
|
||||
|
||||
Assuming all prerequisites were installed correctly, the test fails with output similar to:
|
||||
|
||||
```shell
|
||||
Using Accessor#strict_set for specs
|
||||
Run options: exclude {:redis=>true, :socket=>true, :performance=>true, :couchdb=>true, :elasticsearch=>true,
|
||||
:elasticsearch_secure=>true, :export_cypher=>true, :integration=>true, :windows=>true}
|
||||
|
||||
LogStash::Outputs::ZeroMQ
|
||||
when in server mode
|
||||
a ‘bound’ info line is logged (FAILED - 1)
|
||||
|
||||
Failures:
|
||||
|
||||
1) LogStash::Outputs::ZeroMQ when in server mode a ‘bound’ info line is logged
|
||||
Failure/Error: output.register
|
||||
Double "logger" received :info with unexpected arguments
|
||||
expected: ("0mq: bound", {:address=>"tcp://127.0.0.1:2120"})
|
||||
got: ("0mq: connected", {:address=>"tcp://127.0.0.1:2120"})
|
||||
# ./lib/logstash/util/zeromq.rb:21:in `setup'
|
||||
# ./lib/logstash/outputs/zeromq.rb:92:in `register'
|
||||
# ./lib/logstash/outputs/zeromq.rb:91:in `register'
|
||||
# ./spec/outputs/zeromq_spec.rb:13:in `(root)'
|
||||
# /Users/guy/.gem/jruby/1.9.3/gems/rspec-wait-0.0.7/lib/rspec/wait.rb:46:in `(root)'
|
||||
|
||||
Finished in 0.133 seconds (files took 1.28 seconds to load)
|
||||
1 example, 1 failure
|
||||
|
||||
Failed examples:
|
||||
|
||||
rspec ./spec/outputs/zeromq_spec.rb:10 # LogStash::Outputs::ZeroMQ when in server mode a ‘bound’ info line is logged
|
||||
|
||||
Randomized with seed 2568
|
||||
```
|
||||
|
||||
To correct the error, open the `util/zeromq.rb` file in your text editor and swap the positions of the words `connected` and `bound` on line 21. Line 21 now reads:
|
||||
|
||||
```ruby
|
||||
@logger.info("0mq: #{server? ? 'bound' : 'connected'}", :address => address)
|
||||
```
|
||||
|
||||
Run the test again with the `bundle exec rspec` command.
|
||||
|
||||
The test passes with output similar to:
|
||||
|
||||
```shell
|
||||
Using Accessor#strict_set for specs
|
||||
Run options: exclude {:redis=>true, :socket=>true, :performance=>true, :couchdb=>true, :elasticsearch=>true, :elasticsearch_secure=>true, :export_cypher=>true, :integration=>true, :windows=>true}
|
||||
|
||||
LogStash::Outputs::ZeroMQ
|
||||
when in server mode
|
||||
a ‘bound’ info line is logged
|
||||
|
||||
Finished in 0.114 seconds (files took 1.22 seconds to load)
|
||||
1 example, 0 failures
|
||||
|
||||
Randomized with seed 45887
|
||||
```
|
||||
|
||||
[Commit](https://help.github.com/articles/fork-a-repo/#next-steps) the changes to git and Github.
|
||||
|
||||
Your pull request is visible from the [Pull Requests](https://github.com/logstash-plugins/logstash-output-zeromq/pulls) section of the original Github repository. The plugin maintainers review your work, suggest changes if necessary, and merge and publish a new version of the plugin.
|
||||
|
||||
|
47
docs/extend/create-logstash-plugins.md
Normal file
47
docs/extend/create-logstash-plugins.md
Normal file
|
@ -0,0 +1,47 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/contributing-java-plugin.html
|
||||
---
|
||||
|
||||
# Create Logstash plugins [contributing-java-plugin]
|
||||
|
||||
Now you can write your own Java plugin for use with {{ls}}. We have provided instructions and GitHub examples to give you a head start.
|
||||
|
||||
Native support for Java plugins in {{ls}} consists of several components:
|
||||
|
||||
* Extensions to the Java execution engine to support running Java plugins in Logstash pipelines
|
||||
* APIs for developing Java plugins. The APIs are in the `co.elastic.logstash.api` package. A Java plugin might break if it references classes or specific concrete implementations of API interfaces outside that package. The implementation of classes outside of the API package may change at any time.
|
||||
* Tooling to automate the packaging and deployment of Java plugins in Logstash.
|
||||
|
||||
|
||||
## Process overview [_process_overview]
|
||||
|
||||
Here are the steps:
|
||||
|
||||
1. Choose the type of plugin you want to create: input, codec, filter, or output.
|
||||
2. Set up your environment.
|
||||
3. Code the plugin.
|
||||
4. Package and deploy the plugin.
|
||||
5. Run Logstash with your new plugin.
|
||||
|
||||
|
||||
### Let’s get started [_lets_get_started]
|
||||
|
||||
Here are the example repos:
|
||||
|
||||
* [Input plugin example](https://github.com/logstash-plugins/logstash-input-java_input_example)
|
||||
* [Codec plugin example](https://github.com/logstash-plugins/logstash-codec-java_codec_example)
|
||||
* [Filter plugin example](https://github.com/logstash-plugins/logstash-filter-java_filter_example)
|
||||
* [Output plugin example](https://github.com/logstash-plugins/logstash-output-java_output_example)
|
||||
|
||||
Here are the instructions:
|
||||
|
||||
* [How to write a Java input plugin](/extend/java-input-plugin.md)
|
||||
* [How to write a Java codec plugin](/extend/java-codec-plugin.md)
|
||||
* [How to write a Java filter plugin](/extend/java-filter-plugin.md)
|
||||
* [How to write a Java output plugin](/extend/java-output-plugin.md)
|
||||
|
||||
|
||||
|
||||
|
||||
|
637
docs/extend/filter-new-plugin.md
Normal file
637
docs/extend/filter-new-plugin.md
Normal file
|
@ -0,0 +1,637 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/filter-new-plugin.html
|
||||
---
|
||||
|
||||
# How to write a Logstash filter plugin [filter-new-plugin]
|
||||
|
||||
To develop a new filter for Logstash, build a self-contained Ruby gem whose source code lives in its own GitHub repository. The Ruby gem can then be hosted and shared on RubyGems.org. You can use the example filter implementation as a starting point. (If you’re unfamiliar with Ruby, you can find an excellent quickstart guide at [https://www.ruby-lang.org/en/documentation/quickstart/](https://www.ruby-lang.org/en/documentation/quickstart/).)
|
||||
|
||||
## Get started [_get_started_3]
|
||||
|
||||
Let’s step through creating a filter plugin using the [example filter plugin](https://github.com/logstash-plugins/logstash-filter-example/).
|
||||
|
||||
### Create a GitHub repo for your new plugin [_create_a_github_repo_for_your_new_plugin_3]
|
||||
|
||||
Each Logstash plugin lives in its own GitHub repository. To create a new repository for your plugin:
|
||||
|
||||
1. Log in to GitHub.
|
||||
2. Click the **Repositories** tab. You’ll see a list of other repositories you’ve forked or contributed to.
|
||||
3. Click the green **New** button in the upper right.
|
||||
4. Specify the following settings for your new repo:
|
||||
|
||||
* **Repository name** — a unique name of the form `logstash-filter-pluginname`.
|
||||
* **Public or Private** — your choice, but the repository must be Public if you want to submit it as an official plugin.
|
||||
* **Initialize this repository with a README** — enables you to immediately clone the repository to your computer.
|
||||
|
||||
5. Click **Create Repository**.
|
||||
|
||||
|
||||
### Use the plugin generator tool [_use_the_plugin_generator_tool_3]
|
||||
|
||||
You can create your own Logstash plugin in seconds! The `generate` subcommand of `bin/logstash-plugin` creates the foundation for a new Logstash plugin with templatized files. It creates the correct directory structure, gemspec files, and dependencies so you can start adding custom code to process data with Logstash.
|
||||
|
||||
For more information, see [Generating plugins](/reference/plugin-generator.md)
|
||||
|
||||
|
||||
### Copy the filter code [_copy_the_filter_code]
|
||||
|
||||
Alternatively, you can use the examples repo we host on github.com
|
||||
|
||||
1. **Clone your plugin.** Replace `GITUSERNAME` with your github username, and `MYPLUGINNAME` with your plugin name.
|
||||
|
||||
* `git clone https://github.com/GITUSERNAME/logstash-``filter-MYPLUGINNAME.git`
|
||||
|
||||
* alternately, via ssh: `git clone git@github.com:GITUSERNAME/logstash``-filter-MYPLUGINNAME.git`
|
||||
|
||||
* `cd logstash-filter-MYPLUGINNAME`
|
||||
|
||||
2. **Clone the filter plugin example and copy it to your plugin branch.**
|
||||
|
||||
You don’t want to include the example .git directory or its contents, so delete it before you copy the example.
|
||||
|
||||
* `cd /tmp`
|
||||
* `git clone https://github.com/logstash-plugins/logstash``-filter-example.git`
|
||||
* `cd logstash-filter-example`
|
||||
* `rm -rf .git`
|
||||
* `cp -R * /path/to/logstash-filter-mypluginname/`
|
||||
|
||||
3. **Rename the following files to match the name of your plugin.**
|
||||
|
||||
* `logstash-filter-example.gemspec`
|
||||
* `example.rb`
|
||||
* `example_spec.rb`
|
||||
|
||||
```txt
|
||||
cd /path/to/logstash-filter-mypluginname
|
||||
mv logstash-filter-example.gemspec logstash-filter-mypluginname.gemspec
|
||||
mv lib/logstash/filters/example.rb lib/logstash/filters/mypluginname.rb
|
||||
mv spec/filters/example_spec.rb spec/filters/mypluginname_spec.rb
|
||||
```
|
||||
|
||||
|
||||
Your file structure should look like this:
|
||||
|
||||
```txt
|
||||
$ tree logstash-filter-mypluginname
|
||||
├── Gemfile
|
||||
├── LICENSE
|
||||
├── README.md
|
||||
├── Rakefile
|
||||
├── lib
|
||||
│ └── logstash
|
||||
│ └── filters
|
||||
│ └── mypluginname.rb
|
||||
├── logstash-filter-mypluginname.gemspec
|
||||
└── spec
|
||||
└── filters
|
||||
└── mypluginname_spec.rb
|
||||
```
|
||||
|
||||
For more information about the Ruby gem file structure and an excellent walkthrough of the Ruby gem creation process, see [http://timelessrepo.com/making-ruby-gems](http://timelessrepo.com/making-ruby-gems)
|
||||
|
||||
|
||||
### See what your plugin looks like [_see_what_your_plugin_looks_like_3]
|
||||
|
||||
Before we dive into the details, open up the plugin file in your favorite text editor and take a look.
|
||||
|
||||
```ruby
|
||||
require "logstash/filters/base"
|
||||
require "logstash/namespace"
|
||||
|
||||
# Add any asciidoc formatted documentation here
|
||||
# This example filter will replace the contents of the default
|
||||
# message field with whatever you specify in the configuration.
|
||||
#
|
||||
# It is only intended to be used as an example.
|
||||
class LogStash::Filters::Example < LogStash::Filters::Base
|
||||
|
||||
# Setting the config_name here is required. This is how you
|
||||
# configure this filter from your Logstash config.
|
||||
#
|
||||
# filter {
|
||||
# example { message => "My message..." }
|
||||
# }
|
||||
config_name "example"
|
||||
|
||||
# Replace the message with this value.
|
||||
config :message, :validate => :string, :default => "Hello World!"
|
||||
|
||||
|
||||
public
|
||||
def register
|
||||
# Add instance variables
|
||||
end # def register
|
||||
|
||||
public
|
||||
def filter(event)
|
||||
|
||||
if @message
|
||||
# Replace the event message with our message as configured in the
|
||||
# config file.
|
||||
event.set("message", @message)
|
||||
end
|
||||
|
||||
# filter_matched should go in the last line of our successful code
|
||||
filter_matched(event)
|
||||
end # def filter
|
||||
|
||||
end # class LogStash::Filters::Example
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Coding filter plugins [_coding_filter_plugins]
|
||||
|
||||
Now let’s take a line-by-line look at the example plugin.
|
||||
|
||||
### `require` Statements [_require_statements_3]
|
||||
|
||||
Logstash filter plugins require parent classes defined in `logstash/filters/base` and logstash/namespace:
|
||||
|
||||
```ruby
|
||||
require "logstash/filters/base"
|
||||
require "logstash/namespace"
|
||||
```
|
||||
|
||||
Of course, the plugin you build may depend on other code, or even gems. Just put them here along with these Logstash dependencies.
|
||||
|
||||
|
||||
|
||||
## Plugin Body [_plugin_body_3]
|
||||
|
||||
Let’s go through the various elements of the plugin itself.
|
||||
|
||||
### `class` Declaration [_class_declaration_3]
|
||||
|
||||
The filter plugin class should be a subclass of `LogStash::Filters::Base`:
|
||||
|
||||
```ruby
|
||||
class LogStash::Filters::Example < LogStash::Filters::Base
|
||||
```
|
||||
|
||||
The class name should closely mirror the plugin name, for example:
|
||||
|
||||
```ruby
|
||||
LogStash::Filters::Example
|
||||
```
|
||||
|
||||
|
||||
### `config_name` [_config_name_3]
|
||||
|
||||
```ruby
|
||||
config_name "example"
|
||||
```
|
||||
|
||||
This is the name your plugin will call inside the filter configuration block.
|
||||
|
||||
If you set `config_name "example"` in your plugin code, the corresponding Logstash configuration block would need to look like this:
|
||||
|
||||
|
||||
|
||||
## Configuration Parameters [_configuration_parameters_3]
|
||||
|
||||
```ruby
|
||||
config :variable_name, :validate => :variable_type, :default => "Default value", :required => boolean, :deprecated => boolean, :obsolete => string
|
||||
```
|
||||
|
||||
The configuration, or `config` section allows you to define as many (or as few) parameters as are needed to enable Logstash to process events.
|
||||
|
||||
There are several configuration attributes:
|
||||
|
||||
* `:validate` - allows you to enforce passing a particular data type to Logstash for this configuration option, such as `:string`, `:password`, `:boolean`, `:number`, `:array`, `:hash`, `:path` (a file-system path), `uri`, `:codec` (since 1.2.0), `:bytes`. Note that this also works as a coercion in that if I specify "true" for boolean (even though technically a string), it will become a valid boolean in the config. This coercion works for the `:number` type as well where "1.2" becomes a float and "22" is an integer.
|
||||
* `:default` - lets you specify a default value for a parameter
|
||||
* `:required` - whether or not this parameter is mandatory (a Boolean `true` or
|
||||
* `:list` - whether or not this value should be a list of values. Will typecheck the list members, and convert scalars to one element lists. Note that this mostly obviates the array type, though if you need lists of complex objects that will be more suitable. `false`)
|
||||
* `:deprecated` - informational (also a Boolean `true` or `false`)
|
||||
* `:obsolete` - used to declare that a given setting has been removed and is no longer functioning. The idea is to provide an informed upgrade path to users who are still using a now-removed setting.
|
||||
|
||||
|
||||
## Plugin Methods [_plugin_methods_3]
|
||||
|
||||
Logstash filters must implement the `register` and `filter` methods.
|
||||
|
||||
### `register` Method [_register_method_3]
|
||||
|
||||
```ruby
|
||||
public
|
||||
def register
|
||||
end # def register
|
||||
```
|
||||
|
||||
The Logstash `register` method is like an `initialize` method. It was originally created to enforce having `super` called, preventing headaches for newbies. (Note: It may go away in favor of `initialize`, in conjunction with some enforced testing to ensure `super` is called.)
|
||||
|
||||
`public` means the method can be called anywhere, not just within the class. This is the default behavior for methods in Ruby, but it is specified explicitly here anyway.
|
||||
|
||||
You can also assign instance variables here (variables prepended by `@`). Configuration variables are now in scope as instance variables, like `@message`
|
||||
|
||||
|
||||
### `filter` Method [_filter_method]
|
||||
|
||||
```ruby
|
||||
public
|
||||
def filter(event)
|
||||
|
||||
if @message
|
||||
# Replace the event message with our message as configured in the
|
||||
# config file.
|
||||
event.set("message", @message)
|
||||
end
|
||||
|
||||
# filter_matched should go in the last line of our successful code
|
||||
filter_matched(event)
|
||||
end # def filter
|
||||
```
|
||||
|
||||
The plugin’s `filter` method is where the actual filtering work takes place! Inside the `filter` method you can refer to the event data using the `Event` object. Event is the main object that encapsulates data flow internally in Logstash and provides an [API](/reference/event-api.md) for the plugin developers to interact with the event’s content.
|
||||
|
||||
The `filter` method should also handle any [event dependent configuration](/reference/event-dependent-configuration.md) by explicitly calling the `sprintf` method available in Event class. For example:
|
||||
|
||||
```ruby
|
||||
field_foo = event.sprintf(field)
|
||||
```
|
||||
|
||||
Note that configuration variables are now in scope as instance variables, like `@message`
|
||||
|
||||
```ruby
|
||||
filter_matched(event)
|
||||
```
|
||||
|
||||
Calling the `filter_matched` method upon successful execution of the plugin will ensure that any fields or tags added through the Logstash configuration for this filter will be handled correctly. For example, any `add_field`, `remove_field`, `add_tag` and/or `remove_tag` actions will be performed at this time.
|
||||
|
||||
Event methods such as `event.cancel` are now available to control the workflow of the event being processed.
|
||||
|
||||
|
||||
|
||||
## Building the Plugin [_building_the_plugin_3]
|
||||
|
||||
At this point in the process you have coded your plugin and are ready to build a Ruby Gem from it. The following information will help you complete the process.
|
||||
|
||||
### External dependencies [_external_dependencies_3]
|
||||
|
||||
A `require` statement in Ruby is used to include necessary code. In some cases your plugin may require additional files. For example, the collectd plugin [uses](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/lib/logstash/codecs/collectd.rb#L148) the `types.db` file provided by collectd. In the main directory of your plugin, a file called `vendor.json` is where these files are described.
|
||||
|
||||
The `vendor.json` file contains an array of JSON objects, each describing a file dependency. This example comes from the [collectd](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/vendor.json) codec plugin:
|
||||
|
||||
```txt
|
||||
[{
|
||||
"sha1": "a90fe6cc53b76b7bdd56dc57950d90787cb9c96e",
|
||||
"url": "http://collectd.org/files/collectd-5.4.0.tar.gz",
|
||||
"files": [ "/src/types.db" ]
|
||||
}]
|
||||
```
|
||||
|
||||
* `sha1` is the sha1 signature used to verify the integrity of the file referenced by `url`.
|
||||
* `url` is the address from where Logstash will download the file.
|
||||
* `files` is an optional array of files to extract from the downloaded file. Note that while tar archives can use absolute or relative paths, treat them as absolute in this array. If `files` is not present, all files will be uncompressed and extracted into the vendor directory.
|
||||
|
||||
Another example of the `vendor.json` file is the [`geoip` filter](https://github.com/logstash-plugins/logstash-filter-geoip/blob/main/vendor.json)
|
||||
|
||||
The process used to download these dependencies is to call `rake vendor`. This will be discussed further in the testing section of this document.
|
||||
|
||||
Another kind of external dependency is on jar files. This will be described in the "Add a `gemspec` file" section.
|
||||
|
||||
|
||||
### Deprecated features [_deprecated_features_3]
|
||||
|
||||
As a plugin evolves, an option or feature may no longer serve the intended purpose, and the developer may want to *deprecate* its usage. Deprecation warns users about the option’s status, so they aren’t caught by surprise when it is removed in a later release.
|
||||
|
||||
{{ls}} 7.6 introduced a *deprecation logger* to make handling those situations easier. You can use the [adapter](https://github.com/logstash-plugins/logstash-mixin-deprecation_logger_support) to ensure that your plugin can use the deprecation logger while still supporting older versions of {{ls}}. See the [readme](https://github.com/logstash-plugins/logstash-mixin-deprecation_logger_support/blob/main/README.md) for more information and for instructions on using the adapter.
|
||||
|
||||
Deprecations are noted in the `logstash-deprecation.log` file in the `log` directory.
|
||||
|
||||
|
||||
### Add a Gemfile [_add_a_gemfile_3]
|
||||
|
||||
Gemfiles allow Ruby’s Bundler to maintain the dependencies for your plugin. Currently, all we’ll need is the Logstash gem, for testing, but if you require other gems, you should add them in here.
|
||||
|
||||
::::{tip}
|
||||
See [Bundler’s Gemfile page](http://bundler.io/gemfile.html) for more details.
|
||||
::::
|
||||
|
||||
|
||||
```ruby
|
||||
source 'https://rubygems.org'
|
||||
gemspec
|
||||
gem "logstash", :github => "elastic/logstash", :branch => "master"
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Add a `gemspec` file [_add_a_gemspec_file_3]
|
||||
|
||||
Gemspecs define the Ruby gem which will be built and contain your plugin.
|
||||
|
||||
::::{tip}
|
||||
More information can be found on the [Rubygems Specification page](http://guides.rubygems.org/specification-reference/).
|
||||
::::
|
||||
|
||||
|
||||
```ruby
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'logstash-filter-example'
|
||||
s.version = '0.1.0'
|
||||
s.licenses = ['Apache License (2.0)']
|
||||
s.summary = "This filter does x, y, z in Logstash"
|
||||
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
||||
s.authors = ["Elastic"]
|
||||
s.email = 'info@elastic.co'
|
||||
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
||||
s.require_paths = ["lib"]
|
||||
|
||||
# Files
|
||||
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
||||
# Tests
|
||||
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
||||
|
||||
# Special flag to let us know this is actually a logstash plugin
|
||||
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
|
||||
|
||||
# Gem dependencies
|
||||
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
||||
s.add_development_dependency 'logstash-devutils'
|
||||
end
|
||||
```
|
||||
|
||||
It is appropriate to change these values to fit your plugin. In particular, `s.name` and `s.summary` should reflect your plugin’s name and behavior.
|
||||
|
||||
`s.licenses` and `s.version` are also important and will come into play when you are ready to publish your plugin.
|
||||
|
||||
Logstash and all its plugins are licensed under [Apache License, version 2 ("ALv2")](https://github.com/elastic/logstash/blob/main/LICENSE.txt). If you make your plugin publicly available via [RubyGems.org](http://rubygems.org), please make sure to have this line in your gemspec:
|
||||
|
||||
* `s.licenses = ['Apache License (2.0)']`
|
||||
|
||||
The gem version, designated by `s.version`, helps track changes to plugins over time. You should use [semver versioning](http://semver.org/) strategy for version numbers.
|
||||
|
||||
### Runtime and Development Dependencies [_runtime_and_development_dependencies_3]
|
||||
|
||||
At the bottom of the `gemspec` file is a section with a comment: `Gem dependencies`. This is where any other needed gems must be mentioned. If a gem is necessary for your plugin to function, it is a runtime dependency. If a gem are only used for testing, then it would be a development dependency.
|
||||
|
||||
::::{note}
|
||||
You can also have versioning requirements for your dependencies—including other Logstash plugins:
|
||||
|
||||
```ruby
|
||||
# Gem dependencies
|
||||
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
||||
s.add_development_dependency 'logstash-devutils'
|
||||
```
|
||||
|
||||
This gemspec has a runtime dependency on the logstash-core-plugin-api and requires that it have a version number greater than or equal to version 1.60 and less than or equal to version 2.99.
|
||||
|
||||
::::
|
||||
|
||||
|
||||
::::{important}
|
||||
All plugins have a runtime dependency on the `logstash-core-plugin-api` gem, and a development dependency on `logstash-devutils`.
|
||||
::::
|
||||
|
||||
|
||||
|
||||
### Jar dependencies [_jar_dependencies_3]
|
||||
|
||||
In some cases, such as the [Elasticsearch output plugin](https://github.com/logstash-plugins/logstash-output-elasticsearch/blob/main/logstash-output-elasticsearch.gemspec#L22-L23), your code may depend on a jar file. In cases such as this, the dependency is added in the gemspec file in this manner:
|
||||
|
||||
```ruby
|
||||
# Jar dependencies
|
||||
s.requirements << "jar 'org.elasticsearch:elasticsearch', '5.0.0'"
|
||||
s.add_runtime_dependency 'jar-dependencies'
|
||||
```
|
||||
|
||||
With these both defined, the install process will search for the required jar file at [http://mvnrepository.com](http://mvnrepository.com) and download the specified version.
|
||||
|
||||
|
||||
|
||||
## Document your plugin [_document_your_plugin_3]
|
||||
|
||||
Documentation is an important part of your plugin. All plugin documentation is rendered and placed in the [Logstash Reference](/reference/index.md) and the [Versioned plugin docs](logstash-docs-md://vpr/integration-plugins.md).
|
||||
|
||||
See [Document your plugin](/extend/plugin-doc.md) for tips and guidelines.
|
||||
|
||||
|
||||
## Add Tests [_add_tests_3]
|
||||
|
||||
Logstash loves tests. Lots of tests. If you’re using your new filter plugin in a production environment, you’ll want to have some tests to ensure you are not breaking any existing functionality.
|
||||
|
||||
::::{note}
|
||||
A full exposition on RSpec is outside the scope of this document. Learn more about RSpec at [http://rspec.info](http://rspec.info)
|
||||
::::
|
||||
|
||||
|
||||
For help learning about tests and testing, look in the `spec/filters/` directory of several other similar plugins.
|
||||
|
||||
|
||||
## Clone and test! [_clone_and_test_3]
|
||||
|
||||
Now let’s start with a fresh clone of the plugin, build it and run the tests.
|
||||
|
||||
* **Clone your plugin into a temporary location** Replace `GITUSERNAME` with your github username, and `MYPLUGINNAME` with your plugin name.
|
||||
|
||||
* `git clone https://github.com/GITUSERNAME/logstash-``filter-MYPLUGINNAME.git`
|
||||
|
||||
* alternately, via ssh: `git clone git@github.com:GITUSERNAME/logstash-``filter-MYPLUGINNAME.git`
|
||||
|
||||
* `cd logstash-filter-MYPLUGINNAME`
|
||||
|
||||
|
||||
Then, you’ll need to install your plugins dependencies with bundler:
|
||||
|
||||
```
|
||||
bundle install
|
||||
```
|
||||
|
||||
::::{important}
|
||||
If your plugin has an external file dependency described in `vendor.json`, you must download that dependency before running or testing. You can do this by running:
|
||||
|
||||
```
|
||||
rake vendor
|
||||
```
|
||||
|
||||
::::
|
||||
|
||||
|
||||
And finally, run the tests:
|
||||
|
||||
```
|
||||
bundle exec rspec
|
||||
```
|
||||
|
||||
You should see a success message, which looks something like this:
|
||||
|
||||
```
|
||||
Finished in 0.034 seconds
|
||||
1 example, 0 failures
|
||||
```
|
||||
|
||||
Hooray! You’re almost there! (Unless you saw failures… you should fix those first).
|
||||
|
||||
|
||||
## Building and Testing [_building_and_testing_3]
|
||||
|
||||
Now you’re ready to build your (well-tested) plugin into a Ruby gem.
|
||||
|
||||
### Build [_build_3]
|
||||
|
||||
You already have all the necessary ingredients, so let’s go ahead and run the build command:
|
||||
|
||||
```sh
|
||||
gem build logstash-filter-example.gemspec
|
||||
```
|
||||
|
||||
That’s it! Your gem should be built and be in the same path with the name
|
||||
|
||||
```sh
|
||||
logstash-filter-mypluginname-0.1.0.gem
|
||||
```
|
||||
|
||||
The `s.version` number from your gemspec file will provide the gem version, in this case, `0.1.0`.
|
||||
|
||||
|
||||
### Test installation [_test_installation_3]
|
||||
|
||||
You should test install your plugin into a clean installation of Logstash. Download the latest version from the [Logstash downloads page](https://www.elastic.co/downloads/logstash/).
|
||||
|
||||
1. Untar and cd in to the directory:
|
||||
|
||||
```sh
|
||||
curl -O https://download.elastic.co/logstash/logstash/logstash-9.0.0.tar.gz
|
||||
tar xzvf logstash-9.0.0.tar.gz
|
||||
cd logstash-9.0.0
|
||||
```
|
||||
|
||||
2. Using the plugin tool, we can install the gem we just built.
|
||||
|
||||
* Replace `/my/logstash/plugins` with the correct path to the gem for your environment, and `0.1.0` with the correct version number from the gemspec file.
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin install /my/logstash/plugins/logstash-filter-example/logstash-filter-example-0.1.0.gem
|
||||
```
|
||||
|
||||
* After running this, you should see feedback from Logstash that it was successfully installed:
|
||||
|
||||
```sh
|
||||
validating /my/logstash/plugins/logstash-filter-example/logstash-filter-example-0.1.0.gem >= 0
|
||||
Valid logstash plugin. Continuing...
|
||||
Successfully installed 'logstash-filter-example' with version '0.1.0'
|
||||
```
|
||||
|
||||
::::{tip}
|
||||
You can also use the Logstash plugin tool to determine which plugins are currently available:
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin list
|
||||
```
|
||||
|
||||
Depending on what you have installed, you might see a short or long list of plugins: inputs, codecs, filters and outputs.
|
||||
|
||||
::::
|
||||
|
||||
3. Now try running Logstash with a simple configuration passed in via the command-line, using the `-e` flag.
|
||||
|
||||
::::{note}
|
||||
Your results will depend on what your filter plugin is designed to do.
|
||||
::::
|
||||
|
||||
|
||||
```sh
|
||||
bin/logstash -e 'input { stdin{} } filter { example {} } output {stdout { codec => rubydebug }}'
|
||||
```
|
||||
|
||||
Test your filter by sending input through `stdin` and output (after filtering) through `stdout` with the `rubydebug` codec, which enhances readability.
|
||||
|
||||
In the case of the example filter plugin, any text you send will be replaced by the contents of the `message` configuration parameter, the default value being "Hello World!":
|
||||
|
||||
```sh
|
||||
Testing 1, 2, 3
|
||||
{
|
||||
"message" => "Hello World!",
|
||||
"@version" => "1",
|
||||
"@timestamp" => "2015-01-27T19:17:18.932Z",
|
||||
"host" => "cadenza"
|
||||
}
|
||||
```
|
||||
|
||||
Feel free to experiment and test this by changing the `message` parameter:
|
||||
|
||||
```sh
|
||||
bin/logstash -e 'input { stdin{} } filter { example { message => "This is a new message!"} } output {stdout { codec => rubydebug }}'
|
||||
```
|
||||
|
||||
Congratulations! You’ve built, deployed and successfully run a Logstash filter.
|
||||
|
||||
|
||||
|
||||
## Submitting your plugin to [RubyGems.org](http://rubygems.org) and [logstash-plugins](https://github.com/logstash-plugins) [_submitting_your_plugin_to_rubygems_orghttprubygems_org_and_logstash_pluginshttpsgithub_comlogstash_plugins_3]
|
||||
|
||||
Logstash uses [RubyGems.org](http://rubygems.org) as its repository for all plugin artifacts. Once you have developed your new plugin, you can make it available to Logstash users by simply publishing it to RubyGems.org.
|
||||
|
||||
### Licensing [_licensing_3]
|
||||
|
||||
Logstash and all its plugins are licensed under [Apache License, version 2 ("ALv2")](https://github.com/elasticsearch/logstash/blob/main/LICENSE). If you make your plugin publicly available via [RubyGems.org](http://rubygems.org), please make sure to have this line in your gemspec:
|
||||
|
||||
* `s.licenses = ['Apache License (2.0)']`
|
||||
|
||||
|
||||
### Publishing to [RubyGems.org](http://rubygems.org) [_publishing_to_rubygems_orghttprubygems_org_3]
|
||||
|
||||
To begin, you’ll need an account on RubyGems.org
|
||||
|
||||
* [Sign-up for a RubyGems account](https://rubygems.org/sign_up).
|
||||
|
||||
After creating an account, [obtain](http://guides.rubygems.org/rubygems-org-api/#api-authorization) an API key from RubyGems.org. By default, RubyGems uses the file `~/.gem/credentials` to store your API key. These credentials will be used to publish the gem. Replace `username` and `password` with the credentials you created at RubyGems.org:
|
||||
|
||||
```sh
|
||||
curl -u username:password https://rubygems.org/api/v1/api_key.yaml > ~/.gem/credentials
|
||||
chmod 0600 ~/.gem/credentials
|
||||
```
|
||||
|
||||
Before proceeding, make sure you have the right version in your gemspec file and commit your changes.
|
||||
|
||||
* `s.version = '0.1.0'`
|
||||
|
||||
To publish version 0.1.0 of your new logstash gem:
|
||||
|
||||
```sh
|
||||
bundle install
|
||||
bundle exec rake vendor
|
||||
bundle exec rspec
|
||||
bundle exec rake publish_gem
|
||||
```
|
||||
|
||||
::::{note}
|
||||
Executing `rake publish_gem`:
|
||||
|
||||
1. Reads the version from the gemspec file (`s.version = '0.1.0'`)
|
||||
2. Checks in your local repository if a tag exists for that version. If the tag already exists, it aborts the process. Otherwise, it creates a new version tag in your local repository.
|
||||
3. Builds the gem
|
||||
4. Publishes the gem to RubyGems.org
|
||||
|
||||
::::
|
||||
|
||||
|
||||
That’s it! Your plugin is published! Logstash users can now install your plugin by running:
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin install logstash-filter-mypluginname
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Contributing your source code to [logstash-plugins](https://github.com/logstash-plugins) [_contributing_your_source_code_to_logstash_pluginshttpsgithub_comlogstash_plugins_3]
|
||||
|
||||
It is not required to contribute your source code to [logstash-plugins](https://github.com/logstash-plugins) github organization, but we always welcome new plugins!
|
||||
|
||||
### Benefits [_benefits_3]
|
||||
|
||||
Some of the many benefits of having your plugin in the logstash-plugins repository are:
|
||||
|
||||
* **Discovery.** Your plugin will appear in the [Logstash Reference](/reference/index.md), where Logstash users look first for plugins and documentation.
|
||||
* **Documentation.** Your plugin documentation will automatically be added to the [Logstash Reference](/reference/index.md).
|
||||
* **Testing.** With our testing infrastructure, your plugin will be continuously tested against current and future releases of Logstash. As a result, users will have the assurance that if incompatibilities arise, they will be quickly discovered and corrected.
|
||||
|
||||
|
||||
### Acceptance Guidelines [_acceptance_guidelines_3]
|
||||
|
||||
* **Code Review.** Your plugin must be reviewed by members of the community for coherence, quality, readability, stability and security.
|
||||
* **Tests.** Your plugin must contain tests to be accepted. These tests are also subject to code review for scope and completeness. It’s ok if you don’t know how to write tests — we will guide you. We are working on publishing a guide to creating tests for Logstash which will make it easier. In the meantime, you can refer to [http://betterspecs.org/](http://betterspecs.org/) for examples.
|
||||
|
||||
To begin migrating your plugin to logstash-plugins, simply create a new [issue](https://github.com/elasticsearch/logstash/issues) in the Logstash repository. When the acceptance guidelines are completed, we will facilitate the move to the logstash-plugins organization using the recommended [github process](https://help.github.com/articles/transferring-a-repository/#transferring-from-a-user-to-an-organization).
|
58
docs/extend/index.md
Normal file
58
docs/extend/index.md
Normal file
|
@ -0,0 +1,58 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/contributing-to-logstash.html
|
||||
---
|
||||
|
||||
# Contribute to Logstash [contributing-to-logstash]
|
||||
|
||||
You can add your own input, codec, filter, or output plugins to Logstash.
|
||||
|
||||
|
||||
### Acceptance guidelines [plugin-acceptance]
|
||||
|
||||
Start with the end in mind. These guidelines and best practices can help you build a better plugin, even if you choose not to share it with the world.
|
||||
|
||||
* **Consistency.** Your plugin must be consistent in quality and naming conventions used by other plugins. The plugin name must be unique and in this format: `logstash-plugintype-pluginname`. If the plugin name is more than one word, separate words after plugin type with underscores. Example: *logstash-output-elastic_app_search*
|
||||
* **Documentation.** Documentation is a required component of your plugin. If we list your plugin in the Logstash Reference, we point to your documentation—a readme.md, docs/index.asciidoc, or both—in your plugin repo.
|
||||
* **Code Review.** Your plugin must be reviewed by members of the community for coherence, quality, readability, stability and security.
|
||||
* **Tests.** Your plugin must contain tests to be accepted. You can refer to [http://betterspecs.org/](http://betterspecs.org/) for examples.
|
||||
|
||||
* Step 1. Enable travis on your account
|
||||
* Step 2. Import our standard travis.yml [https://github.com/logstash-plugins/.ci/blob/1.x/travis/travis.yml](https://github.com/logstash-plugins/.ci/blob/1.x/travis/travis.yml), as shown in the [fingerprint filter example](https://github.com/logstash-plugins/logstash-filter-fingerprint/blob/main/.travis.yml).
|
||||
* Step 3. Have specs in the spec folder.
|
||||
|
||||
|
||||
|
||||
## Add a plugin [add-plugin]
|
||||
|
||||
Plugins can be developed and deployed independently of the Logstash core. Here are some documents to guide you through the process of coding, deploying, and sharing your plugin:
|
||||
|
||||
* Write a new plugin
|
||||
|
||||
* [How to write a Logstash input plugin](/extend/input-new-plugin.md)
|
||||
* [How to write a Logstash codec plugin](/extend/codec-new-plugin.md)
|
||||
* [How to write a Logstash filter plugin](/extend/filter-new-plugin.md)
|
||||
* [How to write a Logstash output plugin](/extend/output-new-plugin.md)
|
||||
* [Community Maintainer’s Guide](/extend/community-maintainer.md)
|
||||
|
||||
* [Document your plugin](/extend/plugin-doc.md)
|
||||
* [Publish your plugin to RubyGems.org](/extend/publish-plugin.md)
|
||||
* [List your plugin](/extend/plugin-listing.md)
|
||||
* Contribute a patch
|
||||
|
||||
* [Contributing a patch to a Logstash plugin](/extend/contributing-patch-plugin.md)
|
||||
* [Extending Logstash core](/extend/contribute-to-core.md)
|
||||
|
||||
|
||||
|
||||
#### Plugin Shutdown APIs [shutdown-apis]
|
||||
|
||||
You have three options for shutting down a plugin: `stop`, `stop?`, and `close`.
|
||||
|
||||
* Call the `stop` method from outside the plugin thread. This method signals the plugin to stop.
|
||||
* The `stop?` method returns `true` when the `stop` method has already been called for that plugin.
|
||||
* The `close` method performs final bookkeeping and cleanup after the plugin’s `run` method and the plugin’s thread both exit. The `close` method is a a new name for the method known as `teardown` in previous versions of Logstash.
|
||||
|
||||
The `shutdown`, `finished`, `finished?`, `running?`, and `terminating?` methods are redundant and no longer present in the Plugin Base class.
|
||||
|
||||
Sample code for the plugin shutdown APIs is [available](https://github.com/logstash-plugins/logstash-input-example/blob/main/lib/logstash/inputs/example.rb).
|
674
docs/extend/input-new-plugin.md
Normal file
674
docs/extend/input-new-plugin.md
Normal file
|
@ -0,0 +1,674 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/input-new-plugin.html
|
||||
---
|
||||
|
||||
# How to write a Logstash input plugin [input-new-plugin]
|
||||
|
||||
To develop a new input for Logstash, build a self-contained Ruby gem whose source code lives in its own GitHub repository. The Ruby gem can then be hosted and shared on RubyGems.org. You can use the example input implementation as a starting point. (If you’re unfamiliar with Ruby, you can find an excellent quickstart guide at [https://www.ruby-lang.org/en/documentation/quickstart/](https://www.ruby-lang.org/en/documentation/quickstart/).)
|
||||
|
||||
## Get started [_get_started]
|
||||
|
||||
Let’s step through creating an input plugin using the [example input plugin](https://github.com/logstash-plugins/logstash-input-example/).
|
||||
|
||||
### Create a GitHub repo for your new plugin [_create_a_github_repo_for_your_new_plugin]
|
||||
|
||||
Each Logstash plugin lives in its own GitHub repository. To create a new repository for your plugin:
|
||||
|
||||
1. Log in to GitHub.
|
||||
2. Click the **Repositories** tab. You’ll see a list of other repositories you’ve forked or contributed to.
|
||||
3. Click the green **New** button in the upper right.
|
||||
4. Specify the following settings for your new repo:
|
||||
|
||||
* **Repository name** — a unique name of the form `logstash-input-pluginname`.
|
||||
* **Public or Private** — your choice, but the repository must be Public if you want to submit it as an official plugin.
|
||||
* **Initialize this repository with a README** — enables you to immediately clone the repository to your computer.
|
||||
|
||||
5. Click **Create Repository**.
|
||||
|
||||
|
||||
### Use the plugin generator tool [_use_the_plugin_generator_tool]
|
||||
|
||||
You can create your own Logstash plugin in seconds! The `generate` subcommand of `bin/logstash-plugin` creates the foundation for a new Logstash plugin with templatized files. It creates the correct directory structure, gemspec files, and dependencies so you can start adding custom code to process data with Logstash.
|
||||
|
||||
For more information, see [Generating plugins](/reference/plugin-generator.md)
|
||||
|
||||
|
||||
### Copy the input code [_copy_the_input_code]
|
||||
|
||||
Alternatively, you can use the examples repo we host on github.com
|
||||
|
||||
1. **Clone your plugin.** Replace `GITUSERNAME` with your github username, and `MYPLUGINNAME` with your plugin name.
|
||||
|
||||
* `git clone https://github.com/GITUSERNAME/logstash-``input-MYPLUGINNAME.git`
|
||||
|
||||
* alternately, via ssh: `git clone git@github.com:GITUSERNAME/logstash``-input-MYPLUGINNAME.git`
|
||||
|
||||
* `cd logstash-input-MYPLUGINNAME`
|
||||
|
||||
2. **Clone the input plugin example and copy it to your plugin branch.**
|
||||
|
||||
You don’t want to include the example .git directory or its contents, so delete it before you copy the example.
|
||||
|
||||
* `cd /tmp`
|
||||
* `git clone https://github.com/logstash-plugins/logstash``-input-example.git`
|
||||
* `cd logstash-input-example`
|
||||
* `rm -rf .git`
|
||||
* `cp -R * /path/to/logstash-input-mypluginname/`
|
||||
|
||||
3. **Rename the following files to match the name of your plugin.**
|
||||
|
||||
* `logstash-input-example.gemspec`
|
||||
* `example.rb`
|
||||
* `example_spec.rb`
|
||||
|
||||
```txt
|
||||
cd /path/to/logstash-input-mypluginname
|
||||
mv logstash-input-example.gemspec logstash-input-mypluginname.gemspec
|
||||
mv lib/logstash/inputs/example.rb lib/logstash/inputs/mypluginname.rb
|
||||
mv spec/inputs/example_spec.rb spec/inputs/mypluginname_spec.rb
|
||||
```
|
||||
|
||||
|
||||
Your file structure should look like this:
|
||||
|
||||
```txt
|
||||
$ tree logstash-input-mypluginname
|
||||
├── Gemfile
|
||||
├── LICENSE
|
||||
├── README.md
|
||||
├── Rakefile
|
||||
├── lib
|
||||
│ └── logstash
|
||||
│ └── inputs
|
||||
│ └── mypluginname.rb
|
||||
├── logstash-input-mypluginname.gemspec
|
||||
└── spec
|
||||
└── inputs
|
||||
└── mypluginname_spec.rb
|
||||
```
|
||||
|
||||
For more information about the Ruby gem file structure and an excellent walkthrough of the Ruby gem creation process, see [http://timelessrepo.com/making-ruby-gems](http://timelessrepo.com/making-ruby-gems)
|
||||
|
||||
|
||||
### See what your plugin looks like [_see_what_your_plugin_looks_like]
|
||||
|
||||
Before we dive into the details, open up the plugin file in your favorite text editor and take a look.
|
||||
|
||||
```ruby
|
||||
require "logstash/inputs/base"
|
||||
require "logstash/namespace"
|
||||
require "stud/interval"
|
||||
require "socket" # for Socket.gethostname
|
||||
|
||||
# Add any asciidoc formatted documentation here
|
||||
# Generate a repeating message.
|
||||
#
|
||||
# This plugin is intended only as an example.
|
||||
|
||||
class LogStash::Inputs::Example < LogStash::Inputs::Base
|
||||
config_name "example"
|
||||
|
||||
# If undefined, Logstash will complain, even if codec is unused.
|
||||
default :codec, "plain"
|
||||
|
||||
# The message string to use in the event.
|
||||
config :message, :validate => :string, :default => "Hello World!"
|
||||
|
||||
# Set how frequently messages should be sent.
|
||||
#
|
||||
# The default, `1`, means send a message every second.
|
||||
config :interval, :validate => :number, :default => 1
|
||||
|
||||
public
|
||||
def register
|
||||
@host = Socket.gethostname
|
||||
end # def register
|
||||
|
||||
def run(queue)
|
||||
Stud.interval(@interval) do
|
||||
event = LogStash::Event.new("message" => @message, "host" => @host)
|
||||
decorate(event)
|
||||
queue << event
|
||||
end # loop
|
||||
end # def run
|
||||
|
||||
end # class LogStash::Inputs::Example
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Coding input plugins [_coding_input_plugins]
|
||||
|
||||
Now let’s take a line-by-line look at the example plugin.
|
||||
|
||||
### `require` Statements [_require_statements]
|
||||
|
||||
Logstash input plugins require parent classes defined in `logstash/inputs/base` and logstash/namespace:
|
||||
|
||||
```ruby
|
||||
require "logstash/inputs/base"
|
||||
require "logstash/namespace"
|
||||
```
|
||||
|
||||
Of course, the plugin you build may depend on other code, or even gems. Just put them here along with these Logstash dependencies.
|
||||
|
||||
|
||||
|
||||
## Plugin Body [_plugin_body]
|
||||
|
||||
Let’s go through the various elements of the plugin itself.
|
||||
|
||||
### `class` Declaration [_class_declaration]
|
||||
|
||||
The input plugin class should be a subclass of `LogStash::Inputs::Base`:
|
||||
|
||||
```ruby
|
||||
class LogStash::Inputs::Example < LogStash::Inputs::Base
|
||||
```
|
||||
|
||||
The class name should closely mirror the plugin name, for example:
|
||||
|
||||
```ruby
|
||||
LogStash::Inputs::Example
|
||||
```
|
||||
|
||||
|
||||
### `config_name` [_config_name]
|
||||
|
||||
```ruby
|
||||
config_name "example"
|
||||
```
|
||||
|
||||
This is the name your plugin will call inside the input configuration block.
|
||||
|
||||
If you set `config_name "example"` in your plugin code, the corresponding Logstash configuration block would need to look like this:
|
||||
|
||||
```js
|
||||
input {
|
||||
example {...}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Configuration Parameters [_configuration_parameters]
|
||||
|
||||
```ruby
|
||||
config :variable_name, :validate => :variable_type, :default => "Default value", :required => boolean, :deprecated => boolean, :obsolete => string
|
||||
```
|
||||
|
||||
The configuration, or `config` section allows you to define as many (or as few) parameters as are needed to enable Logstash to process events.
|
||||
|
||||
There are several configuration attributes:
|
||||
|
||||
* `:validate` - allows you to enforce passing a particular data type to Logstash for this configuration option, such as `:string`, `:password`, `:boolean`, `:number`, `:array`, `:hash`, `:path` (a file-system path), `uri`, `:codec` (since 1.2.0), `:bytes`. Note that this also works as a coercion in that if I specify "true" for boolean (even though technically a string), it will become a valid boolean in the config. This coercion works for the `:number` type as well where "1.2" becomes a float and "22" is an integer.
|
||||
* `:default` - lets you specify a default value for a parameter
|
||||
* `:required` - whether or not this parameter is mandatory (a Boolean `true` or
|
||||
* `:list` - whether or not this value should be a list of values. Will typecheck the list members, and convert scalars to one element lists. Note that this mostly obviates the array type, though if you need lists of complex objects that will be more suitable. `false`)
|
||||
* `:deprecated` - informational (also a Boolean `true` or `false`)
|
||||
* `:obsolete` - used to declare that a given setting has been removed and is no longer functioning. The idea is to provide an informed upgrade path to users who are still using a now-removed setting.
|
||||
|
||||
|
||||
## Plugin Methods [_plugin_methods]
|
||||
|
||||
Logstash inputs must implement two main methods: `register` and `run`.
|
||||
|
||||
### `register` Method [_register_method]
|
||||
|
||||
```ruby
|
||||
public
|
||||
def register
|
||||
end # def register
|
||||
```
|
||||
|
||||
The Logstash `register` method is like an `initialize` method. It was originally created to enforce having `super` called, preventing headaches for newbies. (Note: It may go away in favor of `initialize`, in conjunction with some enforced testing to ensure `super` is called.)
|
||||
|
||||
`public` means the method can be called anywhere, not just within the class. This is the default behavior for methods in Ruby, but it is specified explicitly here anyway.
|
||||
|
||||
You can also assign instance variables here (variables prepended by `@`). Configuration variables are now in scope as instance variables, like `@message`
|
||||
|
||||
|
||||
### `run` Method [_run_method]
|
||||
|
||||
The example input plugin has the following `run` Method:
|
||||
|
||||
```ruby
|
||||
def run(queue)
|
||||
Stud.interval(@interval) do
|
||||
event = LogStash::Event.new("message" => @message, "host" => @host)
|
||||
decorate(event)
|
||||
queue << event
|
||||
end # loop
|
||||
end # def run
|
||||
```
|
||||
|
||||
The `run` method is where a stream of data from an input becomes an event.
|
||||
|
||||
The stream can be plain or generated as with the [heartbeat](https://github.com/logstash-plugins/logstash-input-heartbeat/blob/main/lib/logstash/inputs/heartbeat.rb#L43-L61) input plugin. In these cases, though no codec is used, [a default codec](https://github.com/logstash-plugins/logstash-input-heartbeat/blob/main/lib/logstash/inputs/heartbeat.rb#L17) must be set in the code to avoid errors.
|
||||
|
||||
Here’s another example `run` method:
|
||||
|
||||
```ruby
|
||||
def run(queue)
|
||||
while true
|
||||
begin
|
||||
# Based on some testing, there is no way to interrupt an IO.sysread nor
|
||||
# IO.select call in JRuby.
|
||||
data = $stdin.sysread(16384)
|
||||
@codec.decode(data) do |event|
|
||||
decorate(event)
|
||||
event.set("host", @host) if !event.include?("host")
|
||||
queue << event
|
||||
end
|
||||
rescue IOError, EOFError, LogStash::ShutdownSignal
|
||||
# stdin closed or a requested shutdown
|
||||
break
|
||||
end
|
||||
end # while true
|
||||
finished
|
||||
end # def run
|
||||
```
|
||||
|
||||
In this example, the `data` is being sent to the codec defined in the configuration block to `decode` the data stream and return an event.
|
||||
|
||||
In both examples, the resulting `event` is passed to the `decorate` method:
|
||||
|
||||
```ruby
|
||||
decorate(event)
|
||||
```
|
||||
|
||||
This applies any tags you might have set in the input configuration block. For example, `tags => ["tag1", "tag2"]`.
|
||||
|
||||
Also in both examples, the `event`, after being "decorated," is appended to the queue:
|
||||
|
||||
```ruby
|
||||
queue << event
|
||||
```
|
||||
|
||||
This inserts the event into the pipeline.
|
||||
|
||||
::::{tip}
|
||||
Because input plugins can range from simple to complex, it is helpful to see more examples of how they have been created:
|
||||
|
||||
* [syslog](https://github.com/logstash-plugins/logstash-input-syslog/blob/main/lib/logstash/inputs/syslog.rb)
|
||||
* [zeromq](https://github.com/logstash-plugins/logstash-input-zeromq/blob/main/lib/logstash/inputs/zeromq.rb)
|
||||
* [stdin](https://github.com/logstash-plugins/logstash-input-stdin/blob/main/lib/logstash/inputs/stdin.rb)
|
||||
* [tcp](https://github.com/logstash-plugins/logstash-input-tcp/blob/main/lib/logstash/inputs/tcp.rb)
|
||||
|
||||
There are many more more examples in the [logstash-plugin github repository](https://github.com/logstash-plugins?query=logstash-input).
|
||||
|
||||
::::
|
||||
|
||||
|
||||
|
||||
|
||||
## Building the Plugin [_building_the_plugin]
|
||||
|
||||
At this point in the process you have coded your plugin and are ready to build a Ruby Gem from it. The following information will help you complete the process.
|
||||
|
||||
### External dependencies [_external_dependencies]
|
||||
|
||||
A `require` statement in Ruby is used to include necessary code. In some cases your plugin may require additional files. For example, the collectd plugin [uses](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/lib/logstash/codecs/collectd.rb#L148) the `types.db` file provided by collectd. In the main directory of your plugin, a file called `vendor.json` is where these files are described.
|
||||
|
||||
The `vendor.json` file contains an array of JSON objects, each describing a file dependency. This example comes from the [collectd](https://github.com/logstash-plugins/logstash-codec-collectd/blob/main/vendor.json) codec plugin:
|
||||
|
||||
```txt
|
||||
[{
|
||||
"sha1": "a90fe6cc53b76b7bdd56dc57950d90787cb9c96e",
|
||||
"url": "http://collectd.org/files/collectd-5.4.0.tar.gz",
|
||||
"files": [ "/src/types.db" ]
|
||||
}]
|
||||
```
|
||||
|
||||
* `sha1` is the sha1 signature used to verify the integrity of the file referenced by `url`.
|
||||
* `url` is the address from where Logstash will download the file.
|
||||
* `files` is an optional array of files to extract from the downloaded file. Note that while tar archives can use absolute or relative paths, treat them as absolute in this array. If `files` is not present, all files will be uncompressed and extracted into the vendor directory.
|
||||
|
||||
Another example of the `vendor.json` file is the [`geoip` filter](https://github.com/logstash-plugins/logstash-filter-geoip/blob/main/vendor.json)
|
||||
|
||||
The process used to download these dependencies is to call `rake vendor`. This will be discussed further in the testing section of this document.
|
||||
|
||||
Another kind of external dependency is on jar files. This will be described in the "Add a `gemspec` file" section.
|
||||
|
||||
|
||||
### Deprecated features [_deprecated_features]
|
||||
|
||||
As a plugin evolves, an option or feature may no longer serve the intended purpose, and the developer may want to *deprecate* its usage. Deprecation warns users about the option’s status, so they aren’t caught by surprise when it is removed in a later release.
|
||||
|
||||
{{ls}} 7.6 introduced a *deprecation logger* to make handling those situations easier. You can use the [adapter](https://github.com/logstash-plugins/logstash-mixin-deprecation_logger_support) to ensure that your plugin can use the deprecation logger while still supporting older versions of {{ls}}. See the [readme](https://github.com/logstash-plugins/logstash-mixin-deprecation_logger_support/blob/main/README.md) for more information and for instructions on using the adapter.
|
||||
|
||||
Deprecations are noted in the `logstash-deprecation.log` file in the `log` directory.
|
||||
|
||||
|
||||
### Add a Gemfile [_add_a_gemfile]
|
||||
|
||||
Gemfiles allow Ruby’s Bundler to maintain the dependencies for your plugin. Currently, all we’ll need is the Logstash gem, for testing, but if you require other gems, you should add them in here.
|
||||
|
||||
::::{tip}
|
||||
See [Bundler’s Gemfile page](http://bundler.io/gemfile.html) for more details.
|
||||
::::
|
||||
|
||||
|
||||
```ruby
|
||||
source 'https://rubygems.org'
|
||||
gemspec
|
||||
gem "logstash", :github => "elastic/logstash", :branch => "master"
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Add a `gemspec` file [_add_a_gemspec_file]
|
||||
|
||||
Gemspecs define the Ruby gem which will be built and contain your plugin.
|
||||
|
||||
::::{tip}
|
||||
More information can be found on the [Rubygems Specification page](http://guides.rubygems.org/specification-reference/).
|
||||
::::
|
||||
|
||||
|
||||
```ruby
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'logstash-input-example'
|
||||
s.version = '0.1.0'
|
||||
s.licenses = ['Apache License (2.0)']
|
||||
s.summary = "This input does x, y, z in Logstash"
|
||||
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
||||
s.authors = ["Elastic"]
|
||||
s.email = 'info@elastic.co'
|
||||
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
||||
s.require_paths = ["lib"]
|
||||
|
||||
# Files
|
||||
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
||||
# Tests
|
||||
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
||||
|
||||
# Special flag to let us know this is actually a logstash plugin
|
||||
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
|
||||
|
||||
# Gem dependencies
|
||||
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
||||
s.add_development_dependency 'logstash-devutils'
|
||||
end
|
||||
```
|
||||
|
||||
It is appropriate to change these values to fit your plugin. In particular, `s.name` and `s.summary` should reflect your plugin’s name and behavior.
|
||||
|
||||
`s.licenses` and `s.version` are also important and will come into play when you are ready to publish your plugin.
|
||||
|
||||
Logstash and all its plugins are licensed under [Apache License, version 2 ("ALv2")](https://github.com/elastic/logstash/blob/main/LICENSE.txt). If you make your plugin publicly available via [RubyGems.org](http://rubygems.org), please make sure to have this line in your gemspec:
|
||||
|
||||
* `s.licenses = ['Apache License (2.0)']`
|
||||
|
||||
The gem version, designated by `s.version`, helps track changes to plugins over time. You should use [semver versioning](http://semver.org/) strategy for version numbers.
|
||||
|
||||
### Runtime and Development Dependencies [_runtime_and_development_dependencies]
|
||||
|
||||
At the bottom of the `gemspec` file is a section with a comment: `Gem dependencies`. This is where any other needed gems must be mentioned. If a gem is necessary for your plugin to function, it is a runtime dependency. If a gem are only used for testing, then it would be a development dependency.
|
||||
|
||||
::::{note}
|
||||
You can also have versioning requirements for your dependencies—including other Logstash plugins:
|
||||
|
||||
```ruby
|
||||
# Gem dependencies
|
||||
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
||||
s.add_development_dependency 'logstash-devutils'
|
||||
```
|
||||
|
||||
This gemspec has a runtime dependency on the logstash-core-plugin-api and requires that it have a version number greater than or equal to version 1.60 and less than or equal to version 2.99.
|
||||
|
||||
::::
|
||||
|
||||
|
||||
::::{important}
|
||||
All plugins have a runtime dependency on the `logstash-core-plugin-api` gem, and a development dependency on `logstash-devutils`.
|
||||
::::
|
||||
|
||||
|
||||
|
||||
### Jar dependencies [_jar_dependencies]
|
||||
|
||||
In some cases, such as the [Elasticsearch output plugin](https://github.com/logstash-plugins/logstash-output-elasticsearch/blob/main/logstash-output-elasticsearch.gemspec#L22-L23), your code may depend on a jar file. In cases such as this, the dependency is added in the gemspec file in this manner:
|
||||
|
||||
```ruby
|
||||
# Jar dependencies
|
||||
s.requirements << "jar 'org.elasticsearch:elasticsearch', '5.0.0'"
|
||||
s.add_runtime_dependency 'jar-dependencies'
|
||||
```
|
||||
|
||||
With these both defined, the install process will search for the required jar file at [http://mvnrepository.com](http://mvnrepository.com) and download the specified version.
|
||||
|
||||
|
||||
|
||||
## Document your plugin [_document_your_plugin]
|
||||
|
||||
Documentation is an important part of your plugin. All plugin documentation is rendered and placed in the [Logstash Reference](/reference/index.md) and the [Versioned plugin docs](logstash-docs-md://vpr/integration-plugins.md).
|
||||
|
||||
See [Document your plugin](/extend/plugin-doc.md) for tips and guidelines.
|
||||
|
||||
|
||||
## Add Tests [_add_tests]
|
||||
|
||||
Logstash loves tests. Lots of tests. If you’re using your new input plugin in a production environment, you’ll want to have some tests to ensure you are not breaking any existing functionality.
|
||||
|
||||
::::{note}
|
||||
A full exposition on RSpec is outside the scope of this document. Learn more about RSpec at [http://rspec.info](http://rspec.info)
|
||||
::::
|
||||
|
||||
|
||||
For help learning about tests and testing, look in the `spec/inputs/` directory of several other similar plugins.
|
||||
|
||||
|
||||
## Clone and test! [_clone_and_test]
|
||||
|
||||
Now let’s start with a fresh clone of the plugin, build it and run the tests.
|
||||
|
||||
* **Clone your plugin into a temporary location** Replace `GITUSERNAME` with your github username, and `MYPLUGINNAME` with your plugin name.
|
||||
|
||||
* `git clone https://github.com/GITUSERNAME/logstash-``input-MYPLUGINNAME.git`
|
||||
|
||||
* alternately, via ssh: `git clone git@github.com:GITUSERNAME/logstash-``input-MYPLUGINNAME.git`
|
||||
|
||||
* `cd logstash-input-MYPLUGINNAME`
|
||||
|
||||
|
||||
Then, you’ll need to install your plugins dependencies with bundler:
|
||||
|
||||
```
|
||||
bundle install
|
||||
```
|
||||
|
||||
::::{important}
|
||||
If your plugin has an external file dependency described in `vendor.json`, you must download that dependency before running or testing. You can do this by running:
|
||||
|
||||
```
|
||||
rake vendor
|
||||
```
|
||||
|
||||
::::
|
||||
|
||||
|
||||
And finally, run the tests:
|
||||
|
||||
```
|
||||
bundle exec rspec
|
||||
```
|
||||
|
||||
You should see a success message, which looks something like this:
|
||||
|
||||
```
|
||||
Finished in 0.034 seconds
|
||||
1 example, 0 failures
|
||||
```
|
||||
|
||||
Hooray! You’re almost there! (Unless you saw failures… you should fix those first).
|
||||
|
||||
|
||||
## Building and Testing [_building_and_testing]
|
||||
|
||||
Now you’re ready to build your (well-tested) plugin into a Ruby gem.
|
||||
|
||||
### Build [_build]
|
||||
|
||||
You already have all the necessary ingredients, so let’s go ahead and run the build command:
|
||||
|
||||
```sh
|
||||
gem build logstash-input-example.gemspec
|
||||
```
|
||||
|
||||
That’s it! Your gem should be built and be in the same path with the name
|
||||
|
||||
```sh
|
||||
logstash-input-mypluginname-0.1.0.gem
|
||||
```
|
||||
|
||||
The `s.version` number from your gemspec file will provide the gem version, in this case, `0.1.0`.
|
||||
|
||||
|
||||
### Test installation [_test_installation]
|
||||
|
||||
You should test install your plugin into a clean installation of Logstash. Download the latest version from the [Logstash downloads page](https://www.elastic.co/downloads/logstash/).
|
||||
|
||||
1. Untar and cd in to the directory:
|
||||
|
||||
```sh
|
||||
curl -O https://download.elastic.co/logstash/logstash/logstash-9.0.0.tar.gz
|
||||
tar xzvf logstash-9.0.0.tar.gz
|
||||
cd logstash-9.0.0
|
||||
```
|
||||
|
||||
2. Using the plugin tool, we can install the gem we just built.
|
||||
|
||||
* Replace `/my/logstash/plugins` with the correct path to the gem for your environment, and `0.1.0` with the correct version number from the gemspec file.
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin install /my/logstash/plugins/logstash-input-example/logstash-input-example-0.1.0.gem
|
||||
```
|
||||
|
||||
* After running this, you should see feedback from Logstash that it was successfully installed:
|
||||
|
||||
```sh
|
||||
validating /my/logstash/plugins/logstash-input-example/logstash-input-example-0.1.0.gem >= 0
|
||||
Valid logstash plugin. Continuing...
|
||||
Successfully installed 'logstash-input-example' with version '0.1.0'
|
||||
```
|
||||
|
||||
::::{tip}
|
||||
You can also use the Logstash plugin tool to determine which plugins are currently available:
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin list
|
||||
```
|
||||
|
||||
Depending on what you have installed, you might see a short or long list of plugins: inputs, codecs, filters and outputs.
|
||||
|
||||
::::
|
||||
|
||||
3. Now try running Logstash with a simple configuration passed in via the command-line, using the `-e` flag.
|
||||
|
||||
::::{note}
|
||||
Your results will depend on what your input plugin is designed to do.
|
||||
::::
|
||||
|
||||
|
||||
```sh
|
||||
bin/logstash -e 'input { example{} } output {stdout { codec => rubydebug }}'
|
||||
```
|
||||
|
||||
The example input plugin will send the contents of `message` (with a default message of "Hello World!") every second.
|
||||
|
||||
```sh
|
||||
{
|
||||
"message" => "Hello World!",
|
||||
"@version" => "1",
|
||||
"@timestamp" => "2015-01-27T19:17:18.932Z",
|
||||
"host" => "cadenza"
|
||||
}
|
||||
```
|
||||
|
||||
Feel free to experiment and test this by changing the `message` and `interval` parameters:
|
||||
|
||||
```sh
|
||||
bin/logstash -e 'input { example{ message => "A different message" interval => 5 } } output {stdout { codec => rubydebug }}'
|
||||
```
|
||||
|
||||
Congratulations! You’ve built, deployed and successfully run a Logstash input.
|
||||
|
||||
|
||||
|
||||
## Submitting your plugin to [RubyGems.org](http://rubygems.org) and [logstash-plugins](https://github.com/logstash-plugins) [_submitting_your_plugin_to_rubygems_orghttprubygems_org_and_logstash_pluginshttpsgithub_comlogstash_plugins]
|
||||
|
||||
Logstash uses [RubyGems.org](http://rubygems.org) as its repository for all plugin artifacts. Once you have developed your new plugin, you can make it available to Logstash users by simply publishing it to RubyGems.org.
|
||||
|
||||
### Licensing [_licensing]
|
||||
|
||||
Logstash and all its plugins are licensed under [Apache License, version 2 ("ALv2")](https://github.com/elasticsearch/logstash/blob/main/LICENSE). If you make your plugin publicly available via [RubyGems.org](http://rubygems.org), please make sure to have this line in your gemspec:
|
||||
|
||||
* `s.licenses = ['Apache License (2.0)']`
|
||||
|
||||
|
||||
### Publishing to [RubyGems.org](http://rubygems.org) [_publishing_to_rubygems_orghttprubygems_org]
|
||||
|
||||
To begin, you’ll need an account on RubyGems.org
|
||||
|
||||
* [Sign-up for a RubyGems account](https://rubygems.org/sign_up).
|
||||
|
||||
After creating an account, [obtain](http://guides.rubygems.org/rubygems-org-api/#api-authorization) an API key from RubyGems.org. By default, RubyGems uses the file `~/.gem/credentials` to store your API key. These credentials will be used to publish the gem. Replace `username` and `password` with the credentials you created at RubyGems.org:
|
||||
|
||||
```sh
|
||||
curl -u username:password https://rubygems.org/api/v1/api_key.yaml > ~/.gem/credentials
|
||||
chmod 0600 ~/.gem/credentials
|
||||
```
|
||||
|
||||
Before proceeding, make sure you have the right version in your gemspec file and commit your changes.
|
||||
|
||||
* `s.version = '0.1.0'`
|
||||
|
||||
To publish version 0.1.0 of your new logstash gem:
|
||||
|
||||
```sh
|
||||
bundle install
|
||||
bundle exec rake vendor
|
||||
bundle exec rspec
|
||||
bundle exec rake publish_gem
|
||||
```
|
||||
|
||||
::::{note}
|
||||
Executing `rake publish_gem`:
|
||||
|
||||
1. Reads the version from the gemspec file (`s.version = '0.1.0'`)
|
||||
2. Checks in your local repository if a tag exists for that version. If the tag already exists, it aborts the process. Otherwise, it creates a new version tag in your local repository.
|
||||
3. Builds the gem
|
||||
4. Publishes the gem to RubyGems.org
|
||||
|
||||
::::
|
||||
|
||||
|
||||
That’s it! Your plugin is published! Logstash users can now install your plugin by running:
|
||||
|
||||
```sh
|
||||
bin/logstash-plugin install logstash-input-mypluginname
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Contributing your source code to [logstash-plugins](https://github.com/logstash-plugins) [_contributing_your_source_code_to_logstash_pluginshttpsgithub_comlogstash_plugins]
|
||||
|
||||
It is not required to contribute your source code to [logstash-plugins](https://github.com/logstash-plugins) github organization, but we always welcome new plugins!
|
||||
|
||||
### Benefits [_benefits]
|
||||
|
||||
Some of the many benefits of having your plugin in the logstash-plugins repository are:
|
||||
|
||||
* **Discovery.** Your plugin will appear in the [Logstash Reference](/reference/index.md), where Logstash users look first for plugins and documentation.
|
||||
* **Documentation.** Your plugin documentation will automatically be added to the [Logstash Reference](/reference/index.md).
|
||||
* **Testing.** With our testing infrastructure, your plugin will be continuously tested against current and future releases of Logstash. As a result, users will have the assurance that if incompatibilities arise, they will be quickly discovered and corrected.
|
||||
|
||||
|
||||
### Acceptance Guidelines [_acceptance_guidelines]
|
||||
|
||||
* **Code Review.** Your plugin must be reviewed by members of the community for coherence, quality, readability, stability and security.
|
||||
* **Tests.** Your plugin must contain tests to be accepted. These tests are also subject to code review for scope and completeness. It’s ok if you don’t know how to write tests — we will guide you. We are working on publishing a guide to creating tests for Logstash which will make it easier. In the meantime, you can refer to [http://betterspecs.org/](http://betterspecs.org/) for examples.
|
||||
|
||||
To begin migrating your plugin to logstash-plugins, simply create a new [issue](https://github.com/elasticsearch/logstash/issues) in the Logstash repository. When the acceptance guidelines are completed, we will facilitate the move to the logstash-plugins organization using the recommended [github process](https://help.github.com/articles/transferring-a-repository/#transferring-from-a-user-to-an-organization).
|
348
docs/extend/java-codec-plugin.md
Normal file
348
docs/extend/java-codec-plugin.md
Normal file
|
@ -0,0 +1,348 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/java-codec-plugin.html
|
||||
---
|
||||
|
||||
# How to write a Java codec plugin [java-codec-plugin]
|
||||
|
||||
::::{note}
|
||||
Java codecs are currently supported only for Java input and output plugins. They will not work with Ruby input or output plugins.
|
||||
::::
|
||||
|
||||
|
||||
To develop a new Java codec for Logstash, you write a new Java class that conforms to the Logstash Java Codecs API, package it, and install it with the logstash-plugin utility. We’ll go through each of those steps.
|
||||
|
||||
|
||||
## Set up your environment [_set_up_your_environment_2]
|
||||
|
||||
|
||||
### Copy the example repo [_copy_the_example_repo_2]
|
||||
|
||||
Start by copying the [example codec plugin](https://github.com/logstash-plugins/logstash-codec-java_codec_example). The plugin API is currently part of the Logstash codebase so you must have a local copy of that available. You can obtain a copy of the Logstash codebase with the following `git` command:
|
||||
|
||||
```shell
|
||||
git clone --branch <branch_name> --single-branch https://github.com/elastic/logstash.git <target_folder>
|
||||
```
|
||||
|
||||
The `branch_name` should correspond to the version of Logstash containing the preferred revision of the Java plugin API.
|
||||
|
||||
::::{note}
|
||||
The GA version of the Java plugin API is available in the `7.2` and later branches of the Logstash codebase.
|
||||
::::
|
||||
|
||||
|
||||
Specify the `target_folder` for your local copy of the Logstash codebase. If you do not specify `target_folder`, it defaults to a new folder called `logstash` under your current folder.
|
||||
|
||||
|
||||
### Generate the .jar file [_generate_the_jar_file_2]
|
||||
|
||||
After you have obtained a copy of the appropriate revision of the Logstash codebase, you need to compile it to generate the .jar file containing the Java plugin API. From the root directory of your Logstash codebase ($LS_HOME), you can compile it with `./gradlew assemble` (or `gradlew.bat assemble` if you’re running on Windows). This should produce the `$LS_HOME/logstash-core/build/libs/logstash-core-x.y.z.jar` where `x`, `y`, and `z` refer to the version of Logstash.
|
||||
|
||||
After you have successfully compiled Logstash, you need to tell your Java plugin where to find the `logstash-core-x.y.z.jar` file. Create a new file named `gradle.properties` in the root folder of your plugin project. That file should have a single line:
|
||||
|
||||
```txt
|
||||
LOGSTASH_CORE_PATH=<target_folder>/logstash-core
|
||||
```
|
||||
|
||||
where `target_folder` is the root folder of your local copy of the Logstash codebase.
|
||||
|
||||
|
||||
## Code the plugin [_code_the_plugin_2]
|
||||
|
||||
The example codec plugin decodes messages separated by a configurable delimiter and encodes messages by writing their string representation separated by a delimiter. For example, if the codec were configured with `/` as the delimiter, the input text `event1/event2/` would be decoded into two separate events with `message` fields of `event1` and `event2`, respectively. Note that this is only an example codec and does not cover all the edge cases that a production-grade codec should cover.
|
||||
|
||||
Let’s look at the main class in that codec filter:
|
||||
|
||||
```java
|
||||
@LogstashPlugin(name="java_codec_example")
|
||||
public class JavaCodecExample implements Codec {
|
||||
|
||||
public static final PluginConfigSpec<String> DELIMITER_CONFIG =
|
||||
PluginConfigSpec.stringSetting("delimiter", ",");
|
||||
|
||||
private final String id;
|
||||
private final String delimiter;
|
||||
|
||||
public JavaCodecExample(final Configuration config, final Context context) {
|
||||
this(config.get(DELIMITER_CONFIG));
|
||||
}
|
||||
|
||||
private JavaCodecExample(String delimiter) {
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void decode(ByteBuffer byteBuffer, Consumer<Map<String, Object>> consumer) {
|
||||
// a not-production-grade delimiter decoder
|
||||
byte[] byteInput = new byte[byteBuffer.remaining()];
|
||||
byteBuffer.get(byteInput);
|
||||
if (byteInput.length > 0) {
|
||||
String input = new String(byteInput);
|
||||
String[] split = input.split(delimiter);
|
||||
for (String s : split) {
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("message", s);
|
||||
consumer.accept(map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush(ByteBuffer byteBuffer, Consumer<Map<String, Object>> consumer) {
|
||||
// if the codec maintains any internal state such as partially-decoded input, this
|
||||
// method should flush that state along with any additional input supplied in
|
||||
// the ByteBuffer
|
||||
|
||||
decode(byteBuffer, consumer); // this is a simplistic implementation
|
||||
}
|
||||
|
||||
@Override
|
||||
public void encode(Event event, OutputStream outputStream) throws IOException {
|
||||
outputStream.write((event.toString() + delimiter).getBytes(Charset.defaultCharset()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<PluginConfigSpec<?>> configSchema() {
|
||||
// should return a list of all configuration options for this plugin
|
||||
return Collections.singletonList(DELIMITER_CONFIG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Codec cloneCodec() {
|
||||
return new JavaCodecExample(this.delimiter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
Let’s step through and examine each part of that class.
|
||||
|
||||
|
||||
### Class declaration [_class_declaration_6]
|
||||
|
||||
```java
|
||||
@LogstashPlugin(name="java_codec_example")
|
||||
public class JavaCodecExample implements Codec {
|
||||
```
|
||||
|
||||
Notes about the class declaration:
|
||||
|
||||
* All Java plugins must be annotated with the `@LogstashPlugin` annotation. Additionally:
|
||||
|
||||
* The `name` property of the annotation must be supplied and defines the name of the plugin as it will be used in the Logstash pipeline definition. For example, this codec would be referenced in the codec section of the an appropriate input or output in the Logstash pipeline defintion as `codec => java_codec_example { }`
|
||||
* The value of the `name` property must match the name of the class excluding casing and underscores.
|
||||
|
||||
* The class must implement the `co.elastic.logstash.api.Codec` interface.
|
||||
* Java plugins may not be created in the `org.logstash` or `co.elastic.logstash` packages to prevent potential clashes with classes in Logstash itself.
|
||||
|
||||
|
||||
#### Plugin settings [_plugin_settings_2]
|
||||
|
||||
The snippet below contains both the setting definition and the method referencing it:
|
||||
|
||||
```java
|
||||
public static final PluginConfigSpec<String> DELIMITER_CONFIG =
|
||||
PluginConfigSpec.stringSetting("delimiter", ",");
|
||||
|
||||
@Override
|
||||
public Collection<PluginConfigSpec<?>> configSchema() {
|
||||
return Collections.singletonList(DELIMITER_CONFIG);
|
||||
}
|
||||
```
|
||||
|
||||
The `PluginConfigSpec` class allows developers to specify the settings that a plugin supports complete with setting name, data type, deprecation status, required status, and default value. In this example, the `delimiter` setting defines the delimiter on which the codec will split events. It is not a required setting and if it is not explicitly set, its default value will be `,`.
|
||||
|
||||
The `configSchema` method must return a list of all settings that the plugin supports. The Logstash execution engine will validate that all required settings are present and that no unsupported settings are present.
|
||||
|
||||
|
||||
#### Constructor and initialization [_constructor_and_initialization_2]
|
||||
|
||||
```java
|
||||
private final String id;
|
||||
private final String delimiter;
|
||||
|
||||
public JavaCodecExample(final Configuration config, final Context context) {
|
||||
this(config.get(DELIMITER_CONFIG));
|
||||
}
|
||||
|
||||
private JavaCodecExample(String delimiter) {
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
```
|
||||
|
||||
All Java codec plugins must have a constructor taking a `Configuration` and `Context` argument. This is the constructor that will be used to instantiate them at runtime. The retrieval and validation of all plugin settings should occur in this constructor. In this example, the delimiter to be used for delimiting events is retrieved from its setting and stored in a local variable so that it can be used later in the `decode` and `encode` methods. The codec’s ID is initialized to a random UUID (as should be done for most codecs), and a local `encoder` variable is initialized to encode and decode with a specified character set.
|
||||
|
||||
Any additional initialization may occur in the constructor as well. If there are any unrecoverable errors encountered in the configuration or initialization of the codec plugin, a descriptive exception should be thrown. The exception will be logged and will prevent Logstash from starting.
|
||||
|
||||
|
||||
### Codec methods [_codec_methods]
|
||||
|
||||
```java
|
||||
@Override
|
||||
public void decode(ByteBuffer byteBuffer, Consumer<Map<String, Object>> consumer) {
|
||||
// a not-production-grade delimiter decoder
|
||||
byte[] byteInput = new byte[byteBuffer.remaining()];
|
||||
byteBuffer.get(byteInput);
|
||||
if (byteInput.length > 0) {
|
||||
String input = new String(byteInput);
|
||||
String[] split = input.split(delimiter);
|
||||
for (String s : split) {
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("message", s);
|
||||
consumer.accept(map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush(ByteBuffer byteBuffer, Consumer<Map<String, Object>> consumer) {
|
||||
// if the codec maintains any internal state such as partially-decoded input, this
|
||||
// method should flush that state along with any additional input supplied in
|
||||
// the ByteBuffer
|
||||
|
||||
decode(byteBuffer, consumer); // this is a simplistic implementation
|
||||
}
|
||||
|
||||
@Override
|
||||
public void encode(Event event, OutputStream outputStream) throws IOException {
|
||||
outputStream.write((event.toString() + delimiter).getBytes(Charset.defaultCharset()));
|
||||
}
|
||||
```
|
||||
|
||||
The `decode`, `flush`, and `encode` methods provide the core functionality of the codec. Codecs may be used by inputs to decode a sequence or stream of bytes into events or by outputs to encode events into a sequence of bytes.
|
||||
|
||||
The `decode` method decodes events from the specified `ByteBuffer` and passes them to the provided `Consumer`. The input must provide a `ByteBuffer` that is ready for reading with `byteBuffer.position()` indicating the next position to read and `byteBuffer.limit()` indicating the first byte in the buffer that is not safe to read. Codecs must ensure that `byteBuffer.position()` reflects the last-read position before returning control to the input. The input is then responsible for returning the buffer to write mode via either `byteBuffer.clear()` or `byteBuffer.compact()` before resuming writes. In the example above, the `decode` method simply splits the incoming byte stream on the specified delimiter. A production-grade codec such as [`java-line`](https://github.com/elastic/logstash/blob/main/logstash-core/src/main/java/org/logstash/plugins/codecs/Line.java) would not make the simplifying assumption that the end of the supplied byte stream corresponded with the end of an event.
|
||||
|
||||
Events should be constructed as instances of `Map<String, Object>` and pushed into the event pipeline via the `Consumer<Map<String, Object>>.accept()` method. To reduce allocations and GC pressure, codecs may reuse the same map instance by modifying its fields between calls to `Consumer<Map<String, Object>>.accept()` because the event pipeline will create events based on a copy of the map’s data.
|
||||
|
||||
The `flush` method works in coordination with the `decode` method to decode all remaining events from the specified `ByteBuffer` along with any internal state that may remain after previous calls to the `decode` method. As an example of internal state that a codec might maintain, consider an input stream of bytes `event1/event2/event3` with a delimiter of `/`. Due to buffering or other reasons, the input might supply a partial stream of bytes such as `event1/eve` to the codec’s `decode` method. In this case, the codec could save the beginning three characters `eve` of the second event rather than assuming that the supplied byte stream ends on an event boundary. If the next call to `decode` supplied the `nt2/ev` bytes, the codec would prepend the saved `eve` bytes to produce the full `event2` event and then save the remaining `ev` bytes for decoding when the remainder of the bytes for that event were supplied. A call to `flush` signals the codec that the supplied bytes represent the end of an event stream and all remaining bytes should be decoded to events. The `flush` example above is a simplistic implementation that does not maintain any state about partially-supplied byte streams across calls to `decode`.
|
||||
|
||||
The `encode` method encodes an event into a sequence of bytes and writes it into the specified `OutputStream`. Because a single codec instance is shared across all pipeline workers in the output stage of the Logstash pipeline, codecs should *not* retain state across calls to their `encode` methods.
|
||||
|
||||
|
||||
### cloneCodec method [_clonecodec_method]
|
||||
|
||||
```java
|
||||
@Override
|
||||
public Codec cloneCodec() {
|
||||
return new JavaCodecExample(this.delimiter);
|
||||
}
|
||||
```
|
||||
|
||||
The `cloneCodec` method should return an identical instance of the codec with the exception of its ID. Because codecs may be stateful across calls to their `decode` methods, input plugins that are multi-threaded should use a separate instance of each codec via the `cloneCodec` method for each of their threads. Because a single codec instance is shared across all pipeline workers in the output stage of the Logstash pipeline, codecs should *not* retain state across calls to their `encode` methods. In the example above, the codec is cloned with the same delimiter but a different ID.
|
||||
|
||||
|
||||
### getId method [_getid_method_2]
|
||||
|
||||
```java
|
||||
@Override
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
```
|
||||
|
||||
For codec plugins, the `getId` method should always return the id that was set at instantiation time. This is typically an UUID.
|
||||
|
||||
|
||||
### Unit tests [_unit_tests_2]
|
||||
|
||||
Lastly, but certainly not least importantly, unit tests are strongly encouraged. The example codec plugin includes an [example unit test](https://github.com/logstash-plugins/logstash-codec-java_codec_example/blob/main/src/test/java/org/logstashplugins/JavaCodecExampleTest.java) that you can use as a template for your own.
|
||||
|
||||
|
||||
## Package and deploy [_package_and_deploy_2]
|
||||
|
||||
Java plugins are packaged as Ruby gems for dependency management and interoperability with Ruby plugins. Once they are packaged as gems, they may be installed with the `logstash-plugin` utility just as Ruby plugins are. Because no knowledge of Ruby or its toolchain should be required for Java plugin development, the procedure for packaging Java plugins as Ruby gems has been automated through a custom task in the Gradle build file provided with the example Java plugins. The following sections describe how to configure and execute that packaging task as well as how to install the packaged Java plugin in Logstash.
|
||||
|
||||
|
||||
### Configuring the Gradle packaging task [_configuring_the_gradle_packaging_task_2]
|
||||
|
||||
The following section appears near the top of the `build.gradle` file supplied with the example Java plugins:
|
||||
|
||||
```java
|
||||
// ===========================================================================
|
||||
// plugin info
|
||||
// ===========================================================================
|
||||
group 'org.logstashplugins' // must match the package of the main plugin class
|
||||
version "${file("VERSION").text.trim()}" // read from required VERSION file
|
||||
description = "Example Java filter implementation"
|
||||
pluginInfo.licenses = ['Apache-2.0'] // list of SPDX license IDs
|
||||
pluginInfo.longDescription = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using \$LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
||||
pluginInfo.authors = ['Elasticsearch']
|
||||
pluginInfo.email = ['info@elastic.co']
|
||||
pluginInfo.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
||||
pluginInfo.pluginType = "filter"
|
||||
pluginInfo.pluginClass = "JavaFilterExample"
|
||||
pluginInfo.pluginName = "java_filter_example"
|
||||
// ===========================================================================
|
||||
```
|
||||
|
||||
You should configure the values above for your plugin.
|
||||
|
||||
* The `version` value will be automatically read from the `VERSION` file in the root of your plugin’s codebase.
|
||||
* `pluginInfo.pluginType` should be set to one of `input`, `filter`, `codec`, or `output`.
|
||||
* `pluginInfo.pluginName` must match the name specified on the `@LogstashPlugin` annotation on the main plugin class. The Gradle packaging task will validate that and return an error if they do not match.
|
||||
|
||||
|
||||
### Running the Gradle packaging task [_running_the_gradle_packaging_task_2]
|
||||
|
||||
Several Ruby source files along with a `gemspec` file and a `Gemfile` are required to package the plugin as a Ruby gem. These Ruby files are used only for defining the Ruby gem structure or at Logstash startup time to register the Java plugin. They are not used during runtime event processing. The Gradle packaging task automatically generates all of these files based on the values configured in the section above.
|
||||
|
||||
You run the Gradle packaging task with the following command:
|
||||
|
||||
```shell
|
||||
./gradlew gem
|
||||
```
|
||||
|
||||
For Windows platforms: Substitute `gradlew.bat` for `./gradlew` as appropriate in the command.
|
||||
|
||||
That task will produce a gem file in the root directory of your plugin’s codebase with the name `logstash-{{plugintype}}-<pluginName>-<version>.gem`
|
||||
|
||||
|
||||
### Installing the Java plugin in Logstash [_installing_the_java_plugin_in_logstash_2]
|
||||
|
||||
After you have packaged your Java plugin as a Ruby gem, you can install it in Logstash with this command:
|
||||
|
||||
```shell
|
||||
bin/logstash-plugin install --no-verify --local /path/to/javaPlugin.gem
|
||||
```
|
||||
|
||||
For Windows platforms: Substitute backslashes for forward slashes as appropriate in the command.
|
||||
|
||||
|
||||
## Run Logstash with the Java codec plugin [_run_logstash_with_the_java_codec_plugin]
|
||||
|
||||
To test the plugin, start Logstash with:
|
||||
|
||||
```java
|
||||
echo "foo,bar" | bin/logstash -e 'input { java_stdin { codec => java_codec_example } }'
|
||||
```
|
||||
|
||||
The expected Logstash output (excluding initialization) with the configuration above is:
|
||||
|
||||
```txt
|
||||
{
|
||||
"@version" => "1",
|
||||
"message" => "foo",
|
||||
"@timestamp" => yyyy-MM-ddThh:mm:ss.SSSZ,
|
||||
"host" => "<yourHostName>"
|
||||
}
|
||||
{
|
||||
"@version" => "1",
|
||||
"message" => "bar\n",
|
||||
"@timestamp" => yyyy-MM-ddThh:mm:ss.SSSZ,
|
||||
"host" => "<yourHostName>"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Feedback [_feedback_2]
|
||||
|
||||
If you have any feedback on Java plugin support in Logstash, please comment on our [main Github issue](https://github.com/elastic/logstash/issues/9215) or post in the [Logstash forum](https://discuss.elastic.co/c/logstash).
|
||||
|
307
docs/extend/java-filter-plugin.md
Normal file
307
docs/extend/java-filter-plugin.md
Normal file
|
@ -0,0 +1,307 @@
|
|||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/logstash/current/java-filter-plugin.html
|
||||
---
|
||||
|
||||
# How to write a Java filter plugin [java-filter-plugin]
|
||||
|
||||
To develop a new Java filter for Logstash, you write a new Java class that conforms to the Logstash Java Filters API, package it, and install it with the logstash-plugin utility. We’ll go through each of those steps.
|
||||
|
||||
|
||||
## Set up your environment [_set_up_your_environment_3]
|
||||
|
||||
|
||||
### Copy the example repo [_copy_the_example_repo_3]
|
||||
|
||||
Start by copying the [example filter plugin](https://github.com/logstash-plugins/logstash-filter-java_filter_example). The plugin API is currently part of the Logstash codebase so you must have a local copy of that available. You can obtain a copy of the Logstash codebase with the following `git` command:
|
||||
|
||||
```shell
|
||||
git clone --branch <branch_name> --single-branch https://github.com/elastic/logstash.git <target_folder>
|
||||
```
|
||||
|
||||
The `branch_name` should correspond to the version of Logstash containing the preferred revision of the Java plugin API.
|
||||
|
||||
::::{note}
|
||||
The GA version of the Java plugin API is available in the `7.2` and later branches of the Logstash codebase.
|
||||
::::
|
||||
|
||||
|
||||
Specify the `target_folder` for your local copy of the Logstash codebase. If you do not specify `target_folder`, it defaults to a new folder called `logstash` under your current folder.
|
||||
|
||||
|
||||
### Generate the .jar file [_generate_the_jar_file_3]
|
||||
|
||||
After you have obtained a copy of the appropriate revision of the Logstash codebase, you need to compile it to generate the .jar file containing the Java plugin API. From the root directory of your Logstash codebase ($LS_HOME), you can compile it with `./gradlew assemble` (or `gradlew.bat assemble` if you’re running on Windows). This should produce the `$LS_HOME/logstash-core/build/libs/logstash-core-x.y.z.jar` where `x`, `y`, and `z` refer to the version of Logstash.
|
||||
|
||||
After you have successfully compiled Logstash, you need to tell your Java plugin where to find the `logstash-core-x.y.z.jar` file. Create a new file named `gradle.properties` in the root folder of your plugin project. That file should have a single line:
|
||||
|
||||
```txt
|
||||
LOGSTASH_CORE_PATH=<target_folder>/logstash-core
|
||||
```
|
||||
|
||||
where `target_folder` is the root folder of your local copy of the Logstash codebase.
|
||||
|
||||
|
||||
## Code the plugin [_code_the_plugin_3]
|
||||
|
||||
The example filter plugin allows one to configure a field in each event that will be reversed. For example, if the filter were configured to reverse the `day_of_week` field, an event with `day_of_week: "Monday"` would be transformed to `day_of_week: "yadnoM"`. Let’s look at the main class in that example filter:
|
||||
|
||||
```java
|
||||
@LogstashPlugin(name = "java_filter_example")
|
||||
public class JavaFilterExample implements Filter {
|
||||
|
||||
public static final PluginConfigSpec<String> SOURCE_CONFIG =
|
||||
PluginConfigSpec.stringSetting("source", "message");
|
||||
|
||||
private String id;
|
||||
private String sourceField;
|
||||
|
||||
public JavaFilterExample(String id, Configuration config, Context context) {
|
||||
this.id = id;
|
||||
this.sourceField = config.get(SOURCE_CONFIG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Event> filter(Collection<Event> events, FilterMatchListener matchListener) {
|
||||
for (Event e : events) {
|
||||
Object f = e.getField(sourceField);
|
||||
if (f instanceof String) {
|
||||
e.setField(sourceField, StringUtils.reverse((String)f));
|
||||
matchListener.filterMatched(e);
|
||||
}
|
||||
}
|
||||
return events;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<PluginConfigSpec<?>> configSchema() {
|
||||
return Collections.singletonList(SOURCE_CONFIG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
this.sourceField = null;
|
||||
return;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Let’s step through and examine each part of that class.
|
||||
|
||||
|
||||
### Class declaration [_class_declaration_7]
|
||||
|
||||
```java
|
||||
@LogstashPlugin(name = "java_filter_example")
|
||||
public class JavaFilterExample implements Filter {
|
||||
```
|
||||
|
||||
Notes about the class declaration:
|
||||
|
||||
* All Java plugins must be annotated with the `@LogstashPlugin` annotation. Additionally:
|
||||
|
||||
* The `name` property of the annotation must be supplied and defines the name of the plugin as it will be used in the Logstash pipeline definition. For example, this filter would be referenced in the filter section of the Logstash pipeline defintion as `filter { java_filter_example => { .... } }`
|
||||
* The value of the `name` property must match the name of the class excluding casing and underscores.
|
||||
|
||||
* The class must implement the `co.elastic.logstash.api.Filter` interface.
|
||||
* Java plugins may not be created in the `org.logstash` or `co.elastic.logstash` packages to prevent potential clashes with classes in Logstash itself.
|
||||
|
||||
|
||||
### Plugin settings [_plugin_settings_3]
|
||||
|
||||
The snippet below contains both the setting definition and the method referencing it:
|
||||
|
||||
```java
|
||||
public static final PluginConfigSpec<String> SOURCE_CONFIG =
|
||||
PluginConfigSpec.stringSetting("source", "message");
|
||||
|
||||
@Override
|
||||
public Collection<PluginConfigSpec<?>> configSchema() {
|
||||
return Collections.singletonList(SOURCE_CONFIG);
|
||||
}
|
||||
```
|
||||
|
||||
The `PluginConfigSpec` class allows developers to specify the settings that a plugin supports complete with setting name, data type, deprecation status, required status, and default value. In this example, the `source` setting defines the name of the field in each event that will be reversed. It is not a required setting and if it is not explicitly set, its default value will be `message`.
|
||||
|
||||
The `configSchema` method must return a list of all settings that the plugin supports. In a future phase of the Java plugin project, the Logstash execution engine will validate that all required settings are present and that no unsupported settings are present.
|
||||
|
||||
|
||||
### Constructor and initialization [_constructor_and_initialization_3]
|
||||
|
||||
```java
|
||||
private String id;
|
||||
private String sourceField;
|
||||
|
||||
public JavaFilterExample(String id, Configuration config, Context context) {
|
||||
this.id = id;
|
||||
this.sourceField = config.get(SOURCE_CONFIG);
|
||||
}
|
||||
```
|
||||
|
||||
All Java filter plugins must have a constructor taking a `String` id and a `Configuration` and `Context` argument. This is the constructor that will be used to instantiate them at runtime. The retrieval and validation of all plugin settings should occur in this constructor. In this example, the name of the field to be reversed in each event is retrieved from its setting and stored in a local variable so that it can be used later in the `filter` method.
|
||||
|
||||
Any additional initialization may occur in the constructor as well. If there are any unrecoverable errors encountered in the configuration or initialization of the filter plugin, a descriptive exception should be thrown. The exception will be logged and will prevent Logstash from starting.
|
||||
|
||||
|
||||
### Filter method [_filter_method_2]
|
||||
|
||||
```java
|
||||
@Override
|
||||
public Collection<Event> filter(Collection<Event> events, FilterMatchListener matchListener) {
|
||||
for (Event e : events) {
|
||||
Object f = e.getField(sourceField);
|
||||
if (f instanceof String) {
|
||||
e.setField(sourceField, StringUtils.reverse((String)f));
|
||||
matchListener.filterMatched(e);
|
||||
}
|
||||
}
|
||||
return events;
|
||||
```
|
||||
|
||||
Finally, we come to the `filter` method that is invoked by the Logstash execution engine on batches of events as they flow through the event processing pipeline. The events to be filtered are supplied in the `events` argument and the method should return a collection of filtered events. Filters may perform a variety of actions on events as they flow through the pipeline including:
|
||||
|
||||
* Mutation — Fields in events may be added, removed, or changed by a filter. This is the most common scenario for filters that perform various kinds of enrichment on events. In this scenario, the incoming `events` collection may be returned unmodified since the events in the collection are mutated in place.
|
||||
* Deletion — Events may be removed from the event pipeline by a filter so that subsequent filters and outputs do not receive them. In this scenario, the events to be deleted must be removed from the collection of filtered events before it is returned.
|
||||
* Creation — A filter may insert new events into the event pipeline that will be seen only by subsequent filters and outputs. In this scenario, the new events must be added to the collection of filtered events before it is returned.
|
||||
* Observation — Events may pass unchanged by a filter through the event pipeline. This may be useful in scenarios where a filter performs external actions (e.g., updating an external cache) based on the events observed in the event pipeline. In this scenario, the incoming `events` collection may be returned unmodified since no changes were made.
|
||||
|
||||
In the example above, the value of the `source` field is retrieved from each event and reversed if it is a string value. Because each event is mutated in place, the incoming `events` collection can be returned.
|
||||
|
||||
The `matchListener` is the mechanism by which filters indicate which events "match". The common actions for filters such as `add_field` and `add_tag` are applied only to events that are designated as "matching". Some filters such as the [grok filter](logstash-docs-md://lsr/plugins-filters-grok.md) have a clear definition for what constitutes a matching event and will notify the listener only for matching events. Other filters such as the [UUID filter](logstash-docs-md://lsr/plugins-filters-uuid.md) have no specific match criteria and should notify the listener for every event filtered. In this example, the filter notifies the match listener for any event that had a `String` value in its `source` field and was therefore able to be reversed.
|
||||
|
||||
|
||||
### getId method [_getid_method_3]
|
||||
|
||||
```java
|
||||
@Override
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
```
|
||||
|
||||
For filter plugins, the `getId` method should always return the id that was provided to the plugin through its constructor at instantiation time.
|
||||
|
||||
|
||||
### close method [_close_method]
|
||||
|
||||
```java
|
||||
@Override
|
||||
public void close() {
|
||||
// shutdown a resource that was instantiated during the filter initialization phase.
|
||||
this.sourceField = null;
|
||||
return;
|
||||
}
|
||||
```
|
||||
|
||||
Filter plugins can use additional resources to perform operations, such as creating new database connections. Implementing the `close` method will allow the plugins to free up those resources when shutting down the pipeline.
|
||||
|
||||
|
||||
### Unit tests [_unit_tests_3]
|
||||
|
||||
Lastly, but certainly not least importantly, unit tests are strongly encouraged. The example filter plugin includes an [example unit test](https://github.com/logstash-plugins/logstash-filter-java_filter_example/blob/main/src/test/java/org/logstashplugins/JavaFilterExampleTest.java) that you can use as a template for your own.
|
||||
|
||||
|
||||
## Package and deploy [_package_and_deploy_3]
|
||||
|
||||
Java plugins are packaged as Ruby gems for dependency management and interoperability with Ruby plugins. Once they are packaged as gems, they may be installed with the `logstash-plugin` utility just as Ruby plugins are. Because no knowledge of Ruby or its toolchain should be required for Java plugin development, the procedure for packaging Java plugins as Ruby gems has been automated through a custom task in the Gradle build file provided with the example Java plugins. The following sections describe how to configure and execute that packaging task as well as how to install the packaged Java plugin in Logstash.
|
||||
|
||||
|
||||
### Configuring the Gradle packaging task [_configuring_the_gradle_packaging_task_3]
|
||||
|
||||
The following section appears near the top of the `build.gradle` file supplied with the example Java plugins:
|
||||
|
||||
```java
|
||||
// ===========================================================================
|
||||
// plugin info
|
||||
// ===========================================================================
|
||||
group 'org.logstashplugins' // must match the package of the main plugin class
|
||||
version "${file("VERSION").text.trim()}" // read from required VERSION file
|
||||
description = "Example Java filter implementation"
|
||||
pluginInfo.licenses = ['Apache-2.0'] // list of SPDX license IDs
|
||||
pluginInfo.longDescription = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using \$LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
||||
pluginInfo.authors = ['Elasticsearch']
|
||||
pluginInfo.email = ['info@elastic.co']
|
||||
pluginInfo.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
||||
pluginInfo.pluginType = "filter"
|
||||
pluginInfo.pluginClass = "JavaFilterExample"
|
||||
pluginInfo.pluginName = "java_filter_example"
|
||||
// ===========================================================================
|
||||
```
|
||||
|
||||
You should configure the values above for your plugin.
|
||||
|
||||
* The `version` value will be automatically read from the `VERSION` file in the root of your plugin’s codebase.
|
||||
* `pluginInfo.pluginType` should be set to one of `input`, `filter`, `codec`, or `output`.
|
||||
* `pluginInfo.pluginName` must match the name specified on the `@LogstashPlugin` annotation on the main plugin class. The Gradle packaging task will validate that and return an error if they do not match.
|
||||
|
||||
|
||||
### Running the Gradle packaging task [_running_the_gradle_packaging_task_3]
|
||||
|
||||
Several Ruby source files along with a `gemspec` file and a `Gemfile` are required to package the plugin as a Ruby gem. These Ruby files are used only for defining the Ruby gem structure or at Logstash startup time to register the Java plugin. They are not used during runtime event processing. The Gradle packaging task automatically generates all of these files based on the values configured in the section above.
|
||||
|
||||
You run the Gradle packaging task with the following command:
|
||||
|
||||
```shell
|
||||
./gradlew gem
|
||||
```
|
||||
|
||||
For Windows platforms: Substitute `gradlew.bat` for `./gradlew` as appropriate in the command.
|
||||
|
||||
That task will produce a gem file in the root directory of your plugin’s codebase with the name `logstash-{{plugintype}}-<pluginName>-<version>.gem`
|
||||
|
||||
|
||||
### Installing the Java plugin in Logstash [_installing_the_java_plugin_in_logstash_3]
|
||||
|
||||
After you have packaged your Java plugin as a Ruby gem, you can install it in Logstash with this command:
|
||||
|
||||
```shell
|
||||
bin/logstash-plugin install --no-verify --local /path/to/javaPlugin.gem
|
||||
```
|
||||
|
||||
For Windows platforms: Substitute backslashes for forward slashes as appropriate in the command.
|
||||
|
||||
|
||||
## Run Logstash with the Java filter plugin [_run_logstash_with_the_java_filter_plugin]
|
||||
|
||||
The following is a minimal Logstash configuration that can be used to test that the Java filter plugin is correctly installed and functioning.
|
||||
|
||||
```java
|
||||
input {
|
||||
generator { message => "Hello world!" count => 1 }
|
||||
}
|
||||
filter {
|
||||
java_filter_example {}
|
||||
}
|
||||
output {
|
||||
stdout { codec => rubydebug }
|
||||
}
|
||||
```
|
||||
|
||||
Copy the above Logstash configuration to a file such as `java_filter.conf`. Start Logstash with:
|
||||
|
||||
```shell
|
||||
bin/logstash -f /path/to/java_filter.conf
|
||||
```
|
||||
|
||||
The expected Logstash output (excluding initialization) with the configuration above is:
|
||||
|
||||
```txt
|
||||
{
|
||||
"sequence" => 0,
|
||||
"@version" => "1",
|
||||
"message" => "!dlrow olleH",
|
||||
"@timestamp" => yyyy-MM-ddThh:mm:ss.SSSZ,
|
||||
"host" => "<yourHostName>"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Feedback [_feedback_3]
|
||||
|
||||
If you have any feedback on Java plugin support in Logstash, please comment on our [main Github issue](https://github.com/elastic/logstash/issues/9215) or post in the [Logstash forum](https://discuss.elastic.co/c/logstash).
|
||||
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue