mirror of
https://github.com/elastic/kibana.git
synced 2025-06-28 11:05:39 -04:00
[Ops] Refactor env loading & fix agent-targeting rewrite (#178320)
## Summary This PR refactors a bit of the pre-command env setup, separating parts, so they can be individually skipped. Then it removes the setup-avoidance based on agent types, as this won't be useful after the migration. Also, it fixes a missed bit in the agent-targeting rewrite used for the migration, where the `provider: 'gcp'` was missing, and adds an optional targeting for the script. - add gcp as provider to all rewritten agent targeting rules - add option to target specific pipelines - refactor env-var loading to a separated file - refactor node installs so it can be switched by a flag - skip node installing in (some) jobs that don't require it
This commit is contained in:
parent
9613da5c5e
commit
a89fb9b2fb
11 changed files with 247 additions and 192 deletions
|
@ -1,7 +1,3 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
if [[ "$BUILDKITE_AGENT_NAME" =~ ^bk-agent ]]; then
|
source .buildkite/scripts/lifecycle/post_command.sh
|
||||||
echo "Pipeline file triggered from outside the kibana executors, skipping post_command"
|
|
||||||
else
|
|
||||||
.buildkite/scripts/lifecycle/post_command.sh
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,8 +1,3 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
if [[ "$BUILDKITE_AGENT_NAME" =~ ^bk-agent ]]; then
|
source .buildkite/scripts/lifecycle/pre_command.sh
|
||||||
echo "Pipeline file triggered from outside the kibana executors, skipping .buildkite/scripts/lifecycle/pre_command.sh"
|
|
||||||
export SONAR_LOGIN=$(vault read -field=token secret/ci/elastic-kibana/sonarqube)
|
|
||||||
else
|
|
||||||
source .buildkite/scripts/lifecycle/pre_command.sh
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
env:
|
env:
|
||||||
|
SKIP_NODE_SETUP: true
|
||||||
VERSION: ${BUILDKITE_COMMIT:0:12}
|
VERSION: ${BUILDKITE_COMMIT:0:12}
|
||||||
EMERGENCY_RELEASE_REMOTE_SERVICE_CONFIG: https://raw.githubusercontent.com/elastic/serverless-gitops/main/gen/gpctl/kibana/emergency.yaml
|
EMERGENCY_RELEASE_REMOTE_SERVICE_CONFIG: https://raw.githubusercontent.com/elastic/serverless-gitops/main/gen/gpctl/kibana/emergency.yaml
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ agents:
|
||||||
memory: "8G"
|
memory: "8G"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
|
SKIP_NODE_SETUP: true
|
||||||
TEAM_CHANNEL: "#kibana-mission-control"
|
TEAM_CHANNEL: "#kibana-mission-control"
|
||||||
ENVIRONMENT: ${ENVIRONMENT?}
|
ENVIRONMENT: ${ENVIRONMENT?}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
env:
|
||||||
|
SKIP_NODE_SETUP: true
|
||||||
|
|
||||||
|
steps:
|
||||||
- label: ":sonarqube: Continuous Code Inspection"
|
- label: ":sonarqube: Continuous Code Inspection"
|
||||||
agents:
|
agents:
|
||||||
image: docker.elastic.co/cloud-ci/sonarqube/buildkite-scanner:latest
|
image: docker.elastic.co/cloud-ci/sonarqube/buildkite-scanner:latest
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
echo '--- Setup environment vars'
|
||||||
|
|
||||||
export CI=true
|
export CI=true
|
||||||
|
|
||||||
KIBANA_DIR=$(pwd)
|
KIBANA_DIR=$(pwd)
|
||||||
|
|
22
.buildkite/scripts/common/setup_buildkite_deps.sh
Normal file
22
.buildkite/scripts/common/setup_buildkite_deps.sh
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo '--- Install/build buildkite dependencies'
|
||||||
|
|
||||||
|
if [[ "$(type -t retry)" != "function" ]]; then
|
||||||
|
source "$(dirname "${BASH_SOURCE[0]}")/util.sh"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# `rm -rf <ts-node node_modules dir>; npm install -g ts-node` will cause ts-node bin files to be messed up
|
||||||
|
# but literally just calling `npm install -g ts-node` a second time fixes it
|
||||||
|
# this is only on newer versions of npm
|
||||||
|
npm_install_global ts-node
|
||||||
|
if ! ts-node --version; then
|
||||||
|
npm_install_global ts-node
|
||||||
|
ts-node --version;
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd '.buildkite'
|
||||||
|
retry 5 15 npm ci
|
||||||
|
cd -
|
181
.buildkite/scripts/common/setup_job_env.sh
Normal file
181
.buildkite/scripts/common/setup_job_env.sh
Normal file
|
@ -0,0 +1,181 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo '--- Job Environment Setup'
|
||||||
|
|
||||||
|
if [[ "$(type -t vault_get)" != "function" ]]; then
|
||||||
|
source .buildkite/scripts/common/vault_fns.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set up general-purpose tokens and credentials
|
||||||
|
{
|
||||||
|
BUILDKITE_TOKEN="$(vault_get buildkite-ci buildkite_token_all_jobs)"
|
||||||
|
export BUILDKITE_TOKEN
|
||||||
|
|
||||||
|
GITHUB_TOKEN=$(vault_get kibanamachine github_token)
|
||||||
|
export GITHUB_TOKEN
|
||||||
|
|
||||||
|
KIBANA_CI_GITHUB_TOKEN=$(vault_get kibana-ci-github github_token)
|
||||||
|
export KIBANA_CI_GITHUB_TOKEN
|
||||||
|
|
||||||
|
KIBANA_DOCKER_USERNAME="$(vault_get container-registry username)"
|
||||||
|
export KIBANA_DOCKER_USERNAME
|
||||||
|
|
||||||
|
KIBANA_DOCKER_PASSWORD="$(vault_get container-registry password)"
|
||||||
|
export KIBANA_DOCKER_PASSWORD
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up a custom ES Snapshot Manifest if one has been specified for this build
|
||||||
|
{
|
||||||
|
ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST --default '')}
|
||||||
|
export ES_SNAPSHOT_MANIFEST
|
||||||
|
|
||||||
|
if [[ "${ES_SNAPSHOT_MANIFEST:-}" ]]; then
|
||||||
|
cat << EOF | buildkite-agent annotate --style "info" --context es-snapshot-manifest
|
||||||
|
This build is running using a custom Elasticsearch snapshot.
|
||||||
|
|
||||||
|
ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST
|
||||||
|
|
||||||
|
To use this locally, simply prefix your commands with:
|
||||||
|
|
||||||
|
\`\`\`
|
||||||
|
ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST"
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
\`\`\`
|
||||||
|
ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" node scripts/functional_tests_server.js
|
||||||
|
\`\`\`
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# If a custom manifest isn't specified, then use the default one that we resolve earlier in the build
|
||||||
|
{
|
||||||
|
if [[ ! "${ES_SNAPSHOT_MANIFEST:-}" ]]; then
|
||||||
|
ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST_DEFAULT --default '')}
|
||||||
|
export ES_SNAPSHOT_MANIFEST
|
||||||
|
echo "Using default ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Setup CI Stats
|
||||||
|
{
|
||||||
|
CI_STATS_BUILD_ID="$(buildkite-agent meta-data get ci_stats_build_id --default '')"
|
||||||
|
export CI_STATS_BUILD_ID
|
||||||
|
|
||||||
|
CI_STATS_TOKEN="$(vault_get kibana_ci_stats api_token)"
|
||||||
|
export CI_STATS_TOKEN
|
||||||
|
|
||||||
|
CI_STATS_HOST="$(vault_get kibana_ci_stats api_host)"
|
||||||
|
export CI_STATS_HOST
|
||||||
|
|
||||||
|
if [[ "$CI_STATS_BUILD_ID" ]]; then
|
||||||
|
echo "CI Stats Build ID: $CI_STATS_BUILD_ID"
|
||||||
|
|
||||||
|
KIBANA_CI_STATS_CONFIG=$(jq -n \
|
||||||
|
--arg buildId "$CI_STATS_BUILD_ID" \
|
||||||
|
--arg apiUrl "https://$CI_STATS_HOST" \
|
||||||
|
--arg apiToken "$CI_STATS_TOKEN" \
|
||||||
|
'{buildId: $buildId, apiUrl: $apiUrl, apiToken: $apiToken}' \
|
||||||
|
)
|
||||||
|
export KIBANA_CI_STATS_CONFIG
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up misc keys
|
||||||
|
{
|
||||||
|
KIBANA_CI_REPORTER_KEY=$(vault_get kibanamachine-reporter value)
|
||||||
|
export KIBANA_CI_REPORTER_KEY
|
||||||
|
|
||||||
|
EC_API_KEY="$(vault_get kibana-ci-cloud-deploy pr_deploy_api_key)"
|
||||||
|
export EC_API_KEY
|
||||||
|
|
||||||
|
PROJECT_API_KEY="$(vault_get kibana-ci-project-deploy pr_deploy_api_key)"
|
||||||
|
export PROJECT_API_KEY
|
||||||
|
|
||||||
|
PROJECT_API_DOMAIN="$(vault_get kibana-ci-project-deploy pr_deploy_domain)"
|
||||||
|
export PROJECT_API_DOMAIN
|
||||||
|
|
||||||
|
SYNTHETICS_SERVICE_USERNAME="$(vault_get kibana-ci-synthetics-credentials username)"
|
||||||
|
export SYNTHETICS_SERVICE_USERNAME
|
||||||
|
|
||||||
|
SYNTHETICS_SERVICE_PASSWORD="$(vault_get kibana-ci-synthetics-credentials password)"
|
||||||
|
export SYNTHETICS_SERVICE_PASSWORD
|
||||||
|
|
||||||
|
SYNTHETICS_SERVICE_MANIFEST="$(vault_get kibana-ci-synthetics-credentials manifest)"
|
||||||
|
export SYNTHETICS_SERVICE_MANIFEST
|
||||||
|
|
||||||
|
SYNTHETICS_REMOTE_KIBANA_USERNAME="$(vault_get kibana-ci-synthetics-remote-credentials username)"
|
||||||
|
export SYNTHETICS_REMOTE_KIBANA_USERNAME
|
||||||
|
|
||||||
|
SYNTHETICS_REMOTE_KIBANA_PASSWORD="$(vault_get kibana-ci-synthetics-remote-credentials password)"
|
||||||
|
export SYNTHETICS_REMOTE_KIBANA_PASSWORD
|
||||||
|
|
||||||
|
SYNTHETICS_REMOTE_KIBANA_URL=${SYNTHETICS_REMOTE_KIBANA_URL-"$(vault_get kibana-ci-synthetics-remote-credentials url)"}
|
||||||
|
export SYNTHETICS_REMOTE_KIBANA_URL
|
||||||
|
|
||||||
|
DEPLOY_TAGGER_SLACK_WEBHOOK_URL=${DEPLOY_TAGGER_SLACK_WEBHOOK_URL:-"$(vault_get kibana-serverless-release-tools DEPLOY_TAGGER_SLACK_WEBHOOK_URL)"}
|
||||||
|
export DEPLOY_TAGGER_SLACK_WEBHOOK_URL
|
||||||
|
|
||||||
|
SONAR_LOGIN=$(vault_get sonarqube token)
|
||||||
|
export SONAR_LOGIN
|
||||||
|
|
||||||
|
ELASTIC_APM_SERVER_URL=$(vault_get project-kibana-ci-apm apm_server_url)
|
||||||
|
export ELASTIC_APM_SERVER_URL
|
||||||
|
|
||||||
|
ELASTIC_APM_API_KEY=$(vault_get project-kibana-ci-apm apm_server_api_key)
|
||||||
|
export ELASTIC_APM_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up GCS Service Account for CDN
|
||||||
|
{
|
||||||
|
GCS_SA_CDN_KEY="$(vault_get gcs-sa-cdn-prod key)"
|
||||||
|
export GCS_SA_CDN_KEY
|
||||||
|
|
||||||
|
GCS_SA_CDN_EMAIL="$(vault_get gcs-sa-cdn-prod email)"
|
||||||
|
export GCS_SA_CDN_EMAIL
|
||||||
|
|
||||||
|
GCS_SA_CDN_BUCKET="$(vault_get gcs-sa-cdn-prod bucket)"
|
||||||
|
export GCS_SA_CDN_BUCKET
|
||||||
|
|
||||||
|
GCS_SA_CDN_URL="$(vault_get gcs-sa-cdn-prod cdn)"
|
||||||
|
export GCS_SA_CDN_URL
|
||||||
|
}
|
||||||
|
|
||||||
|
# Setup Failed Test Reporter Elasticsearch credentials
|
||||||
|
{
|
||||||
|
TEST_FAILURES_ES_CLOUD_ID=$(vault_get failed_tests_reporter_es cloud_id)
|
||||||
|
export TEST_FAILURES_ES_CLOUD_ID
|
||||||
|
|
||||||
|
TEST_FAILURES_ES_USERNAME=$(vault_get failed_tests_reporter_es username)
|
||||||
|
export TEST_FAILURES_ES_USERNAME
|
||||||
|
|
||||||
|
TEST_FAILURES_ES_PASSWORD=$(vault_get failed_tests_reporter_es password)
|
||||||
|
export TEST_FAILURES_ES_PASSWORD
|
||||||
|
}
|
||||||
|
|
||||||
|
# Setup Bazel Remote/Local Cache Credentials
|
||||||
|
{
|
||||||
|
BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-local-dev.json"
|
||||||
|
export BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE
|
||||||
|
vault_get kibana-ci-bazel-remote-cache-local-dev service_account_json > "$BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE"
|
||||||
|
|
||||||
|
BAZEL_REMOTE_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-gcs.json"
|
||||||
|
export BAZEL_REMOTE_CACHE_CREDENTIALS_FILE
|
||||||
|
vault_get kibana-ci-bazel-remote-cache-sa-key key | base64 -d > "$BAZEL_REMOTE_CACHE_CREDENTIALS_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Setup GCS Service Account Proxy for CI
|
||||||
|
{
|
||||||
|
KIBANA_SERVICE_ACCOUNT_PROXY_KEY="$(mktemp -d)/kibana-gcloud-service-account.json"
|
||||||
|
export KIBANA_SERVICE_ACCOUNT_PROXY_KEY
|
||||||
|
vault_get kibana-ci-sa-proxy-key key | base64 -d > "$KIBANA_SERVICE_ACCOUNT_PROXY_KEY"
|
||||||
|
}
|
||||||
|
|
||||||
|
PIPELINE_PRE_COMMAND=${PIPELINE_PRE_COMMAND:-".buildkite/scripts/lifecycle/pipelines/$BUILDKITE_PIPELINE_SLUG/pre_command.sh"}
|
||||||
|
if [[ -f "$PIPELINE_PRE_COMMAND" ]]; then
|
||||||
|
source "$PIPELINE_PRE_COMMAND"
|
||||||
|
fi
|
|
@ -6,8 +6,12 @@ echo '--- Log out of gcloud'
|
||||||
./.buildkite/scripts/common/activate_service_account.sh --unset-impersonation || echo "Failed to unset impersonation"
|
./.buildkite/scripts/common/activate_service_account.sh --unset-impersonation || echo "Failed to unset impersonation"
|
||||||
./.buildkite/scripts/common/activate_service_account.sh --logout-gcloud || echo "Failed to log out of gcloud"
|
./.buildkite/scripts/common/activate_service_account.sh --logout-gcloud || echo "Failed to log out of gcloud"
|
||||||
|
|
||||||
echo '--- Agent Debug Info'
|
if [[ "${SKIP_NODE_SETUP:-}" =~ ^(1|true)$ ]]; then
|
||||||
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true
|
echo '--- Skipping Agent Debug Info'
|
||||||
|
else
|
||||||
|
echo '--- Agent Debug Info'
|
||||||
|
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true
|
||||||
|
fi
|
||||||
|
|
||||||
IS_TEST_EXECUTION_STEP="$(buildkite-agent meta-data get "${BUILDKITE_JOB_ID}_is_test_execution_step" --default '')"
|
IS_TEST_EXECUTION_STEP="$(buildkite-agent meta-data get "${BUILDKITE_JOB_ID}_is_test_execution_step" --default '')"
|
||||||
|
|
||||||
|
|
|
@ -3,191 +3,22 @@
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
source .buildkite/scripts/common/util.sh
|
source .buildkite/scripts/common/util.sh
|
||||||
|
|
||||||
echo '--- Setup environment vars'
|
|
||||||
source .buildkite/scripts/common/env.sh
|
source .buildkite/scripts/common/env.sh
|
||||||
source .buildkite/scripts/common/setup_node.sh
|
source .buildkite/scripts/common/setup_job_env.sh
|
||||||
|
|
||||||
BUILDKITE_TOKEN="$(vault_get buildkite-ci buildkite_token_all_jobs)"
|
if [[ "${SKIP_NODE_SETUP:-}" =~ ^(1|true)$ ]]; then
|
||||||
export BUILDKITE_TOKEN
|
echo "Skipping node setup (SKIP_NODE_SETUP=$SKIP_NODE_SETUP)"
|
||||||
|
else
|
||||||
|
source .buildkite/scripts/common/setup_node.sh
|
||||||
|
source .buildkite/scripts/common/setup_buildkite_deps.sh
|
||||||
|
|
||||||
echo '--- Install/build buildkite dependencies'
|
echo '--- Agent Debug/SSH Info'
|
||||||
|
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true
|
||||||
# `rm -rf <ts-node node_modules dir>; npm install -g ts-node` will cause ts-node bin files to be messed up
|
|
||||||
# but literally just calling `npm install -g ts-node` a second time fixes it
|
|
||||||
# this is only on newer versions of npm
|
|
||||||
npm_install_global ts-node
|
|
||||||
if ! ts-node --version; then
|
|
||||||
npm_install_global ts-node
|
|
||||||
ts-node --version;
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cd '.buildkite'
|
|
||||||
retry 5 15 npm ci
|
|
||||||
cd ..
|
|
||||||
|
|
||||||
echo '--- Agent Debug/SSH Info'
|
|
||||||
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true
|
|
||||||
|
|
||||||
if [[ "$(curl -is metadata.google.internal || true)" ]]; then
|
if [[ "$(curl -is metadata.google.internal || true)" ]]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "To SSH into this agent, run:"
|
echo "To SSH into this agent, run:"
|
||||||
echo "gcloud compute ssh --tunnel-through-iap --project elastic-kibana-ci --zone \"$(curl -sH Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/zone)\" \"$(curl -sH Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/name)\""
|
echo "gcloud compute ssh --tunnel-through-iap --project elastic-kibana-ci --zone \"$(curl -sH Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/zone)\" \"$(curl -sH Metadata-Flavor:Google http://metadata.google.internal/computeMetadata/v1/instance/name)\""
|
||||||
echo ""
|
echo ""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo '--- Job Environment Setup'
|
|
||||||
|
|
||||||
# Set up a custom ES Snapshot Manifest if one has been specified for this build
|
|
||||||
{
|
|
||||||
ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST --default '')}
|
|
||||||
export ES_SNAPSHOT_MANIFEST
|
|
||||||
|
|
||||||
if [[ "${ES_SNAPSHOT_MANIFEST:-}" ]]; then
|
|
||||||
cat << EOF | buildkite-agent annotate --style "info" --context es-snapshot-manifest
|
|
||||||
This build is running using a custom Elasticsearch snapshot.
|
|
||||||
|
|
||||||
ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST
|
|
||||||
|
|
||||||
To use this locally, simply prefix your commands with:
|
|
||||||
|
|
||||||
\`\`\`
|
|
||||||
ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST"
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
e.g.
|
|
||||||
|
|
||||||
\`\`\`
|
|
||||||
ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" node scripts/functional_tests_server.js
|
|
||||||
\`\`\`
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# If a custom manifest isn't specified, then use the default one that we resolve earlier in the build
|
|
||||||
{
|
|
||||||
if [[ ! "${ES_SNAPSHOT_MANIFEST:-}" ]]; then
|
|
||||||
ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST_DEFAULT --default '')}
|
|
||||||
export ES_SNAPSHOT_MANIFEST
|
|
||||||
echo "Using default ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Setup CI Stats
|
|
||||||
{
|
|
||||||
CI_STATS_BUILD_ID="$(buildkite-agent meta-data get ci_stats_build_id --default '')"
|
|
||||||
export CI_STATS_BUILD_ID
|
|
||||||
|
|
||||||
CI_STATS_TOKEN="$(vault_get kibana_ci_stats api_token)"
|
|
||||||
export CI_STATS_TOKEN
|
|
||||||
|
|
||||||
CI_STATS_HOST="$(vault_get kibana_ci_stats api_host)"
|
|
||||||
export CI_STATS_HOST
|
|
||||||
|
|
||||||
if [[ "$CI_STATS_BUILD_ID" ]]; then
|
|
||||||
echo "CI Stats Build ID: $CI_STATS_BUILD_ID"
|
|
||||||
|
|
||||||
KIBANA_CI_STATS_CONFIG=$(jq -n \
|
|
||||||
--arg buildId "$CI_STATS_BUILD_ID" \
|
|
||||||
--arg apiUrl "https://$CI_STATS_HOST" \
|
|
||||||
--arg apiToken "$CI_STATS_TOKEN" \
|
|
||||||
'{buildId: $buildId, apiUrl: $apiUrl, apiToken: $apiToken}' \
|
|
||||||
)
|
|
||||||
export KIBANA_CI_STATS_CONFIG
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
GITHUB_TOKEN=$(vault_get kibanamachine github_token)
|
|
||||||
export GITHUB_TOKEN
|
|
||||||
|
|
||||||
KIBANA_CI_GITHUB_TOKEN=$(vault_get kibana-ci-github github_token)
|
|
||||||
export KIBANA_CI_GITHUB_TOKEN
|
|
||||||
|
|
||||||
KIBANA_CI_REPORTER_KEY=$(vault_get kibanamachine-reporter value)
|
|
||||||
export KIBANA_CI_REPORTER_KEY
|
|
||||||
|
|
||||||
KIBANA_DOCKER_USERNAME="$(vault_get container-registry username)"
|
|
||||||
export KIBANA_DOCKER_USERNAME
|
|
||||||
|
|
||||||
KIBANA_DOCKER_PASSWORD="$(vault_get container-registry password)"
|
|
||||||
export KIBANA_DOCKER_PASSWORD
|
|
||||||
|
|
||||||
EC_API_KEY="$(vault_get kibana-ci-cloud-deploy pr_deploy_api_key)"
|
|
||||||
export EC_API_KEY
|
|
||||||
|
|
||||||
PROJECT_API_KEY="$(vault_get kibana-ci-project-deploy pr_deploy_api_key)"
|
|
||||||
export PROJECT_API_KEY
|
|
||||||
|
|
||||||
PROJECT_API_DOMAIN="$(vault_get kibana-ci-project-deploy pr_deploy_domain)"
|
|
||||||
export PROJECT_API_DOMAIN
|
|
||||||
|
|
||||||
SYNTHETICS_SERVICE_USERNAME="$(vault_get kibana-ci-synthetics-credentials username)"
|
|
||||||
export SYNTHETICS_SERVICE_USERNAME
|
|
||||||
|
|
||||||
SYNTHETICS_SERVICE_PASSWORD="$(vault_get kibana-ci-synthetics-credentials password)"
|
|
||||||
export SYNTHETICS_SERVICE_PASSWORD
|
|
||||||
|
|
||||||
SYNTHETICS_SERVICE_MANIFEST="$(vault_get kibana-ci-synthetics-credentials manifest)"
|
|
||||||
export SYNTHETICS_SERVICE_MANIFEST
|
|
||||||
|
|
||||||
SYNTHETICS_REMOTE_KIBANA_USERNAME="$(vault_get kibana-ci-synthetics-remote-credentials username)"
|
|
||||||
export SYNTHETICS_REMOTE_KIBANA_USERNAME
|
|
||||||
|
|
||||||
SYNTHETICS_REMOTE_KIBANA_PASSWORD="$(vault_get kibana-ci-synthetics-remote-credentials password)"
|
|
||||||
export SYNTHETICS_REMOTE_KIBANA_PASSWORD
|
|
||||||
|
|
||||||
SYNTHETICS_REMOTE_KIBANA_URL=${SYNTHETICS_REMOTE_KIBANA_URL-"$(vault_get kibana-ci-synthetics-remote-credentials url)"}
|
|
||||||
export SYNTHETICS_REMOTE_KIBANA_URL
|
|
||||||
|
|
||||||
DEPLOY_TAGGER_SLACK_WEBHOOK_URL=${DEPLOY_TAGGER_SLACK_WEBHOOK_URL:-"$(vault_get kibana-serverless-release-tools DEPLOY_TAGGER_SLACK_WEBHOOK_URL)"}
|
|
||||||
export DEPLOY_TAGGER_SLACK_WEBHOOK_URL
|
|
||||||
|
|
||||||
GCS_SA_CDN_KEY="$(vault_get gcs-sa-cdn-prod key)"
|
|
||||||
export GCS_SA_CDN_KEY
|
|
||||||
|
|
||||||
GCS_SA_CDN_EMAIL="$(vault_get gcs-sa-cdn-prod email)"
|
|
||||||
export GCS_SA_CDN_EMAIL
|
|
||||||
|
|
||||||
GCS_SA_CDN_BUCKET="$(vault_get gcs-sa-cdn-prod bucket)"
|
|
||||||
export GCS_SA_CDN_BUCKET
|
|
||||||
|
|
||||||
GCS_SA_CDN_URL="$(vault_get gcs-sa-cdn-prod cdn)"
|
|
||||||
export GCS_SA_CDN_URL
|
|
||||||
|
|
||||||
|
|
||||||
ELASTIC_APM_SERVER_URL=$(vault_get project-kibana-ci-apm apm_server_url)
|
|
||||||
export ELASTIC_APM_SERVER_URL
|
|
||||||
|
|
||||||
ELASTIC_APM_API_KEY=$(vault_get project-kibana-ci-apm apm_server_api_key)
|
|
||||||
export ELASTIC_APM_API_KEY
|
|
||||||
|
|
||||||
# Setup Failed Test Reporter Elasticsearch credentials
|
|
||||||
{
|
|
||||||
TEST_FAILURES_ES_CLOUD_ID=$(vault_get failed_tests_reporter_es cloud_id)
|
|
||||||
export TEST_FAILURES_ES_CLOUD_ID
|
|
||||||
|
|
||||||
TEST_FAILURES_ES_USERNAME=$(vault_get failed_tests_reporter_es username)
|
|
||||||
export TEST_FAILURES_ES_USERNAME
|
|
||||||
|
|
||||||
TEST_FAILURES_ES_PASSWORD=$(vault_get failed_tests_reporter_es password)
|
|
||||||
export TEST_FAILURES_ES_PASSWORD
|
|
||||||
}
|
|
||||||
|
|
||||||
BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-local-dev.json"
|
|
||||||
export BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE
|
|
||||||
vault_get kibana-ci-bazel-remote-cache-local-dev service_account_json > "$BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE"
|
|
||||||
|
|
||||||
# Export key for accessing bazel remote cache's GCS bucket
|
|
||||||
BAZEL_REMOTE_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-gcs.json"
|
|
||||||
export BAZEL_REMOTE_CACHE_CREDENTIALS_FILE
|
|
||||||
vault_get kibana-ci-bazel-remote-cache-sa-key key | base64 -d > "$BAZEL_REMOTE_CACHE_CREDENTIALS_FILE"
|
|
||||||
|
|
||||||
# Setup GCS Service Account Proxy for CI
|
|
||||||
KIBANA_SERVICE_ACCOUNT_PROXY_KEY="$(mktemp -d)/kibana-gcloud-service-account.json"
|
|
||||||
export KIBANA_SERVICE_ACCOUNT_PROXY_KEY
|
|
||||||
vault_get kibana-ci-sa-proxy-key key | base64 -d > "$KIBANA_SERVICE_ACCOUNT_PROXY_KEY"
|
|
||||||
|
|
||||||
PIPELINE_PRE_COMMAND=${PIPELINE_PRE_COMMAND:-".buildkite/scripts/lifecycle/pipelines/$BUILDKITE_PIPELINE_SLUG/pre_command.sh"}
|
|
||||||
if [[ -f "$PIPELINE_PRE_COMMAND" ]]; then
|
|
||||||
source "$PIPELINE_PRE_COMMAND"
|
|
||||||
fi
|
|
||||||
|
|
|
@ -50,6 +50,7 @@ interface GobldGCPConfig {
|
||||||
enableSecureBoot?: boolean;
|
enableSecureBoot?: boolean;
|
||||||
enableNestedVirtualization?: boolean;
|
enableNestedVirtualization?: boolean;
|
||||||
image: string;
|
image: string;
|
||||||
|
provider: 'gcp';
|
||||||
localSsds?: number;
|
localSsds?: number;
|
||||||
localSsdInterface?: string;
|
localSsdInterface?: string;
|
||||||
machineType: string;
|
machineType: string;
|
||||||
|
@ -75,16 +76,32 @@ if (!fs.existsSync('data/agents.json')) {
|
||||||
* rewrites all agent targeting rules from the shorthands to the full targeting syntax
|
* rewrites all agent targeting rules from the shorthands to the full targeting syntax
|
||||||
*/
|
*/
|
||||||
run(
|
run(
|
||||||
async ({ log, flags }) => {
|
async ({ log, flags, flagsReader }) => {
|
||||||
|
const filterExpressions = flagsReader.getPositionals();
|
||||||
|
|
||||||
const paths = await globby('.buildkite/**/*.yml', {
|
const paths = await globby('.buildkite/**/*.yml', {
|
||||||
cwd: REPO_ROOT,
|
cwd: REPO_ROOT,
|
||||||
onlyFiles: true,
|
onlyFiles: true,
|
||||||
gitignore: true,
|
gitignore: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const pathsFiltered =
|
||||||
|
filterExpressions.length === 0
|
||||||
|
? paths
|
||||||
|
: paths.filter((path) => {
|
||||||
|
return filterExpressions.some((expression) => path.includes(expression));
|
||||||
|
});
|
||||||
|
|
||||||
|
if (pathsFiltered.length === 0) {
|
||||||
|
log.warning('No .yml files found to rewrite after filtering.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('Applying rewrite to the following paths: \n', pathsFiltered.join('\n'));
|
||||||
|
|
||||||
const failedRewrites: Array<{ path: string; error: Error }> = [];
|
const failedRewrites: Array<{ path: string; error: Error }> = [];
|
||||||
|
|
||||||
const rewritePromises: Array<Promise<void>> = paths.map((ymlPath) => {
|
const rewritePromises: Array<Promise<void>> = pathsFiltered.map((ymlPath) => {
|
||||||
return rewriteFile(ymlPath, log).catch((e) => {
|
return rewriteFile(ymlPath, log).catch((e) => {
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
console.error('Failed to rewrite: ' + ymlPath, e);
|
console.error('Failed to rewrite: ' + ymlPath, e);
|
||||||
|
@ -193,6 +210,7 @@ function getFullAgentTargetingRule(queue: string): GobldGCPConfig {
|
||||||
return removeNullish({
|
return removeNullish({
|
||||||
image: 'family/kibana-ubuntu-2004',
|
image: 'family/kibana-ubuntu-2004',
|
||||||
imageProject: 'elastic-images-qa',
|
imageProject: 'elastic-images-qa',
|
||||||
|
provider: 'gcp',
|
||||||
assignExternalIP: agent.disableExternalIp === true ? false : undefined,
|
assignExternalIP: agent.disableExternalIp === true ? false : undefined,
|
||||||
diskSizeGb: agent.diskSizeGb,
|
diskSizeGb: agent.diskSizeGb,
|
||||||
diskType: agent.diskType,
|
diskType: agent.diskType,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue