[Ops] Fix GCS bucket access for future buildkite agents (#174756)

## Summary
Once we're moving to the elastic-wide buildkite agents, and away from
the kibana-buildkite-managed ones, we won't have default access to the
buckets we used to use, as the assumed service account will differ.

**Note:** Although this will only be required in the new infra, but this
change can be merged and expected to work properly in the current infra
as well.

### Solution
We've set up a central service-account with rights to impersonate other
service accounts that have controlled access to individual buckets to
minimize the reach and influence of individual accounts. See:
https://github.com/elastic/kibana-operations/pull/51

**several of the changes weren't tested, as they're part of CI tasks
outside the PR build** - will merge with caution and monitor the
stability afterwards

TODO:  _add access, and assume account before other GCS bucket usages_
- [x] storybook
- [x] coverage
(.buildkite/scripts/steps/code_coverage/reporting/uploadPrevSha.sh)
- [x] upload static site
(.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh)
- [x] SO object migration
(.buildkite/scripts/steps/archive_so_migration_snapshot.sh)
- [x] ES Snapshot manifest upload
(.buildkite/scripts/steps/es_snapshots/create_manifest.ts)
- [x] Scalability?
(.buildkite/scripts/steps/functional/scalability_dataset_extraction.sh)
- [x] Benchmarking
(.buildkite/scripts/steps/scalability/benchmarking.sh)
- [x] Webpack bundle analyzer
(.buildkite/scripts/steps/webpack_bundle_analyzer/upload.ts)
- [x] ~Build chromium (x-pack/build_chromium/build.py)~ Not needed, as
it's manual, and not a CI task

TODO:  _others_
- [x] Remove manifest upload
(.buildkite/scripts/steps/es_serverless/promote_es_serverless_image.sh)
- [x] Decide if we should merge with the CDN access: no, SRE is managing
that account
- [x] Bazel remote cache seems to also rely on gcs - roles PR:
https://github.com/elastic/kibana-operations/pull/56


Closes: https://github.com/elastic/kibana-operations/issues/29
Part of: https://github.com/elastic/kibana-operations/issues/15
This commit is contained in:
Alex Szabo 2024-02-07 09:34:55 +01:00 committed by GitHub
parent 936e4e9a25
commit 455ee0a450
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 200 additions and 85 deletions

View file

@ -0,0 +1,85 @@
#!/usr/bin/env bash
set -euo pipefail
source "$(dirname "${BASH_SOURCE[0]}")/vault_fns.sh"
BUCKET_OR_EMAIL="${1:-}"
GCLOUD_EMAIL_POSTFIX="elastic-kibana-ci.iam.gserviceaccount.com"
GCLOUD_SA_PROXY_EMAIL="kibana-ci-sa-proxy@$GCLOUD_EMAIL_POSTFIX"
if [[ -z "$BUCKET_OR_EMAIL" ]]; then
echo "Usage: $0 <bucket_name|email>"
exit 1
elif [[ "$BUCKET_OR_EMAIL" == "--unset-impersonation" ]]; then
echo "Unsetting impersonation"
gcloud config unset auth/impersonate_service_account
exit 0
elif [[ "$BUCKET_OR_EMAIL" == "--logout-gcloud" ]]; then
echo "Logging out of gcloud"
if [[ -x "$(command -v gcloud)" ]] && [[ "$(gcloud auth list 2>/dev/null | grep $GCLOUD_SA_PROXY_EMAIL)" != "" ]]; then
gcloud auth revoke $GCLOUD_SA_PROXY_EMAIL --no-user-output-enabled
fi
exit 0
fi
CURRENT_GCLOUD_USER=$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")
# Verify that the service account proxy is activated
if [[ "$CURRENT_GCLOUD_USER" != "$GCLOUD_SA_PROXY_EMAIL" ]]; then
if [[ -x "$(command -v gcloud)" ]]; then
if [[ -z "${KIBANA_SERVICE_ACCOUNT_PROXY_KEY:-}" ]]; then
echo "KIBANA_SERVICE_ACCOUNT_PROXY_KEY is not set, cannot activate service account $GCLOUD_SA_PROXY_EMAIL."
exit 1
fi
AUTH_RESULT=$(gcloud auth activate-service-account --key-file="$KIBANA_SERVICE_ACCOUNT_PROXY_KEY" || "FAILURE")
if [[ "$AUTH_RESULT" == "FAILURE" ]]; then
echo "Failed to activate service account $GCLOUD_SA_PROXY_EMAIL."
exit 1
else
echo "Activated service account $GCLOUD_SA_PROXY_EMAIL"
fi
else
echo "gcloud is not installed, cannot activate service account $GCLOUD_SA_PROXY_EMAIL."
exit 1
fi
fi
# Check if the arg is a service account e-mail or a bucket name
EMAIL=""
if [[ "$BUCKET_OR_EMAIL" =~ ^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$ ]]; then
EMAIL="$BUCKET_OR_EMAIL"
elif [[ "$BUCKET_OR_EMAIL" =~ ^gs://* ]]; then
BUCKET_NAME="${BUCKET_OR_EMAIL:5}"
else
BUCKET_NAME="$BUCKET_OR_EMAIL"
fi
if [[ -z "$EMAIL" ]]; then
case "$BUCKET_NAME" in
"elastic-kibana-coverage-live")
EMAIL="kibana-ci-access-coverage@$GCLOUD_EMAIL_POSTFIX"
;;
"kibana-ci-es-snapshots-daily")
EMAIL="kibana-ci-access-es-snapshots@$GCLOUD_EMAIL_POSTFIX"
;;
"kibana-so-types-snapshots")
EMAIL="kibana-ci-access-so-snapshots@$GCLOUD_EMAIL_POSTFIX"
;;
"kibana-performance")
EMAIL="kibana-ci-access-perf-stats@$GCLOUD_EMAIL_POSTFIX"
;;
"ci-artifacts.kibana.dev")
EMAIL="kibana-ci-access-artifacts@$GCLOUD_EMAIL_POSTFIX"
;;
*)
EMAIL="$BUCKET_NAME@$GCLOUD_EMAIL_POSTFIX"
;;
esac
fi
# Activate the service account
echo "Impersonating $EMAIL"
gcloud config set auth/impersonate_service_account "$EMAIL"
echo "Activated service account $EMAIL"

View file

@ -2,6 +2,8 @@
source .buildkite/scripts/common/util.sh
echo '--- Setting up bazel'
echo "[bazel] writing .bazelrc"
cat <<EOF > $KIBANA_DIR/.bazelrc
# Generated by .buildkite/scripts/common/setup_bazel.sh
@ -27,16 +29,16 @@ if [[ "$BAZEL_CACHE_MODE" == "gcs" ]]; then
echo "[bazel] using GCS bucket: $BAZEL_BUCKET"
cat <<EOF >> $KIBANA_DIR/.bazelrc
cat <<EOF >> $KIBANA_DIR/.bazelrc
build --remote_cache=https://storage.googleapis.com/$BAZEL_BUCKET
build --google_default_credentials
build --google_credentials=$BAZEL_REMOTE_CACHE_CREDENTIALS_FILE
EOF
fi
if [[ "$BAZEL_CACHE_MODE" == "populate-local-gcs" ]]; then
echo "[bazel] enabling caching with GCS buckets for local dev"
cat <<EOF >> $KIBANA_DIR/.bazelrc
cat <<EOF >> $KIBANA_DIR/.bazelrc
build --remote_cache=https://storage.googleapis.com/kibana-local-bazel-remote-cache
build --google_credentials=$BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE
EOF

View file

@ -1,5 +1,7 @@
#!/usr/bin/env bash
source "$(dirname "${BASH_SOURCE[0]}")/vault_fns.sh"
is_pr() {
[[ "${GITHUB_PR_NUMBER-}" ]] && return
false
@ -170,48 +172,3 @@ npm_install_global() {
download_artifact() {
retry 3 1 timeout 3m buildkite-agent artifact download "$@"
}
# TODO: remove after https://github.com/elastic/kibana-operations/issues/15 is done
if [[ "${VAULT_ADDR:-}" == *"secrets.elastic.co"* ]]; then
VAULT_PATH_PREFIX="secret/kibana-issues/dev"
VAULT_KV_PREFIX="secret/kibana-issues/dev"
IS_LEGACY_VAULT_ADDR=true
else
VAULT_PATH_PREFIX="secret/ci/elastic-kibana"
VAULT_KV_PREFIX="kv/ci-shared/kibana-deployments"
IS_LEGACY_VAULT_ADDR=false
fi
export IS_LEGACY_VAULT_ADDR
vault_get() {
key_path=$1
field=$2
fullPath="$VAULT_PATH_PREFIX/$key_path"
if [[ -z "${2:-}" || "${2:-}" =~ ^-.* ]]; then
retry 5 5 vault read "$fullPath" "${@:2}"
else
retry 5 5 vault read -field="$field" "$fullPath" "${@:3}"
fi
}
vault_set() {
key_path=$1
shift
fields=("$@")
fullPath="$VAULT_PATH_PREFIX/$key_path"
# shellcheck disable=SC2068
retry 5 5 vault write "$fullPath" ${fields[@]}
}
vault_kv_set() {
kv_path=$1
shift
fields=("$@")
vault kv put "$VAULT_KV_PREFIX/$kv_path" "${fields[@]}"
}

View file

@ -0,0 +1,67 @@
#!/bin/bash
# TODO: remove after https://github.com/elastic/kibana-operations/issues/15 is done
if [[ "${VAULT_ADDR:-}" == *"secrets.elastic.co"* ]]; then
VAULT_PATH_PREFIX="secret/kibana-issues/dev"
VAULT_KV_PREFIX="secret/kibana-issues/dev"
IS_LEGACY_VAULT_ADDR=true
else
VAULT_PATH_PREFIX="secret/ci/elastic-kibana"
VAULT_KV_PREFIX="kv/ci-shared/kibana-deployments"
IS_LEGACY_VAULT_ADDR=false
fi
export IS_LEGACY_VAULT_ADDR
retry() {
local retries=$1; shift
local delay=$1; shift
local attempts=1
until "$@"; do
retry_exit_status=$?
echo "Exited with $retry_exit_status" >&2
if (( retries == "0" )); then
return $retry_exit_status
elif (( attempts == retries )); then
echo "Failed $attempts retries" >&2
return $retry_exit_status
else
echo "Retrying $((retries - attempts)) more times..." >&2
attempts=$((attempts + 1))
sleep "$delay"
fi
done
}
vault_get() {
key_path=${1:-}
field=${2:-}
fullPath="$VAULT_PATH_PREFIX/$key_path"
if [[ -z "$field" || "$field" =~ ^-.* ]]; then
retry 5 5 vault read "$fullPath" "${@:2}"
else
retry 5 5 vault read -field="$field" "$fullPath" "${@:3}"
fi
}
vault_set() {
key_path=$1
shift
fields=("$@")
fullPath="$VAULT_PATH_PREFIX/$key_path"
# shellcheck disable=SC2068
retry 5 5 vault write "$fullPath" ${fields[@]}
}
vault_kv_set() {
kv_path=$1
shift
fields=("$@")
vault kv put "$VAULT_KV_PREFIX/$kv_path" "${fields[@]}"
}

View file

@ -2,6 +2,10 @@
set -euo pipefail
echo '--- Log out of gcloud'
./.buildkite/scripts/common/activate_service_account.sh --unset-impersonation || echo "Failed to unset impersonation"
./.buildkite/scripts/common/activate_service_account.sh --logout-gcloud || echo "Failed to log out of gcloud"
echo '--- Agent Debug Info'
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true

View file

@ -167,6 +167,16 @@ BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-loca
export BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE
vault_get kibana-ci-bazel-remote-cache-local-dev service_account_json > "$BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE"
# Export key for accessing bazel remote cache's GCS bucket
BAZEL_REMOTE_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-gcs.json"
export BAZEL_REMOTE_CACHE_CREDENTIALS_FILE
vault_get kibana-ci-bazel-remote-cache-sa-key key | base64 -d > "$BAZEL_REMOTE_CACHE_CREDENTIALS_FILE"
# Setup GCS Service Account Proxy for CI
KIBANA_SERVICE_ACCOUNT_PROXY_KEY="$(mktemp -d)/kibana-gcloud-service-account.json"
export KIBANA_SERVICE_ACCOUNT_PROXY_KEY
vault_get kibana-ci-sa-proxy-key key | base64 -d > "$KIBANA_SERVICE_ACCOUNT_PROXY_KEY"
PIPELINE_PRE_COMMAND=${PIPELINE_PRE_COMMAND:-".buildkite/scripts/lifecycle/pipelines/$BUILDKITE_PIPELINE_SLUG/pre_command.sh"}
if [[ -f "$PIPELINE_PRE_COMMAND" ]]; then
source "$PIPELINE_PRE_COMMAND"

View file

@ -3,15 +3,16 @@ set -euo pipefail
.buildkite/scripts/bootstrap.sh
SO_MIGRATIONS_SNAPSHOT_FOLDER=kibana-so-types-snapshots
SO_MIGRATIONS_SNAPSHOT_BUCKET="gs://kibana-so-types-snapshots"
SNAPSHOT_FILE_PATH="${1:-target/plugin_so_types_snapshot.json}"
echo "--- Creating snapshot of Saved Object migration info"
node scripts/snapshot_plugin_types snapshot --outputPath "$SNAPSHOT_FILE_PATH"
echo "--- Uploading as ${BUILDKITE_COMMIT}.json"
SNAPSHOT_PATH="${SO_MIGRATIONS_SNAPSHOT_FOLDER}/${BUILDKITE_COMMIT}.json"
gsutil cp "$SNAPSHOT_FILE_PATH" "gs://$SNAPSHOT_PATH"
SNAPSHOT_PATH="${SO_MIGRATIONS_SNAPSHOT_BUCKET}/${BUILDKITE_COMMIT}.json"
.buildkite/scripts/common/activate_service_account.sh "$SO_MIGRATIONS_SNAPSHOT_BUCKET"
gsutil cp "$SNAPSHOT_FILE_PATH" "$SNAPSHOT_PATH"
buildkite-agent annotate --context so_migration_snapshot --style success \
'Saved Object type snapshot is available at <a href="https://storage.cloud.google.com/'"$SNAPSHOT_PATH"'">'"$SNAPSHOT_PATH"'</a>'

View file

@ -6,6 +6,7 @@ set -euo pipefail
gsutil -m cp -r gs://elastic-bekitzur-kibana-coverage-live/previous_pointer/previous.txt . || echo "### Previous Pointer NOT FOUND?"
# TODO: Activate after the above is removed
#.buildkite/scripts/common/activate_service_account.sh gs://elastic-kibana-coverage-live
#gsutil -m cp -r gs://elastic-kibana-coverage-live/previous_pointer/previous.txt . || echo "### Previous Pointer NOT FOUND?"
if [ -e ./previous.txt ]; then

View file

@ -12,4 +12,5 @@ collectPrevious
# TODO: Safe to remove this after 2024-03-01 (https://github.com/elastic/kibana/issues/175904)
gsutil cp previous.txt gs://elastic-bekitzur-kibana-coverage-live/previous_pointer/
.buildkite/scripts/common/activate_service_account.sh gs://elastic-kibana-coverage-live
gsutil cp previous.txt gs://elastic-kibana-coverage-live/previous_pointer/

View file

@ -27,5 +27,6 @@ uploadRest() {
echo "--- Uploading static site"
.buildkite/scripts/common/activate_service_account.sh gs://elastic-kibana-coverage-live
uploadBase
uploadRest

View file

@ -7,9 +7,6 @@ source .buildkite/scripts/common/util.sh
BASE_ES_SERVERLESS_REPO=docker.elastic.co/elasticsearch-ci/elasticsearch-serverless
TARGET_IMAGE=docker.elastic.co/kibana-ci/elasticsearch-serverless:latest-verified
ES_SERVERLESS_BUCKET=kibana-ci-es-serverless-images
MANIFEST_FILE_NAME=latest-verified.json
SOURCE_IMAGE_OR_TAG=$1
if [[ $SOURCE_IMAGE_OR_TAG =~ :[a-zA-Z_-]+$ ]]; then
# $SOURCE_IMAGE_OR_TAG was a full image
@ -67,36 +64,6 @@ docker logout docker.elastic.co
echo "Image push to $TARGET_IMAGE successful."
echo "Promotion successful! Henceforth, thou shall be named Sir $TARGET_IMAGE"
MANIFEST_UPLOAD_PATH="Skipped"
if [[ "${PUBLISH_MANIFEST:-}" =~ ^(1|true)$ && "$SOURCE_IMAGE_OR_TAG" =~ ^git-[0-9a-fA-F]{12}$ ]]; then
echo "--- Uploading latest-verified manifest to GCS"
cat << EOT >> $MANIFEST_FILE_NAME
{
"build_url": "$BUILDKITE_BUILD_URL",
"kibana_commit": "$BUILDKITE_COMMIT",
"kibana_branch": "$BUILDKITE_BRANCH",
"elasticsearch_serverless_tag": "$SOURCE_IMAGE_OR_TAG",
"elasticsearch_serverless_image_url": "$SOURCE_IMAGE",
"elasticsearch_serverless_commit": "TODO: this currently can't be decided",
"elasticsearch_commit": "$ELASTIC_COMMIT_HASH",
"created_at": "`date`",
"timestamp": "`FORCE_COLOR=0 node -p 'Date.now()'`"
}
EOT
gsutil -h "Cache-Control:no-cache, max-age=0, no-transform" \
cp $MANIFEST_FILE_NAME "gs://$ES_SERVERLESS_BUCKET/$MANIFEST_FILE_NAME"
gsutil acl ch -u AllUsers:R "gs://$ES_SERVERLESS_BUCKET/$MANIFEST_FILE_NAME"
MANIFEST_UPLOAD_PATH="<a href=\"https://storage.googleapis.com/$ES_SERVERLESS_BUCKET/$MANIFEST_FILE_NAME\">$MANIFEST_FILE_NAME</a>"
elif [[ "${PUBLISH_MANIFEST:-}" =~ ^(1|true)$ ]]; then
echo "--- Skipping upload of latest-verified manifest to GCS, ES Serverless build tag is not pointing to a hash"
elif [[ "$SOURCE_IMAGE_OR_TAG" =~ ^git-[0-9a-fA-F]{12}$ ]]; then
echo "--- Skipping upload of latest-verified manifest to GCS, flag was not provided"
else
echo "--- Skipping upload of latest-verified manifest to GCS, no flag and hash provided"
fi
echo "--- Annotating build with info"
cat << EOT | buildkite-agent annotate --style "success"
<h2>Promotion successful!</h2>
@ -104,5 +71,4 @@ cat << EOT | buildkite-agent annotate --style "success"
<br/>Source image: $SOURCE_IMAGE
<br/>Kibana commit: <a href="https://github.com/elastic/kibana/commit/$BUILDKITE_COMMIT">$BUILDKITE_COMMIT</a>
<br/>Elasticsearch commit: <a href="https://github.com/elastic/elasticsearch/commit/$ELASTIC_COMMIT_HASH">$ELASTIC_COMMIT_HASH</a>
<br/>Manifest file: $MANIFEST_UPLOAD_PATH
EOT

View file

@ -103,6 +103,7 @@ interface ManifestEntry {
set -euo pipefail
echo '--- Upload files to GCS'
.buildkite/scripts/common/activate_service_account.sh ${BASE_BUCKET_DAILY}
cd "${destination}"
gsutil -m cp -r *.* gs://${BASE_BUCKET_DAILY}/${DESTINATION}
cp manifest.json manifest-latest.json

View file

@ -38,6 +38,7 @@ import { BASE_BUCKET_DAILY, BASE_BUCKET_PERMANENT } from './bucket_config';
execSync(
`
set -euo pipefail
.buildkite/scripts/common/activate_service_account.sh ${bucket}
cp manifest.json manifest-latest-verified.json
gsutil -h "Cache-Control:no-cache, max-age=0, no-transform" cp manifest-latest-verified.json gs://${BASE_BUCKET_DAILY}/${version}/
rm manifest.json

View file

@ -41,6 +41,9 @@ download_artifact kibana-default-plugins.tar.gz "${OUTPUT_DIR}/" --build "${KIBA
echo "--- Adding commit info"
echo "${BUILDKITE_COMMIT}" > "${OUTPUT_DIR}/KIBANA_COMMIT_HASH"
echo "--- Activating service-account for gsutil to access gs://kibana-performance"
.buildkite/scripts/common/activate_service_account.sh gs://kibana-performance
echo "--- Uploading ${OUTPUT_REL} dir to ${GCS_BUCKET}"
cd "${OUTPUT_DIR}/.."
gsutil -m cp -r "${BUILD_ID}" "${GCS_BUCKET}"

View file

@ -19,6 +19,9 @@ rm -rf "${KIBANA_LOAD_TESTING_DIR}"
rm -rf "${GCS_ARTIFACTS_DIR}"
download_artifacts() {
echo Activating service-account for gsutil to access gs://kibana-performance
.buildkite/scripts/common/activate_service_account.sh gs://kibana-performance
mkdir -p "${GCS_ARTIFACTS_DIR}"
gsutil cp "$GCS_BUCKET/latest" "${GCS_ARTIFACTS_DIR}/"

View file

@ -9,6 +9,7 @@
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { getKibanaDir } from '#pipeline-utils';
// TODO - how to generate this dynamically?
const STORYBOOKS = [
@ -117,7 +118,12 @@ const upload = () => {
fs.writeFileSync('index.html', html);
console.log('--- Uploading Storybooks');
const activateScript = path.relative(
process.cwd(),
path.join(getKibanaDir(), '.buildkite', 'scripts', 'common', 'activate_service_account.sh')
);
exec(`
${activateScript} gs://ci-artifacts.kibana.dev
gsutil -q -m cp -r -z js,css,html,json,map,txt,svg '*' 'gs://${STORYBOOK_BUCKET}/${STORYBOOK_DIRECTORY}/${process.env.BUILDKITE_COMMIT}/'
gsutil -h "Cache-Control:no-cache, max-age=0, no-transform" cp -z html 'index.html' 'gs://${STORYBOOK_BUCKET}/${STORYBOOK_DIRECTORY}/latest/'
`);

View file

@ -9,6 +9,7 @@
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { getKibanaDir } from '#pipeline-utils';
const GITHUB_CONTEXT = 'Build and Publish Webpack bundle analyzer reports';
@ -54,7 +55,12 @@ const upload = () => {
fs.writeFileSync('index.html', html);
console.log('--- Uploading Webpack Bundle Analyzer reports');
const activateScript = path.relative(
process.cwd(),
path.join(getKibanaDir(), '.buildkite', 'scripts', 'common', 'activate_service_account.sh')
);
exec(`
${activateScript} gs://ci-artifacts.kibana.dev
gsutil -q -m cp -r -z html '*' 'gs://${WEBPACK_REPORTS_BUCKET}/${WEBPACK_REPORTS}/${process.env.BUILDKITE_COMMIT}/'
gsutil -h "Cache-Control:no-cache, max-age=0, no-transform" cp -z html 'index.html' 'gs://${WEBPACK_REPORTS_BUCKET}/${WEBPACK_REPORTS}/latest/'
`);