mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[CI] Buildkite support with Baseline pipeline (#100492)
This commit is contained in:
parent
0ecca23e30
commit
54c3ca142e
37 changed files with 729 additions and 3 deletions
10
.buildkite/README.md
Normal file
10
.buildkite/README.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Kibana / Buildkite
|
||||
|
||||
## Directory Structure
|
||||
|
||||
- `hooks` - special directory used by Buildkite agents for [hooks](https://buildkite.com/docs/agent/v3/hooks)
|
||||
- `pipelines` - contains pipeline definitions
|
||||
- `scripts/common` - scripts that get `source`d by other scripts to set environment variables or import shared functions
|
||||
- `scripts/lifecycle` - general scripts for tasks that run before or after individual steps or the entire build
|
||||
- `scripts/steps` - scripts that define something that will run for a step defined in a pipeline
|
||||
- `scripts/*` - all other scripts are building blocks that make up the tasks in pipelines. They may be run by other scripts, but should not be `source`d
|
79
.buildkite/agents.json
Normal file
79
.buildkite/agents.json
Normal file
|
@ -0,0 +1,79 @@
|
|||
{
|
||||
"gcp": {
|
||||
"project": "elastic-kibana-ci",
|
||||
"zones": ["us-central1-a", "us-central1-b", "us-central1-c", "us-central1-f"],
|
||||
"serviceAccount": "elastic-buildkite-agent@elastic-kibana-ci.iam.gserviceaccount.com",
|
||||
"imageFamily": "kb-ubuntu",
|
||||
"subnetwork": "buildkite",
|
||||
"disableExternalIp": true,
|
||||
"diskType": "pd-ssd",
|
||||
"diskSizeGb": 75,
|
||||
"overprovision": 0,
|
||||
"minimumAgents": 0,
|
||||
"maximumAgents": 50,
|
||||
"gracefulStopAfterMins": 360,
|
||||
"hardStopAfterMins": 540,
|
||||
"idleTimeoutMins": 10,
|
||||
"exitAfterOneJob": false,
|
||||
|
||||
"agents": [
|
||||
{
|
||||
"queue": "default",
|
||||
"name": "kb-default",
|
||||
"minimumAgents": 1,
|
||||
"maximumAgents": 100,
|
||||
"idleTimeoutMins": 60,
|
||||
"machineType": "e2-small"
|
||||
},
|
||||
{
|
||||
"queue": "c2-8",
|
||||
"name": "kb-c2-8",
|
||||
"machineType": "c2-standard-8",
|
||||
"localSsds": 1
|
||||
},
|
||||
{
|
||||
"queue": "c2-4",
|
||||
"name": "kb-c2-4",
|
||||
"machineType": "c2-standard-4",
|
||||
"localSsds": 1
|
||||
},
|
||||
{
|
||||
"queue": "jest",
|
||||
"name": "kb-jest",
|
||||
"machineType": "n2-standard-2",
|
||||
"diskSizeGb": 128
|
||||
},
|
||||
{
|
||||
"queue": "ci-group",
|
||||
"name": "kb-cigroup",
|
||||
"machineType": "n2-standard-8",
|
||||
"diskSizeGb": 256
|
||||
},
|
||||
{
|
||||
"queue": "ci-group-4",
|
||||
"name": "kb-cigroup-4",
|
||||
"machineType": "n2-standard-4",
|
||||
"diskSizeGb": 128
|
||||
},
|
||||
{
|
||||
"queue": "ci-group-4d",
|
||||
"name": "kb-cigroup-4d",
|
||||
"machineType": "n2d-standard-4",
|
||||
"diskSizeGb": 128
|
||||
},
|
||||
{
|
||||
"queue": "ci-group-6",
|
||||
"name": "kb-cigroup-6",
|
||||
"machineType": "n2-custom-6-16384",
|
||||
"diskSizeGb": 128
|
||||
},
|
||||
{
|
||||
"queue": "packer",
|
||||
"name": "kb-packer",
|
||||
"serviceAccount": "buildkite-packer-agent@elastic-kibana-ci.iam.gserviceaccount.com",
|
||||
"maximumAgents": 10,
|
||||
"machineType": "e2-small"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
3
.buildkite/hooks/post-command
Normal file
3
.buildkite/hooks/post-command
Normal file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source .buildkite/scripts/lifecycle/post_command.sh
|
3
.buildkite/hooks/pre-command
Normal file
3
.buildkite/hooks/pre-command
Normal file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source .buildkite/scripts/lifecycle/pre_command.sh
|
24
.buildkite/pipelines/on_merge.yml
Normal file
24
.buildkite/pipelines/on_merge.yml
Normal file
|
@ -0,0 +1,24 @@
|
|||
env:
|
||||
GITHUB_COMMIT_STATUS_ENABLED: 'true'
|
||||
GITHUB_COMMIT_STATUS_CONTEXT: 'buildkite/on-merge'
|
||||
steps:
|
||||
- command: .buildkite/scripts/lifecycle/pre_build.sh
|
||||
label: Pre-Build
|
||||
|
||||
- wait
|
||||
|
||||
- command: .buildkite/scripts/steps/on_merge_build_and_metrics.sh
|
||||
label: Default Build and Metrics
|
||||
agents:
|
||||
queue: c2-8
|
||||
|
||||
- command: .buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh
|
||||
label: Build TS Refs and Check Public API Docs
|
||||
agents:
|
||||
queue: c2-4
|
||||
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
|
||||
- command: .buildkite/scripts/lifecycle/post_build.sh
|
||||
label: Post-Build
|
22
.buildkite/scripts/bootstrap.sh
Executable file
22
.buildkite/scripts/bootstrap.sh
Executable file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/util.sh
|
||||
|
||||
echo "--- yarn install and bootstrap"
|
||||
yarn kbn bootstrap --verbose
|
||||
|
||||
###
|
||||
### upload ts-refs-cache artifacts as quickly as possible so they are available for download
|
||||
###
|
||||
if [[ "${BUILD_TS_REFS_CACHE_CAPTURE:-}" == "true" ]]; then
|
||||
echo "--- Upload ts-refs-cache"
|
||||
cd "$KIBANA_DIR/target/ts_refs_cache"
|
||||
gsutil cp "*.zip" 'gs://kibana-ci-ts-refs-cache/'
|
||||
cd "$KIBANA_DIR"
|
||||
fi
|
||||
|
||||
if [[ "$DISABLE_BOOTSTRAP_VALIDATION" != "true" ]]; then
|
||||
verify_no_git_changes 'yarn kbn bootstrap'
|
||||
fi
|
16
.buildkite/scripts/build_kibana.sh
Executable file
16
.buildkite/scripts/build_kibana.sh
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export KBN_NP_PLUGINS_BUILT=true
|
||||
|
||||
echo "--- Build Kibana Distribution"
|
||||
node scripts/build --debug --no-oss
|
||||
|
||||
echo "--- Archive Kibana Distribution"
|
||||
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
|
||||
installDir="$KIBANA_DIR/install/kibana"
|
||||
mkdir -p "$installDir"
|
||||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||
mkdir -p "$KIBANA_BUILD_LOCATION"
|
||||
cp -pR install/kibana/. "$KIBANA_BUILD_LOCATION/"
|
32
.buildkite/scripts/build_kibana_plugins.sh
Normal file
32
.buildkite/scripts/build_kibana_plugins.sh
Normal file
|
@ -0,0 +1,32 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "--- Build Platform Plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/common/fixtures/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/examples" \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/common/fixtures/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/usage_collection/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \
|
||||
--scan-dir "$XPACK_DIR/examples" \
|
||||
--verbose
|
||||
|
||||
echo "--- Archive built plugins"
|
||||
shopt -s globstar
|
||||
tar -zcf \
|
||||
target/kibana-default-plugins.tar.gz \
|
||||
x-pack/plugins/**/target/public \
|
||||
x-pack/test/**/target/public \
|
||||
examples/**/target/public \
|
||||
x-pack/examples/**/target/public \
|
||||
test/**/target/public
|
72
.buildkite/scripts/common/env.sh
Executable file
72
.buildkite/scripts/common/env.sh
Executable file
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
export CI=true
|
||||
|
||||
KIBANA_DIR=$(pwd)
|
||||
export KIBANA_DIR
|
||||
export XPACK_DIR="$KIBANA_DIR/x-pack"
|
||||
|
||||
export CACHE_DIR="$HOME/.kibana"
|
||||
PARENT_DIR="$(cd "$KIBANA_DIR/.."; pwd)"
|
||||
export PARENT_DIR
|
||||
export WORKSPACE="${WORKSPACE:-$PARENT_DIR}"
|
||||
|
||||
KIBANA_PKG_BRANCH="$(jq -r .branch "$KIBANA_DIR/package.json")"
|
||||
export KIBANA_PKG_BRANCH
|
||||
export KIBANA_BASE_BRANCH="$KIBANA_PKG_BRANCH"
|
||||
|
||||
export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export RE2_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export CYPRESS_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/cypress"
|
||||
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
export FORCE_COLOR=1
|
||||
export TEST_BROWSER_HEADLESS=1
|
||||
|
||||
export ELASTIC_APM_ENVIRONMENT=ci
|
||||
export ELASTIC_APM_TRANSACTION_SAMPLE_RATE=0.1
|
||||
|
||||
CI_REPORTING_ENABLED=false # TODO enable when ready, only controls checks reporter and APM
|
||||
|
||||
if is_pr; then
|
||||
export ELASTIC_APM_ACTIVE=false
|
||||
export CHECKS_REPORTER_ACTIVE="${CI_REPORTING_ENABLED-}"
|
||||
|
||||
# These can be removed once we're not supporting Jenkins and Buildkite at the same time
|
||||
# These are primarily used by github checks reporter and can be configured via /github_checks_api.json
|
||||
export ghprbGhRepository="elastic/kibana"
|
||||
export ghprbActualCommit="$BUILDKITE_COMMIT"
|
||||
export BUILD_URL="$BUILDKITE_BUILD_URL"
|
||||
|
||||
# set_git_merge_base # TODO for PRs
|
||||
else
|
||||
export ELASTIC_APM_ACTIVE="${CI_REPORTING_ENABLED-}"
|
||||
export CHECKS_REPORTER_ACTIVE=false
|
||||
fi
|
||||
|
||||
export FLEET_PACKAGE_REGISTRY_PORT=6104
|
||||
export TEST_CORS_SERVER_PORT=6105
|
||||
|
||||
export DETECT_CHROMEDRIVER_VERSION=true
|
||||
export CHROMEDRIVER_FORCE_DOWNLOAD=true
|
||||
|
||||
export GCS_UPLOAD_PREFIX=FAKE_UPLOAD_PREFIX # TODO remove the need for this
|
||||
|
||||
export KIBANA_BUILD_LOCATION="$WORKSPACE/kibana-build-xpack"
|
||||
|
||||
if [[ "${BUILD_TS_REFS_CACHE_ENABLE:-}" != "true" ]]; then
|
||||
export BUILD_TS_REFS_CACHE_ENABLE=false
|
||||
fi
|
||||
|
||||
export BUILD_TS_REFS_DISABLE=true
|
||||
export DISABLE_BOOTSTRAP_VALIDATION=true
|
||||
|
||||
export TEST_KIBANA_HOST=localhost
|
||||
export TEST_KIBANA_PORT=6101
|
||||
export TEST_KIBANA_URL="http://elastic:changeme@localhost:6101"
|
||||
export TEST_ES_URL="http://elastic:changeme@localhost:6102"
|
||||
export TEST_ES_TRANSPORT_PORT=6103
|
||||
export TEST_CORS_SERVER_PORT=6106
|
||||
export ALERTING_PROXY_PORT=6105
|
22
.buildkite/scripts/common/setup_bazel.sh
Normal file
22
.buildkite/scripts/common/setup_bazel.sh
Normal file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
KIBANA_BUILDBUDDY_CI_API_KEY=$(vault read -field=value secret/kibana-issues/dev/kibana-buildbuddy-ci-api-key)
|
||||
export KIBANA_BUILDBUDDY_CI_API_KEY
|
||||
|
||||
cp "$KIBANA_DIR/src/dev/ci_setup/.bazelrc-ci" "$HOME/.bazelrc"
|
||||
|
||||
###
|
||||
### append auth token to buildbuddy into "$HOME/.bazelrc";
|
||||
###
|
||||
echo "# Appended by .buildkite/scripts/setup_bazel.sh" >> "$HOME/.bazelrc"
|
||||
echo "build --remote_header=x-buildbuddy-api-key=$KIBANA_BUILDBUDDY_CI_API_KEY" >> "$HOME/.bazelrc"
|
||||
|
||||
###
|
||||
### remove write permissions on buildbuddy remote cache for prs
|
||||
###
|
||||
if [[ "${BUILDKITE_PULL_REQUEST:-}" && "$BUILDKITE_PULL_REQUEST" != "false" ]] ; then
|
||||
{
|
||||
echo "# Uploads logs & artifacts without writing to cache"
|
||||
echo "build --noremote_upload_local_results"
|
||||
} >> "$HOME/.bazelrc"
|
||||
fi
|
46
.buildkite/scripts/common/setup_node.sh
Executable file
46
.buildkite/scripts/common/setup_node.sh
Executable file
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo "--- Setup Node"
|
||||
|
||||
NODE_VERSION="$(cat "$KIBANA_DIR/.node-version")"
|
||||
export NODE_VERSION
|
||||
export NODE_DIR="$CACHE_DIR/node/$NODE_VERSION"
|
||||
export NODE_BIN_DIR="$NODE_DIR/bin"
|
||||
export YARN_OFFLINE_CACHE="$CACHE_DIR/yarn-offline-cache"
|
||||
|
||||
if [[ ! -d "$NODE_DIR" ]]; then
|
||||
hostArch="$(command uname -m)"
|
||||
case "${hostArch}" in
|
||||
x86_64 | amd64) nodeArch="x64" ;;
|
||||
aarch64) nodeArch="arm64" ;;
|
||||
*) nodeArch="${hostArch}" ;;
|
||||
esac
|
||||
|
||||
nodeUrl="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-$nodeArch.tar.gz"
|
||||
|
||||
echo "node.js v$NODE_VERSION not found at $NODE_DIR, downloading from $nodeUrl"
|
||||
|
||||
mkdir -p "$NODE_DIR"
|
||||
curl --silent -L "$nodeUrl" | tar -xz -C "$NODE_DIR" --strip-components=1
|
||||
else
|
||||
echo "node.js v$NODE_VERSION already installed to $NODE_DIR, re-using"
|
||||
ls -alh "$NODE_BIN_DIR"
|
||||
fi
|
||||
|
||||
export PATH="$NODE_BIN_DIR:$PATH"
|
||||
|
||||
|
||||
echo "--- Setup Yarn"
|
||||
|
||||
YARN_VERSION=$(node -e "console.log(String(require('./package.json').engines.yarn || '').replace(/^[^\d]+/,''))")
|
||||
export YARN_VERSION
|
||||
|
||||
if [[ ! $(which yarn) || $(yarn --version) != "$YARN_VERSION" ]]; then
|
||||
npm install -g "yarn@^${YARN_VERSION}"
|
||||
fi
|
||||
|
||||
yarn config set yarn-offline-mirror "$YARN_OFFLINE_CACHE"
|
||||
|
||||
YARN_GLOBAL_BIN=$(yarn global bin)
|
||||
export YARN_GLOBAL_BIN
|
||||
export PATH="$PATH:$YARN_GLOBAL_BIN"
|
51
.buildkite/scripts/common/util.sh
Executable file
51
.buildkite/scripts/common/util.sh
Executable file
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
checks-reporter-with-killswitch() {
|
||||
if [ "$CHECKS_REPORTER_ACTIVE" == "true" ] ; then
|
||||
yarn run github-checks-reporter "$@"
|
||||
else
|
||||
arguments=("$@");
|
||||
"${arguments[@]:1}";
|
||||
fi
|
||||
}
|
||||
|
||||
is_pr() {
|
||||
[[ "${GITHUB_PR_NUMBER-}" ]] && return
|
||||
false
|
||||
}
|
||||
|
||||
verify_no_git_changes() {
|
||||
RED='\033[0;31m'
|
||||
C_RESET='\033[0m' # Reset color
|
||||
|
||||
GIT_CHANGES="$(git ls-files --modified)"
|
||||
if [ "$GIT_CHANGES" ]; then
|
||||
echo -e "\n${RED}ERROR: '$1' caused changes to the following files:${C_RESET}\n"
|
||||
echo -e "$GIT_CHANGES\n"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# docker_run can be used in place of `docker run`
|
||||
# it automatically passes along all of Buildkite's tracked environment variables, and mounts the buildkite-agent in the running container
|
||||
docker_run() {
|
||||
args=()
|
||||
|
||||
if [[ -n "${BUILDKITE_ENV_FILE:-}" ]] ; then
|
||||
# Read in the env file and convert to --env params for docker
|
||||
# This is because --env-file doesn't support newlines or quotes per https://docs.docker.com/compose/env-file/#syntax-rules
|
||||
while read -r var; do
|
||||
args+=( --env "${var%%=*}" )
|
||||
done < "$BUILDKITE_ENV_FILE"
|
||||
fi
|
||||
|
||||
BUILDKITE_AGENT_BINARY_PATH=$(command -v buildkite-agent)
|
||||
args+=(
|
||||
"--env" "BUILDKITE_JOB_ID"
|
||||
"--env" "BUILDKITE_BUILD_ID"
|
||||
"--env" "BUILDKITE_AGENT_ACCESS_TOKEN"
|
||||
"--volume" "$BUILDKITE_AGENT_BINARY_PATH:/usr/bin/buildkite-agent"
|
||||
)
|
||||
|
||||
docker run "${args[@]}" "$@"
|
||||
}
|
19
.buildkite/scripts/download_build_artifacts.sh
Executable file
19
.buildkite/scripts/download_build_artifacts.sh
Executable file
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ ! -d "$KIBANA_BUILD_LOCATION/bin" ]]; then
|
||||
echo '--- Downloading Distribution and Plugin artifacts'
|
||||
|
||||
cd "$WORKSPACE"
|
||||
|
||||
buildkite-agent artifact download kibana-default.tar.gz .
|
||||
buildkite-agent artifact download kibana-default-plugins.tar.gz .
|
||||
|
||||
mkdir -p "$KIBANA_BUILD_LOCATION"
|
||||
tar -xzf kibana-default.tar.gz -C "$KIBANA_BUILD_LOCATION" --strip=1
|
||||
|
||||
cd "$KIBANA_DIR"
|
||||
|
||||
tar -xzf ../kibana-default-plugins.tar.gz
|
||||
fi
|
59
.buildkite/scripts/lifecycle/ci_stats.js
Normal file
59
.buildkite/scripts/lifecycle/ci_stats.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
const https = require('https');
|
||||
const token = process.env.CI_STATS_TOKEN;
|
||||
const host = process.env.CI_STATS_HOST;
|
||||
|
||||
const request = (url, options, data = null) => {
|
||||
const httpOptions = {
|
||||
...options,
|
||||
headers: {
|
||||
...(options.headers || {}),
|
||||
Authorization: `token ${token}`,
|
||||
},
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
console.log(`Calling https://${host}${url}`);
|
||||
|
||||
const req = https.request(`https://${host}${url}`, httpOptions, (res) => {
|
||||
if (res.statusCode < 200 || res.statusCode >= 300) {
|
||||
return reject(new Error(`Status Code: ${res.statusCode}`));
|
||||
}
|
||||
|
||||
const data = [];
|
||||
res.on('data', (d) => {
|
||||
data.push(d);
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
try {
|
||||
let resp = Buffer.concat(data).toString();
|
||||
|
||||
try {
|
||||
if (resp.trim()) {
|
||||
resp = JSON.parse(resp);
|
||||
}
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
}
|
||||
|
||||
resolve(resp);
|
||||
} catch (ex) {
|
||||
reject(ex);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
req.on('error', reject);
|
||||
|
||||
if (data) {
|
||||
req.write(JSON.stringify(data));
|
||||
}
|
||||
|
||||
req.end();
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
get: (url) => request(url, { method: 'GET' }),
|
||||
post: (url, data) => request(url, { method: 'POST' }, data),
|
||||
};
|
17
.buildkite/scripts/lifecycle/ci_stats_complete.js
Normal file
17
.buildkite/scripts/lifecycle/ci_stats_complete.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
const ciStats = require('./ci_stats');
|
||||
|
||||
// TODO - this is okay for now but should really be replaced with an API call, especially once retries are enabled
|
||||
const BUILD_STATUS = process.env.BUILD_FAILED === 'true' ? 'FAILURE' : 'SUCCESS';
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
if (process.env.CI_STATS_BUILD_ID) {
|
||||
await ciStats.post(`/v1/build/_complete?id=${process.env.CI_STATS_BUILD_ID}`, {
|
||||
result: BUILD_STATUS,
|
||||
});
|
||||
}
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
30
.buildkite/scripts/lifecycle/ci_stats_start.js
Normal file
30
.buildkite/scripts/lifecycle/ci_stats_start.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
const { execSync } = require('child_process');
|
||||
const ciStats = require('./ci_stats');
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
const build = await ciStats.post('/v1/build', {
|
||||
jenkinsJobName: process.env.BUILDKITE_PIPELINE_NAME,
|
||||
jenkinsJobId: process.env.BUILDKITE_BUILD_ID,
|
||||
jenkinsUrl: process.env.BUILDKITE_BUILD_URL,
|
||||
prId: process.env.GITHUB_PR_NUMBER || null,
|
||||
});
|
||||
|
||||
execSync(`buildkite-agent meta-data set ci_stats_build_id "${build.id}"`);
|
||||
|
||||
// TODO Will need to set MERGE_BASE for PRs
|
||||
|
||||
await ciStats.post(`/v1/git_info?buildId=${build.id}`, {
|
||||
branch: process.env.BUILDKITE_BRANCH.replace(/^(refs\/heads\/|origin\/)/, ''),
|
||||
commit: process.env.BUILDKITE_COMMIT,
|
||||
targetBranch:
|
||||
process.env.GITHUB_PR_TARGET_BRANCH ||
|
||||
process.env.BUILDKITE_PULL_REQUEST_BASE_BRANCH ||
|
||||
null,
|
||||
mergeBase: process.env.GITHUB_PR_MERGE_BASE || null, // TODO confirm GITHUB_PR_MERGE_BASE or switch to final var
|
||||
});
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
14
.buildkite/scripts/lifecycle/commit_status_complete.sh
Executable file
14
.buildkite/scripts/lifecycle/commit_status_complete.sh
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${GITHUB_COMMIT_STATUS_ENABLED:-}" == "true" ]]; then
|
||||
COMMIT_STATUS=success
|
||||
if [[ "${BUILD_FAILED:-}" == "true" ]]; then
|
||||
COMMIT_STATUS=failure
|
||||
fi
|
||||
|
||||
GITHUB_COMMIT_STATUS_CONTEXT=${GITHUB_COMMIT_STATUS_CONTEXT:-"buildkite/$BUILDKITE_PIPELINE_NAME"}
|
||||
|
||||
gh api "repos/elastic/kibana/statuses/$BUILDKITE_COMMIT" -f state="$COMMIT_STATUS" -f target_url="$BUILDKITE_BUILD_URL" -f context="$GITHUB_COMMIT_STATUS_CONTEXT" --silent
|
||||
fi
|
9
.buildkite/scripts/lifecycle/commit_status_start.sh
Executable file
9
.buildkite/scripts/lifecycle/commit_status_start.sh
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${GITHUB_COMMIT_STATUS_ENABLED:-}" == "true" ]]; then
|
||||
GITHUB_COMMIT_STATUS_CONTEXT=${GITHUB_COMMIT_STATUS_CONTEXT:-"buildkite/$BUILDKITE_PIPELINE_NAME"}
|
||||
|
||||
gh api "repos/elastic/kibana/statuses/$BUILDKITE_COMMIT" -f state=pending -f target_url="$BUILDKITE_BUILD_URL" -f context="$GITHUB_COMMIT_STATUS_CONTEXT" --silent
|
||||
fi
|
10
.buildkite/scripts/lifecycle/post_build.sh
Executable file
10
.buildkite/scripts/lifecycle/post_build.sh
Executable file
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
BUILD_FAILED=$(buildkite-agent meta-data get build_failed --default "false")
|
||||
export BUILD_FAILED
|
||||
|
||||
"$(dirname "${0}")/commit_status_complete.sh"
|
||||
|
||||
node "$(dirname "${0}")/ci_stats_complete.js"
|
7
.buildkite/scripts/lifecycle/post_command.sh
Executable file
7
.buildkite/scripts/lifecycle/post_command.sh
Executable file
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "$BUILDKITE_COMMAND_EXIT_STATUS" != "0" ]]; then
|
||||
buildkite-agent meta-data set build_failed true
|
||||
fi
|
10
.buildkite/scripts/lifecycle/pre_build.sh
Executable file
10
.buildkite/scripts/lifecycle/pre_build.sh
Executable file
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
"$(dirname "${0}")/commit_status_start.sh"
|
||||
|
||||
export CI_STATS_TOKEN="$(vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)"
|
||||
export CI_STATS_HOST="$(vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)"
|
||||
|
||||
node "$(dirname "${0}")/ci_stats_start.js"
|
67
.buildkite/scripts/lifecycle/pre_command.sh
Executable file
67
.buildkite/scripts/lifecycle/pre_command.sh
Executable file
|
@ -0,0 +1,67 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Set up a custom ES Snapshot Manifest if one has been specified for this build
|
||||
{
|
||||
ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST --default '')}
|
||||
export ES_SNAPSHOT_MANIFEST
|
||||
|
||||
if [[ "${ES_SNAPSHOT_MANIFEST:-}" ]]; then
|
||||
cat << EOF | buildkite-agent annotate --style "info" --context es-snapshot-manifest
|
||||
This build is running using a custom Elasticsearch snapshot.
|
||||
|
||||
ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST
|
||||
|
||||
To use this locally, simply prefix your commands with:
|
||||
|
||||
\`\`\`
|
||||
ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST"
|
||||
\`\`\`
|
||||
|
||||
e.g.
|
||||
|
||||
\`\`\`
|
||||
ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" node scripts/functional_tests_server.js
|
||||
\`\`\`
|
||||
EOF
|
||||
fi
|
||||
}
|
||||
|
||||
# Setup CI Stats
|
||||
{
|
||||
CI_STATS_BUILD_ID="$(buildkite-agent meta-data get ci_stats_build_id --default '')"
|
||||
export CI_STATS_BUILD_ID
|
||||
|
||||
if [[ "$CI_STATS_BUILD_ID" ]]; then
|
||||
echo "CI Stats Build ID: $CI_STATS_BUILD_ID"
|
||||
|
||||
CI_STATS_TOKEN="$(vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)"
|
||||
export CI_STATS_TOKEN
|
||||
|
||||
CI_STATS_HOST="$(vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)"
|
||||
export CI_STATS_HOST
|
||||
|
||||
KIBANA_CI_STATS_CONFIG=$(jq -n \
|
||||
--arg buildId "$CI_STATS_BUILD_ID" \
|
||||
--arg apiUrl "https://$CI_STATS_HOST" \
|
||||
--arg apiToken "$CI_STATS_TOKEN" \
|
||||
'{buildId: $buildId, apiUrl: $apiUrl, apiToken: $apiToken}' \
|
||||
)
|
||||
export KIBANA_CI_STATS_CONFIG
|
||||
fi
|
||||
}
|
||||
|
||||
GITHUB_TOKEN=$(vault read -field=github_token secret/kibana-issues/dev/kibanamachine)
|
||||
export GITHUB_TOKEN
|
||||
|
||||
# By default, all steps should set up these things to get a full environment before running
|
||||
# It can be skipped for pipeline upload steps though, to make job start time a little faster
|
||||
if [[ "${SKIP_CI_SETUP:-}" != "true" ]]; then
|
||||
if [[ -d .buildkite/scripts && "${BUILDKITE_COMMAND:-}" != "buildkite-agent pipeline upload"* ]]; then
|
||||
source .buildkite/scripts/common/util.sh
|
||||
source .buildkite/scripts/common/env.sh
|
||||
source .buildkite/scripts/common/setup_node.sh
|
||||
source .buildkite/scripts/common/setup_bazel.sh
|
||||
fi
|
||||
fi
|
8
.buildkite/scripts/packer_cache.sh
Executable file
8
.buildkite/scripts/packer_cache.sh
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/env.sh
|
||||
source .buildkite/scripts/common/setup_node.sh
|
||||
|
||||
yarn kbn bootstrap
|
18
.buildkite/scripts/post_build_kibana.sh
Executable file
18
.buildkite/scripts/post_build_kibana.sh
Executable file
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ ! "${DISABLE_CI_STATS_SHIPPING:-}" ]]; then
|
||||
echo "--- Ship Kibana Distribution Metrics to CI Stats"
|
||||
node scripts/ship_ci_stats \
|
||||
--metrics target/optimizer_bundle_metrics.json \
|
||||
--metrics packages/kbn-ui-shared-deps/target/metrics.json
|
||||
fi
|
||||
|
||||
echo "--- Upload Build Artifacts"
|
||||
# Moving to `target/` first will keep `buildkite-agent` from including directories in the artifact name
|
||||
cd "$KIBANA_DIR/target"
|
||||
mv kibana-*-linux-x86_64.tar.gz kibana-default.tar.gz
|
||||
buildkite-agent artifact upload kibana-default.tar.gz
|
||||
buildkite-agent artifact upload kibana-default-plugins.tar.gz
|
||||
cd -
|
13
.buildkite/scripts/saved_object_field_metrics.sh
Executable file
13
.buildkite/scripts/saved_object_field_metrics.sh
Executable file
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
source .buildkite/scripts/common/util.sh
|
||||
|
||||
echo '--- Default Saved Object Field Metrics'
|
||||
cd "$XPACK_DIR"
|
||||
checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_BUILD_LOCATION" \
|
||||
--config test/saved_objects_field_count/config.ts
|
11
.buildkite/scripts/steps/on_merge_build_and_metrics.sh
Executable file
11
.buildkite/scripts/steps/on_merge_build_and_metrics.sh
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export DISABLE_BOOTSTRAP_VALIDATION=true
|
||||
export BUILD_TS_REFS_DISABLE=true
|
||||
|
||||
.buildkite/scripts/bootstrap.sh
|
||||
.buildkite/scripts/build_kibana.sh
|
||||
.buildkite/scripts/post_build_kibana.sh
|
||||
.buildkite/scripts/saved_object_field_metrics.sh
|
13
.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh
Executable file
13
.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh
Executable file
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export BUILD_TS_REFS_CACHE_ENABLE=true
|
||||
export BUILD_TS_REFS_CACHE_CAPTURE=true
|
||||
export DISABLE_BOOTSTRAP_VALIDATION=true
|
||||
export BUILD_TS_REFS_DISABLE=false
|
||||
|
||||
.buildkite/scripts/bootstrap.sh
|
||||
|
||||
echo "--- Build API Docs"
|
||||
node scripts/build_api_docs
|
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
|
@ -174,6 +174,7 @@
|
|||
/.bazelversion @elastic/kibana-operations
|
||||
/WORKSPACE.bazel @elastic/kibana-operations
|
||||
#CC# /packages/kbn-expect/ @elastic/kibana-operations
|
||||
/.buildkite/ @elastic/kibana-operations
|
||||
|
||||
# Quality Assurance
|
||||
/src/dev/code_coverage @elastic/kibana-qa
|
||||
|
|
|
@ -121,6 +121,10 @@ export interface OptimizerCacheKey {
|
|||
}
|
||||
|
||||
async function getLastCommit() {
|
||||
if (!Fs.existsSync(Path.join(REPO_ROOT, '.git'))) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { stdout } = await execa(
|
||||
'git',
|
||||
['log', '-n', '1', '--pretty=format:%H', '--', RELATIVE_DIR],
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
*/
|
||||
|
||||
jest.mock('execa');
|
||||
jest.mock('fs');
|
||||
|
||||
import { getChanges } from './get_changes';
|
||||
|
||||
|
@ -15,6 +16,8 @@ const execa: jest.Mock = jest.requireMock('execa');
|
|||
it('parses git ls-files output', async () => {
|
||||
expect.assertions(4);
|
||||
|
||||
jest.requireMock('fs').existsSync.mockImplementation(() => true);
|
||||
|
||||
execa.mockImplementation((cmd, args, options) => {
|
||||
expect(cmd).toBe('git');
|
||||
expect(args).toEqual(['ls-files', '-dmt', '--', '/foo/bar/x']);
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import Path from 'path';
|
||||
|
||||
import execa from 'execa';
|
||||
import fs from 'fs';
|
||||
|
||||
export type Changes = Map<string, 'modified' | 'deleted'>;
|
||||
|
||||
|
@ -16,11 +17,16 @@ export type Changes = Map<string, 'modified' | 'deleted'>;
|
|||
* get the changes in all the context directories (plugin public paths)
|
||||
*/
|
||||
export async function getChanges(dir: string) {
|
||||
const changes: Changes = new Map();
|
||||
|
||||
if (!fs.existsSync(Path.join(dir, '.git'))) {
|
||||
return changes;
|
||||
}
|
||||
|
||||
const { stdout } = await execa('git', ['ls-files', '-dmt', '--', dir], {
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const changes: Changes = new Map();
|
||||
const output = stdout.trim();
|
||||
|
||||
if (output) {
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { relative } from 'path';
|
||||
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
|
@ -120,6 +121,9 @@ export class FunctionalTestRunner {
|
|||
throw new Error('No tests defined.');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
console.log(`--- Running ${relative(process.cwd(), this.configFile)}`);
|
||||
|
||||
const dockerServers = new DockerServersService(
|
||||
config.get('dockerServers'),
|
||||
this.log,
|
||||
|
|
|
@ -88,6 +88,8 @@ export async function runTests(options) {
|
|||
continue;
|
||||
}
|
||||
|
||||
console.log(`--- Running ${relative(process.cwd(), configPath)}`);
|
||||
|
||||
await withProcRunner(log, async (procs) => {
|
||||
const config = await readConfigFile(log, configPath);
|
||||
|
||||
|
|
|
@ -16,11 +16,17 @@ export function getUniqueJunitReportPath(
|
|||
reportName: string,
|
||||
counter?: number
|
||||
): string {
|
||||
const BUILDKITE_ID_SUFFIX = process.env.BUILDKITE_JOB_ID
|
||||
? `-${process.env.BUILDKITE_JOB_ID}`
|
||||
: '';
|
||||
|
||||
const path = Path.resolve(
|
||||
rootDirectory,
|
||||
'target/junit',
|
||||
process.env.JOB || '.',
|
||||
`TEST-${CI_PARALLEL_PROCESS_PREFIX}${reportName}${counter ? `-${counter}` : ''}.xml`
|
||||
`TEST-${CI_PARALLEL_PROCESS_PREFIX}${reportName}${
|
||||
counter ? `-${counter}` : ''
|
||||
}${BUILDKITE_ID_SUFFIX}.xml`
|
||||
);
|
||||
|
||||
return Fs.existsSync(path)
|
||||
|
|
|
@ -28,7 +28,10 @@ export async function getVersionInfo({ isRelease, versionQualifier, pkg }: Optio
|
|||
);
|
||||
|
||||
return {
|
||||
buildSha: (await execa('git', ['rev-parse', 'HEAD'])).stdout,
|
||||
buildSha:
|
||||
process.env.GIT_COMMIT ||
|
||||
process.env.BUILDKITE_COMMIT ||
|
||||
(await execa('git', ['rev-parse', 'HEAD'])).stdout,
|
||||
buildVersion,
|
||||
buildNumber: await getBuildNumber(),
|
||||
};
|
||||
|
|
|
@ -67,6 +67,9 @@ export const IGNORE_FILE_GLOBS = [
|
|||
// Bazel default files
|
||||
'**/WORKSPACE.bazel',
|
||||
'**/BUILD.bazel',
|
||||
|
||||
// Buildkite
|
||||
'.buildkite/hooks/*',
|
||||
];
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
// @ts-ignore barely used, untyped module
|
||||
import simpleGit from 'simple-git';
|
||||
|
@ -12,6 +13,14 @@ import simpleGit from 'simple-git';
|
|||
const gitDir = path.resolve(__dirname, '..', '..');
|
||||
|
||||
export async function gitInfo() {
|
||||
if (!fs.existsSync(path.join(gitDir, '.git'))) {
|
||||
// This info is only used for debugging purposes in the log
|
||||
// So if .git is not available for some reason, it's fine to output this
|
||||
return {
|
||||
number: 1,
|
||||
sha: process.env.GIT_COMMIT || process.env.BUILDKITE_COMMIT || 'none',
|
||||
};
|
||||
}
|
||||
const git = simpleGit(gitDir);
|
||||
|
||||
return new Promise<{ number: number; sha: string }>((resolve, reject) => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue