mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[CI] Add pipeline task queue framework and merge workers into one (#64011)
This commit is contained in:
parent
6e357988a2
commit
465ed21194
51 changed files with 749 additions and 130 deletions
35
.ci/Dockerfile
Normal file
35
.ci/Dockerfile
Normal file
|
@ -0,0 +1,35 @@
|
|||
ARG NODE_VERSION=10.21.0
|
||||
|
||||
FROM node:${NODE_VERSION} AS base
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
|
||||
libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
|
||||
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
|
||||
libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
|
||||
libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
|
||||
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y rsync jq bsdtar google-chrome-stable \
|
||||
--no-install-recommends \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \
|
||||
&& curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \
|
||||
&& unzip vault.zip \
|
||||
&& rm vault.zip \
|
||||
&& chmod +x vault \
|
||||
&& mv vault /usr/local/bin/vault
|
||||
|
||||
RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana
|
||||
|
||||
COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh
|
||||
RUN chmod +x /usr/local/bin/bash_standard_lib.sh
|
||||
|
||||
COPY ./runbld /usr/local/bin/runbld
|
||||
RUN chmod +x /usr/local/bin/runbld
|
||||
|
||||
USER kibana
|
|
@ -3,4 +3,4 @@
|
|||
profiles:
|
||||
- ".*": # Match any job
|
||||
tests:
|
||||
junit-filename-pattern: "8d8bd494-d909-4e67-a052-7e8b5aaeb5e4" # A bogus path that should never exist
|
||||
junit-filename-pattern: false
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -47,6 +47,8 @@ npm-debug.log*
|
|||
.tern-project
|
||||
.nyc_output
|
||||
.ci/pipeline-library/build/
|
||||
.ci/runbld
|
||||
.ci/bash_standard_lib.sh
|
||||
.gradle
|
||||
|
||||
# apm plugin
|
||||
|
|
45
Jenkinsfile
vendored
45
Jenkinsfile
vendored
|
@ -8,50 +8,7 @@ kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true)
|
|||
ciStats.trackBuild {
|
||||
catchError {
|
||||
retryable.enable()
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'),
|
||||
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'),
|
||||
// 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'),
|
||||
]),
|
||||
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'),
|
||||
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
|
||||
'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'),
|
||||
'xpack-savedObjectsFieldMetrics': kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'),
|
||||
// 'xpack-pageLoadMetrics': kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh'),
|
||||
'xpack-securitySolutionCypress': { processNumber ->
|
||||
whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/']) {
|
||||
kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')(processNumber)
|
||||
}
|
||||
},
|
||||
|
||||
// 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'),
|
||||
]),
|
||||
])
|
||||
kibanaPipeline.allCiTasks()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,10 +7,11 @@ function checkout_sibling {
|
|||
targetDir=$2
|
||||
useExistingParamName=$3
|
||||
useExisting="$(eval "echo "\$$useExistingParamName"")"
|
||||
repoAddress="https://github.com/"
|
||||
|
||||
if [ -z ${useExisting:+x} ]; then
|
||||
if [ -d "$targetDir" ]; then
|
||||
echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$PARENT_DIR]!"
|
||||
echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$WORKSPACE]!"
|
||||
echo
|
||||
echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling."
|
||||
exit 1
|
||||
|
@ -21,8 +22,9 @@ function checkout_sibling {
|
|||
cloneBranch=""
|
||||
|
||||
function clone_target_is_valid {
|
||||
|
||||
echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}"
|
||||
if [[ -n "$(git ls-remote --heads "git@github.com:${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then
|
||||
if [[ -n "$(git ls-remote --heads "${repoAddress}${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
|
@ -71,7 +73,7 @@ function checkout_sibling {
|
|||
fi
|
||||
|
||||
echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..."
|
||||
git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1
|
||||
git clone -b "$cloneBranch" "${repoAddress}${cloneAuthor}/${project}.git" "$targetDir" --depth=1
|
||||
echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)"
|
||||
echo
|
||||
}
|
||||
|
@ -87,12 +89,12 @@ function checkout_sibling {
|
|||
fi
|
||||
}
|
||||
|
||||
checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES"
|
||||
checkout_sibling "elasticsearch" "${WORKSPACE}/elasticsearch" "USE_EXISTING_ES"
|
||||
export TEST_ES_FROM=${TEST_ES_FROM:-snapshot}
|
||||
|
||||
# Set the JAVA_HOME based on the Java property file in the ES repo
|
||||
# This assumes the naming convention used on CI (ex: ~/.java/java10)
|
||||
ES_DIR="$PARENT_DIR/elasticsearch"
|
||||
ES_DIR="$WORKSPACE/elasticsearch"
|
||||
ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
|
||||
|
||||
|
||||
|
|
|
@ -53,6 +53,8 @@ export PARENT_DIR="$parentDir"
|
|||
kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
|
||||
export KIBANA_PKG_BRANCH="$kbnBranch"
|
||||
|
||||
export WORKSPACE="${WORKSPACE:-$PARENT_DIR}"
|
||||
|
||||
###
|
||||
### download node
|
||||
###
|
||||
|
@ -161,7 +163,7 @@ export -f checks-reporter-with-killswitch
|
|||
|
||||
source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh"
|
||||
|
||||
ES_DIR="$PARENT_DIR/elasticsearch"
|
||||
ES_DIR="$WORKSPACE/elasticsearch"
|
||||
ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
|
||||
|
||||
if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then
|
||||
|
|
|
@ -49,8 +49,10 @@ export async function generateNoticeFromSource({ productName, directory, log }:
|
|||
ignore: [
|
||||
'{node_modules,build,target,dist,data,built_assets}/**',
|
||||
'packages/*/{node_modules,build,target,dist}/**',
|
||||
'src/plugins/*/{node_modules,build,target,dist}/**',
|
||||
'x-pack/{node_modules,build,target,dist,data}/**',
|
||||
'x-pack/packages/*/{node_modules,build,target,dist}/**',
|
||||
'x-pack/plugins/*/{node_modules,build,target,dist}/**',
|
||||
],
|
||||
};
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ module.exports = function (grunt) {
|
|||
customLaunchers: {
|
||||
Chrome_Headless: {
|
||||
base: 'Chrome',
|
||||
flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222'],
|
||||
flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222', '--no-sandbox'],
|
||||
},
|
||||
},
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ const { resolve } = require('path');
|
|||
module.exports = function (grunt) {
|
||||
grunt.registerTask('test:jest', function () {
|
||||
const done = this.async();
|
||||
runJest(resolve(__dirname, '../scripts/jest.js')).then(done, done);
|
||||
runJest(resolve(__dirname, '../scripts/jest.js'), ['--maxWorkers=10']).then(done, done);
|
||||
});
|
||||
|
||||
grunt.registerTask('test:jest_integration', function () {
|
||||
|
@ -30,10 +30,10 @@ module.exports = function (grunt) {
|
|||
runJest(resolve(__dirname, '../scripts/jest_integration.js')).then(done, done);
|
||||
});
|
||||
|
||||
function runJest(jestScript) {
|
||||
function runJest(jestScript, args = []) {
|
||||
const serverCmd = {
|
||||
cmd: 'node',
|
||||
args: [jestScript, '--ci'],
|
||||
args: [jestScript, '--ci', ...args],
|
||||
opts: { stdio: 'inherit' },
|
||||
};
|
||||
|
||||
|
|
5
test/scripts/checks/doc_api_changes.sh
Executable file
5
test/scripts/checks/doc_api_changes.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:checkDocApiChanges
|
5
test/scripts/checks/file_casing.sh
Executable file
5
test/scripts/checks/file_casing.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:checkFileCasing
|
5
test/scripts/checks/i18n.sh
Executable file
5
test/scripts/checks/i18n.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:i18nCheck
|
5
test/scripts/checks/licenses.sh
Executable file
5
test/scripts/checks/licenses.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:licenses
|
5
test/scripts/checks/lock_file_symlinks.sh
Executable file
5
test/scripts/checks/lock_file_symlinks.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:checkLockfileSymlinks
|
5
test/scripts/checks/test_hardening.sh
Executable file
5
test/scripts/checks/test_hardening.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:test_hardening
|
5
test/scripts/checks/test_projects.sh
Executable file
5
test/scripts/checks/test_projects.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:test_projects
|
5
test/scripts/checks/ts_projects.sh
Executable file
5
test/scripts/checks/ts_projects.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:checkTsProjects
|
5
test/scripts/checks/type_check.sh
Executable file
5
test/scripts/checks/type_check.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:typeCheck
|
5
test/scripts/checks/verify_dependency_versions.sh
Executable file
5
test/scripts/checks/verify_dependency_versions.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:verifyDependencyVersions
|
5
test/scripts/checks/verify_notice.sh
Executable file
5
test/scripts/checks/verify_notice.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:verifyNotice
|
0
test/scripts/jenkins_build_kbn_sample_panel_action.sh
Normal file → Executable file
0
test/scripts/jenkins_build_kbn_sample_panel_action.sh
Normal file → Executable file
|
@ -2,19 +2,9 @@
|
|||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
echo " -> building examples separate from test plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--oss \
|
||||
--examples \
|
||||
--verbose;
|
||||
|
||||
echo " -> building test plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--oss \
|
||||
--no-examples \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
|
||||
--verbose;
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
|
||||
./test/scripts/jenkins_build_plugins.sh
|
||||
fi
|
||||
|
||||
# doesn't persist, also set in kibanaPipeline.groovy
|
||||
export KBN_NP_PLUGINS_BUILT=true
|
||||
|
@ -26,4 +16,7 @@ yarn run grunt functionalTests:ensureAllTestsInCiGroup;
|
|||
if [[ -z "$CODE_COVERAGE" ]] ; then
|
||||
echo " -> building and extracting OSS Kibana distributable for use in functional tests"
|
||||
node scripts/build --debug --oss
|
||||
|
||||
mkdir -p "$WORKSPACE/kibana-build-oss"
|
||||
cp -pR build/oss/kibana-*-SNAPSHOT-linux-x86_64/. $WORKSPACE/kibana-build-oss/
|
||||
fi
|
||||
|
|
19
test/scripts/jenkins_build_plugins.sh
Executable file
19
test/scripts/jenkins_build_plugins.sh
Executable file
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
echo " -> building examples separate from test plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--oss \
|
||||
--examples \
|
||||
--workers 6 \
|
||||
--verbose
|
||||
|
||||
echo " -> building kibana platform plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--oss \
|
||||
--no-examples \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
|
||||
--workers 6 \
|
||||
--verbose
|
|
@ -5,7 +5,7 @@ source test/scripts/jenkins_test_setup_oss.sh
|
|||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}";
|
||||
|
||||
if [ "$CI_GROUP" == "1" ]; then
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then
|
||||
source test/scripts/jenkins_build_kbn_sample_panel_action.sh
|
||||
yarn run grunt run:pluginFunctionalTestsRelease --from=source;
|
||||
yarn run grunt run:exampleFunctionalTestsRelease --from=source;
|
||||
|
|
15
test/scripts/jenkins_plugin_functional.sh
Executable file
15
test/scripts/jenkins_plugin_functional.sh
Executable file
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
cd test/plugin_functional/plugins/kbn_sample_panel_action;
|
||||
if [[ ! -d "target" ]]; then
|
||||
yarn build;
|
||||
fi
|
||||
cd -;
|
||||
|
||||
pwd
|
||||
|
||||
yarn run grunt run:pluginFunctionalTestsRelease --from=source;
|
||||
yarn run grunt run:exampleFunctionalTestsRelease --from=source;
|
||||
yarn run grunt run:interpreterFunctionalTestsRelease;
|
8
test/scripts/jenkins_security_solution_cypress.sh
Normal file → Executable file
8
test/scripts/jenkins_security_solution_cypress.sh
Normal file → Executable file
|
@ -1,12 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup.sh
|
||||
|
||||
installDir="$PARENT_DIR/install/kibana"
|
||||
destDir="${installDir}-${CI_WORKER_NUMBER}"
|
||||
cp -R "$installDir" "$destDir"
|
||||
|
||||
export KIBANA_INSTALL_DIR="$destDir"
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running security solution cypress tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
|
32
test/scripts/jenkins_setup_parallel_workspace.sh
Executable file
32
test/scripts/jenkins_setup_parallel_workspace.sh
Executable file
|
@ -0,0 +1,32 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
CURRENT_DIR=$(pwd)
|
||||
|
||||
# Copy everything except node_modules into the current workspace
|
||||
rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules
|
||||
rsync -a ${WORKSPACE}/kibana/.??* .
|
||||
|
||||
# Symlink all non-root, non-fixture node_modules into our new workspace
|
||||
cd ${WORKSPACE}/kibana
|
||||
find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
|
||||
find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
|
||||
cd "${CURRENT_DIR}"
|
||||
|
||||
# Symlink all of the individual root-level node_modules into the node_modules/ directory
|
||||
mkdir -p node_modules
|
||||
ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/
|
||||
ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/
|
||||
|
||||
# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked
|
||||
unlink node_modules/@kbn
|
||||
unlink node_modules/css-loader
|
||||
unlink node_modules/style-loader
|
||||
|
||||
# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink
|
||||
unlink node_modules/val-loader
|
||||
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/
|
4
test/scripts/jenkins_test_setup.sh
Normal file → Executable file
4
test/scripts/jenkins_test_setup.sh
Normal file → Executable file
|
@ -14,3 +14,7 @@ trap 'post_work' EXIT
|
|||
export TEST_BROWSER_HEADLESS=1
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then
|
||||
cp -R $WORKSPACE/kibana/.es ./
|
||||
fi
|
||||
|
|
15
test/scripts/jenkins_test_setup_oss.sh
Normal file → Executable file
15
test/scripts/jenkins_test_setup_oss.sh
Normal file → Executable file
|
@ -2,10 +2,17 @@
|
|||
|
||||
source test/scripts/jenkins_test_setup.sh
|
||||
|
||||
if [[ -z "$CODE_COVERAGE" ]] ; then
|
||||
installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)"
|
||||
destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER}
|
||||
cp -R "$installDir" "$destDir"
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
|
||||
destDir="build/kibana-build-oss"
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
|
||||
destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}"
|
||||
fi
|
||||
|
||||
if [[ ! -d $destDir ]]; then
|
||||
mkdir -p $destDir
|
||||
cp -pR "$WORKSPACE/kibana-build-oss/." $destDir/
|
||||
fi
|
||||
|
||||
export KIBANA_INSTALL_DIR="$destDir"
|
||||
fi
|
||||
|
|
15
test/scripts/jenkins_test_setup_xpack.sh
Normal file → Executable file
15
test/scripts/jenkins_test_setup_xpack.sh
Normal file → Executable file
|
@ -3,11 +3,18 @@
|
|||
source test/scripts/jenkins_test_setup.sh
|
||||
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
installDir="$PARENT_DIR/install/kibana"
|
||||
destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}"
|
||||
cp -R "$installDir" "$destDir"
|
||||
|
||||
export KIBANA_INSTALL_DIR="$destDir"
|
||||
destDir="build/kibana-build-xpack"
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
|
||||
destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}"
|
||||
fi
|
||||
|
||||
if [[ ! -d $destDir ]]; then
|
||||
mkdir -p $destDir
|
||||
cp -pR "$WORKSPACE/kibana-build-xpack/." $destDir/
|
||||
fi
|
||||
|
||||
export KIBANA_INSTALL_DIR="$(realpath $destDir)"
|
||||
|
||||
cd "$XPACK_DIR"
|
||||
fi
|
||||
|
|
|
@ -3,21 +3,9 @@
|
|||
cd "$KIBANA_DIR"
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
echo " -> building examples separate from test plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--examples \
|
||||
--verbose;
|
||||
|
||||
echo " -> building test plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--no-examples \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
|
||||
--verbose;
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
|
||||
./test/scripts/jenkins_xpack_build_plugins.sh
|
||||
fi
|
||||
|
||||
# doesn't persist, also set in kibanaPipeline.groovy
|
||||
export KBN_NP_PLUGINS_BUILT=true
|
||||
|
@ -42,7 +30,10 @@ if [[ -z "$CODE_COVERAGE" ]] ; then
|
|||
cd "$KIBANA_DIR"
|
||||
node scripts/build --debug --no-oss
|
||||
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
|
||||
installDir="$PARENT_DIR/install/kibana"
|
||||
installDir="$KIBANA_DIR/install/kibana"
|
||||
mkdir -p "$installDir"
|
||||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||
|
||||
mkdir -p "$WORKSPACE/kibana-build-xpack"
|
||||
cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/
|
||||
fi
|
||||
|
|
21
test/scripts/jenkins_xpack_build_plugins.sh
Executable file
21
test/scripts/jenkins_xpack_build_plugins.sh
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
echo " -> building examples separate from test plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--workers 12 \
|
||||
--examples \
|
||||
--verbose
|
||||
|
||||
echo " -> building kibana platform plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--no-examples \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
|
||||
--workers 12 \
|
||||
--verbose
|
0
test/scripts/jenkins_xpack_page_load_metrics.sh
Normal file → Executable file
0
test/scripts/jenkins_xpack_page_load_metrics.sh
Normal file → Executable file
5
test/scripts/lint/eslint.sh
Executable file
5
test/scripts/lint/eslint.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:eslint
|
5
test/scripts/lint/sasslint.sh
Executable file
5
test/scripts/lint/sasslint.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:sasslint
|
5
test/scripts/test/api_integration.sh
Executable file
5
test/scripts/test/api_integration.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:apiIntegrationTests
|
5
test/scripts/test/jest_integration.sh
Executable file
5
test/scripts/test/jest_integration.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:test_jest_integration
|
5
test/scripts/test/jest_unit.sh
Executable file
5
test/scripts/test/jest_unit.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:test_jest
|
5
test/scripts/test/karma_ci.sh
Executable file
5
test/scripts/test/karma_ci.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:test_karma_ci
|
5
test/scripts/test/mocha.sh
Executable file
5
test/scripts/test/mocha.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn run grunt run:mocha
|
6
test/scripts/test/xpack_jest_unit.sh
Executable file
6
test/scripts/test/xpack_jest_unit.sh
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
cd x-pack
|
||||
checks-reporter-with-killswitch "X-Pack Jest" node --max-old-space-size=6144 scripts/jest --ci --verbose --maxWorkers=10
|
6
test/scripts/test/xpack_karma.sh
Executable file
6
test/scripts/test/xpack_karma.sh
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
cd x-pack
|
||||
checks-reporter-with-killswitch "X-Pack Karma Tests" yarn test:karma
|
6
test/scripts/test/xpack_list_cyclic_dependency.sh
Executable file
6
test/scripts/test/xpack_list_cyclic_dependency.sh
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
cd x-pack
|
||||
checks-reporter-with-killswitch "X-Pack List cyclic dependency test" node plugins/lists/scripts/check_circular_deps
|
6
test/scripts/test/xpack_siem_cyclic_dependency.sh
Executable file
6
test/scripts/test/xpack_siem_cyclic_dependency.sh
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
cd x-pack
|
||||
checks-reporter-with-killswitch "X-Pack SIEM cyclic dependency test" node plugins/security_solution/scripts/check_circular_deps
|
|
@ -1,8 +1,15 @@
|
|||
// Basically, this is a shortcut for catchError(catchInterruptions: false) {}
|
||||
// By default, catchError will swallow aborts/timeouts, which we almost never want
|
||||
// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps
|
||||
def call(Map params = [:], Closure closure) {
|
||||
params.catchInterruptions = false
|
||||
return catchError(params, closure)
|
||||
try {
|
||||
closure()
|
||||
} catch (ex) {
|
||||
params.catchInterruptions = false
|
||||
catchError(params) {
|
||||
throw ex
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
|
|
|
@ -16,27 +16,34 @@ def withPostBuildReporting(Closure closure) {
|
|||
}
|
||||
}
|
||||
|
||||
def functionalTestProcess(String name, Closure closure) {
|
||||
return { processNumber ->
|
||||
def kibanaPort = "61${processNumber}1"
|
||||
def esPort = "61${processNumber}2"
|
||||
def esTransportPort = "61${processNumber}3"
|
||||
def ingestManagementPackageRegistryPort = "61${processNumber}4"
|
||||
def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) {
|
||||
// This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue
|
||||
def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER
|
||||
|
||||
withEnv([
|
||||
"CI_PARALLEL_PROCESS_NUMBER=${processNumber}",
|
||||
"TEST_KIBANA_HOST=localhost",
|
||||
"TEST_KIBANA_PORT=${kibanaPort}",
|
||||
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
|
||||
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
|
||||
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
|
||||
"INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
|
||||
"IS_PIPELINE_JOB=1",
|
||||
"JOB=${name}",
|
||||
"KBN_NP_PLUGINS_BUILT=true",
|
||||
]) {
|
||||
closure()
|
||||
}
|
||||
def kibanaPort = "61${parallelId}1"
|
||||
def esPort = "61${parallelId}2"
|
||||
def esTransportPort = "61${parallelId}3"
|
||||
def ingestManagementPackageRegistryPort = "61${parallelId}4"
|
||||
|
||||
withEnv([
|
||||
"CI_GROUP=${parallelId}",
|
||||
"REMOVE_KIBANA_INSTALL_DIR=1",
|
||||
"CI_PARALLEL_PROCESS_NUMBER=${parallelId}",
|
||||
"TEST_KIBANA_HOST=localhost",
|
||||
"TEST_KIBANA_PORT=${kibanaPort}",
|
||||
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
|
||||
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
|
||||
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
|
||||
"KBN_NP_PLUGINS_BUILT=true",
|
||||
"INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
|
||||
] + additionalEnvs) {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
|
||||
def functionalTestProcess(String name, Closure closure) {
|
||||
return {
|
||||
withFunctionalTestEnv(["JOB=${name}"], closure)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,11 +107,17 @@ def withGcsArtifactUpload(workerName, closure) {
|
|||
def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}"
|
||||
def ARTIFACT_PATTERNS = [
|
||||
'target/kibana-*',
|
||||
'target/test-metrics/*',
|
||||
'target/kibana-security-solution/**/*.png',
|
||||
'target/junit/**/*',
|
||||
'test/**/screenshots/**/*.png',
|
||||
'target/test-suites-ci-plan.json',
|
||||
'test/**/screenshots/session/*.png',
|
||||
'test/**/screenshots/failure/*.png',
|
||||
'test/**/screenshots/diff/*.png',
|
||||
'test/functional/failure_debug/html/*.html',
|
||||
'x-pack/test/**/screenshots/**/*.png',
|
||||
'x-pack/test/**/screenshots/session/*.png',
|
||||
'x-pack/test/**/screenshots/failure/*.png',
|
||||
'x-pack/test/**/screenshots/diff/*.png',
|
||||
'x-pack/test/functional/failure_debug/html/*.html',
|
||||
'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
|
||||
]
|
||||
|
@ -119,6 +132,12 @@ def withGcsArtifactUpload(workerName, closure) {
|
|||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(uploadPrefix, pattern)
|
||||
}
|
||||
|
||||
dir(env.WORKSPACE) {
|
||||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -131,6 +150,11 @@ def withGcsArtifactUpload(workerName, closure) {
|
|||
|
||||
def publishJunit() {
|
||||
junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
|
||||
|
||||
// junit() is weird about paths for security reasons, so we need to actually change to an upper directory first
|
||||
dir(env.WORKSPACE) {
|
||||
junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
|
||||
}
|
||||
}
|
||||
|
||||
def sendMail() {
|
||||
|
@ -194,12 +218,16 @@ def doSetup() {
|
|||
}
|
||||
}
|
||||
|
||||
def buildOss() {
|
||||
runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana")
|
||||
def buildOss(maxWorkers = '') {
|
||||
withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
|
||||
runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana")
|
||||
}
|
||||
}
|
||||
|
||||
def buildXpack() {
|
||||
runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana")
|
||||
def buildXpack(maxWorkers = '') {
|
||||
withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
|
||||
runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana")
|
||||
}
|
||||
}
|
||||
|
||||
def runErrorReporter() {
|
||||
|
@ -248,6 +276,100 @@ def call(Map params = [:], Closure closure) {
|
|||
}
|
||||
}
|
||||
|
||||
// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace
|
||||
// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh
|
||||
def withCiTaskQueue(Map options = [:], Closure closure) {
|
||||
def setupClosure = {
|
||||
// This can't use runbld, because it expects the source to be there, which isn't yet
|
||||
bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process")
|
||||
}
|
||||
|
||||
def config = [parallel: 24, setup: setupClosure] + options
|
||||
|
||||
withTaskQueue(config) {
|
||||
closure.call()
|
||||
}
|
||||
}
|
||||
|
||||
def scriptTask(description, script) {
|
||||
return {
|
||||
withFunctionalTestEnv {
|
||||
runbld(script, description)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def scriptTaskDocker(description, script) {
|
||||
return {
|
||||
withDocker(scriptTask(description, script))
|
||||
}
|
||||
}
|
||||
|
||||
def buildDocker() {
|
||||
sh(
|
||||
script: """
|
||||
cp /usr/local/bin/runbld .ci/
|
||||
cp /usr/local/bin/bash_standard_lib.sh .ci/
|
||||
cd .ci
|
||||
docker build -t kibana-ci -f ./Dockerfile .
|
||||
""",
|
||||
label: 'Build CI Docker image'
|
||||
)
|
||||
}
|
||||
|
||||
def withDocker(Closure closure) {
|
||||
docker
|
||||
.image('kibana-ci')
|
||||
.inside(
|
||||
"-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4",
|
||||
closure
|
||||
)
|
||||
}
|
||||
|
||||
def buildOssPlugins() {
|
||||
runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins')
|
||||
}
|
||||
|
||||
def buildXpackPlugins() {
|
||||
runbld('./test/scripts/jenkins_xpack_build_plugins.sh', 'Build X-Pack Plugins')
|
||||
}
|
||||
|
||||
def withTasks(Map params = [worker: [:]], Closure closure) {
|
||||
catchErrors {
|
||||
def config = [name: 'ci-worker', size: 'xxl', ramDisk: true] + (params.worker ?: [:])
|
||||
|
||||
workers.ci(config) {
|
||||
withCiTaskQueue(parallel: 24) {
|
||||
parallel([
|
||||
docker: {
|
||||
retry(2) {
|
||||
buildDocker()
|
||||
}
|
||||
},
|
||||
|
||||
// There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces
|
||||
ossPlugins: { buildOssPlugins() },
|
||||
xpackPlugins: { buildXpackPlugins() },
|
||||
])
|
||||
|
||||
catchErrors {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def allCiTasks() {
|
||||
withTasks {
|
||||
tasks.check()
|
||||
tasks.lint()
|
||||
tasks.test()
|
||||
tasks.functionalOss()
|
||||
tasks.functionalXpack()
|
||||
}
|
||||
}
|
||||
|
||||
def pipelineLibraryTests() {
|
||||
whenChanged(['vars/', '.ci/pipeline-library/']) {
|
||||
workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) {
|
||||
|
@ -258,5 +380,4 @@ def pipelineLibraryTests() {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
return this
|
||||
|
|
5
vars/task.groovy
Normal file
5
vars/task.groovy
Normal file
|
@ -0,0 +1,5 @@
|
|||
def call(Closure closure) {
|
||||
withTaskQueue.addTask(closure)
|
||||
}
|
||||
|
||||
return this
|
118
vars/tasks.groovy
Normal file
118
vars/tasks.groovy
Normal file
|
@ -0,0 +1,118 @@
|
|||
def call(List<Closure> closures) {
|
||||
withTaskQueue.addTasks(closures)
|
||||
}
|
||||
|
||||
def check() {
|
||||
tasks([
|
||||
kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'),
|
||||
kibanaPipeline.scriptTask('Check Doc API Changes', 'test/scripts/checks/doc_api_changes.sh'),
|
||||
kibanaPipeline.scriptTask('Check Types', 'test/scripts/checks/type_check.sh'),
|
||||
kibanaPipeline.scriptTask('Check i18n', 'test/scripts/checks/i18n.sh'),
|
||||
kibanaPipeline.scriptTask('Check File Casing', 'test/scripts/checks/file_casing.sh'),
|
||||
kibanaPipeline.scriptTask('Check Lockfile Symlinks', 'test/scripts/checks/lock_file_symlinks.sh'),
|
||||
kibanaPipeline.scriptTask('Check Licenses', 'test/scripts/checks/licenses.sh'),
|
||||
kibanaPipeline.scriptTask('Verify Dependency Versions', 'test/scripts/checks/verify_dependency_versions.sh'),
|
||||
kibanaPipeline.scriptTask('Verify NOTICE', 'test/scripts/checks/verify_notice.sh'),
|
||||
kibanaPipeline.scriptTask('Test Projects', 'test/scripts/checks/test_projects.sh'),
|
||||
kibanaPipeline.scriptTask('Test Hardening', 'test/scripts/checks/test_hardening.sh'),
|
||||
])
|
||||
}
|
||||
|
||||
def lint() {
|
||||
tasks([
|
||||
kibanaPipeline.scriptTask('Lint: eslint', 'test/scripts/lint/eslint.sh'),
|
||||
kibanaPipeline.scriptTask('Lint: sasslint', 'test/scripts/lint/sasslint.sh'),
|
||||
])
|
||||
}
|
||||
|
||||
def test() {
|
||||
tasks([
|
||||
// These 4 tasks require isolation because of hard-coded, conflicting ports and such, so let's use Docker here
|
||||
kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'),
|
||||
kibanaPipeline.scriptTaskDocker('Mocha Tests', 'test/scripts/test/mocha.sh'),
|
||||
kibanaPipeline.scriptTaskDocker('Karma CI Tests', 'test/scripts/test/karma_ci.sh'),
|
||||
kibanaPipeline.scriptTaskDocker('X-Pack Karma Tests', 'test/scripts/test/xpack_karma.sh'),
|
||||
|
||||
kibanaPipeline.scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh'),
|
||||
kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'),
|
||||
kibanaPipeline.scriptTask('X-Pack SIEM cyclic dependency', 'test/scripts/test/xpack_siem_cyclic_dependency.sh'),
|
||||
kibanaPipeline.scriptTask('X-Pack List cyclic dependency', 'test/scripts/test/xpack_list_cyclic_dependency.sh'),
|
||||
kibanaPipeline.scriptTask('X-Pack Jest Unit Tests', 'test/scripts/test/xpack_jest_unit.sh'),
|
||||
])
|
||||
}
|
||||
|
||||
def functionalOss(Map params = [:]) {
|
||||
def config = params ?: [ciGroups: true, firefox: true, accessibility: true, pluginFunctional: true, visualRegression: false]
|
||||
|
||||
task {
|
||||
kibanaPipeline.buildOss(6)
|
||||
|
||||
if (config.ciGroups) {
|
||||
def ciGroups = 1..12
|
||||
tasks(ciGroups.collect { kibanaPipeline.ossCiGroupProcess(it) })
|
||||
}
|
||||
|
||||
if (config.firefox) {
|
||||
task(kibanaPipeline.functionalTestProcess('oss-firefox', './test/scripts/jenkins_firefox_smoke.sh'))
|
||||
}
|
||||
|
||||
if (config.accessibility) {
|
||||
task(kibanaPipeline.functionalTestProcess('oss-accessibility', './test/scripts/jenkins_accessibility.sh'))
|
||||
}
|
||||
|
||||
if (config.pluginFunctional) {
|
||||
task(kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh'))
|
||||
}
|
||||
|
||||
if (config.visualRegression) {
|
||||
task(kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def functionalXpack(Map params = [:]) {
|
||||
def config = params ?: [
|
||||
ciGroups: true,
|
||||
firefox: true,
|
||||
accessibility: true,
|
||||
pluginFunctional: true,
|
||||
savedObjectsFieldMetrics:true,
|
||||
pageLoadMetrics: false,
|
||||
visualRegression: false,
|
||||
]
|
||||
|
||||
task {
|
||||
kibanaPipeline.buildXpack(10)
|
||||
|
||||
if (config.ciGroups) {
|
||||
def ciGroups = 1..10
|
||||
tasks(ciGroups.collect { kibanaPipeline.xpackCiGroupProcess(it) })
|
||||
}
|
||||
|
||||
if (config.firefox) {
|
||||
task(kibanaPipeline.functionalTestProcess('xpack-firefox', './test/scripts/jenkins_xpack_firefox_smoke.sh'))
|
||||
}
|
||||
|
||||
if (config.accessibility) {
|
||||
task(kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'))
|
||||
}
|
||||
|
||||
if (config.visualRegression) {
|
||||
task(kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'))
|
||||
}
|
||||
|
||||
if (config.pageLoadMetrics) {
|
||||
task(kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh'))
|
||||
}
|
||||
|
||||
if (config.savedObjectsFieldMetrics) {
|
||||
task(kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'))
|
||||
}
|
||||
|
||||
whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/']) {
|
||||
task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
154
vars/withTaskQueue.groovy
Normal file
154
vars/withTaskQueue.groovy
Normal file
|
@ -0,0 +1,154 @@
|
|||
import groovy.transform.Field
|
||||
|
||||
public static @Field TASK_QUEUES = [:]
|
||||
public static @Field TASK_QUEUES_COUNTER = 0
|
||||
|
||||
/**
|
||||
withTaskQueue creates a queue of "tasks" (just plain closures to execute), and executes them with your desired level of concurrency.
|
||||
This way, you can define, for example, 40 things that need to execute, then only allow 10 of them to execute at once.
|
||||
|
||||
Each "process" will execute in a separate, unique, empty directory.
|
||||
If you want each process to have a bootstrapped kibana repo, check out kibanaPipeline.withCiTaskQueue
|
||||
|
||||
Using the queue currently requires an agent/worker.
|
||||
|
||||
Usage:
|
||||
|
||||
withTaskQueue(parallel: 10) {
|
||||
task { print "This is a task" }
|
||||
|
||||
// This is the same as calling task() multiple times
|
||||
tasks([ { print "Another task" }, { print "And another task" } ])
|
||||
|
||||
// Tasks can queue up subsequent tasks
|
||||
task {
|
||||
buildThing()
|
||||
task { print "I depend on buildThing()" }
|
||||
}
|
||||
}
|
||||
|
||||
You can also define a setup task that each process should execute one time before executing tasks:
|
||||
withTaskQueue(parallel: 10, setup: { sh "my-setup-scrupt.sh" }) {
|
||||
...
|
||||
}
|
||||
|
||||
*/
|
||||
def call(Map options = [:], Closure closure) {
|
||||
def config = [ parallel: 10 ] + options
|
||||
def counter = ++TASK_QUEUES_COUNTER
|
||||
|
||||
// We're basically abusing withEnv() to create a "scope" for all steps inside of a withTaskQueue block
|
||||
// This way, we could have multiple task queue instances in the same pipeline
|
||||
withEnv(["TASK_QUEUE_ID=${counter}"]) {
|
||||
withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID] = [
|
||||
tasks: [],
|
||||
tmpFile: sh(script: 'mktemp', returnStdout: true).trim()
|
||||
]
|
||||
|
||||
closure.call()
|
||||
|
||||
def processesExecuting = 0
|
||||
def processes = [:]
|
||||
def iterationId = 0
|
||||
|
||||
for(def i = 1; i <= config.parallel; i++) {
|
||||
def j = i
|
||||
processes["task-queue-process-${j}"] = {
|
||||
catchErrors {
|
||||
withEnv([
|
||||
"TASK_QUEUE_PROCESS_ID=${j}",
|
||||
"TASK_QUEUE_ITERATION_ID=${++iterationId}"
|
||||
]) {
|
||||
dir("${WORKSPACE}/parallel/${j}/kibana") {
|
||||
if (config.setup) {
|
||||
config.setup.call(j)
|
||||
}
|
||||
|
||||
def isDone = false
|
||||
while(!isDone) { // TODO some kind of timeout?
|
||||
catchErrors {
|
||||
if (!getTasks().isEmpty()) {
|
||||
processesExecuting++
|
||||
catchErrors {
|
||||
def task
|
||||
try {
|
||||
task = getTasks().pop()
|
||||
} catch (java.util.NoSuchElementException ex) {
|
||||
return
|
||||
}
|
||||
|
||||
task.call()
|
||||
}
|
||||
processesExecuting--
|
||||
// If a task finishes, and no new tasks were queued up, and nothing else is executing
|
||||
// Then all of the processes should wake up and exit
|
||||
if (processesExecuting < 1 && getTasks().isEmpty()) {
|
||||
taskNotify()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (processesExecuting > 0) {
|
||||
taskSleep()
|
||||
return
|
||||
}
|
||||
|
||||
// Queue is empty, no processes are executing
|
||||
isDone = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
parallel(processes)
|
||||
}
|
||||
}
|
||||
|
||||
// If we sleep in a loop using Groovy code, Pipeline Steps is flooded with Sleep steps
|
||||
// So, instead, we just watch a file and `touch` it whenever something happens that could modify the queue
|
||||
// There's a 20 minute timeout just in case something goes wrong,
|
||||
// in which case this method will get called again if the process is actually supposed to be waiting.
|
||||
def taskSleep() {
|
||||
sh(script: """#!/bin/bash
|
||||
TIMESTAMP=\$(date '+%s' -d "0 seconds ago")
|
||||
for (( i=1; i<=240; i++ ))
|
||||
do
|
||||
if [ "\$(stat -c %Y '${getTmpFile()}')" -ge "\$TIMESTAMP" ]
|
||||
then
|
||||
break
|
||||
else
|
||||
sleep 5
|
||||
if [[ \$i == 240 ]]; then
|
||||
echo "Waited for new tasks for 20 minutes, exiting in case something went wrong"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
""", label: "Waiting for new tasks...")
|
||||
}
|
||||
|
||||
// Used to let the task queue processes know that either a new task has been queued up, or work is complete
|
||||
def taskNotify() {
|
||||
sh "touch '${getTmpFile()}'"
|
||||
}
|
||||
|
||||
def getTasks() {
|
||||
return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tasks
|
||||
}
|
||||
|
||||
def getTmpFile() {
|
||||
return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tmpFile
|
||||
}
|
||||
|
||||
def addTask(Closure closure) {
|
||||
getTasks() << closure
|
||||
taskNotify()
|
||||
}
|
||||
|
||||
def addTasks(List<Closure> closures) {
|
||||
closures.reverse().each {
|
||||
getTasks() << it
|
||||
}
|
||||
taskNotify()
|
||||
}
|
|
@ -13,6 +13,8 @@ def label(size) {
|
|||
return 'docker && tests-l'
|
||||
case 'xl':
|
||||
return 'docker && tests-xl'
|
||||
case 'xl-highmem':
|
||||
return 'docker && tests-xl-highmem'
|
||||
case 'xxl':
|
||||
return 'docker && tests-xxl'
|
||||
}
|
||||
|
@ -55,6 +57,11 @@ def base(Map params, Closure closure) {
|
|||
}
|
||||
}
|
||||
|
||||
sh(
|
||||
script: "mkdir -p ${env.WORKSPACE}/tmp",
|
||||
label: "Create custom temp directory"
|
||||
)
|
||||
|
||||
def checkoutInfo = [:]
|
||||
|
||||
if (config.scm) {
|
||||
|
@ -89,6 +96,7 @@ def base(Map params, Closure closure) {
|
|||
"PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
|
||||
"TEST_BROWSER_HEADLESS=1",
|
||||
"GIT_BRANCH=${checkoutInfo.branch}",
|
||||
"TMPDIR=${env.WORKSPACE}/tmp", // For Chrome and anything else that respects it
|
||||
]) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
|
||||
|
@ -167,7 +175,9 @@ def parallelProcesses(Map params) {
|
|||
sleep(delay)
|
||||
}
|
||||
|
||||
processClosure(processNumber)
|
||||
withEnv(["CI_PARALLEL_PROCESS_NUMBER=${processNumber}"]) {
|
||||
processClosure()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import moment from 'moment';
|
||||
import 'moment-timezone';
|
||||
|
@ -76,6 +77,12 @@ import { RenderedElement } from '../shareable_runtime/components/rendered_elemen
|
|||
jest.mock('../shareable_runtime/components/rendered_element');
|
||||
RenderedElement.mockImplementation(() => 'RenderedElement');
|
||||
|
||||
// Some of the code requires that this directory exists, but the tests don't actually require any css to be present
|
||||
const cssDir = path.resolve(__dirname, '../../../../built_assets/css');
|
||||
if (!fs.existsSync(cssDir)) {
|
||||
fs.mkdirSync(cssDir, { recursive: true });
|
||||
}
|
||||
|
||||
addSerializer(styleSheetSerializer);
|
||||
|
||||
// Initialize Storyshots and build the Jest Snapshots
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue