[6.8] Jenkins pipeline with parallel cigroups (#45285) (#45599)

* Jenkins pipeline with parallel cigroups (#45285)

* Pipeline

* WIP some work for parallelization with ciGroups

* Fix xpack kibana install dir, and add some debugging

* Attempt to quick fix a few tests

* Revert "Revert "Revert "[ci] compress jobs for CI stability" (#44584)""

This reverts commit 078ac2897f.

* Recombine test groups, and try runbld again

* Mostly cleanup, and fix failed_tests reporting to hopefully work for both pipeline and non-pipeline

* Fix typo in shell script

* Remove some debug code

* Add support for changing es transport.port during testing via TEST_ES_TRANSPORT_PORT

* Fix test that uses hard-coded es transport port and add it back in to parallel groups

* Disable checks reporter again for now

* Set env var for TEST_ES_TRANSPORT_PORT in pipeline

* Update Jenkinsfile for shorter testrunner labels

* Fix another hard-coded transport port

* Fix a new test with hard-coded URLs

* Jenkinsfile cleanup and fix one of the groups

* Fix double slash

* Testing vault credentials on jenkins server

* Add a non-existent credential

* Revert "Add a non-existent credential"

This reverts commit 0dc234c465a5483b1a994cb510a182fef766e9cc.

* Try github-checks-reporter again

* github-checks-reporter should only run for elastic/kibana, forks won't work

* Clean up some debug code

* Changing names around to try to make BlueOcean UI a little better

* Add more stages

* Make some changes to stage structure to mirror a nested example from CloudBees

* Handle TODOs, and some cleanup in Jenkinsfile

* Pass GIT_BRANCH when started without GHPRB, fix branch check

* Fix mailer problem and add code that ensures all tests are in cigroups back in

* Test adding worker/job name to junit report paths

* Remove some duplication from ci_setup scripts

* Fix unit test that uses junit path

* Don't reinstall node every time setup_env is run

* Fix yarn install logic

* Fix another unit test that uses junit output dir

* Download latest ES snapshot after kibana builds

* Make sure junit reports are always processed

* Add two failing tests for testing purposes

* Add support to Jenkinsfile for kibana build e-mails

* Remove some debug code for email sending

* Change JOB env handling in junit paths and move it to a sub-directory

* Revert "Add two failing tests for testing purposes"

This reverts commit 5715203e26922a93483feb0ebb8bb3fdcc3daf8c.

* Fix junit report path in test

* Don't send kibana emails on build abort

* Address PR feedback, formatting and use built-in url formatting library

* Fix path formatting for functional test

* Add email sending back in to Jenkinsfile

* Fix another unit test with path problem

(cherry picked from commit 27d23c4184)

* remove reference to extract_bootstrap_cache.sh

* remove extra jobs from Jenkinsfile

* increment leadfoot server port for parallel workers

* add worker index to port number, rather than concatenating with it

* well, for some reason the PR job wasn't triggered for 351eaaf...

* huh, PR job wasn't triggered for 1b99c69 either...
This commit is contained in:
Spencer 2019-09-13 11:17:16 -07:00 committed by GitHub
parent d786bac6d9
commit 2f67e77760
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 579 additions and 178 deletions

260
Jenkinsfile vendored Normal file
View file

@ -0,0 +1,260 @@
#!/bin/groovy
properties([
durabilityHint('PERFORMANCE_OPTIMIZED'),
])
stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
timeout(time: 180, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
catchError {
parallel([
'kibana-intake-agent': legacyJobRunner('kibana-intake'),
'x-pack-intake-agent': legacyJobRunner('x-pack-intake'),
'kibana-oss-agent': withWorkers('kibana-oss-tests', { buildOss() }, [
'oss-ciGroup1': getOssCiGroupWorker(1),
'oss-ciGroup2': getOssCiGroupWorker(2),
'oss-ciGroup3': getOssCiGroupWorker(3),
'oss-ciGroup4': getOssCiGroupWorker(4),
'oss-ciGroup5': getOssCiGroupWorker(5),
'oss-ciGroup6': getOssCiGroupWorker(6),
'oss-ciGroup7': getOssCiGroupWorker(7),
'oss-ciGroup8': getOssCiGroupWorker(8),
'oss-ciGroup9': getOssCiGroupWorker(9),
'oss-ciGroup10': getOssCiGroupWorker(10),
'oss-ciGroup11': getOssCiGroupWorker(11),
'oss-ciGroup12': getOssCiGroupWorker(12),
]),
'kibana-xpack-agent': withWorkers('kibana-xpack-tests', { buildXpack() }, [
'xpack-ciGroup1': getXpackCiGroupWorker(1),
'xpack-ciGroup2': getXpackCiGroupWorker(2),
'xpack-ciGroup3': getXpackCiGroupWorker(3),
'xpack-ciGroup4': getXpackCiGroupWorker(4),
'xpack-ciGroup5': getXpackCiGroupWorker(5),
'xpack-ciGroup6': getXpackCiGroupWorker(6),
]),
])
}
node('flyweight') {
sendMail()
}
}
}
}
}
def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) {
return {
jobRunner('tests-xl') {
try {
doSetup()
preWorkerClosure()
def nextWorker = 1
def worker = { workerClosure ->
def workerNumber = nextWorker
nextWorker++
return {
workerClosure(workerNumber)
}
}
def workers = [:]
workerClosures.each { workerName, workerClosure ->
workers[workerName] = worker(workerClosure)
}
parallel(workers)
} finally {
catchError {
uploadAllGcsArtifacts(name)
}
catchError {
publishJunit()
}
}
}
}
}
def getPostBuildWorker(name, closure) {
return { workerNumber ->
def kibanaPort = "61${workerNumber}1"
def esPort = "61${workerNumber}2"
def esTransportPort = "61${workerNumber}3"
withEnv([
"PARALLEL_PIPELINE_WORKER_INDEX=${workerNumber}",
"TEST_KIBANA_HOST=localhost",
"TEST_KIBANA_PORT=${kibanaPort}",
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
"IS_PIPELINE_JOB=1",
]) {
closure()
}
}
}
def getOssCiGroupWorker(ciGroup) {
return getPostBuildWorker("ciGroup" + ciGroup, {
withEnv([
"CI_GROUP=${ciGroup}",
"JOB=kibana-ciGroup${ciGroup}",
]) {
runbld "./test/scripts/jenkins_ci_group.sh"
}
})
}
def getXpackCiGroupWorker(ciGroup) {
return getPostBuildWorker("xpack-ciGroup" + ciGroup, {
withEnv([
"CI_GROUP=${ciGroup}",
"JOB=xpack-kibana-ciGroup${ciGroup}",
]) {
runbld "./test/scripts/jenkins_xpack_ci_group.sh"
}
})
}
def legacyJobRunner(name) {
return {
parallel([
"${name}": {
withEnv([
"JOB=${name}",
]) {
jobRunner('linux && immutable') {
try {
runbld '.ci/run.sh'
} finally {
catchError {
uploadAllGcsArtifacts(name)
}
catchError {
publishJunit()
}
}
}
}
}
])
}
}
def jobRunner(label, closure) {
node(label) {
def scmVars = checkout scm
withEnv([
"CI=true",
"HOME=${env.JENKINS_HOME}",
"PR_SOURCE_BRANCH=${env.ghprbSourceBranch}",
"PR_TARGET_BRANCH=${env.ghprbTargetBranch}",
"PR_AUTHOR=${env.ghprbPullAuthorLogin}",
"TEST_BROWSER_HEADLESS=1",
"GIT_BRANCH=${scmVars.GIT_BRANCH}",
]) {
withCredentials([
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
]) {
// scm is configured to check out to the ./kibana directory
dir('kibana') {
closure()
}
}
}
}
}
// TODO what should happen if GCS, Junit, or email publishing fails? Unstable build? Failed build?
def uploadGcsArtifact(workerName, pattern) {
def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" // TODO
// def storageLocation = "gs://kibana-pipeline-testing/jobs/pipeline-test/${BUILD_NUMBER}/${workerName}"
googleStorageUpload(
credentialsId: 'kibana-ci-gcs-plugin',
bucket: storageLocation,
pattern: pattern,
sharedPublicly: true,
showInline: true,
)
}
def uploadAllGcsArtifacts(workerName) {
def ARTIFACT_PATTERNS = [
'target/kibana-*',
'target/junit/**/*',
'test/**/screenshots/**/*.png',
'test/functional/failure_debug/html/*.html',
'x-pack/test/**/screenshots/**/*.png',
'x-pack/test/functional/failure_debug/html/*.html',
'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
]
ARTIFACT_PATTERNS.each { pattern ->
uploadGcsArtifact(workerName, pattern)
}
}
def publishJunit() {
junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
}
def sendMail() {
sendInfraMail()
sendKibanaMail()
}
def sendInfraMail() {
catchError {
step([
$class: 'Mailer',
notifyEveryUnstableBuild: true,
recipients: 'infra-root+build@elastic.co',
sendToIndividuals: false
])
}
}
def sendKibanaMail() {
catchError {
if(params.NOTIFY_ON_FAILURE && currentBuild.result != 'SUCCESS' && currentBuild.result != 'ABORTED') {
emailext(
// to: 'build-kibana@elastic.co',
to: 'brian.seeders@elastic.co', // TODO switch this out after testing
subject: "${env.PROJECT_NAME} - Build # ${env.BUILD_NUMBER} - ${currentBuild.result}",
body: '${SCRIPT,template="groovy-html.template"}',
mimeType: 'text/html',
)
}
}
}
def runbld(script) {
sh '#!/usr/local/bin/runbld\n' + script
}
def bash(script) {
sh "#!/bin/bash -x\n${script}"
}
def doSetup() {
runbld "./test/scripts/jenkins_setup.sh"
}
def buildOss() {
runbld "./test/scripts/jenkins_build_kibana.sh"
}
def buildXpack() {
runbld "./test/scripts/jenkins_xpack_build_kibana.sh"
}

View file

@ -51,6 +51,7 @@ export function createEsTestCluster(options = {}) {
license,
basePath,
};
const transportPort = esTestConfig.getTransportPort();
const cluster = new Cluster(log);
@ -84,6 +85,7 @@ export function createEsTestCluster(options = {}) {
`cluster.name=${clusterName}`,
`http.port=${port}`,
'discovery.type=single-node',
`transport.port=${transportPort}`,
...esArgs,
],
});

View file

@ -38,6 +38,10 @@ export const esTestConfig = new class EsTestConfig {
return process.env.TEST_ES_FROM || 'snapshot';
}
getTransportPort() {
return process.env.TEST_ES_TRANSPORT_PORT || '9300-9400';
}
getUrlParts() {
// Allow setting one complete TEST_ES_URL for Es like https://elastic:changeme@myCloudInstance:9200
if (process.env.TEST_ES_URL) {

View file

@ -2,44 +2,7 @@
set -e
dir="$(pwd)"
cacheDir="${CACHE_DIR:-"$HOME/.kibana"}"
RED='\033[0;31m'
C_RESET='\033[0m' # Reset color
###
### Since the Jenkins logging output collector doesn't look like a TTY
### Node/Chalk and other color libs disable their color output. But Jenkins
### can handle color fine, so this forces https://github.com/chalk/supports-color
### to enable color support in Chalk and other related modules.
###
export FORCE_COLOR=1
### To run the test suite against a different version of Elasticsearch than
### the default, uncomment this line, and change the version.
# export ES_SNAPSHOT_VERSION=7.0.0
###
### check that we seem to be in a kibana project
###
if [ -f "$dir/package.json" ] && [ -f "$dir/.node-version" ]; then
echo "Setting up node.js and yarn in $dir"
else
echo "${RED}src/dev/ci_setup/setup.sh must be run within a kibana repo${C_RESET}"
exit 1
fi
export KIBANA_DIR="$dir"
export XPACK_DIR="$KIBANA_DIR/x-pack"
export NODE_OPTIONS="--max_old_space_size=2048"
parentDir="$(cd "$KIBANA_DIR/.."; pwd)"
export PARENT_DIR="$parentDir"
kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
export KIBANA_PKG_BRANCH="$kbnBranch"
source src/dev/ci_setup/setup_env.sh true
echo " -- KIBANA_DIR='$KIBANA_DIR'"
echo " -- XPACK_DIR='$XPACK_DIR'"
@ -47,78 +10,6 @@ echo " -- PARENT_DIR='$PARENT_DIR'"
echo " -- NODE_OPTIONS='$NODE_OPTIONS'"
echo " -- KIBANA_PKG_BRANCH='$KIBANA_PKG_BRANCH'"
###
### download node
###
UNAME=$(uname)
OS="linux"
if [[ "$UNAME" = *"MINGW64_NT"* ]]; then
OS="win"
fi
echo " -- Running on OS: $OS"
nodeVersion="$(cat "$dir/.node-version")"
nodeDir="$cacheDir/node/$nodeVersion"
if [[ "$OS" == "win" ]]; then
nodeBin="$HOME/node"
nodeUrl="https://nodejs.org/dist/v$nodeVersion/node-v$nodeVersion-win-x64.zip"
else
nodeBin="$nodeDir/bin"
nodeUrl="https://nodejs.org/dist/v$nodeVersion/node-v$nodeVersion-linux-x64.tar.gz"
fi
echo " -- node: version=v${nodeVersion} dir=$nodeDir"
echo " -- setting up node.js"
if [ -x "$nodeBin/node" ] && [ "$("$nodeBin/node" --version)" == "v$nodeVersion" ]; then
echo " -- reusing node.js install"
else
if [ -d "$nodeDir" ]; then
echo " -- clearing previous node.js install"
rm -rf "$nodeDir"
fi
echo " -- downloading node.js from $nodeUrl"
mkdir -p "$nodeDir"
if [[ "$OS" == "win" ]]; then
nodePkg="$nodeDir/${nodeUrl##*/}"
curl --silent -o "$nodePkg" "$nodeUrl"
unzip -qo "$nodePkg" -d "$nodeDir"
mv "${nodePkg%.*}" "$nodeBin"
else
curl --silent "$nodeUrl" | tar -xz -C "$nodeDir" --strip-components=1
fi
fi
###
### "install" node into this shell
###
export PATH="$nodeBin:$PATH"
hash -r
###
### downloading yarn
###
yarnVersion="$(node -e "console.log(String(require('./package.json').engines.yarn || '').replace(/^[^\d]+/,''))")"
npm install -g "yarn@^${yarnVersion}"
###
### setup yarn offline cache
###
yarn config set yarn-offline-mirror "$cacheDir/yarn-offline-cache"
###
### "install" yarn into this shell
###
yarnGlobalDir="$(yarn global bin)"
export PATH="$PATH:$yarnGlobalDir"
hash -r
# use a proxy to fetch chromedriver/geckodriver asset
export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
###
### install dependencies
###

View file

@ -0,0 +1,138 @@
#!/usr/bin/env bash
set -e
installNode=$1
dir="$(pwd)"
cacheDir="${CACHE_DIR:-"$HOME/.kibana"}"
RED='\033[0;31m'
C_RESET='\033[0m' # Reset color
###
### Since the Jenkins logging output collector doesn't look like a TTY
### Node/Chalk and other color libs disable their color output. But Jenkins
### can handle color fine, so this forces https://github.com/chalk/supports-color
### to enable color support in Chalk and other related modules.
###
export FORCE_COLOR=1
###
### check that we seem to be in a kibana project
###
if [ -f "$dir/package.json" ] && [ -f "$dir/.node-version" ]; then
echo "Setting up node.js and yarn in $dir"
else
echo "${RED}src/dev/ci_setup/setup.sh must be run within a kibana repo${C_RESET}"
exit 1
fi
export KIBANA_DIR="$dir"
export XPACK_DIR="$KIBANA_DIR/x-pack"
parentDir="$(cd "$KIBANA_DIR/.."; pwd)"
export PARENT_DIR="$parentDir"
kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
export KIBANA_PKG_BRANCH="$kbnBranch"
###
### download node
###
UNAME=$(uname)
OS="linux"
if [[ "$UNAME" = *"MINGW64_NT"* ]]; then
OS="win"
fi
echo " -- Running on OS: $OS"
nodeVersion="$(cat "$dir/.node-version")"
nodeDir="$cacheDir/node/$nodeVersion"
if [[ "$OS" == "win" ]]; then
nodeBin="$HOME/node"
nodeUrl="https://nodejs.org/dist/v$nodeVersion/node-v$nodeVersion-win-x64.zip"
else
nodeBin="$nodeDir/bin"
nodeUrl="https://nodejs.org/dist/v$nodeVersion/node-v$nodeVersion-linux-x64.tar.gz"
fi
if [[ "$installNode" == "true" ]]; then
echo " -- node: version=v${nodeVersion} dir=$nodeDir"
echo " -- setting up node.js"
if [ -x "$nodeBin/node" ] && [ "$("$nodeBin/node" --version)" == "v$nodeVersion" ]; then
echo " -- reusing node.js install"
else
if [ -d "$nodeDir" ]; then
echo " -- clearing previous node.js install"
rm -rf "$nodeDir"
fi
echo " -- downloading node.js from $nodeUrl"
mkdir -p "$nodeDir"
if [[ "$OS" == "win" ]]; then
nodePkg="$nodeDir/${nodeUrl##*/}"
curl --silent -o "$nodePkg" "$nodeUrl"
unzip -qo "$nodePkg" -d "$nodeDir"
mv "${nodePkg%.*}" "$nodeBin"
else
curl --silent "$nodeUrl" | tar -xz -C "$nodeDir" --strip-components=1
fi
fi
fi
###
### "install" node into this shell
###
export PATH="$nodeBin:$PATH"
if [[ "$installNode" == "true" || ! $(which yarn) ]]; then
###
### downloading yarn
###
yarnVersion="$(node -e "console.log(String(require('./package.json').engines.yarn || '').replace(/^[^\d]+/,''))")"
npm install -g "yarn@^${yarnVersion}"
fi
###
### setup yarn offline cache
###
yarn config set yarn-offline-mirror "$cacheDir/yarn-offline-cache"
###
### "install" yarn into this shell
###
yarnGlobalDir="$(yarn global bin)"
export PATH="$PATH:$yarnGlobalDir"
# use a proxy to fetch chromedriver/geckodriver asset
export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
export CHECKS_REPORTER_ACTIVE=false
### only run on pr jobs for elastic/kibana, checks-reporter doesn't work for other repos
if [[ "$ghprbPullId" && "$ghprbGhRepository" == 'elastic/kibana' ]] ; then
export CHECKS_REPORTER_ACTIVE=true
fi
###
### Implements github-checks-reporter kill switch when scripts are called from the command line
### $@ - all arguments
###
function checks-reporter-with-killswitch() {
if [ "$CHECKS_REPORTER_ACTIVE" == "true" ] ; then
yarn run github-checks-reporter "$@"
else
arguments=("$@");
"${arguments[@]:1}";
fi
}
export -f checks-reporter-with-killswitch
source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh"

View file

@ -17,14 +17,26 @@
* under the License.
*/
if (!process.env.JOB_NAME) {
console.log('Unable to determine job name');
const { resolve } = require('path');
// force cwd
process.chdir(resolve(__dirname, '../../..'));
// JOB_NAME is formatted as `elastic+kibana+7.x` in some places and `elastic+kibana+7.x/JOB=kibana-intake,node=immutable` in others
const jobNameSplit = (process.env.JOB_NAME || '').split(/\+|\//);
const branch = jobNameSplit.length >= 3 ? jobNameSplit[2] : process.env.GIT_BRANCH;
const isPr = !!process.env.ghprbPullId;
if (!branch) {
console.log('Unable to determine originating branch from job name or other environment variables');
process.exit(1);
}
const [org, proj, branch] = process.env.JOB_NAME.split('+');
const masterOrVersion = branch === 'master' || branch.match(/^\d+\.(x|\d+)$/);
if (!(org === 'elastic' && proj === 'kibana' && masterOrVersion)) {
const isMasterOrVersion = branch.match(/^(origin\/){0,1}master$/) || branch.match(/^(origin\/){0,1}\d+\.(x|\d+)$/);
if (!isMasterOrVersion || isPr) {
console.log('Failure issues only created on master/version branch jobs');
process.exit(0);
}

View file

@ -33,10 +33,7 @@ const ROOT_DIR = dirname(require.resolve('../../../package.json'));
*/
export default class JestJUnitReporter {
constructor(globalConfig, options = {}) {
const {
reportName = 'Jest Tests',
rootDirectory = ROOT_DIR
} = options;
const { reportName = 'Jest Tests', rootDirectory = ROOT_DIR } = options;
this._reportName = reportName;
this._rootDirectory = rootDirectory;
@ -62,8 +59,8 @@ export default class JestJUnitReporter {
{ skipNullAttributes: true }
);
const msToIso = ms => ms ? new Date(ms).toISOString().slice(0, -5) : undefined;
const msToSec = ms => ms ? (ms / 1000).toFixed(3) : undefined;
const msToIso = ms => (ms ? new Date(ms).toISOString().slice(0, -5) : undefined);
const msToSec = ms => (ms ? (ms / 1000).toFixed(3) : undefined);
root.att({
name: 'jest',
@ -83,7 +80,7 @@ export default class JestJUnitReporter {
tests: suite.testResults.length,
failures: suite.numFailedTests,
skipped: suite.numPendingTests,
file: suite.testFilePath
file: suite.testFilePath,
});
// nested in there are the tests in that file
@ -93,10 +90,10 @@ export default class JestJUnitReporter {
const testEl = suiteEl.ele('testcase', {
classname,
name: [...test.ancestorTitles, test.title].join(' '),
time: msToSec(test.duration)
time: msToSec(test.duration),
});
test.failureMessages.forEach((message) => {
test.failureMessages.forEach(message => {
testEl.ele('failure').dat(escapeCdata(message));
});
@ -106,12 +103,18 @@ export default class JestJUnitReporter {
});
});
const reportPath = resolve(rootDirectory, `target/junit/TEST-${reportName}.xml`);
const reportPath = resolve(
rootDirectory,
'target/junit',
process.env.JOB || '.',
`TEST-${reportName}.xml`
);
const reportXML = root.end({
pretty: true,
indent: ' ',
newline: '\n',
spacebeforeslash: ''
spacebeforeslash: '',
});
mkdirp.sync(dirname(reportPath));

View file

@ -49,7 +49,10 @@ describe('dev/mocha/junit report generation', () => {
mocha.addFile(resolve(PROJECT_DIR, 'test.js'));
await new Promise(resolve => mocha.run(resolve));
const report = await fcb(cb => parseString(readFileSync(resolve(PROJECT_DIR, 'target/junit/TEST-test.xml')), cb));
const report = await fcb(cb => parseString(
readFileSync(resolve(PROJECT_DIR, 'target/junit', process.env.JOB || '.', 'TEST-test.xml')),
cb
));
// test case results are wrapped in <testsuites></testsuites>
expect(report).to.eql({

View file

@ -142,7 +142,12 @@ export function setupJUnitReportGeneration(runner, options = {}) {
}
});
const reportPath = resolve(rootDirectory, `target/junit/TEST-${reportName}.xml`);
const reportPath = resolve(
rootDirectory,
'target/junit',
process.env.JOB || '.',
`TEST-${reportName}.xml`
);
const reportXML = builder.end({
pretty: true,
indent: ' ',

View file

@ -43,6 +43,8 @@ export const IGNORE_FILE_GLOBS = [
'**/{webpackShims,__mocks__}/**/*',
'x-pack/docs/**/*',
'src/dev/tslint/rules/*',
'Jenkinsfile',
'Dockerfile*',
// filename must match language code which requires capital letters
'**/translations/*.json',

View file

@ -59,11 +59,7 @@ module.exports = function (grunt) {
customLaunchers: {
Chrome_Headless: {
base: 'Chrome',
flags: [
'--headless',
'--disable-gpu',
'--remote-debugging-port=9222',
],
flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222'],
},
},
@ -71,16 +67,13 @@ module.exports = function (grunt) {
reporters: process.env.CI ? ['dots', 'junit'] : ['progress'],
junitReporter: {
outputFile: resolve(ROOT, 'target/junit/TEST-karma.xml'),
outputFile: resolve(ROOT, 'target/junit', process.env.JOB || '.', 'TEST-karma.xml'),
useBrowserName: false,
nameFormatter: (browser, result) => [
...result.suite,
result.description
].join(' '),
nameFormatter: (browser, result) => [...result.suite, result.description].join(' '),
classNameFormatter: (browser, result) => {
const rootSuite = result.suite[0] || result.description;
return `Browser Unit Tests.${rootSuite.replace(/\./g, '·')}`;
}
},
},
// list of files / patterns to load in the browser
@ -95,16 +88,16 @@ module.exports = function (grunt) {
proxies: {
'/tests/': 'http://localhost:5610/tests/',
'/bundles/': 'http://localhost:5610/bundles/',
'/built_assets/dlls/': 'http://localhost:5610/built_assets/dlls/'
'/built_assets/dlls/': 'http://localhost:5610/built_assets/dlls/',
},
client: {
mocha: {
reporter: 'html', // change Karma's debug.html to the mocha web reporter
timeout: 10000,
slow: 5000
}
}
slow: 5000,
},
},
},
dev: { singleRun: false },
@ -113,11 +106,8 @@ module.exports = function (grunt) {
singleRun: true,
reporters: ['coverage'],
coverageReporter: {
reporters: [
{ type: 'html', dir: 'coverage' },
{ type: 'text-summary' },
]
}
reporters: [{ type: 'html', dir: 'coverage' }, { type: 'text-summary' }],
},
},
};
@ -180,9 +170,9 @@ module.exports = function (grunt) {
`http://localhost:5610/bundles/tests.bundle.js?shards=${TOTAL_CI_SHARDS}&shard_num=${n}`,
'http://localhost:5610/built_assets/dlls/vendors.style.dll.css',
'http://localhost:5610/bundles/tests.style.css'
]
}
'http://localhost:5610/bundles/tests.style.css',
],
},
};
});

View file

@ -21,7 +21,7 @@ import { resolve } from 'path';
import { getFunctionalTestGroupRunConfigs } from '../function_test_groups';
const { version } = require('../../package.json');
const KIBANA_INSTALL_DIR = `./build/oss/kibana-${version}-SNAPSHOT-${process.platform}-x86_64`;
const KIBANA_INSTALL_DIR = process.env.KIBANA_INSTALL_DIR || `./build/oss/kibana-${version}-SNAPSHOT-${process.platform}-x86_64`;
const KIBANA_BIN_PATH = process.platform.startsWith('win')
? '.\\bin\\kibana.bat'
: './bin/kibana';

View file

@ -31,7 +31,14 @@ export async function RemoteProvider({ getService }) {
throw new Error(`Unexpected TEST_BROWSER_TYPE "${browserType}". Valid options are ` + possibleBrowsers.join(','));
}
const browserDriverApi = await BrowserDriverApi.factory(log, config.get(browserType + 'driver.url'), browserType);
let browserDriverUrl = config.get(browserType + 'driver.url');
if (process.env.PARALLEL_PIPELINE_WORKER_INDEX) {
const parsedUrl = new URL(browserDriverUrl);
parsedUrl.port = 4444 + Number.parseInt(process.env.PARALLEL_PIPELINE_WORKER_INDEX);
browserDriverUrl = parsedUrl.href;
}
const browserDriverApi = await BrowserDriverApi.factory(log, browserDriverUrl, browserType);
lifecycle.on('cleanup', async () => await browserDriverApi.stop());
await browserDriverApi.start();

View file

@ -0,0 +1,7 @@
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt functionalTests:ensureAllTestsInCiGroup;
node scripts/build --debug --oss
node scripts/es snapshot --license=oss --download-only;

View file

@ -3,14 +3,25 @@
set -e
trap 'node "$KIBANA_DIR/src/dev/failed_tests/cli"' EXIT
"$(FORCE_COLOR=0 yarn bin)/grunt" functionalTests:ensureAllTestsInCiGroup;
node scripts/build --debug --oss;
if [[ "$IS_PIPELINE_JOB" ]] ; then
source src/dev/ci_setup/setup_env.sh
fi
export TEST_BROWSER_HEADLESS=1
"$(FORCE_COLOR=0 yarn bin)/grunt" "run:functionalTests_ciGroup${CI_GROUP}";
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
yarn run grunt functionalTests:ensureAllTestsInCiGroup;
node scripts/build --debug --oss;
else
installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)"
destDir=${installDir}-${PARALLEL_PIPELINE_WORKER_INDEX}
cp -R "$installDir" "$destDir"
export KIBANA_INSTALL_DIR="$destDir"
fi
yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}";
if [ "$CI_GROUP" == "1" ]; then
"$(FORCE_COLOR=0 yarn bin)/grunt" run:pluginFunctionalTestsRelease;
yarn run grunt run:pluginFunctionalTestsRelease;
fi

5
test/scripts/jenkins_setup.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source src/dev/ci_setup/load_env_keys.sh
source src/dev/ci_setup/setup.sh
source src/dev/ci_setup/checkout_sibling_es.sh

View file

@ -0,0 +1,28 @@
#!/usr/bin/env bash
echo " -> building and extracting default Kibana distributable for use in functional tests"
cd "$KIBANA_DIR"
source src/dev/ci_setup/setup_env.sh
echo " -> Ensuring all functional tests are in a ciGroup"
cd "$XPACK_DIR"
node scripts/functional_tests --assert-none-excluded \
--include-tag ciGroup1 \
--include-tag ciGroup2 \
--include-tag ciGroup3 \
--include-tag ciGroup4 \
--include-tag ciGroup5 \
--include-tag ciGroup6 \
--include-tag ciGroup7 \
--include-tag ciGroup8 \
--include-tag ciGroup9 \
--include-tag ciGroup10
cd "$KIBANA_DIR"
node scripts/build --debug --no-oss
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
installDir="$PARENT_DIR/install/kibana"
mkdir -p "$installDir"
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
node scripts/es snapshot --download-only;

View file

@ -5,26 +5,50 @@ trap 'node "$KIBANA_DIR/src/dev/failed_tests/cli"' EXIT
export TEST_BROWSER_HEADLESS=1
echo " -> Ensuring all functional tests are in a ciGroup"
cd "$XPACK_DIR"
node scripts/functional_tests --assert-none-excluded \
--include-tag ciGroup1 \
--include-tag ciGroup2 \
--include-tag ciGroup3 \
--include-tag ciGroup4 \
--include-tag ciGroup5 \
--include-tag ciGroup6
if [[ "$IS_PIPELINE_JOB" ]] ; then
source src/dev/ci_setup/setup_env.sh
fi
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
echo " -> Ensuring all functional tests are in a ciGroup"
cd "$XPACK_DIR"
node scripts/functional_tests --assert-none-excluded \
--include-tag ciGroup1 \
--include-tag ciGroup2 \
--include-tag ciGroup3 \
--include-tag ciGroup4 \
--include-tag ciGroup5 \
--include-tag ciGroup6
fi
echo " -> building and extracting default Kibana distributable for use in functional tests"
cd "$KIBANA_DIR"
node scripts/build --debug --no-oss
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
installDir="$PARENT_DIR/install/kibana"
mkdir -p "$installDir"
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
echo " -> building and extracting default Kibana distributable for use in functional tests"
node scripts/build --debug --no-oss
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
installDir="$PARENT_DIR/install/kibana"
mkdir -p "$installDir"
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
export KIBANA_INSTALL_DIR="$installDir"
else
installDir="$PARENT_DIR/install/kibana"
destDir="${installDir}-${PARALLEL_PIPELINE_WORKER_INDEX}"
cp -R "$installDir" "$destDir"
export KIBANA_INSTALL_DIR="$destDir"
fi
echo " -> Running functional and api tests"
cd "$XPACK_DIR"
node scripts/functional_tests --debug --bail --kibana-install-dir "$installDir" --include-tag "ciGroup$CI_GROUP"
node scripts/functional_tests \
--debug --bail \
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
--include-tag "ciGroup$CI_GROUP"
echo ""
echo ""

View file

@ -22,8 +22,12 @@ export default function ({ getService }) {
describe('kibana server with ssl', () => {
it('redirects http requests at redirect port to https', async () => {
const host = process.env.TEST_KIBANA_HOST || 'localhost';
const port = process.env.TEST_KIBANA_PORT || '5620';
const url = `https://${host}:${port}/`;
await supertest.get('/')
.expect('location', 'https://localhost:5620/')
.expect('location', url)
.expect(302);
await supertest.get('/')

View file

@ -16,6 +16,9 @@ const CLUSTER_NAME = `test-${getRandomString()}`;
export const initClusterHelpers = (supertest) => {
let clusters = [];
// TODO how could this get pulled out of the ES config instead?
const esTransportPort = process.env.TEST_ES_TRANSPORT_PORT ? process.env.TEST_ES_TRANSPORT_PORT.split('-')[0] : '9300';
const addCluster = (name = CLUSTER_NAME) => {
clusters.push(name);
return (
@ -25,7 +28,7 @@ export const initClusterHelpers = (supertest) => {
.send({
"name": name,
"seeds": [
"localhost:9300"
`localhost:${esTransportPort}`
],
"skipUnavailable": true,
})

View file

@ -5,4 +5,3 @@
*/
export const API_BASE_PATH = '/api/remote_clusters';
export const NODE_SEED = 'localhost:9300';

View file

@ -5,11 +5,14 @@
*/
import expect from 'expect.js';
import { API_BASE_PATH, NODE_SEED } from './constants';
import { API_BASE_PATH } from './constants';
export default function ({ getService }) {
const supertest = getService('supertest');
const esTransportPort = process.env.TEST_ES_TRANSPORT_PORT ? process.env.TEST_ES_TRANSPORT_PORT.split('-')[0] : '9300';
const NODE_SEED = `localhost:${esTransportPort}`;
describe('Remote Clusters', function () {
this.tags(['skipCloud']);