mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
parent
1852db6278
commit
0ee6a05aee
15 changed files with 406 additions and 386 deletions
|
@ -3,110 +3,91 @@
|
|||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load() // load from the Jenkins instance
|
||||
|
||||
stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
|
||||
timeout(time: 180, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
catchError {
|
||||
kibanaPipeline(timeoutMinutes: 180) {
|
||||
catchErrors {
|
||||
withEnv([
|
||||
'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
|
||||
]) {
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': {
|
||||
withEnv([
|
||||
'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
|
||||
'NODE_ENV=test' // Needed for jest tests only
|
||||
]) {
|
||||
parallel([
|
||||
'kibana-intake-agent': {
|
||||
withEnv([
|
||||
'NODE_ENV=test' // Needed for jest tests only
|
||||
]) {
|
||||
kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh')()
|
||||
}
|
||||
},
|
||||
'x-pack-intake-agent': {
|
||||
withEnv([
|
||||
'NODE_ENV=test' // Needed for jest tests only
|
||||
]) {
|
||||
kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh')()
|
||||
}
|
||||
},
|
||||
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
|
||||
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
|
||||
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
|
||||
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
|
||||
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
|
||||
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
|
||||
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
|
||||
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
|
||||
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
|
||||
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
|
||||
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
|
||||
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
|
||||
]),
|
||||
'kibana-xpack-agent-1': kibanaPipeline.withWorkers('kibana-xpack-tests-1', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
|
||||
]),
|
||||
'kibana-xpack-agent-2': kibanaPipeline.withWorkers('kibana-xpack-tests-2', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
|
||||
]),
|
||||
|
||||
'kibana-xpack-agent-3': kibanaPipeline.withWorkers('kibana-xpack-tests-3', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
|
||||
]),
|
||||
])
|
||||
kibanaPipeline.jobRunner('tests-l', false) {
|
||||
kibanaPipeline.downloadCoverageArtifacts()
|
||||
kibanaPipeline.bash(
|
||||
'''
|
||||
# bootstrap from x-pack folder
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
cd x-pack
|
||||
yarn kbn bootstrap --prefer-offline
|
||||
cd ..
|
||||
# extract archives
|
||||
mkdir -p /tmp/extracted_coverage
|
||||
echo extracting intakes
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
echo extracting kibana-oss-tests
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
echo extracting kibana-xpack-tests
|
||||
for i in {1..3}; do
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests-${i}/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
done
|
||||
# replace path in json files to have valid html report
|
||||
pwd=$(pwd)
|
||||
du -sh /tmp/extracted_coverage/target/kibana-coverage/
|
||||
echo replacing path in json files
|
||||
for i in {1..9}; do
|
||||
sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
|
||||
done
|
||||
wait
|
||||
# merge oss & x-pack reports
|
||||
echo merging coverage reports
|
||||
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
|
||||
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
|
||||
echo copy mocha reports
|
||||
mkdir -p target/kibana-coverage/mocha-combined
|
||||
cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
|
||||
''',
|
||||
"run `yarn kbn bootstrap && merge coverage`"
|
||||
)
|
||||
sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
|
||||
kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
|
||||
sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
|
||||
kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
|
||||
sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
|
||||
kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
|
||||
}
|
||||
workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh')()
|
||||
}
|
||||
}
|
||||
kibanaPipeline.sendMail()
|
||||
},
|
||||
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
]),
|
||||
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
|
||||
]),
|
||||
])
|
||||
workers.base(name: 'coverage-worker', label: 'tests-l', ramDisk: false, bootstrapped: false) {
|
||||
kibanaPipeline.downloadCoverageArtifacts()
|
||||
kibanaPipeline.bash(
|
||||
'''
|
||||
# bootstrap from x-pack folder
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
cd x-pack
|
||||
yarn kbn bootstrap --prefer-offline
|
||||
cd ..
|
||||
# extract archives
|
||||
mkdir -p /tmp/extracted_coverage
|
||||
echo extracting intakes
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
echo extracting kibana-oss-tests
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
echo extracting kibana-xpack-tests
|
||||
tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
|
||||
# replace path in json files to have valid html report
|
||||
pwd=$(pwd)
|
||||
du -sh /tmp/extracted_coverage/target/kibana-coverage/
|
||||
echo replacing path in json files
|
||||
for i in {1..9}; do
|
||||
sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
|
||||
done
|
||||
wait
|
||||
# merge oss & x-pack reports
|
||||
echo merging coverage reports
|
||||
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
|
||||
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
|
||||
echo copy mocha reports
|
||||
mkdir -p target/kibana-coverage/mocha-combined
|
||||
cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
|
||||
''',
|
||||
"run `yarn kbn bootstrap && merge coverage`"
|
||||
)
|
||||
sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
|
||||
kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
|
||||
sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
|
||||
kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
|
||||
sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
|
||||
kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
|
||||
}
|
||||
}
|
||||
}
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
|
|
|
@ -21,53 +21,47 @@ def workerFailures = []
|
|||
currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24)
|
||||
currentBuild.description = "${params.CI_GROUP}<br />Agents: ${AGENT_COUNT}<br />Executions: ${params.NUMBER_EXECUTIONS}"
|
||||
|
||||
stage("Kibana Pipeline") {
|
||||
timeout(time: 180, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
def agents = [:]
|
||||
for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
|
||||
def agentNumberInside = agentNumber
|
||||
def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
|
||||
agents["agent-${agentNumber}"] = {
|
||||
catchError {
|
||||
print "Agent ${agentNumberInside} - ${agentExecutions} executions"
|
||||
kibanaPipeline(timeoutMinutes: 180) {
|
||||
def agents = [:]
|
||||
for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
|
||||
def agentNumberInside = agentNumber
|
||||
def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
|
||||
agents["agent-${agentNumber}"] = {
|
||||
catchErrors {
|
||||
print "Agent ${agentNumberInside} - ${agentExecutions} executions"
|
||||
|
||||
kibanaPipeline.withWorkers('flaky-test-runner', {
|
||||
if (NEED_BUILD) {
|
||||
if (!IS_XPACK) {
|
||||
kibanaPipeline.buildOss()
|
||||
if (CI_GROUP == '1') {
|
||||
runbld("./test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh", "Build kbn tp sample panel action for ciGroup1")
|
||||
}
|
||||
} else {
|
||||
kibanaPipeline.buildXpack()
|
||||
}
|
||||
}
|
||||
}, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
|
||||
workers.functional('flaky-test-runner', {
|
||||
if (NEED_BUILD) {
|
||||
if (!IS_XPACK) {
|
||||
kibanaPipeline.buildOss()
|
||||
if (CI_GROUP == '1') {
|
||||
runbld("./test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh", "Build kbn tp sample panel action for ciGroup1")
|
||||
}
|
||||
} else {
|
||||
kibanaPipeline.buildXpack()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parallel(agents)
|
||||
|
||||
currentBuild.description += ", Failures: ${workerFailures.size()}"
|
||||
|
||||
if (workerFailures.size() > 0) {
|
||||
print "There were ${workerFailures.size()} test suite failures."
|
||||
print "The executions that failed were:"
|
||||
print workerFailures.join("\n")
|
||||
print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
|
||||
}
|
||||
}, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parallel(agents)
|
||||
|
||||
currentBuild.description += ", Failures: ${workerFailures.size()}"
|
||||
|
||||
if (workerFailures.size() > 0) {
|
||||
print "There were ${workerFailures.size()} test suite failures."
|
||||
print "The executions that failed were:"
|
||||
print workerFailures.join("\n")
|
||||
print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
|
||||
}
|
||||
}
|
||||
|
||||
def getWorkerFromParams(isXpack, job, ciGroup) {
|
||||
if (!isXpack) {
|
||||
if (job == 'serverMocha') {
|
||||
return kibanaPipeline.getPostBuildWorker('serverMocha', {
|
||||
return kibanaPipeline.functionalTestProcess('serverMocha', {
|
||||
kibanaPipeline.bash(
|
||||
"""
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
@ -77,20 +71,20 @@ def getWorkerFromParams(isXpack, job, ciGroup) {
|
|||
)
|
||||
})
|
||||
} else if (job == 'firefoxSmoke') {
|
||||
return kibanaPipeline.getPostBuildWorker('firefoxSmoke', { runbld('./test/scripts/jenkins_firefox_smoke.sh', 'Execute kibana-firefoxSmoke') })
|
||||
return kibanaPipeline.functionalTestProcess('firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh')
|
||||
} else if(job == 'visualRegression') {
|
||||
return kibanaPipeline.getPostBuildWorker('visualRegression', { runbld('./test/scripts/jenkins_visual_regression.sh', 'Execute kibana-visualRegression') })
|
||||
return kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh')
|
||||
} else {
|
||||
return kibanaPipeline.getOssCiGroupWorker(ciGroup)
|
||||
return kibanaPipeline.ossCiGroupProcess(ciGroup)
|
||||
}
|
||||
}
|
||||
|
||||
if (job == 'firefoxSmoke') {
|
||||
return kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', { runbld('./test/scripts/jenkins_xpack_firefox_smoke.sh', 'Execute xpack-firefoxSmoke') })
|
||||
return kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh')
|
||||
} else if(job == 'visualRegression') {
|
||||
return kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld('./test/scripts/jenkins_xpack_visual_regression.sh', 'Execute xpack-visualRegression') })
|
||||
return kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')
|
||||
} else {
|
||||
return kibanaPipeline.getXpackCiGroupWorker(ciGroup)
|
||||
return kibanaPipeline.xpackCiGroupProcess(ciGroup)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,10 +99,9 @@ def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWor
|
|||
for(def j = 0; j < workerExecutions; j++) {
|
||||
print "Execute agent-${agentNumber} worker-${workerNumber}: ${j}"
|
||||
withEnv([
|
||||
"JOB=agent-${agentNumber}-worker-${workerNumber}-${j}",
|
||||
"REMOVE_KIBANA_INSTALL_DIR=1",
|
||||
]) {
|
||||
catchError {
|
||||
catchErrors {
|
||||
try {
|
||||
worker(workerNumber)
|
||||
} catch (ex) {
|
||||
|
|
|
@ -26,7 +26,7 @@ timeout(time: 120, unit: 'MINUTES') {
|
|||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
node('linux && immutable') {
|
||||
catchError {
|
||||
catchErrors {
|
||||
def VERSION
|
||||
def SNAPSHOT_ID
|
||||
def DESTINATION
|
||||
|
|
|
@ -19,50 +19,45 @@ currentBuild.description = "ES: ${SNAPSHOT_VERSION}<br />Kibana: ${params.branch
|
|||
|
||||
def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"
|
||||
|
||||
timeout(time: 120, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
catchError {
|
||||
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
|
||||
parallel([
|
||||
// TODO we just need to run integration tests from intake?
|
||||
'kibana-intake-agent': kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
|
||||
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
|
||||
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
|
||||
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
|
||||
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
|
||||
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
|
||||
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
|
||||
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
|
||||
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
|
||||
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
|
||||
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
|
||||
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
|
||||
]),
|
||||
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
kibanaPipeline(timeoutMinutes: 120) {
|
||||
catchErrors {
|
||||
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
]),
|
||||
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
|
||||
def promoteSnapshot(snapshotVersion, snapshotId) {
|
||||
|
|
108
Jenkinsfile
vendored
108
Jenkinsfile
vendored
|
@ -3,71 +3,49 @@
|
|||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
|
||||
timeout(time: 135, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
githubPr.withDefaultPrComments {
|
||||
catchError {
|
||||
retryable.enable()
|
||||
parallel([
|
||||
'kibana-intake-agent': kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-firefoxSmoke': kibanaPipeline.getPostBuildWorker('firefoxSmoke', {
|
||||
retryable('kibana-firefoxSmoke') {
|
||||
runbld('./test/scripts/jenkins_firefox_smoke.sh', 'Execute kibana-firefoxSmoke')
|
||||
}
|
||||
}),
|
||||
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
|
||||
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
|
||||
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
|
||||
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
|
||||
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
|
||||
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
|
||||
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
|
||||
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
|
||||
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
|
||||
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
|
||||
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
|
||||
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
|
||||
'oss-accessibility': kibanaPipeline.getPostBuildWorker('accessibility', {
|
||||
retryable('kibana-accessibility') {
|
||||
runbld('./test/scripts/jenkins_accessibility.sh', 'Execute accessibility tests')
|
||||
}
|
||||
}),
|
||||
// 'oss-visualRegression': kibanaPipeline.getPostBuildWorker('visualRegression', { runbld('./test/scripts/jenkins_visual_regression.sh', 'Execute kibana-visualRegression') }),
|
||||
]),
|
||||
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-firefoxSmoke': kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', {
|
||||
retryable('xpack-firefoxSmoke') {
|
||||
runbld('./test/scripts/jenkins_xpack_firefox_smoke.sh', 'Execute xpack-firefoxSmoke')
|
||||
}
|
||||
}),
|
||||
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
|
||||
'xpack-accessibility': kibanaPipeline.getPostBuildWorker('xpack-accessibility', {
|
||||
retryable('xpack-accessibility') {
|
||||
runbld('./test/scripts/jenkins_xpack_accessibility.sh', 'Execute xpack-accessibility tests')
|
||||
}
|
||||
}),
|
||||
// 'xpack-visualRegression': kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld('./test/scripts/jenkins_xpack_visual_regression.sh', 'Execute xpack-visualRegression') }),
|
||||
]),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
retryable.printFlakyFailures()
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
kibanaPipeline(timeoutMinutes: 135) {
|
||||
githubPr.withDefaultPrComments {
|
||||
catchError {
|
||||
retryable.enable()
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'),
|
||||
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'),
|
||||
// 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'),
|
||||
]),
|
||||
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'),
|
||||
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
|
||||
'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'),
|
||||
// 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'),
|
||||
]),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
retryable.printFlakyFailures()
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
|
|
|
@ -20,7 +20,9 @@
|
|||
import { resolve } from 'path';
|
||||
|
||||
const job = process.env.JOB ? `job-${process.env.JOB}-` : '';
|
||||
const num = process.env.CI_WORKER_NUMBER ? `worker-${process.env.CI_WORKER_NUMBER}-` : '';
|
||||
const num = process.env.CI_PARALLEL_PROCESS_NUMBER
|
||||
? `worker-${process.env.CI_PARALLEL_PROCESS_NUMBER}-`
|
||||
: '';
|
||||
|
||||
export function makeJunitReportPath(rootDirectory: string, reportName: string) {
|
||||
return resolve(
|
||||
|
|
|
@ -4,7 +4,7 @@ source test/scripts/jenkins_test_setup.sh
|
|||
|
||||
if [[ -z "$CODE_COVERAGE" ]] ; then
|
||||
installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)"
|
||||
destDir=${installDir}-${CI_WORKER_NUMBER}
|
||||
destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER}
|
||||
cp -R "$installDir" "$destDir"
|
||||
|
||||
export KIBANA_INSTALL_DIR="$destDir"
|
||||
|
|
|
@ -4,7 +4,7 @@ source test/scripts/jenkins_test_setup.sh
|
|||
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
installDir="$PARENT_DIR/install/kibana"
|
||||
destDir="${installDir}-${CI_WORKER_NUMBER}"
|
||||
destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}"
|
||||
cp -R "$installDir" "$destDir"
|
||||
|
||||
export KIBANA_INSTALL_DIR="$destDir"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
def print() {
|
||||
try {
|
||||
catchError(catchInterruptions: false, buildResult: null) {
|
||||
def startTime = sh(script: "date -d '-3 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
|
||||
def endTime = sh(script: "date -d '+1 hour 30 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
|
||||
|
||||
|
@ -34,8 +34,6 @@ def print() {
|
|||
echo 'SSH Command:'
|
||||
echo "ssh -F ssh_config \$(hostname --ip-address)"
|
||||
""", label: "Worker/Agent/Node debug links"
|
||||
} catch(ex) {
|
||||
print ex.toString()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
8
vars/catchErrors.groovy
Normal file
8
vars/catchErrors.groovy
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Basically, this is a shortcut for catchError(catchInterruptions: false) {}
|
||||
// By default, catchError will swallow aborts/timeouts, which we almost never want
|
||||
def call(Map params = [:], Closure closure) {
|
||||
params.catchInterruptions = false
|
||||
return catchError(params, closure)
|
||||
}
|
||||
|
||||
return this
|
|
@ -14,8 +14,8 @@
|
|||
So, there is only ever one build status comment on a PR at any given time, the most recent one.
|
||||
*/
|
||||
def withDefaultPrComments(closure) {
|
||||
catchError {
|
||||
catchError {
|
||||
catchErrors {
|
||||
catchErrors {
|
||||
closure()
|
||||
}
|
||||
|
||||
|
|
|
@ -1,100 +1,52 @@
|
|||
def withWorkers(machineName, preWorkerClosure = {}, workerClosures = [:]) {
|
||||
return {
|
||||
jobRunner('tests-xl', true) {
|
||||
withGcsArtifactUpload(machineName, {
|
||||
withPostBuildReporting {
|
||||
doSetup()
|
||||
preWorkerClosure()
|
||||
|
||||
def nextWorker = 1
|
||||
def worker = { workerClosure ->
|
||||
def workerNumber = nextWorker
|
||||
nextWorker++
|
||||
|
||||
return {
|
||||
// This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
|
||||
def delay = (workerNumber-1)*20
|
||||
sleep(delay)
|
||||
|
||||
workerClosure(workerNumber)
|
||||
}
|
||||
}
|
||||
|
||||
def workers = [:]
|
||||
workerClosures.each { workerName, workerClosure ->
|
||||
workers[workerName] = worker(workerClosure)
|
||||
}
|
||||
|
||||
parallel(workers)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def withWorker(machineName, label, Closure closure) {
|
||||
return {
|
||||
jobRunner(label, false) {
|
||||
withGcsArtifactUpload(machineName) {
|
||||
withPostBuildReporting {
|
||||
doSetup()
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def intakeWorker(jobName, String script) {
|
||||
return withWorker(jobName, 'linux && immutable') {
|
||||
withEnv([
|
||||
"JOB=${jobName}",
|
||||
]) {
|
||||
runbld(script, "Execute ${jobName}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def withPostBuildReporting(Closure closure) {
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
catchError {
|
||||
catchErrors {
|
||||
runErrorReporter()
|
||||
}
|
||||
|
||||
catchError {
|
||||
catchErrors {
|
||||
runbld.junit()
|
||||
}
|
||||
|
||||
catchError {
|
||||
catchErrors {
|
||||
publishJunit()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getPostBuildWorker(name, closure) {
|
||||
return { workerNumber ->
|
||||
def kibanaPort = "61${workerNumber}1"
|
||||
def esPort = "61${workerNumber}2"
|
||||
def esTransportPort = "61${workerNumber}3"
|
||||
def functionalTestProcess(String name, Closure closure) {
|
||||
return { processNumber ->
|
||||
def kibanaPort = "61${processNumber}1"
|
||||
def esPort = "61${processNumber}2"
|
||||
def esTransportPort = "61${processNumber}3"
|
||||
|
||||
withEnv([
|
||||
"CI_WORKER_NUMBER=${workerNumber}",
|
||||
"CI_PARALLEL_PROCESS_NUMBER=${processNumber}",
|
||||
"TEST_KIBANA_HOST=localhost",
|
||||
"TEST_KIBANA_PORT=${kibanaPort}",
|
||||
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
|
||||
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
|
||||
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
|
||||
"IS_PIPELINE_JOB=1",
|
||||
"JOB=${name}",
|
||||
]) {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getOssCiGroupWorker(ciGroup) {
|
||||
return getPostBuildWorker("ciGroup" + ciGroup, {
|
||||
def functionalTestProcess(String name, String script) {
|
||||
return functionalTestProcess(name) {
|
||||
retryable(name) {
|
||||
runbld(script, "Execute ${name}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def ossCiGroupProcess(ciGroup) {
|
||||
return functionalTestProcess("ciGroup" + ciGroup) {
|
||||
withEnv([
|
||||
"CI_GROUP=${ciGroup}",
|
||||
"JOB=kibana-ciGroup${ciGroup}",
|
||||
|
@ -103,11 +55,11 @@ def getOssCiGroupWorker(ciGroup) {
|
|||
runbld("./test/scripts/jenkins_ci_group.sh", "Execute kibana-ciGroup${ciGroup}")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
def getXpackCiGroupWorker(ciGroup) {
|
||||
return getPostBuildWorker("xpack-ciGroup" + ciGroup, {
|
||||
def xpackCiGroupProcess(ciGroup) {
|
||||
return functionalTestProcess("xpack-ciGroup" + ciGroup) {
|
||||
withEnv([
|
||||
"CI_GROUP=${ciGroup}",
|
||||
"JOB=xpack-kibana-ciGroup${ciGroup}",
|
||||
|
@ -116,56 +68,6 @@ def getXpackCiGroupWorker(ciGroup) {
|
|||
runbld("./test/scripts/jenkins_xpack_ci_group.sh", "Execute xpack-kibana-ciGroup${ciGroup}")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
def jobRunner(label, useRamDisk, closure) {
|
||||
node(label) {
|
||||
agentInfo.print()
|
||||
|
||||
if (useRamDisk) {
|
||||
// Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
|
||||
def originalWorkspace = env.WORKSPACE
|
||||
ws('/tmp/workspace') {
|
||||
sh(
|
||||
script: """
|
||||
mkdir -p /dev/shm/workspace
|
||||
mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
|
||||
rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
|
||||
ln -s /dev/shm/workspace '${originalWorkspace}'
|
||||
""",
|
||||
label: "Move workspace to RAM - /dev/shm/workspace"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def scmVars
|
||||
|
||||
// Try to clone from Github up to 8 times, waiting 15 secs between attempts
|
||||
retryWithDelay(8, 15) {
|
||||
scmVars = checkout scm
|
||||
}
|
||||
|
||||
withEnv([
|
||||
"CI=true",
|
||||
"HOME=${env.JENKINS_HOME}",
|
||||
"PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
|
||||
"PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
|
||||
"PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
|
||||
"TEST_BROWSER_HEADLESS=1",
|
||||
"GIT_BRANCH=${scmVars.GIT_BRANCH}",
|
||||
]) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
|
||||
string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
|
||||
string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
|
||||
]) {
|
||||
// scm is configured to check out to the ./kibana directory
|
||||
dir('kibana') {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,7 +99,7 @@ def withGcsArtifactUpload(workerName, closure) {
|
|||
try {
|
||||
closure()
|
||||
} finally {
|
||||
catchError {
|
||||
catchErrors {
|
||||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(uploadPrefix, pattern)
|
||||
}
|
||||
|
@ -225,7 +127,7 @@ def sendMail() {
|
|||
}
|
||||
|
||||
def sendInfraMail() {
|
||||
catchError {
|
||||
catchErrors {
|
||||
step([
|
||||
$class: 'Mailer',
|
||||
notifyEveryUnstableBuild: true,
|
||||
|
@ -236,7 +138,7 @@ def sendInfraMail() {
|
|||
}
|
||||
|
||||
def sendKibanaMail() {
|
||||
catchError {
|
||||
catchErrors {
|
||||
def buildStatus = buildUtils.getBuildStatus()
|
||||
if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
|
||||
emailext(
|
||||
|
@ -281,4 +183,18 @@ def runErrorReporter() {
|
|||
)
|
||||
}
|
||||
|
||||
def call(Map params = [:], Closure closure) {
|
||||
def config = [timeoutMinutes: 135] + params
|
||||
|
||||
stage("Kibana Pipeline") {
|
||||
timeout(time: config.timeoutMinutes, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
|
|
|
@ -2,7 +2,9 @@ def call(retryTimes, delaySecs, closure) {
|
|||
retry(retryTimes) {
|
||||
try {
|
||||
closure()
|
||||
} catch (ex) {
|
||||
} catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException ex) {
|
||||
throw ex // Immediately re-throw build abort exceptions, don't sleep first
|
||||
} catch (Exception ex) {
|
||||
sleep delaySecs
|
||||
throw ex
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ def getFlakyFailures() {
|
|||
}
|
||||
|
||||
def printFlakyFailures() {
|
||||
catchError {
|
||||
catchErrors {
|
||||
def failures = getFlakyFailures()
|
||||
|
||||
if (failures && failures.size() > 0) {
|
||||
|
|
147
vars/workers.groovy
Normal file
147
vars/workers.groovy
Normal file
|
@ -0,0 +1,147 @@
|
|||
// "Workers" in this file will spin up an instance, do some setup etc depending on the configuration, and then execute some work that you define
|
||||
// e.g. workers.base(name: 'my-worker') { sh "echo 'ready to execute some kibana scripts'" }
|
||||
|
||||
/*
|
||||
The base worker that all of the others use. Will clone the scm (assumed to be kibana), and run kibana bootstrap processes by default.
|
||||
|
||||
Parameters:
|
||||
label - gobld/agent label to use, e.g. 'linux && immutable'
|
||||
ramDisk - Should the workspace be mounted in memory? Default: true
|
||||
bootstrapped - If true, download kibana dependencies, run kbn bootstrap, etc. Default: true
|
||||
name - Name of the worker for display purposes, filenames, etc.
|
||||
scm - Jenkins scm configuration for checking out code. Use `null` to disable checkout. Default: inherited from job
|
||||
*/
|
||||
def base(Map params, Closure closure) {
|
||||
def config = [label: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params
|
||||
if (!config.label) {
|
||||
error "You must specify an agent label, such as 'tests-xl' or 'linux && immutable', when using workers.base()"
|
||||
}
|
||||
|
||||
node(config.label) {
|
||||
agentInfo.print()
|
||||
|
||||
if (config.ramDisk) {
|
||||
// Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
|
||||
def originalWorkspace = env.WORKSPACE
|
||||
ws('/tmp/workspace') {
|
||||
sh(
|
||||
script: """
|
||||
mkdir -p /dev/shm/workspace
|
||||
mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
|
||||
rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
|
||||
ln -s /dev/shm/workspace '${originalWorkspace}'
|
||||
""",
|
||||
label: "Move workspace to RAM - /dev/shm/workspace"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def scmVars = [:]
|
||||
|
||||
if (config.scm) {
|
||||
// Try to clone from Github up to 8 times, waiting 15 secs between attempts
|
||||
retryWithDelay(8, 15) {
|
||||
scmVars = checkout scm
|
||||
}
|
||||
}
|
||||
|
||||
withEnv([
|
||||
"CI=true",
|
||||
"HOME=${env.JENKINS_HOME}",
|
||||
"PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
|
||||
"PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
|
||||
"PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
|
||||
"TEST_BROWSER_HEADLESS=1",
|
||||
"GIT_BRANCH=${scmVars.GIT_BRANCH ?: ''}",
|
||||
]) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
|
||||
string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
|
||||
string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
|
||||
]) {
|
||||
// scm is configured to check out to the ./kibana directory
|
||||
dir('kibana') {
|
||||
if (config.bootstrapped) {
|
||||
kibanaPipeline.doSetup()
|
||||
}
|
||||
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Worker for ci processes. Extends the base worker and adds GCS artifact upload, error reporting, junit processing
|
||||
def ci(Map params, Closure closure) {
|
||||
def config = [ramDisk: true, bootstrapped: true] + params
|
||||
|
||||
return base(config) {
|
||||
kibanaPipeline.withGcsArtifactUpload(config.name) {
|
||||
kibanaPipeline.withPostBuildReporting {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Worker for running the current intake jobs. Just runs a single script after bootstrap.
|
||||
def intake(jobName, String script) {
|
||||
return {
|
||||
ci(name: jobName, label: 'linux && immutable', ramDisk: false) {
|
||||
withEnv(["JOB=${jobName}"]) {
|
||||
runbld(script, "Execute ${jobName}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Worker for running functional tests. Runs a setup process (e.g. the kibana build) then executes a map of closures in parallel (e.g. one for each ciGroup)
|
||||
def functional(name, Closure setup, Map processes) {
|
||||
return {
|
||||
parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, label: 'tests-xl')
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Creates a ci worker that can run a setup process, followed by a group of processes in parallel.
|
||||
|
||||
Parameters:
|
||||
name: Name of the worker for display purposes, filenames, etc.
|
||||
setup: Closure to execute after the agent is bootstrapped, before starting the parallel work
|
||||
processes: Map of closures that will execute in parallel after setup. Each closure is passed a unique number.
|
||||
delayBetweenProcesses: Number of seconds to wait between starting the parallel processes. Useful to spread the load of heavy init processes, e.g. Elasticsearch starting up. Default: 0
|
||||
label: gobld/agent label to use, e.g. 'linux && immutable'. Default: 'tests-xl', a 32 CPU machine used for running many functional test suites in parallel
|
||||
*/
|
||||
def parallelProcesses(Map params) {
|
||||
def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, label: 'tests-xl'] + params
|
||||
|
||||
ci(label: config.label, name: config.name) {
|
||||
config.setup()
|
||||
|
||||
def nextProcessNumber = 1
|
||||
def process = { processName, processClosure ->
|
||||
def processNumber = nextProcessNumber
|
||||
nextProcessNumber++
|
||||
|
||||
return {
|
||||
if (config.delayBetweenProcesses && config.delayBetweenProcesses > 0) {
|
||||
// This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
|
||||
def delay = (processNumber-1)*config.delayBetweenProcesses
|
||||
sleep(delay)
|
||||
}
|
||||
|
||||
processClosure(processNumber)
|
||||
}
|
||||
}
|
||||
|
||||
def processes = [:]
|
||||
config.processes.each { processName, processClosure ->
|
||||
processes[processName] = process(processName, processClosure)
|
||||
}
|
||||
|
||||
parallel(processes)
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
Loading…
Add table
Add a link
Reference in a new issue