mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[CI] [6.8] Pipeline backports (#72293)
This commit is contained in:
parent
e8fa2dc9c5
commit
ff81d1b02b
35 changed files with 1747 additions and 247 deletions
|
@ -19,59 +19,50 @@ currentBuild.description = "ES: ${SNAPSHOT_VERSION}<br />Kibana: ${params.branch
|
|||
|
||||
def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"
|
||||
|
||||
timeout(time: 150, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
catchError {
|
||||
slackNotifications.onFailure(
|
||||
title: "*<${env.BUILD_URL}|[${SNAPSHOT_VERSION}] ES Snapshot Verification Failure>*",
|
||||
message: "[${SNAPSHOT_VERSION}] ES Snapshot Verification Failure",
|
||||
) {
|
||||
retryable.enable(2)
|
||||
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
|
||||
parallel([
|
||||
// TODO we just need to run integration tests from intake?
|
||||
'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'),
|
||||
'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'),
|
||||
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
|
||||
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
|
||||
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
|
||||
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
|
||||
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
|
||||
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
|
||||
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
|
||||
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
|
||||
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
|
||||
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
|
||||
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
|
||||
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
|
||||
]),
|
||||
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
|
||||
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
|
||||
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
|
||||
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
|
||||
]),
|
||||
])
|
||||
}
|
||||
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
|
||||
}
|
||||
kibanaPipeline(timeoutMinutes: 150) {
|
||||
catchErrors {
|
||||
slackNotifications.onFailure(
|
||||
title: "*<${env.BUILD_URL}|[${SNAPSHOT_VERSION}] ES Snapshot Verification Failure>*",
|
||||
message: "[${SNAPSHOT_VERSION}] ES Snapshot Verification Failure",
|
||||
) {
|
||||
retryable.enable(2)
|
||||
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
]),
|
||||
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
|
||||
}
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
|
||||
def promoteSnapshot(snapshotVersion, snapshotId) {
|
||||
node('linux && immutable') {
|
||||
node(workers.label('s')) {
|
||||
esSnapshots.promote(snapshotVersion, snapshotId)
|
||||
}
|
||||
}
|
||||
|
|
8
.ci/pipeline-library/README.md
Normal file
8
.ci/pipeline-library/README.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
# Kibana Jenkins Pipeline Library
|
||||
|
||||
## Running tests
|
||||
|
||||
```bash
|
||||
cd .ci/pipeline-library
|
||||
./gradlew test
|
||||
```
|
46
.ci/pipeline-library/build.gradle
Normal file
46
.ci/pipeline-library/build.gradle
Normal file
|
@ -0,0 +1,46 @@
|
|||
plugins {
|
||||
id 'groovy'
|
||||
id 'idea'
|
||||
}
|
||||
|
||||
group = 'co.elastic.kibana.pipeline'
|
||||
version = '0.0.1'
|
||||
|
||||
sourceCompatibility = 1.8
|
||||
targetCompatibility = 1.8
|
||||
|
||||
repositories {
|
||||
maven { url 'https://repo.jenkins-ci.org/releases/' }
|
||||
maven { url 'https://repo.maven.apache.org/maven2' }
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.codehaus.groovy:groovy-all:2.4.12'
|
||||
implementation 'org.jenkins-ci.main:jenkins-core:2.23'
|
||||
implementation 'org.jenkins-ci.plugins.workflow:workflow-step-api:2.19@jar'
|
||||
testImplementation 'com.lesfurets:jenkins-pipeline-unit:1.4'
|
||||
testImplementation 'junit:junit:4.12'
|
||||
testImplementation 'org.mockito:mockito-core:2.+'
|
||||
testImplementation 'org.assertj:assertj-core:3.15+' // Temporary https://github.com/jenkinsci/JenkinsPipelineUnit/issues/209
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
main {
|
||||
groovy {
|
||||
srcDirs = ['vars']
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
groovy {
|
||||
srcDirs = ['src/test']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
testLogging {
|
||||
events 'passed', 'skipped', 'failed'
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
BIN
.ci/pipeline-library/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
.ci/pipeline-library/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
5
.ci/pipeline-library/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
5
.ci/pipeline-library/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-6.4.1-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
185
.ci/pipeline-library/gradlew
vendored
Executable file
185
.ci/pipeline-library/gradlew
vendored
Executable file
|
@ -0,0 +1,185 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
#
|
||||
# Copyright 2015 the original author or authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=`expr $i + 1`
|
||||
done
|
||||
case $i in
|
||||
0) set -- ;;
|
||||
1) set -- "$args0" ;;
|
||||
2) set -- "$args0" "$args1" ;;
|
||||
3) set -- "$args0" "$args1" "$args2" ;;
|
||||
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=`save "$@"`
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
exec "$JAVACMD" "$@"
|
104
.ci/pipeline-library/gradlew.bat
vendored
Normal file
104
.ci/pipeline-library/gradlew.bat
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
109
.ci/pipeline-library/src/test/KibanaBasePipelineTest.groovy
Normal file
109
.ci/pipeline-library/src/test/KibanaBasePipelineTest.groovy
Normal file
|
@ -0,0 +1,109 @@
|
|||
import com.lesfurets.jenkins.unit.*
|
||||
import org.junit.Before
|
||||
|
||||
class KibanaBasePipelineTest extends BasePipelineTest {
|
||||
Map env = [:]
|
||||
Map params = [:]
|
||||
|
||||
public def Mocks = [
|
||||
TEST_FAILURE_URL: 'https://localhost/',
|
||||
TEST_FAILURE_NAME: 'Kibana Pipeline / kibana-xpack-agent / Chrome X-Pack UI Functional Tests.x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &',
|
||||
]
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
env.BRANCH_NAME = 'master'
|
||||
env.BUILD_ID = '1'
|
||||
env.BUILD_DISPLAY_NAME = "#${env.BUILD_ID}"
|
||||
|
||||
env.JENKINS_URL = 'http://jenkins.localhost:8080'
|
||||
env.BUILD_URL = "${env.JENKINS_URL}/job/elastic+kibana+${env.BRANCH_NAME}/${env.BUILD_ID}/".toString()
|
||||
|
||||
env.JOB_BASE_NAME = "elastic / kibana # ${env.BRANCH_NAME}".toString()
|
||||
env.JOB_NAME = env.JOB_BASE_NAME
|
||||
|
||||
env.WORKSPACE = 'WS'
|
||||
|
||||
props([
|
||||
buildUtils: [
|
||||
getBuildStatus: { 'SUCCESS' },
|
||||
printStacktrace: { ex -> print ex },
|
||||
],
|
||||
githubPr: [
|
||||
isPr: { false },
|
||||
],
|
||||
jenkinsApi: [ getFailedSteps: { [] } ],
|
||||
testUtils: [ getFailures: { [] } ],
|
||||
])
|
||||
|
||||
vars([
|
||||
env: env,
|
||||
params: params,
|
||||
])
|
||||
|
||||
// Some wrappers that can just be mocked to immediately call the closure passed in
|
||||
[
|
||||
'catchError',
|
||||
'catchErrors',
|
||||
'timestamps',
|
||||
'withGithubCredentials',
|
||||
].each {
|
||||
helper.registerAllowedMethod(it, [Closure.class], null)
|
||||
}
|
||||
}
|
||||
|
||||
void props(Map properties) {
|
||||
properties.each {
|
||||
binding.setProperty(it.key, it.value)
|
||||
}
|
||||
}
|
||||
|
||||
void prop(String propertyName, Object propertyValue) {
|
||||
binding.setProperty(propertyName, propertyValue)
|
||||
}
|
||||
|
||||
void vars(Map variables) {
|
||||
variables.each {
|
||||
binding.setVariable(it.key, it.value)
|
||||
}
|
||||
}
|
||||
|
||||
void var(String variableName, Object variableValue) {
|
||||
binding.setVariable(variableName, variableValue)
|
||||
}
|
||||
|
||||
def fnMock(String name) {
|
||||
return helper.callStack.find { it.methodName == name }
|
||||
}
|
||||
|
||||
void mockFailureBuild() {
|
||||
props([
|
||||
buildUtils: [
|
||||
getBuildStatus: { 'FAILURE' },
|
||||
printStacktrace: { ex -> print ex },
|
||||
],
|
||||
jenkinsApi: [ getFailedSteps: { [
|
||||
[
|
||||
displayName: 'Check out from version control',
|
||||
logs: 'http://jenkins.localhost:8080',
|
||||
],
|
||||
[
|
||||
displayName: 'Execute test task',
|
||||
logs: 'http://jenkins.localhost:8080',
|
||||
],
|
||||
] } ],
|
||||
testUtils: [
|
||||
getFailures: {
|
||||
return [
|
||||
[
|
||||
url: Mocks.TEST_FAILURE_URL,
|
||||
fullDisplayName: Mocks.TEST_FAILURE_NAME,
|
||||
]
|
||||
]
|
||||
},
|
||||
],
|
||||
])
|
||||
}
|
||||
}
|
48
.ci/pipeline-library/src/test/buildState.groovy
Normal file
48
.ci/pipeline-library/src/test/buildState.groovy
Normal file
|
@ -0,0 +1,48 @@
|
|||
import org.junit.*
|
||||
import static groovy.test.GroovyAssert.*
|
||||
|
||||
class BuildStateTest extends KibanaBasePipelineTest {
|
||||
def buildState
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
buildState = loadScript("vars/buildState.groovy")
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'get() returns existing data'() {
|
||||
buildState.add('test', 1)
|
||||
def actual = buildState.get('test')
|
||||
assertEquals(1, actual)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'get() returns null for missing data'() {
|
||||
def actual = buildState.get('missing_key')
|
||||
assertEquals(null, actual)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'add() does not overwrite existing keys'() {
|
||||
assertTrue(buildState.add('test', 1))
|
||||
assertFalse(buildState.add('test', 2))
|
||||
|
||||
def actual = buildState.get('test')
|
||||
|
||||
assertEquals(1, actual)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'set() overwrites existing keys'() {
|
||||
assertFalse(buildState.has('test'))
|
||||
buildState.set('test', 1)
|
||||
assertTrue(buildState.has('test'))
|
||||
buildState.set('test', 2)
|
||||
|
||||
def actual = buildState.get('test')
|
||||
|
||||
assertEquals(2, actual)
|
||||
}
|
||||
}
|
85
.ci/pipeline-library/src/test/githubCommitStatus.groovy
Normal file
85
.ci/pipeline-library/src/test/githubCommitStatus.groovy
Normal file
|
@ -0,0 +1,85 @@
|
|||
import org.junit.*
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
class GithubCommitStatusTest extends KibanaBasePipelineTest {
|
||||
def githubCommitStatus
|
||||
def githubApiMock
|
||||
def buildStateMock
|
||||
|
||||
def EXPECTED_STATUS_URL = 'repos/elastic/kibana/statuses/COMMIT_HASH'
|
||||
def EXPECTED_CONTEXT = 'kibana-ci'
|
||||
def EXPECTED_BUILD_URL = 'http://jenkins.localhost:8080/job/elastic+kibana+master/1/'
|
||||
|
||||
interface BuildState {
|
||||
Object get(String key)
|
||||
}
|
||||
|
||||
interface GithubApi {
|
||||
Object post(String url, Map data)
|
||||
}
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
buildStateMock = mock(BuildState)
|
||||
githubApiMock = mock(GithubApi)
|
||||
|
||||
when(buildStateMock.get('checkoutInfo')).thenReturn([ commit: 'COMMIT_HASH', ])
|
||||
when(githubApiMock.post(any(), any())).thenReturn(null)
|
||||
|
||||
props([
|
||||
buildState: buildStateMock,
|
||||
githubApi: githubApiMock,
|
||||
])
|
||||
|
||||
githubCommitStatus = loadScript("vars/githubCommitStatus.groovy")
|
||||
}
|
||||
|
||||
void verifyStatusCreate(String state, String description) {
|
||||
verify(githubApiMock).post(
|
||||
EXPECTED_STATUS_URL,
|
||||
[
|
||||
'state': state,
|
||||
'description': description,
|
||||
'context': EXPECTED_CONTEXT,
|
||||
'target_url': EXPECTED_BUILD_URL,
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onStart() should create a pending status'() {
|
||||
githubCommitStatus.onStart()
|
||||
verifyStatusCreate('pending', 'Build started.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onFinish() should create a success status'() {
|
||||
githubCommitStatus.onFinish()
|
||||
verifyStatusCreate('success', 'Build completed successfully.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onFinish() should create an error status for failed builds'() {
|
||||
mockFailureBuild()
|
||||
githubCommitStatus.onFinish()
|
||||
verifyStatusCreate('error', 'Build failed.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onStart() should exit early for PRs'() {
|
||||
prop('githubPr', [ isPr: { true } ])
|
||||
|
||||
githubCommitStatus.onStart()
|
||||
verifyZeroInteractions(githubApiMock)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onFinish() should exit early for PRs'() {
|
||||
prop('githubPr', [ isPr: { true } ])
|
||||
|
||||
githubCommitStatus.onFinish()
|
||||
verifyZeroInteractions(githubApiMock)
|
||||
}
|
||||
}
|
100
.ci/pipeline-library/src/test/prChanges.groovy
Normal file
100
.ci/pipeline-library/src/test/prChanges.groovy
Normal file
|
@ -0,0 +1,100 @@
|
|||
import org.junit.*
|
||||
import static groovy.test.GroovyAssert.*
|
||||
|
||||
class PrChangesTest extends KibanaBasePipelineTest {
|
||||
def prChanges
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
env.ghprbPullId = '1'
|
||||
|
||||
props([
|
||||
githubPr: [
|
||||
isPr: { true },
|
||||
],
|
||||
])
|
||||
|
||||
prChanges = loadScript("vars/prChanges.groovy")
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with no changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [] },
|
||||
],
|
||||
])
|
||||
|
||||
assertTrue(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'docs/test/a-fake-doc.asciidoc'],
|
||||
[filename: 'README.md'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertTrue(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable renames'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[ filename: 'docs/test/a-fake-doc.asciidoc', previousFilename: 'docs/test/a-different-fake-doc.asciidoc' ],
|
||||
[ filename: 'README.md', previousFilename: 'README-old.md' ],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertTrue(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with unskippable changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'src/core/index.ts'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable and unskippable changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'README.md'],
|
||||
[filename: 'src/core/index.ts'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable changes that are in notSkippablePaths'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'docs/developer/architecture/code-exploration.asciidoc'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
}
|
62
.ci/pipeline-library/src/test/slackNotifications.groovy
Normal file
62
.ci/pipeline-library/src/test/slackNotifications.groovy
Normal file
|
@ -0,0 +1,62 @@
|
|||
import org.junit.*
|
||||
import static groovy.test.GroovyAssert.*
|
||||
|
||||
class SlackNotificationsTest extends KibanaBasePipelineTest {
|
||||
def slackNotifications
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
helper.registerAllowedMethod('slackSend', [Map.class], null)
|
||||
slackNotifications = loadScript('vars/slackNotifications.groovy')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'getTestFailures() should properly format failure steps'() {
|
||||
mockFailureBuild()
|
||||
|
||||
def failureMessage = slackNotifications.getTestFailures()
|
||||
|
||||
assertEquals(
|
||||
"*Test Failures*\n• <${Mocks.TEST_FAILURE_URL}|x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &>",
|
||||
failureMessage
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'sendFailedBuild() should call slackSend() with message'() {
|
||||
mockFailureBuild()
|
||||
|
||||
slackNotifications.sendFailedBuild()
|
||||
|
||||
def args = fnMock('slackSend').args[0]
|
||||
|
||||
def expected = [
|
||||
channel: '#kibana-operations-alerts',
|
||||
username: 'Kibana Operations',
|
||||
iconEmoji: ':jenkins:',
|
||||
color: 'danger',
|
||||
message: ':broken_heart: elastic / kibana # master #1',
|
||||
]
|
||||
|
||||
expected.each {
|
||||
assertEquals(it.value.toString(), args[it.key].toString())
|
||||
}
|
||||
|
||||
assertEquals(
|
||||
":broken_heart: *<http://jenkins.localhost:8080/job/elastic+kibana+master/1/|elastic / kibana # master #1>*",
|
||||
args.blocks[0].text.text.toString()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
"*Failed Steps*\n• <http://jenkins.localhost:8080|Execute test task>",
|
||||
args.blocks[1].text.text.toString()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
"*Test Failures*\n• <https://localhost/|x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &>",
|
||||
args.blocks[2].text.text.toString()
|
||||
)
|
||||
}
|
||||
}
|
1
.ci/pipeline-library/vars
Symbolic link
1
.ci/pipeline-library/vars
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../vars
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -44,3 +44,5 @@ package-lock.json
|
|||
.vscode
|
||||
npm-debug.log*
|
||||
.tern-project
|
||||
.ci/pipeline-library/build/
|
||||
.gradle
|
||||
|
|
76
Jenkinsfile
vendored
76
Jenkinsfile
vendored
|
@ -3,47 +3,43 @@
|
|||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
|
||||
timeout(time: 135, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
githubPr.withDefaultPrComments {
|
||||
catchError {
|
||||
retryable.enable()
|
||||
parallel([
|
||||
'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'),
|
||||
'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'),
|
||||
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
|
||||
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
|
||||
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
|
||||
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
|
||||
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
|
||||
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
|
||||
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
|
||||
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
|
||||
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
|
||||
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
|
||||
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
|
||||
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
|
||||
]),
|
||||
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
|
||||
]),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
if (params.NOTIFY_ON_FAILURE) {
|
||||
slackNotifications.onFailure()
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true) {
|
||||
githubPr.withDefaultPrComments {
|
||||
ciStats.trackBuild {
|
||||
catchError {
|
||||
retryable.enable()
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
|
||||
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
|
||||
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
]),
|
||||
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
|
||||
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
|
||||
]),
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (params.NOTIFY_ON_FAILURE) {
|
||||
slackNotifications.onFailure()
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ export const IGNORE_FILE_GLOBS = [
|
|||
'**/Jenkinsfile*',
|
||||
'Dockerfile*',
|
||||
'vars/*',
|
||||
'.ci/pipeline-library/**/*',
|
||||
|
||||
// filename must match language code which requires capital letters
|
||||
'**/translations/*.json',
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
set -e
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
|
||||
trap 'node "$KIBANA_DIR/src/dev/failed_tests/cli"' EXIT
|
||||
fi
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
set -e
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
|
||||
trap 'node "$KIBANA_DIR/src/dev/failed_tests/cli"' EXIT
|
||||
fi
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
def print() {
|
||||
try {
|
||||
catchError(catchInterruptions: false, buildResult: null) {
|
||||
def startTime = sh(script: "date -d '-3 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
|
||||
def endTime = sh(script: "date -d '+1 hour 30 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
|
||||
|
||||
|
@ -34,8 +34,6 @@ def print() {
|
|||
echo 'SSH Command:'
|
||||
echo "ssh -F ssh_config \$(hostname --ip-address)"
|
||||
""", label: "Worker/Agent/Node debug links"
|
||||
} catch(ex) {
|
||||
print ex.toString()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
30
vars/buildState.groovy
Normal file
30
vars/buildState.groovy
Normal file
|
@ -0,0 +1,30 @@
|
|||
import groovy.transform.Field
|
||||
|
||||
public static @Field JENKINS_BUILD_STATE = [:]
|
||||
|
||||
def add(key, value) {
|
||||
if (!buildState.JENKINS_BUILD_STATE.containsKey(key)) {
|
||||
buildState.JENKINS_BUILD_STATE[key] = value
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
def set(key, value) {
|
||||
buildState.JENKINS_BUILD_STATE[key] = value
|
||||
}
|
||||
|
||||
def get(key) {
|
||||
return buildState.JENKINS_BUILD_STATE[key]
|
||||
}
|
||||
|
||||
def has(key) {
|
||||
return buildState.JENKINS_BUILD_STATE.containsKey(key)
|
||||
}
|
||||
|
||||
def get() {
|
||||
return buildState.JENKINS_BUILD_STATE
|
||||
}
|
||||
|
||||
return this
|
8
vars/catchErrors.groovy
Normal file
8
vars/catchErrors.groovy
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Basically, this is a shortcut for catchError(catchInterruptions: false) {}
|
||||
// By default, catchError will swallow aborts/timeouts, which we almost never want
|
||||
def call(Map params = [:], Closure closure) {
|
||||
params.catchInterruptions = false
|
||||
return catchError(params, closure)
|
||||
}
|
||||
|
||||
return this
|
45
vars/getCheckoutInfo.groovy
Normal file
45
vars/getCheckoutInfo.groovy
Normal file
|
@ -0,0 +1,45 @@
|
|||
def call(branchOverride) {
|
||||
def repoInfo = [
|
||||
branch: branchOverride ?: env.ghprbSourceBranch,
|
||||
targetBranch: env.ghprbTargetBranch,
|
||||
]
|
||||
|
||||
if (repoInfo.branch == null) {
|
||||
if (!(params.branch_specifier instanceof String)) {
|
||||
throw new Exception(
|
||||
"Unable to determine branch automatically, either pass a branch name to getCheckoutInfo() or use the branch_specifier param."
|
||||
)
|
||||
}
|
||||
|
||||
// strip prefix from the branch specifier to make it consistent with ghprbSourceBranch
|
||||
repoInfo.branch = params.branch_specifier.replaceFirst(/^(refs\/heads\/|origin\/)/, "")
|
||||
}
|
||||
|
||||
repoInfo.commit = sh(
|
||||
script: "git rev-parse HEAD",
|
||||
label: "determining checked out sha",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
|
||||
if (repoInfo.targetBranch) {
|
||||
// Try to clone fetch from Github up to 8 times, waiting 15 secs between attempts
|
||||
retryWithDelay(8, 15) {
|
||||
sh(
|
||||
script: "git fetch origin ${repoInfo.targetBranch}",
|
||||
label: "fetch latest from '${repoInfo.targetBranch}' at origin"
|
||||
)
|
||||
}
|
||||
|
||||
repoInfo.mergeBase = sh(
|
||||
script: "git merge-base HEAD FETCH_HEAD",
|
||||
label: "determining merge point with '${repoInfo.targetBranch}' at origin",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
}
|
||||
|
||||
print "repoInfo: ${repoInfo}"
|
||||
|
||||
return repoInfo
|
||||
}
|
||||
|
||||
return this
|
47
vars/githubCommitStatus.groovy
Normal file
47
vars/githubCommitStatus.groovy
Normal file
|
@ -0,0 +1,47 @@
|
|||
def shouldCreateStatuses() {
|
||||
return !githubPr.isPr() && buildState.get('checkoutInfo')
|
||||
}
|
||||
|
||||
def onStart() {
|
||||
catchError {
|
||||
if (!shouldCreateStatuses()) {
|
||||
return
|
||||
}
|
||||
|
||||
def checkoutInfo = buildState.get('checkoutInfo')
|
||||
create(checkoutInfo.commit, 'pending', 'Build started.')
|
||||
}
|
||||
}
|
||||
|
||||
def onFinish() {
|
||||
catchError {
|
||||
if (!shouldCreateStatuses()) {
|
||||
return
|
||||
}
|
||||
|
||||
def checkoutInfo = buildState.get('checkoutInfo')
|
||||
def status = buildUtils.getBuildStatus()
|
||||
|
||||
if (status == 'SUCCESS' || status == 'UNSTABLE') {
|
||||
create(checkoutInfo.commit, 'success', 'Build completed successfully.')
|
||||
} else if(status == 'ABORTED') {
|
||||
create(checkoutInfo.commit, 'error', 'Build aborted or timed out.')
|
||||
} else {
|
||||
create(checkoutInfo.commit, 'error', 'Build failed.')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// state: error|failure|pending|success
|
||||
def create(sha, state, description, context = 'kibana-ci') {
|
||||
withGithubCredentials {
|
||||
return githubApi.post("repos/elastic/kibana/statuses/${sha}", [
|
||||
state: state,
|
||||
description: description,
|
||||
context: context,
|
||||
target_url: env.BUILD_URL
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
|
@ -14,8 +14,8 @@
|
|||
So, there is only ever one build status comment on a PR at any given time, the most recent one.
|
||||
*/
|
||||
def withDefaultPrComments(closure) {
|
||||
catchError {
|
||||
catchError {
|
||||
catchErrors {
|
||||
catchErrors {
|
||||
closure()
|
||||
}
|
||||
|
||||
|
@ -169,7 +169,20 @@ def getNextCommentMessage(previousCommentInfo = [:]) {
|
|||
## :broken_heart: Build Failed
|
||||
* [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
|
||||
* Commit: ${getCommitHash()}
|
||||
* [Pipeline Steps](${env.BUILD_URL}flowGraphTable) (look for red circles / failed steps)
|
||||
* [Interpreting CI Failures](https://www.elastic.co/guide/en/kibana/current/interpreting-ci-failures.html)
|
||||
"""
|
||||
|
||||
try {
|
||||
def steps = getFailedSteps()
|
||||
if (steps?.size() > 0) {
|
||||
def list = steps.collect { "* [${it.displayName}](${it.logs})" }.join("\n")
|
||||
messages << "### Failed CI Steps\n${list}"
|
||||
}
|
||||
} catch (ex) {
|
||||
buildUtils.printStacktrace(ex)
|
||||
print "Error retrieving failed pipeline steps for PR comment, will skip this section"
|
||||
}
|
||||
}
|
||||
|
||||
messages << getTestFailuresMessage()
|
||||
|
@ -194,14 +207,6 @@ def getNextCommentMessage(previousCommentInfo = [:]) {
|
|||
.join("\n\n")
|
||||
}
|
||||
|
||||
def withGithubCredentials(closure) {
|
||||
withCredentials([
|
||||
string(credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7', variable: 'GITHUB_TOKEN'),
|
||||
]) {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
|
||||
def postComment(message) {
|
||||
if (!isPr()) {
|
||||
error "Trying to post a GitHub PR comment on a non-PR or non-elastic PR build"
|
||||
|
@ -228,3 +233,9 @@ def deleteComment(commentId) {
|
|||
def getCommitHash() {
|
||||
return env.ghprbActualCommit
|
||||
}
|
||||
|
||||
def getFailedSteps() {
|
||||
return jenkinsApi.getFailedSteps()?.findAll { step ->
|
||||
step.displayName != 'Check out from version control'
|
||||
}
|
||||
}
|
||||
|
|
21
vars/jenkinsApi.groovy
Normal file
21
vars/jenkinsApi.groovy
Normal file
|
@ -0,0 +1,21 @@
|
|||
def getSteps() {
|
||||
def url = "${env.BUILD_URL}api/json?tree=actions[nodes[iconColor,running,displayName,id,parents]]"
|
||||
def responseRaw = httpRequest([ method: "GET", url: url ])
|
||||
def response = toJSON(responseRaw)
|
||||
|
||||
def graphAction = response?.actions?.find { it._class == "org.jenkinsci.plugins.workflow.job.views.FlowGraphAction" }
|
||||
|
||||
return graphAction?.nodes
|
||||
}
|
||||
|
||||
def getFailedSteps() {
|
||||
def steps = getSteps()
|
||||
def failedSteps = steps?.findAll { it.iconColor == "red" && it._class == "org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode" }
|
||||
failedSteps.each { step ->
|
||||
step.logs = "${env.BUILD_URL}execution/node/${step.id}/log".toString()
|
||||
}
|
||||
|
||||
return failedSteps
|
||||
}
|
||||
|
||||
return this
|
255
vars/kibanaCoverage.groovy
Normal file
255
vars/kibanaCoverage.groovy
Normal file
|
@ -0,0 +1,255 @@
|
|||
def downloadPrevious(title) {
|
||||
def vaultSecret = 'secret/gce/elastic-bekitzur/service-account/kibana'
|
||||
|
||||
withGcpServiceAccount.fromVaultSecret(vaultSecret, 'value') {
|
||||
kibanaPipeline.bash('''
|
||||
|
||||
gsutil -m cp -r gs://elastic-bekitzur-kibana-coverage-live/previous_pointer/previous.txt . || echo "### Previous Pointer NOT FOUND?"
|
||||
|
||||
if [ -e ./previous.txt ]; then
|
||||
mv previous.txt downloaded_previous.txt
|
||||
echo "### downloaded_previous.txt"
|
||||
cat downloaded_previous.txt
|
||||
fi
|
||||
|
||||
''', title)
|
||||
|
||||
def previous = sh(script: 'cat downloaded_previous.txt', label: '### Capture Previous Sha', returnStdout: true).trim()
|
||||
|
||||
return previous
|
||||
}
|
||||
}
|
||||
|
||||
def uploadPrevious(title) {
|
||||
def vaultSecret = 'secret/gce/elastic-bekitzur/service-account/kibana'
|
||||
|
||||
withGcpServiceAccount.fromVaultSecret(vaultSecret, 'value') {
|
||||
kibanaPipeline.bash('''
|
||||
|
||||
collectPrevious() {
|
||||
PREVIOUS=$(git log --pretty=format:%h -1)
|
||||
echo "### PREVIOUS: ${PREVIOUS}"
|
||||
echo $PREVIOUS > previous.txt
|
||||
}
|
||||
collectPrevious
|
||||
|
||||
gsutil cp previous.txt gs://elastic-bekitzur-kibana-coverage-live/previous_pointer/
|
||||
|
||||
|
||||
''', title)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
def uploadCoverageStaticSite(timestamp) {
|
||||
def uploadPrefix = "gs://elastic-bekitzur-kibana-coverage-live/"
|
||||
def uploadPrefixWithTimeStamp = "${uploadPrefix}${timestamp}/"
|
||||
|
||||
uploadBaseWebsiteFiles(uploadPrefix)
|
||||
uploadCoverageHtmls(uploadPrefixWithTimeStamp)
|
||||
}
|
||||
|
||||
def uploadBaseWebsiteFiles(prefix) {
|
||||
[
|
||||
'src/dev/code_coverage/www/index.html',
|
||||
'src/dev/code_coverage/www/404.html'
|
||||
].each { uploadWithVault(prefix, it) }
|
||||
}
|
||||
|
||||
def uploadCoverageHtmls(prefix) {
|
||||
[
|
||||
'target/kibana-coverage/functional-combined',
|
||||
'target/kibana-coverage/jest-combined',
|
||||
'target/kibana-coverage/mocha-combined',
|
||||
].each { uploadWithVault(prefix, it) }
|
||||
}
|
||||
|
||||
def uploadWithVault(prefix, path) {
|
||||
def vaultSecret = 'secret/gce/elastic-bekitzur/service-account/kibana'
|
||||
|
||||
withGcpServiceAccount.fromVaultSecret(vaultSecret, 'value') {
|
||||
kibanaPipeline.bash("""
|
||||
gsutil -m cp -r -a public-read -z js,css,html ${path} '${prefix}'
|
||||
""", "### Upload files to GCS with vault, path: ${path}")
|
||||
}
|
||||
}
|
||||
|
||||
def prokLinks(title) {
|
||||
kibanaPipeline.bash('''
|
||||
cat << EOF > src/dev/code_coverage/www/index_partial_2.html
|
||||
<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>
|
||||
<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/mocha-combined/index.html">Latest Mocha</a>
|
||||
<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/functional-combined/index.html">Latest FTR</a>
|
||||
</nav>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<main role="main" class="inner cover">
|
||||
<!-- <h1 class="cover-heading"> - Master Branch</h1>-->
|
||||
<p class="lead">Use Kibana Stats to mine coverage data</p>
|
||||
<p class="lead">
|
||||
<a href="https://kibana-stats.elastic.dev/app/kibana#/dashboard/58b8db70-62f9-11ea-8312-7f2d69b79843?_g=(filters%3A!()%2CrefreshInterval%3A(pause%3A!t%2Cvalue%3A0)%2Ctime%3A(from%3Anow-7d%2Cto%3Anow))" class="btn btn-lg btn-primary">Dashboard</a>
|
||||
</p>
|
||||
</main>
|
||||
|
||||
<footer class="mastfoot mt-auto">
|
||||
<div class="inner">
|
||||
<p>Please slack us at <a href="https://app.slack.com/client/T0CUZ52US/C0TR0FAET">#kibana-qa</a> if you've questions</p>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
EOF
|
||||
''', title)
|
||||
kibanaPipeline.bash('''
|
||||
cat src/dev/code_coverage/www/index_partial.html > src/dev/code_coverage/www/index.html
|
||||
cat src/dev/code_coverage/www/index_partial_2.html >> src/dev/code_coverage/www/index.html
|
||||
|
||||
echo "### Prok'd Index File: ..."
|
||||
cat src/dev/code_coverage/www/index.html
|
||||
''', "### Combine Index Partials")
|
||||
}
|
||||
|
||||
def collectVcsInfo(title) {
|
||||
kibanaPipeline.bash('''
|
||||
predicate() {
|
||||
x=$1
|
||||
if [ -n "$x" ]; then
|
||||
return
|
||||
else
|
||||
echo "### 1 or more variables that Code Coverage needs, are undefined"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
CMD="git log --pretty=format"
|
||||
XS=("${GIT_BRANCH}" \
|
||||
"$(${CMD}":%h" -1)" \
|
||||
"$(${CMD}":%an" -1)" \
|
||||
"$(${CMD}":%s" -1)")
|
||||
touch VCS_INFO.txt
|
||||
for X in "${!XS[@]}"; do
|
||||
{
|
||||
predicate "${XS[X]}"
|
||||
echo "${XS[X]}" >> VCS_INFO.txt
|
||||
}
|
||||
done
|
||||
echo "### VCS_INFO:"
|
||||
cat VCS_INFO.txt
|
||||
''', title
|
||||
)
|
||||
}
|
||||
|
||||
def generateReports(title) {
|
||||
kibanaPipeline.bash("""
|
||||
source src/dev/ci_setup/setup_env.sh true
|
||||
# bootstrap from x-pack folder
|
||||
cd x-pack
|
||||
yarn kbn bootstrap --prefer-offline
|
||||
# Return to project root
|
||||
cd ..
|
||||
. src/dev/code_coverage/shell_scripts/extract_archives.sh
|
||||
. src/dev/code_coverage/shell_scripts/fix_html_reports_parallel.sh
|
||||
. src/dev/code_coverage/shell_scripts/merge_jest_and_functional.sh
|
||||
. src/dev/code_coverage/shell_scripts/copy_mocha_reports.sh
|
||||
# zip combined reports
|
||||
tar -czf kibana-coverage.tar.gz target/kibana-coverage/**/*
|
||||
""", title)
|
||||
}
|
||||
|
||||
def uploadCombinedReports() {
|
||||
kibanaPipeline.bash("""
|
||||
ls -laR target/kibana-coverage/
|
||||
""", "### List Combined Reports"
|
||||
)
|
||||
|
||||
kibanaPipeline.uploadGcsArtifact(
|
||||
"kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/coverage/combined",
|
||||
'kibana-coverage.tar.gz'
|
||||
)
|
||||
}
|
||||
|
||||
def ingestData(jobName, buildNum, buildUrl, previousSha, title) {
|
||||
kibanaPipeline.bash("""
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
yarn kbn bootstrap --prefer-offline
|
||||
# Using existing target/kibana-coverage folder
|
||||
. src/dev/code_coverage/shell_scripts/ingest_coverage.sh '${jobName}' ${buildNum} '${buildUrl}' ${previousSha}
|
||||
""", title)
|
||||
}
|
||||
|
||||
def ingestWithVault(jobName, buildNum, buildUrl, previousSha, title) {
|
||||
def vaultSecret = 'secret/kibana-issues/prod/coverage/elasticsearch'
|
||||
withVaultSecret(secret: vaultSecret, secret_field: 'host', variable_name: 'HOST_FROM_VAULT') {
|
||||
withVaultSecret(secret: vaultSecret, secret_field: 'username', variable_name: 'USER_FROM_VAULT') {
|
||||
withVaultSecret(secret: vaultSecret, secret_field: 'password', variable_name: 'PASS_FROM_VAULT') {
|
||||
ingestData(jobName, buildNum, buildUrl, previousSha, title)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def ingest(jobName, buildNumber, buildUrl, timestamp, previousSha, title) {
|
||||
withEnv([
|
||||
"TIME_STAMP=${timestamp}",
|
||||
]) {
|
||||
ingestWithVault(jobName, buildNumber, buildUrl, previousSha, title)
|
||||
}
|
||||
}
|
||||
|
||||
def runTests() {
|
||||
parallel([
|
||||
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
|
||||
'x-pack-intake-agent': {
|
||||
withEnv([
|
||||
'NODE_ENV=test' // Needed for jest tests only
|
||||
]) {
|
||||
workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh')()
|
||||
}
|
||||
},
|
||||
'kibana-oss-agent' : workers.functional(
|
||||
'kibana-oss-tests',
|
||||
{ kibanaPipeline.buildOss() },
|
||||
ossProks()
|
||||
),
|
||||
'kibana-xpack-agent' : workers.functional(
|
||||
'kibana-xpack-tests',
|
||||
{ kibanaPipeline.buildXpack() },
|
||||
xpackProks()
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
def ossProks() {
|
||||
return [
|
||||
'oss-ciGroup1' : kibanaPipeline.ossCiGroupProcess(1),
|
||||
'oss-ciGroup2' : kibanaPipeline.ossCiGroupProcess(2),
|
||||
'oss-ciGroup3' : kibanaPipeline.ossCiGroupProcess(3),
|
||||
'oss-ciGroup4' : kibanaPipeline.ossCiGroupProcess(4),
|
||||
'oss-ciGroup5' : kibanaPipeline.ossCiGroupProcess(5),
|
||||
'oss-ciGroup6' : kibanaPipeline.ossCiGroupProcess(6),
|
||||
'oss-ciGroup7' : kibanaPipeline.ossCiGroupProcess(7),
|
||||
'oss-ciGroup8' : kibanaPipeline.ossCiGroupProcess(8),
|
||||
'oss-ciGroup9' : kibanaPipeline.ossCiGroupProcess(9),
|
||||
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
|
||||
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
|
||||
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
|
||||
]
|
||||
}
|
||||
|
||||
def xpackProks() {
|
||||
return [
|
||||
'xpack-ciGroup1' : kibanaPipeline.xpackCiGroupProcess(1),
|
||||
'xpack-ciGroup2' : kibanaPipeline.xpackCiGroupProcess(2),
|
||||
'xpack-ciGroup3' : kibanaPipeline.xpackCiGroupProcess(3),
|
||||
'xpack-ciGroup4' : kibanaPipeline.xpackCiGroupProcess(4),
|
||||
'xpack-ciGroup5' : kibanaPipeline.xpackCiGroupProcess(5),
|
||||
'xpack-ciGroup6' : kibanaPipeline.xpackCiGroupProcess(6),
|
||||
'xpack-ciGroup7' : kibanaPipeline.xpackCiGroupProcess(7),
|
||||
'xpack-ciGroup8' : kibanaPipeline.xpackCiGroupProcess(8),
|
||||
'xpack-ciGroup9' : kibanaPipeline.xpackCiGroupProcess(9),
|
||||
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
|
||||
]
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,74 +1,57 @@
|
|||
def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) {
|
||||
return {
|
||||
jobRunner('tests-xl', true) {
|
||||
try {
|
||||
doSetup()
|
||||
preWorkerClosure()
|
||||
def withPostBuildReporting(Closure closure) {
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
// disabled in 6.8
|
||||
// catchErrors {
|
||||
// runErrorReporter()
|
||||
// }
|
||||
|
||||
def nextWorker = 1
|
||||
def worker = { workerClosure ->
|
||||
def workerNumber = nextWorker
|
||||
nextWorker++
|
||||
catchErrors {
|
||||
runbld.junit()
|
||||
}
|
||||
|
||||
return {
|
||||
// This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
|
||||
def delay = (workerNumber-1)*20
|
||||
sleep(delay)
|
||||
|
||||
workerClosure(workerNumber)
|
||||
}
|
||||
}
|
||||
|
||||
def workers = [:]
|
||||
workerClosures.each { workerName, workerClosure ->
|
||||
workers[workerName] = worker(workerClosure)
|
||||
}
|
||||
|
||||
parallel(workers)
|
||||
} finally {
|
||||
catchError {
|
||||
uploadAllGcsArtifacts(name)
|
||||
}
|
||||
|
||||
catchError {
|
||||
runbld.junit()
|
||||
}
|
||||
|
||||
catchError {
|
||||
publishJunit()
|
||||
}
|
||||
|
||||
// disabled in 6.8
|
||||
// catchError {
|
||||
// runErrorReporter()
|
||||
// }
|
||||
}
|
||||
catchErrors {
|
||||
publishJunit()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getPostBuildWorker(name, closure) {
|
||||
return { workerNumber ->
|
||||
def kibanaPort = "61${workerNumber}1"
|
||||
def esPort = "61${workerNumber}2"
|
||||
def esTransportPort = "61${workerNumber}3"
|
||||
def functionalTestProcess(String name, Closure closure) {
|
||||
return { processNumber ->
|
||||
def kibanaPort = "61${processNumber}1"
|
||||
def esPort = "61${processNumber}2"
|
||||
def esTransportPort = "61${processNumber}3"
|
||||
def ingestManagementPackageRegistryPort = "61${processNumber}4"
|
||||
|
||||
withEnv([
|
||||
"PARALLEL_PIPELINE_WORKER_INDEX=${workerNumber}",
|
||||
"CI_PARALLEL_PROCESS_NUMBER=${processNumber}",
|
||||
"PARALLEL_PIPELINE_WORKER_INDEX=${processNumber}",
|
||||
"TEST_KIBANA_HOST=localhost",
|
||||
"TEST_KIBANA_PORT=${kibanaPort}",
|
||||
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
|
||||
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
|
||||
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
|
||||
"INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
|
||||
"IS_PIPELINE_JOB=1",
|
||||
"JOB=${name}",
|
||||
"KBN_NP_PLUGINS_BUILT=true",
|
||||
]) {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getOssCiGroupWorker(ciGroup) {
|
||||
return getPostBuildWorker("ciGroup" + ciGroup, {
|
||||
def functionalTestProcess(String name, String script) {
|
||||
return functionalTestProcess(name) {
|
||||
retryable(name) {
|
||||
runbld(script, "Execute ${name}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def ossCiGroupProcess(ciGroup) {
|
||||
return functionalTestProcess("ciGroup" + ciGroup) {
|
||||
withEnv([
|
||||
"CI_GROUP=${ciGroup}",
|
||||
"JOB=kibana-ciGroup${ciGroup}",
|
||||
|
@ -77,11 +60,11 @@ def getOssCiGroupWorker(ciGroup) {
|
|||
runbld("./test/scripts/jenkins_ci_group.sh", "Execute kibana-ciGroup${ciGroup}")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
def getXpackCiGroupWorker(ciGroup) {
|
||||
return getPostBuildWorker("xpack-ciGroup" + ciGroup, {
|
||||
def xpackCiGroupProcess(ciGroup) {
|
||||
return functionalTestProcess("xpack-ciGroup" + ciGroup) {
|
||||
withEnv([
|
||||
"CI_GROUP=${ciGroup}",
|
||||
"JOB=xpack-kibana-ciGroup${ciGroup}",
|
||||
|
@ -90,104 +73,36 @@ def getXpackCiGroupWorker(ciGroup) {
|
|||
runbld("./test/scripts/jenkins_xpack_ci_group.sh", "Execute xpack-kibana-ciGroup${ciGroup}")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
def legacyJobRunner(name) {
|
||||
return {
|
||||
parallel([
|
||||
"${name}": {
|
||||
withEnv([
|
||||
"JOB=${name}",
|
||||
]) {
|
||||
jobRunner('linux && immutable', false) {
|
||||
try {
|
||||
runbld('.ci/run.sh', "Execute ${name}", true)
|
||||
} finally {
|
||||
catchError {
|
||||
uploadAllGcsArtifacts(name)
|
||||
}
|
||||
catchError {
|
||||
publishJunit()
|
||||
}
|
||||
catchError {
|
||||
runErrorReporter()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
def jobRunner(label, useRamDisk, closure) {
|
||||
node(label) {
|
||||
agentInfo.print()
|
||||
|
||||
if (useRamDisk) {
|
||||
// Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
|
||||
def originalWorkspace = env.WORKSPACE
|
||||
ws('/tmp/workspace') {
|
||||
sh(
|
||||
script: """
|
||||
mkdir -p /dev/shm/workspace
|
||||
mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
|
||||
rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
|
||||
ln -s /dev/shm/workspace '${originalWorkspace}'
|
||||
""",
|
||||
label: "Move workspace to RAM - /dev/shm/workspace"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def scmVars
|
||||
|
||||
// Try to clone from Github up to 8 times, waiting 15 secs between attempts
|
||||
retryWithDelay(8, 15) {
|
||||
scmVars = checkout scm
|
||||
}
|
||||
|
||||
withEnv([
|
||||
"CI=true",
|
||||
"HOME=${env.JENKINS_HOME}",
|
||||
"PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
|
||||
"PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
|
||||
"PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
|
||||
"TEST_BROWSER_HEADLESS=1",
|
||||
"GIT_BRANCH=${scmVars.GIT_BRANCH}",
|
||||
]) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
|
||||
string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
|
||||
string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
|
||||
]) {
|
||||
// scm is configured to check out to the ./kibana directory
|
||||
dir('kibana') {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO what should happen if GCS, Junit, or email publishing fails? Unstable build? Failed build?
|
||||
|
||||
def uploadGcsArtifact(workerName, pattern) {
|
||||
def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" // TODO
|
||||
|
||||
def uploadGcsArtifact(uploadPrefix, pattern) {
|
||||
googleStorageUpload(
|
||||
credentialsId: 'kibana-ci-gcs-plugin',
|
||||
bucket: storageLocation,
|
||||
bucket: "gs://${uploadPrefix}",
|
||||
pattern: pattern,
|
||||
sharedPublicly: true,
|
||||
showInline: true,
|
||||
)
|
||||
}
|
||||
|
||||
def uploadAllGcsArtifacts(workerName) {
|
||||
def downloadCoverageArtifacts() {
|
||||
def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/coverage/"
|
||||
def targetLocation = "/tmp/downloaded_coverage"
|
||||
|
||||
sh "mkdir -p '${targetLocation}' && gsutil -m cp -r '${storageLocation}' '${targetLocation}'"
|
||||
}
|
||||
|
||||
def uploadCoverageArtifacts(prefix, pattern) {
|
||||
def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/coverage/${prefix}"
|
||||
uploadGcsArtifact(uploadPrefix, pattern)
|
||||
}
|
||||
|
||||
def withGcsArtifactUpload(workerName, closure) {
|
||||
def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}"
|
||||
def ARTIFACT_PATTERNS = [
|
||||
'target/kibana-*',
|
||||
'target/kibana-security-solution/**/*.png',
|
||||
'target/junit/**/*',
|
||||
'test/**/screenshots/**/*.png',
|
||||
'test/functional/failure_debug/html/*.html',
|
||||
|
@ -196,8 +111,23 @@ def uploadAllGcsArtifacts(workerName) {
|
|||
'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
|
||||
]
|
||||
|
||||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(workerName, pattern)
|
||||
withEnv([
|
||||
"GCS_UPLOAD_PREFIX=${uploadPrefix}"
|
||||
], {
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
catchErrors {
|
||||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(uploadPrefix, pattern)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (env.CODE_COVERAGE) {
|
||||
sh 'tar -czf kibana-coverage.tar.gz target/kibana-coverage/**/*'
|
||||
uploadGcsArtifact("kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/coverage/${workerName}", 'kibana-coverage.tar.gz')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +150,7 @@ def sendMail() {
|
|||
}
|
||||
|
||||
def sendInfraMail() {
|
||||
catchError {
|
||||
catchErrors {
|
||||
step([
|
||||
$class: 'Mailer',
|
||||
notifyEveryUnstableBuild: true,
|
||||
|
@ -231,7 +161,7 @@ def sendInfraMail() {
|
|||
}
|
||||
|
||||
def sendKibanaMail() {
|
||||
catchError {
|
||||
catchErrors {
|
||||
def buildStatus = buildUtils.getBuildStatus()
|
||||
if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
|
||||
emailext(
|
||||
|
@ -284,4 +214,48 @@ def runErrorReporter() {
|
|||
)
|
||||
}
|
||||
|
||||
def call(Map params = [:], Closure closure) {
|
||||
def config = [timeoutMinutes: 135, checkPrChanges: false, setCommitStatus: false] + params
|
||||
|
||||
stage("Kibana Pipeline") {
|
||||
timeout(time: config.timeoutMinutes, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
if (config.setCommitStatus) {
|
||||
buildState.set('shouldSetCommitStatus', true)
|
||||
}
|
||||
if (config.checkPrChanges && githubPr.isPr()) {
|
||||
pipelineLibraryTests()
|
||||
|
||||
print "Checking PR for changes to determine if CI needs to be run..."
|
||||
|
||||
if (prChanges.areChangesSkippable()) {
|
||||
print "No changes requiring CI found in PR, skipping."
|
||||
return
|
||||
}
|
||||
}
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
if (config.setCommitStatus) {
|
||||
githubCommitStatus.onFinish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def pipelineLibraryTests() {
|
||||
whenChanged(['vars/', '.ci/pipeline-library/']) {
|
||||
workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) {
|
||||
dir('.ci/pipeline-library') {
|
||||
sh './gradlew test'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return this
|
||||
|
|
25
vars/kibanaTeamAssign.groovy
Normal file
25
vars/kibanaTeamAssign.groovy
Normal file
|
@ -0,0 +1,25 @@
|
|||
def loadIngestionPipeline(ingestionPipelineName, title) {
|
||||
kibanaPipeline.bash("""
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
yarn kbn bootstrap --prefer-offline
|
||||
|
||||
. src/dev/code_coverage/shell_scripts/assign_teams.sh '${ingestionPipelineName}'
|
||||
""", title)
|
||||
}
|
||||
|
||||
def loadWithVault(ingestionPipelineName, title) {
|
||||
def vaultSecret = 'secret/kibana-issues/prod/coverage/elasticsearch'
|
||||
withVaultSecret(secret: vaultSecret, secret_field: 'host', variable_name: 'HOST_FROM_VAULT') {
|
||||
withVaultSecret(secret: vaultSecret, secret_field: 'username', variable_name: 'USER_FROM_VAULT') {
|
||||
withVaultSecret(secret: vaultSecret, secret_field: 'password', variable_name: 'PASS_FROM_VAULT') {
|
||||
loadIngestionPipeline(ingestionPipelineName, title)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def load(ingestionPipelineName, title) {
|
||||
loadWithVault(ingestionPipelineName, title)
|
||||
}
|
||||
|
||||
return this
|
77
vars/prChanges.groovy
Normal file
77
vars/prChanges.groovy
Normal file
|
@ -0,0 +1,77 @@
|
|||
import groovy.transform.Field
|
||||
|
||||
public static @Field PR_CHANGES_CACHE = []
|
||||
|
||||
// if all the changed files in a PR match one of these regular
|
||||
// expressions then CI will be skipped for that PR
|
||||
def getSkippablePaths() {
|
||||
return [
|
||||
/^docs\//,
|
||||
/^rfcs\//,
|
||||
/^.ci\/.+\.yml$/,
|
||||
/^.ci\/es-snapshots\//,
|
||||
/^.ci\/pipeline-library\//,
|
||||
/^\.github\//,
|
||||
/\.md$/,
|
||||
]
|
||||
}
|
||||
|
||||
// exclusion regular expressions that will invalidate paths that
|
||||
// match one of the skippable path regular expressions
|
||||
def getNotSkippablePaths() {
|
||||
return [
|
||||
// this file is auto-generated and changes to it need to be validated with CI
|
||||
/^docs\/developer\/architecture\/code-exploration.asciidoc$/,
|
||||
]
|
||||
}
|
||||
|
||||
def areChangesSkippable() {
|
||||
if (!githubPr.isPr()) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
def skippablePaths = getSkippablePaths()
|
||||
def notSkippablePaths = getNotSkippablePaths()
|
||||
def files = getChangedFiles()
|
||||
|
||||
// 3000 is the max files GH API will return
|
||||
if (files.size() >= 3000) {
|
||||
return false
|
||||
}
|
||||
|
||||
files = files.findAll { file ->
|
||||
def skippable = skippablePaths.find { regex -> file =~ regex} && !notSkippablePaths.find { regex -> file =~ regex }
|
||||
return !skippable
|
||||
}
|
||||
|
||||
return files.size() < 1
|
||||
} catch (ex) {
|
||||
buildUtils.printStacktrace(ex)
|
||||
print "Error while checking to see if CI is skippable based on changes. Will run CI."
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
def getChanges() {
|
||||
if (!PR_CHANGES_CACHE && env.ghprbPullId) {
|
||||
withGithubCredentials {
|
||||
def changes = githubPrs.getChanges(env.ghprbPullId)
|
||||
if (changes) {
|
||||
PR_CHANGES_CACHE.addAll(changes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return PR_CHANGES_CACHE
|
||||
}
|
||||
|
||||
def getChangedFiles() {
|
||||
def changes = getChanges()
|
||||
def changedFiles = changes.collect { it.filename }
|
||||
def renamedFiles = changes.collect { it.previousFilename }.findAll { it }
|
||||
|
||||
return changedFiles + renamedFiles
|
||||
}
|
||||
|
||||
return this
|
|
@ -2,7 +2,9 @@ def call(retryTimes, delaySecs, closure) {
|
|||
retry(retryTimes) {
|
||||
try {
|
||||
closure()
|
||||
} catch (ex) {
|
||||
} catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException ex) {
|
||||
throw ex // Immediately re-throw build abort exceptions, don't sleep first
|
||||
} catch (Exception ex) {
|
||||
sleep delaySecs
|
||||
throw ex
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ def getFlakyFailures() {
|
|||
}
|
||||
|
||||
def printFlakyFailures() {
|
||||
catchError {
|
||||
catchErrors {
|
||||
def failures = getFlakyFailures()
|
||||
|
||||
if (failures && failures.size() > 0) {
|
||||
|
|
|
@ -62,7 +62,17 @@ def getTestFailures() {
|
|||
def messages = []
|
||||
messages << "*Test Failures*"
|
||||
|
||||
def list = failures.collect { "• <${it.url}|${it.fullDisplayName.split(/\./, 2)[-1]}>" }.join("\n")
|
||||
def list = failures.collect {
|
||||
def name = it
|
||||
.fullDisplayName
|
||||
.split(/\./, 2)[-1]
|
||||
// Only the following three characters need to be escaped for link text, per Slack's docs
|
||||
.replaceAll('&', '&')
|
||||
.replaceAll('<', '<')
|
||||
.replaceAll('>', '>')
|
||||
|
||||
return "• <${it.url}|${name}>"
|
||||
}.join("\n")
|
||||
return "*Test Failures*\n${list}"
|
||||
}
|
||||
|
||||
|
@ -100,6 +110,7 @@ def sendFailedBuild(Map params = [:]) {
|
|||
] + params
|
||||
|
||||
def title = "${getStatusIcon()} ${config.title}"
|
||||
def message = "${getStatusIcon()} ${config.message}"
|
||||
|
||||
def blocks = [markdownBlock(title)]
|
||||
getFailedBuildBlocks().each { blocks << it }
|
||||
|
@ -111,7 +122,7 @@ def sendFailedBuild(Map params = [:]) {
|
|||
username: config.username,
|
||||
iconEmoji: config.icon,
|
||||
color: config.color,
|
||||
message: config.message,
|
||||
message: message,
|
||||
blocks: blocks
|
||||
)
|
||||
}
|
||||
|
|
57
vars/whenChanged.groovy
Normal file
57
vars/whenChanged.groovy
Normal file
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
whenChanged('some/path') { yourCode() } can be used to execute pipeline code in PRs only when changes are detected on paths that you specify.
|
||||
The specified code blocks will also always be executed during the non-PR jobs for tracked branches.
|
||||
|
||||
You have the option of passing in path prefixes, or regexes. Single or multiple.
|
||||
Path specifications are NOT globby, they are only prefixes.
|
||||
Specifying multiple will treat them as ORs.
|
||||
|
||||
Example Usages:
|
||||
whenChanged('a/path/prefix/') { someCode() }
|
||||
whenChanged(startsWith: 'a/path/prefix/') { someCode() } // Same as above
|
||||
whenChanged(['prefix1/', 'prefix2/']) { someCode() }
|
||||
whenChanged(regex: /\.test\.js$/) { someCode() }
|
||||
whenChanged(regex: [/abc/, /xyz/]) { someCode() }
|
||||
*/
|
||||
|
||||
def call(String startsWithString, Closure closure) {
|
||||
return whenChanged([ startsWith: startsWithString ], closure)
|
||||
}
|
||||
|
||||
def call(List<String> startsWithStrings, Closure closure) {
|
||||
return whenChanged([ startsWith: startsWithStrings ], closure)
|
||||
}
|
||||
|
||||
def call(Map params, Closure closure) {
|
||||
if (!githubPr.isPr()) {
|
||||
return closure()
|
||||
}
|
||||
|
||||
def files = prChanges.getChangedFiles()
|
||||
def hasMatch = false
|
||||
|
||||
if (params.regex) {
|
||||
params.regex = [] + params.regex
|
||||
print "Checking PR for changes that match: ${params.regex.join(', ')}"
|
||||
hasMatch = !!files.find { file ->
|
||||
params.regex.find { regex -> file =~ regex }
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasMatch && params.startsWith) {
|
||||
params.startsWith = [] + params.startsWith
|
||||
print "Checking PR for changes that start with: ${params.startsWith.join(', ')}"
|
||||
hasMatch = !!files.find { file ->
|
||||
params.startsWith.find { str -> file.startsWith(str) }
|
||||
}
|
||||
}
|
||||
|
||||
if (hasMatch) {
|
||||
print "Changes found, executing pipeline."
|
||||
closure()
|
||||
} else {
|
||||
print "No changes found, skipping."
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
9
vars/withGithubCredentials.groovy
Normal file
9
vars/withGithubCredentials.groovy
Normal file
|
@ -0,0 +1,9 @@
|
|||
def call(closure) {
|
||||
withCredentials([
|
||||
string(credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7', variable: 'GITHUB_TOKEN'),
|
||||
]) {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
183
vars/workers.groovy
Normal file
183
vars/workers.groovy
Normal file
|
@ -0,0 +1,183 @@
|
|||
// "Workers" in this file will spin up an instance, do some setup etc depending on the configuration, and then execute some work that you define
|
||||
// e.g. workers.base(name: 'my-worker') { sh "echo 'ready to execute some kibana scripts'" }
|
||||
|
||||
def label(size) {
|
||||
switch(size) {
|
||||
case 'flyweight':
|
||||
return 'flyweight'
|
||||
case 's':
|
||||
return 'docker && linux && immutable'
|
||||
case 's-highmem':
|
||||
return 'docker && tests-s'
|
||||
case 'l':
|
||||
return 'docker && tests-l'
|
||||
case 'xl':
|
||||
return 'docker && tests-xl'
|
||||
case 'xxl':
|
||||
return 'docker && tests-xxl'
|
||||
}
|
||||
|
||||
error "unknown size '${size}'"
|
||||
}
|
||||
|
||||
/*
|
||||
The base worker that all of the others use. Will clone the scm (assumed to be kibana), and run kibana bootstrap processes by default.
|
||||
|
||||
Parameters:
|
||||
size - size of worker label to use, e.g. 's' or 'xl'
|
||||
ramDisk - Should the workspace be mounted in memory? Default: true
|
||||
bootstrapped - If true, download kibana dependencies, run kbn bootstrap, etc. Default: true
|
||||
name - Name of the worker for display purposes, filenames, etc.
|
||||
scm - Jenkins scm configuration for checking out code. Use `null` to disable checkout. Default: inherited from job
|
||||
*/
|
||||
def base(Map params, Closure closure) {
|
||||
def config = [size: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params
|
||||
if (!config.size) {
|
||||
error "You must specify an agent size, such as 'xl' or 's', when using workers.base()"
|
||||
}
|
||||
|
||||
node(label(config.size)) {
|
||||
agentInfo.print()
|
||||
|
||||
if (config.ramDisk) {
|
||||
// Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
|
||||
def originalWorkspace = env.WORKSPACE
|
||||
ws('/tmp/workspace') {
|
||||
sh(
|
||||
script: """
|
||||
mkdir -p /dev/shm/workspace
|
||||
mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
|
||||
rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
|
||||
ln -s /dev/shm/workspace '${originalWorkspace}'
|
||||
""",
|
||||
label: "Move workspace to RAM - /dev/shm/workspace"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def checkoutInfo = [:]
|
||||
|
||||
if (config.scm) {
|
||||
// Try to clone from Github up to 8 times, waiting 15 secs between attempts
|
||||
retryWithDelay(8, 15) {
|
||||
checkout scm
|
||||
}
|
||||
|
||||
dir("kibana") {
|
||||
checkoutInfo = getCheckoutInfo()
|
||||
|
||||
// use `checkoutInfo` as a flag to indicate that we've already reported the pending commit status
|
||||
if (buildState.get('shouldSetCommitStatus') && !buildState.has('checkoutInfo')) {
|
||||
buildState.set('checkoutInfo', checkoutInfo)
|
||||
githubCommitStatus.onStart()
|
||||
}
|
||||
}
|
||||
|
||||
ciStats.reportGitInfo(
|
||||
checkoutInfo.branch,
|
||||
checkoutInfo.commit,
|
||||
checkoutInfo.targetBranch,
|
||||
checkoutInfo.mergeBase
|
||||
)
|
||||
}
|
||||
|
||||
withEnv([
|
||||
"CI=true",
|
||||
"HOME=${env.JENKINS_HOME}",
|
||||
"PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
|
||||
"PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
|
||||
"PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
|
||||
"TEST_BROWSER_HEADLESS=1",
|
||||
"GIT_BRANCH=${checkoutInfo.branch}",
|
||||
]) {
|
||||
withCredentials([
|
||||
string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
|
||||
string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
|
||||
string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
|
||||
]) {
|
||||
// scm is configured to check out to the ./kibana directory
|
||||
dir('kibana') {
|
||||
if (config.bootstrapped) {
|
||||
kibanaPipeline.doSetup()
|
||||
}
|
||||
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Worker for ci processes. Extends the base worker and adds GCS artifact upload, error reporting, junit processing
|
||||
def ci(Map params, Closure closure) {
|
||||
def config = [ramDisk: true, bootstrapped: true] + params
|
||||
|
||||
return base(config) {
|
||||
kibanaPipeline.withGcsArtifactUpload(config.name) {
|
||||
kibanaPipeline.withPostBuildReporting {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Worker for running the current intake jobs. Just runs a single script after bootstrap.
|
||||
def intake(jobName, String script) {
|
||||
return {
|
||||
ci(name: jobName, size: 's-highmem', ramDisk: true) {
|
||||
withEnv(["JOB=${jobName}"]) {
|
||||
runbld(script, "Execute ${jobName}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Worker for running functional tests. Runs a setup process (e.g. the kibana build) then executes a map of closures in parallel (e.g. one for each ciGroup)
|
||||
def functional(name, Closure setup, Map processes) {
|
||||
return {
|
||||
parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, size: 'xl')
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Creates a ci worker that can run a setup process, followed by a group of processes in parallel.
|
||||
|
||||
Parameters:
|
||||
name: Name of the worker for display purposes, filenames, etc.
|
||||
setup: Closure to execute after the agent is bootstrapped, before starting the parallel work
|
||||
processes: Map of closures that will execute in parallel after setup. Each closure is passed a unique number.
|
||||
delayBetweenProcesses: Number of seconds to wait between starting the parallel processes. Useful to spread the load of heavy init processes, e.g. Elasticsearch starting up. Default: 0
|
||||
size: size of worker label to use, e.g. 's' or 'xl'
|
||||
*/
|
||||
def parallelProcesses(Map params) {
|
||||
def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, size: 'xl'] + params
|
||||
|
||||
ci(size: config.size, name: config.name) {
|
||||
config.setup()
|
||||
|
||||
def nextProcessNumber = 1
|
||||
def process = { processName, processClosure ->
|
||||
def processNumber = nextProcessNumber
|
||||
nextProcessNumber++
|
||||
|
||||
return {
|
||||
if (config.delayBetweenProcesses && config.delayBetweenProcesses > 0) {
|
||||
// This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
|
||||
def delay = (processNumber-1)*config.delayBetweenProcesses
|
||||
sleep(delay)
|
||||
}
|
||||
|
||||
processClosure(processNumber)
|
||||
}
|
||||
}
|
||||
|
||||
def processes = [:]
|
||||
config.processes.each { processName, processClosure ->
|
||||
processes[processName] = process(processName, processClosure)
|
||||
}
|
||||
|
||||
parallel(processes)
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
Loading…
Add table
Add a link
Reference in a new issue