mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[ci] Remove jenkins config (#173745)
We've migrated off and shut down our instance. These files are no longer needed.
This commit is contained in:
parent
93c26b931b
commit
229e7eff0c
113 changed files with 6 additions and 5140 deletions
|
@ -15,17 +15,13 @@
|
|||
"build_drafts": false,
|
||||
"trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^\\/ci$",
|
||||
"always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^\\/ci$",
|
||||
"skip_ci_labels": ["skip-ci", "jenkins-ci"],
|
||||
"skip_ci_labels": ["skip-ci"],
|
||||
"skip_target_branches": ["6.8", "7.11", "7.12"],
|
||||
"enable_skippable_commits": true,
|
||||
"skip_ci_on_only_changed": [
|
||||
"^dev_docs/",
|
||||
"^docs/",
|
||||
"^rfcs/",
|
||||
"^.ci/.+\\.yml$",
|
||||
"^.ci/es-snapshots/",
|
||||
"^.ci/pipeline-library/",
|
||||
"^.ci/Jenkinsfile_[^/]+$",
|
||||
"^\\.github/",
|
||||
"\\.md$",
|
||||
"\\.mdx$",
|
||||
|
@ -71,10 +67,6 @@
|
|||
"^dev_docs/",
|
||||
"^docs/",
|
||||
"^rfcs/",
|
||||
"^.ci/.+\\.yml$",
|
||||
"^.ci/es-snapshots/",
|
||||
"^.ci/pipeline-library/",
|
||||
"^.ci/Jenkinsfile_[^/]+$",
|
||||
"^\\.github/",
|
||||
"\\.md$",
|
||||
"\\.mdx$",
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable.
|
||||
# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts
|
||||
|
||||
ARG NODE_VERSION=18.18.2
|
||||
|
||||
FROM node:${NODE_VERSION} AS base
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
|
||||
libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
|
||||
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
|
||||
libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
|
||||
libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-11-jre && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
|
||||
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y rsync jq bsdtar google-chrome-stable \
|
||||
--no-install-recommends \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \
|
||||
&& curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \
|
||||
&& unzip vault.zip \
|
||||
&& rm vault.zip \
|
||||
&& chmod +x vault \
|
||||
&& mv vault /usr/local/bin/vault
|
||||
|
||||
RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana
|
||||
|
||||
COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh
|
||||
RUN chmod +x /usr/local/bin/bash_standard_lib.sh
|
||||
|
||||
COPY ./runbld /usr/local/bin/runbld
|
||||
RUN chmod +x /usr/local/bin/runbld
|
||||
|
||||
USER kibana
|
|
@ -1,48 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
kibanaPipeline(timeoutMinutes: 210) {
|
||||
githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') {
|
||||
ciStats.trackBuild {
|
||||
catchErrors {
|
||||
slackNotifications.onFailure(
|
||||
title: "*<${env.BUILD_URL}|[${params.branch}] Baseline Capture Failure>*",
|
||||
message: "[${params.branch}/${params.commit}] Baseline Capture Failure",
|
||||
) {
|
||||
retryable.enable(2)
|
||||
|
||||
catchErrors {
|
||||
workers.ci(
|
||||
name: 'baseline-worker',
|
||||
size: 'xl',
|
||||
ramDisk: true,
|
||||
runErrorReporter: false,
|
||||
bootstrapped: false
|
||||
) {
|
||||
withGcpServiceAccount.fromVaultSecret('secret/ci/elastic-kibana/ci-artifacts-key', 'value') {
|
||||
withEnv([
|
||||
'DISABLE_BOOTSTRAP_VALIDATION=true',
|
||||
]) {
|
||||
kibanaPipeline.doSetup()
|
||||
}
|
||||
}
|
||||
|
||||
kibanaPipeline.withCiTaskQueue([parallel: 2]) {
|
||||
catchErrors {
|
||||
tasks([
|
||||
kibanaPipeline.functionalTestProcess('xpack-baseline', './test/scripts/jenkins_xpack_baseline.sh'),
|
||||
kibanaPipeline.scriptTask('Check Public API Docs', 'test/scripts/checks/baseline_plugin_public_api_docs.sh'),
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
}
|
|
@ -1,72 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
return
|
||||
|
||||
def MAXIMUM_COMMITS_TO_CHECK = 10
|
||||
def MAXIMUM_COMMITS_TO_BUILD = 5
|
||||
|
||||
if (!params.branches_yaml) {
|
||||
error "'branches_yaml' parameter must be specified"
|
||||
}
|
||||
|
||||
def additionalBranches = []
|
||||
|
||||
def branches = readYaml(text: params.branches_yaml) + additionalBranches
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
withGithubCredentials {
|
||||
branches.each { branch ->
|
||||
if (branch == '6.8') {
|
||||
// skip 6.8, it is tracked but we don't need snapshots for it and haven't backported
|
||||
// the baseline capture scripts to it.
|
||||
return;
|
||||
}
|
||||
|
||||
stage(branch) {
|
||||
def commits = getCommits(branch, MAXIMUM_COMMITS_TO_CHECK, MAXIMUM_COMMITS_TO_BUILD)
|
||||
|
||||
commits.take(MAXIMUM_COMMITS_TO_BUILD).each { commit ->
|
||||
catchErrors {
|
||||
githubCommitStatus.create(commit, 'pending', 'Baseline started.', 'kibana-ci-baseline')
|
||||
|
||||
build(
|
||||
propagate: false,
|
||||
wait: false,
|
||||
job: 'elastic+kibana+baseline-capture',
|
||||
parameters: [
|
||||
string(name: 'branch_specifier', value: branch),
|
||||
string(name: 'commit', value: commit),
|
||||
]
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getCommits(String branch, maximumCommitsToCheck, maximumCommitsToBuild) {
|
||||
print "Getting latest commits for ${branch}..."
|
||||
def commits = githubApi.get("repos/elastic/kibana/commits?sha=${branch}").take(maximumCommitsToCheck).collect { it.sha }
|
||||
def commitsToBuild = []
|
||||
|
||||
for (commit in commits) {
|
||||
print "Getting statuses for ${commit}"
|
||||
def status = githubApi.get("repos/elastic/kibana/statuses/${commit}").find { it.context == 'kibana-ci-baseline' }
|
||||
print "Commit '${commit}' already built? ${status ? 'Yes' : 'No'}"
|
||||
|
||||
if (!status) {
|
||||
commitsToBuild << commit
|
||||
} else {
|
||||
// Stop at the first commit we find that's already been triggered
|
||||
break
|
||||
}
|
||||
|
||||
if (commitsToBuild.size() >= maximumCommitsToBuild) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return commitsToBuild.reverse() // We want the builds to trigger oldest-to-newest
|
||||
}
|
|
@ -1,150 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
def TASK_PARAM = params.TASK ?: params.CI_GROUP
|
||||
// Looks like 'oss:ciGroup:1', 'oss:firefoxSmoke'
|
||||
def JOB_PARTS = TASK_PARAM.split(':')
|
||||
def IS_XPACK = JOB_PARTS[0] == 'xpack'
|
||||
def JOB = JOB_PARTS.size() > 1 ? JOB_PARTS[1] : JOB_PARTS[0]
|
||||
def CI_GROUP = JOB_PARTS.size() > 2 ? JOB_PARTS[2] : ''
|
||||
def EXECUTIONS = params.NUMBER_EXECUTIONS.toInteger()
|
||||
def AGENT_COUNT = getAgentCount(EXECUTIONS)
|
||||
def NEED_BUILD = JOB != 'jestIntegration' && JOB != 'apiIntegration'
|
||||
|
||||
currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24)
|
||||
currentBuild.description = "${params.CI_GROUP}<br />Agents: ${AGENT_COUNT}<br />Executions: ${params.NUMBER_EXECUTIONS}"
|
||||
|
||||
kibanaPipeline(timeoutMinutes: 180) {
|
||||
def agents = [:]
|
||||
def workerFailures = []
|
||||
|
||||
def worker = getWorkerFromParams(IS_XPACK, JOB, CI_GROUP)
|
||||
|
||||
for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
|
||||
def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
|
||||
|
||||
agents["agent-${agentNumber}"] = {
|
||||
agentProcess(
|
||||
agentNumber: agentNumber,
|
||||
agentExecutions: agentExecutions,
|
||||
worker: worker,
|
||||
workerFailures: workerFailures,
|
||||
needBuild: NEED_BUILD,
|
||||
isXpack: IS_XPACK,
|
||||
ciGroup: CI_GROUP
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
parallel(agents)
|
||||
|
||||
currentBuild.description += ", Failures: ${workerFailures.size()}"
|
||||
|
||||
if (workerFailures.size() > 0) {
|
||||
print "There were ${workerFailures.size()} test suite failures."
|
||||
print "The executions that failed were:"
|
||||
print workerFailures.join("\n")
|
||||
print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
|
||||
}
|
||||
}
|
||||
|
||||
def agentProcess(Map params = [:]) {
|
||||
def config = [
|
||||
agentNumber: 1,
|
||||
agentExecutions: 0,
|
||||
worker: {},
|
||||
workerFailures: [],
|
||||
needBuild: false,
|
||||
isXpack: false,
|
||||
ciGroup: null,
|
||||
] + params
|
||||
|
||||
catchErrors {
|
||||
print "Agent ${config.agentNumber} - ${config.agentExecutions} executions"
|
||||
|
||||
withEnv([
|
||||
'IGNORE_SHIP_CI_STATS_ERROR=true',
|
||||
]) {
|
||||
kibanaPipeline.withTasks([
|
||||
parallel: 20,
|
||||
]) {
|
||||
task {
|
||||
if (config.needBuild) {
|
||||
kibanaPipeline.buildKibana()
|
||||
}
|
||||
|
||||
for(def i = 0; i < config.agentExecutions; i++) {
|
||||
def taskNumber = i
|
||||
task({
|
||||
withEnv([
|
||||
"REMOVE_KIBANA_INSTALL_DIR=1",
|
||||
]) {
|
||||
catchErrors {
|
||||
try {
|
||||
config.worker()
|
||||
} catch (ex) {
|
||||
config.workerFailures << "agent-${config.agentNumber}-${taskNumber}"
|
||||
throw ex
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getWorkerFromParams(isXpack, job, ciGroup) {
|
||||
if (!isXpack) {
|
||||
if (job == 'accessibility') {
|
||||
return kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh')
|
||||
} else if (job == 'firefoxSmoke') {
|
||||
return kibanaPipeline.functionalTestProcess('firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh')
|
||||
} else if (job == 'visualRegression') {
|
||||
return kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh')
|
||||
} else if (job == 'jestIntegration') {
|
||||
return kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh')
|
||||
} else if (job == 'apiIntegration') {
|
||||
return kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh')
|
||||
} else if (job == 'pluginFunctional') {
|
||||
return kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh')
|
||||
} else {
|
||||
return kibanaPipeline.ossCiGroupProcess(ciGroup)
|
||||
}
|
||||
}
|
||||
|
||||
if (job == 'accessibility') {
|
||||
return kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh')
|
||||
} else if (job == 'firefoxSmoke') {
|
||||
return kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh')
|
||||
} else if (job == 'visualRegression') {
|
||||
return kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')
|
||||
} else {
|
||||
return kibanaPipeline.xpackCiGroupProcess(ciGroup)
|
||||
}
|
||||
}
|
||||
|
||||
def getAgentCount(executions) {
|
||||
// Increase agent count every 20 worker processess, up to 3 agents maximum
|
||||
return Math.min(3, 1 + floor(executions/20))
|
||||
}
|
||||
|
||||
def trunc(str, length) {
|
||||
if (str.size() >= length) {
|
||||
return str.take(length) + "..."
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
// All of the real rounding/truncating methods are sandboxed
|
||||
def floor(num) {
|
||||
return num
|
||||
.toString()
|
||||
.split('\\.')[0]
|
||||
.toInteger()
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
kibanaPipeline(timeoutMinutes: 180) {
|
||||
slackNotifications.onFailure(
|
||||
disabled: !params.NOTIFY_ON_FAILURE,
|
||||
channel: '#security-solution-slack-testing'
|
||||
) {
|
||||
catchError {
|
||||
withEnv([
|
||||
'CI_PARALLEL_PROCESS_NUMBER=1',
|
||||
'IGNORE_SHIP_CI_STATS_ERROR=true',
|
||||
]) {
|
||||
def job = 'xpack-securityCypress'
|
||||
|
||||
workers.ci(name: job, size: 'l', ramDisk: true) {
|
||||
kibanaPipeline.bash('test/scripts/jenkins_build_kibana.sh', 'Build Distributable')
|
||||
kibanaPipeline.functionalTestProcess(job, 'test/scripts/jenkins_security_solution_cypress_chrome.sh')()
|
||||
// Temporarily disabled to figure out test flake
|
||||
// kibanaPipeline.functionalTestProcess(job, 'test/scripts/jenkins_security_solution_cypress_firefox.sh')()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (params.NOTIFY_ON_FAILURE) {
|
||||
kibanaPipeline.sendMail(to: 'siem_dev_team@elastic.co')
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd "$(dirname "${0}")"
|
||||
|
||||
cp /usr/local/bin/runbld ./
|
||||
cp /usr/local/bin/bash_standard_lib.sh ./
|
||||
|
||||
if which docker >/dev/null; then
|
||||
docker build -t kibana-ci -f ./Dockerfile .
|
||||
else
|
||||
echo "Docker binary is not available. Skipping the docker build this time."
|
||||
fi
|
|
@ -1,46 +0,0 @@
|
|||
root:
|
||||
- ciGroup1
|
||||
- ciGroup2
|
||||
- ciGroup3
|
||||
- ciGroup4
|
||||
- ciGroup5
|
||||
- ciGroup6
|
||||
- ciGroup7
|
||||
- ciGroup8
|
||||
- ciGroup9
|
||||
- ciGroup10
|
||||
- ciGroup11
|
||||
- ciGroup12
|
||||
|
||||
xpack:
|
||||
- ciGroup1
|
||||
- ciGroup2
|
||||
- ciGroup3
|
||||
- ciGroup4
|
||||
- ciGroup5
|
||||
- ciGroup6
|
||||
- ciGroup7
|
||||
- ciGroup8
|
||||
- ciGroup9
|
||||
- ciGroup10
|
||||
- ciGroup11
|
||||
- ciGroup12
|
||||
- ciGroup13
|
||||
- ciGroup14
|
||||
- ciGroup15
|
||||
- ciGroup16
|
||||
- ciGroup17
|
||||
- ciGroup18
|
||||
- ciGroup19
|
||||
- ciGroup20
|
||||
- ciGroup21
|
||||
- ciGroup22
|
||||
- ciGroup23
|
||||
- ciGroup24
|
||||
- ciGroup25
|
||||
- ciGroup26
|
||||
- ciGroup27
|
||||
- ciGroup28
|
||||
- ciGroup29
|
||||
- ciGroup30
|
||||
- ciGroup31
|
|
@ -1,165 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
// This job effectively has two SCM configurations:
|
||||
// one for kibana, used to check out this Jenkinsfile (which means it's the job's main SCM configuration), as well as kick-off the downstream verification job
|
||||
// one for elasticsearch, used to check out the elasticsearch source before building it
|
||||
|
||||
// There are two parameters that drive which branch is checked out for each of these, but they will typically be the same
|
||||
// 'branch_specifier' is for kibana / the job itself
|
||||
// ES_BRANCH is for elasticsearch
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
def ES_BRANCH = params.ES_BRANCH
|
||||
|
||||
if (!ES_BRANCH) {
|
||||
error "Parameter 'ES_BRANCH' must be specified."
|
||||
}
|
||||
|
||||
currentBuild.displayName += " - ${ES_BRANCH}"
|
||||
currentBuild.description = "ES: ${ES_BRANCH}<br />Kibana: ${params.branch_specifier}"
|
||||
|
||||
def PROMOTE_WITHOUT_VERIFY = !!params.PROMOTE_WITHOUT_VERIFICATION
|
||||
|
||||
timeout(time: 120, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
slackNotifications.onFailure {
|
||||
node(workers.label('l')) {
|
||||
catchErrors {
|
||||
def VERSION
|
||||
def SNAPSHOT_ID
|
||||
def DESTINATION
|
||||
|
||||
def scmVars = checkoutEs(ES_BRANCH)
|
||||
def GIT_COMMIT = scmVars.GIT_COMMIT
|
||||
def GIT_COMMIT_SHORT = sh(script: "git rev-parse --short ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
|
||||
buildArchives('to-archive')
|
||||
|
||||
dir('to-archive') {
|
||||
def now = new Date()
|
||||
def date = now.format("yyyyMMdd-HHmmss")
|
||||
|
||||
def filesRaw = sh(script: "ls -1", returnStdout: true).trim()
|
||||
def files = filesRaw
|
||||
.split("\n")
|
||||
.collect { filename ->
|
||||
// Filename examples
|
||||
// elasticsearch-oss-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
|
||||
// elasticsearch-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
|
||||
def parts = filename.replace("elasticsearch-oss", "oss").split("-")
|
||||
|
||||
VERSION = VERSION ?: parts[1]
|
||||
SNAPSHOT_ID = SNAPSHOT_ID ?: "${date}_${GIT_COMMIT_SHORT}"
|
||||
DESTINATION = DESTINATION ?: "${VERSION}/archives/${SNAPSHOT_ID}"
|
||||
|
||||
return [
|
||||
filename: filename,
|
||||
checksum: filename + '.sha512',
|
||||
url: "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${DESTINATION}/${filename}".toString(),
|
||||
version: parts[1],
|
||||
platform: parts[3],
|
||||
architecture: parts[4].split('\\.')[0],
|
||||
license: parts[0] == 'oss' ? 'oss' : 'default',
|
||||
]
|
||||
}
|
||||
|
||||
sh 'find * -exec bash -c "shasum -a 512 {} > {}.sha512" \\;'
|
||||
|
||||
def manifest = [
|
||||
bucket: "kibana-ci-es-snapshots-daily/${DESTINATION}".toString(),
|
||||
branch: ES_BRANCH,
|
||||
sha: GIT_COMMIT,
|
||||
sha_short: GIT_COMMIT_SHORT,
|
||||
version: VERSION,
|
||||
generated: now.format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone("UTC")),
|
||||
archives: files,
|
||||
]
|
||||
def manifestJson = toJSON(manifest).toString()
|
||||
writeFile file: 'manifest.json', text: manifestJson
|
||||
|
||||
upload(DESTINATION, '*.*')
|
||||
|
||||
sh "cp manifest.json manifest-latest.json"
|
||||
upload(VERSION, 'manifest-latest.json')
|
||||
}
|
||||
|
||||
if (PROMOTE_WITHOUT_VERIFY) {
|
||||
esSnapshots.promote(VERSION, SNAPSHOT_ID)
|
||||
|
||||
emailext(
|
||||
to: 'build-kibana@elastic.co',
|
||||
subject: "ES snapshot promoted without verification: ${params.ES_BRANCH}",
|
||||
body: '${SCRIPT,template="groovy-html.template"}',
|
||||
mimeType: 'text/html',
|
||||
)
|
||||
} else {
|
||||
build(
|
||||
propagate: false,
|
||||
wait: false,
|
||||
job: 'elasticsearch+snapshots+verify',
|
||||
parameters: [
|
||||
string(name: 'branch_specifier', value: branch_specifier),
|
||||
string(name: 'SNAPSHOT_VERSION', value: VERSION),
|
||||
string(name: 'SNAPSHOT_ID', value: SNAPSHOT_ID),
|
||||
]
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def checkoutEs(branch) {
|
||||
retryWithDelay(8, 15) {
|
||||
return checkout([
|
||||
$class: 'GitSCM',
|
||||
branches: [[name: branch]],
|
||||
doGenerateSubmoduleConfigurations: false,
|
||||
extensions: [],
|
||||
submoduleCfg: [],
|
||||
userRemoteConfigs: [[
|
||||
credentialsId: 'f6c7695a-671e-4f4f-a331-acdce44ff9ba',
|
||||
url: 'git@github.com:elastic/elasticsearch',
|
||||
]],
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
def upload(destination, pattern) {
|
||||
return googleStorageUpload(
|
||||
credentialsId: 'kibana-ci-gcs-plugin',
|
||||
bucket: "gs://kibana-ci-es-snapshots-daily/${destination}",
|
||||
pattern: pattern,
|
||||
sharedPublicly: false,
|
||||
showInline: false,
|
||||
)
|
||||
}
|
||||
|
||||
def buildArchives(destination) {
|
||||
def props = readProperties file: '.ci/java-versions.properties'
|
||||
withEnv([
|
||||
// Select the correct JDK for this branch
|
||||
"PATH=/var/lib/jenkins/.java/${props.ES_BUILD_JAVA}/bin:${env.PATH}",
|
||||
|
||||
// These Jenkins env vars trigger some automation in the elasticsearch repo that we don't want
|
||||
"BUILD_NUMBER=",
|
||||
"JENKINS_URL=",
|
||||
"BUILD_URL=",
|
||||
"JOB_NAME=",
|
||||
"NODE_NAME=",
|
||||
]) {
|
||||
sh """
|
||||
./gradlew -Dbuild.docker=true assemble --parallel
|
||||
mkdir -p ${destination}
|
||||
find distribution -type f \\( -name 'elasticsearch-*-*-*-*.tar.gz' -o -name 'elasticsearch-*-*-*-*.zip' \\) -not -path *no-jdk* -not -path *build-context* -exec cp {} ${destination} \\;
|
||||
docker images "docker.elastic.co/elasticsearch/elasticsearch" --format "{{.Tag}}" | xargs -n1 bash -c 'docker save docker.elastic.co/elasticsearch/elasticsearch:\${0} | gzip > ${destination}/elasticsearch-\${0}-docker-image.tar.gz'
|
||||
"""
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
// Only run this pipeline for 6.8. Higher branches are now running in Buildkite.
|
||||
def branches = ['6.8']
|
||||
|
||||
branches.each { branch ->
|
||||
build(
|
||||
propagate: false,
|
||||
wait: false,
|
||||
job: 'elasticsearch+snapshots+build',
|
||||
parameters: [
|
||||
string(name: 'branch_specifier', value: branch),
|
||||
string(name: 'ES_BRANCH', value: branch),
|
||||
]
|
||||
)
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
def SNAPSHOT_VERSION = params.SNAPSHOT_VERSION
|
||||
def SNAPSHOT_ID = params.SNAPSHOT_ID
|
||||
|
||||
if (!SNAPSHOT_VERSION) {
|
||||
error "Parameter SNAPSHOT_VERSION must be specified"
|
||||
}
|
||||
|
||||
if (!SNAPSHOT_ID) {
|
||||
error "Parameter SNAPSHOT_ID must be specified"
|
||||
}
|
||||
|
||||
currentBuild.displayName += " - ${SNAPSHOT_VERSION}"
|
||||
currentBuild.description = "ES: ${SNAPSHOT_VERSION}<br />Kibana: ${params.branch_specifier}"
|
||||
|
||||
def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"
|
||||
|
||||
kibanaPipeline(timeoutMinutes: 210) {
|
||||
catchErrors {
|
||||
slackNotifications.onFailure(
|
||||
title: "*<${env.BUILD_URL}|[${SNAPSHOT_VERSION}] ES Snapshot Verification Failure>*",
|
||||
message: "[${SNAPSHOT_VERSION}] ES Snapshot Verification Failure",
|
||||
) {
|
||||
retryable.enable(2)
|
||||
withEnv([
|
||||
"ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}",
|
||||
'IGNORE_SHIP_CI_STATS_ERROR=true',
|
||||
]) {
|
||||
kibanaPipeline.withTasks {
|
||||
tasks([
|
||||
kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'),
|
||||
kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'),
|
||||
])
|
||||
|
||||
task {
|
||||
kibanaPipeline.buildKibana(16)
|
||||
tasks.ossCiGroups()
|
||||
tasks.xpackCiGroups()
|
||||
tasks.xpackCiGroupDocker()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
|
||||
}
|
||||
}
|
||||
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
|
||||
def promoteSnapshot(snapshotVersion, snapshotId) {
|
||||
if (buildUtils.getBuildStatus() == 'SUCCESS') {
|
||||
node(workers.label('s')) {
|
||||
esSnapshots.promote(snapshotVersion, snapshotId)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
if [[ "$(which docker)" != "" && "$(command uname -m)" != "aarch64" ]]; then
|
||||
# cache image used by kibana-load-testing project
|
||||
docker pull "maven:3.6.3-openjdk-8-slim"
|
||||
fi
|
||||
|
||||
./.ci/packer_cache_for_branch.sh main
|
||||
./.ci/packer_cache_for_branch.sh 7.16
|
|
@ -1,68 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
branch="$1"
|
||||
checkoutDir="$(pwd)"
|
||||
|
||||
function cleanup()
|
||||
{
|
||||
if [[ "$branch" != "main" ]]; then
|
||||
rm --preserve-root -rf "$checkoutDir"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
trap 'cleanup' 0
|
||||
|
||||
if [[ "$branch" != "main" ]]; then
|
||||
checkoutDir="/tmp/kibana-$branch"
|
||||
git clone https://github.com/elastic/kibana.git --branch "$branch" --depth 1 "$checkoutDir"
|
||||
cd "$checkoutDir"
|
||||
fi
|
||||
|
||||
source src/dev/ci_setup/setup.sh;
|
||||
|
||||
# download es snapshots
|
||||
node scripts/es snapshot --download-only;
|
||||
|
||||
# download reporting browsers
|
||||
(cd "x-pack" && node ../node_modules/.bin/gulp downloadChromium);
|
||||
|
||||
# cache the chromedriver archive
|
||||
chromedriverDistVersion="$(node -e "console.log(require('chromedriver').version)")"
|
||||
chromedriverPkgVersion="$(node -e "console.log(require('./package.json').devDependencies.chromedriver)")"
|
||||
if [ -z "$chromedriverDistVersion" ] || [ -z "$chromedriverPkgVersion" ]; then
|
||||
echo "UNABLE TO DETERMINE CHROMEDRIVER VERSIONS"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p .chromedriver
|
||||
curl "https://chromedriver.storage.googleapis.com/$chromedriverDistVersion/chromedriver_linux64.zip" > .chromedriver/chromedriver.zip
|
||||
echo "$chromedriverPkgVersion" > .chromedriver/pkgVersion
|
||||
|
||||
# cache the geckodriver archive
|
||||
geckodriverPkgVersion="$(node -e "console.log(require('./package.json').devDependencies.geckodriver)")"
|
||||
if [ -z "$geckodriverPkgVersion" ]; then
|
||||
echo "UNABLE TO DETERMINE geckodriver VERSIONS"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p ".geckodriver"
|
||||
cp "node_modules/geckodriver/geckodriver.tar.gz" .geckodriver/geckodriver.tar.gz
|
||||
echo "$geckodriverPkgVersion" > .geckodriver/pkgVersion
|
||||
|
||||
echo "Creating bootstrap_cache archive"
|
||||
|
||||
# archive cacheable directories
|
||||
mkdir -p "$HOME/.kibana/bootstrap_cache"
|
||||
tar -cf "$HOME/.kibana/bootstrap_cache/$branch.tar" \
|
||||
.chromium \
|
||||
.es \
|
||||
.chromedriver \
|
||||
.geckodriver \
|
||||
.yarn-local-mirror;
|
||||
|
||||
echo "created $HOME/.kibana/bootstrap_cache/$branch.tar"
|
||||
|
||||
.ci/build_docker.sh
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
# Kibana Jenkins Pipeline Library
|
||||
|
||||
## Running tests
|
||||
|
||||
```bash
|
||||
cd .ci/pipeline-library
|
||||
./gradlew test
|
||||
```
|
|
@ -1,46 +0,0 @@
|
|||
plugins {
|
||||
id 'groovy'
|
||||
id 'idea'
|
||||
}
|
||||
|
||||
group = 'co.elastic.kibana.pipeline'
|
||||
version = '0.0.1'
|
||||
|
||||
sourceCompatibility = 1.8
|
||||
targetCompatibility = 1.8
|
||||
|
||||
repositories {
|
||||
maven { url 'https://repo.jenkins-ci.org/releases/' }
|
||||
maven { url 'https://repo.maven.apache.org/maven2' }
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.codehaus.groovy:groovy-all:2.4.12'
|
||||
implementation 'org.jenkins-ci.main:jenkins-core:2.23'
|
||||
implementation 'org.jenkins-ci.plugins.workflow:workflow-step-api:2.19@jar'
|
||||
testImplementation 'com.lesfurets:jenkins-pipeline-unit:1.4'
|
||||
testImplementation 'junit:junit:4.12'
|
||||
testImplementation 'org.mockito:mockito-core:2.+'
|
||||
testImplementation 'org.assertj:assertj-core:3.15+' // Temporary https://github.com/jenkinsci/JenkinsPipelineUnit/issues/209
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
main {
|
||||
groovy {
|
||||
srcDirs = ['vars']
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
groovy {
|
||||
srcDirs = ['src/test']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
testLogging {
|
||||
events 'passed', 'skipped', 'failed'
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
Binary file not shown.
|
@ -1,5 +0,0 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-6.4.1-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
185
.ci/pipeline-library/gradlew
vendored
185
.ci/pipeline-library/gradlew
vendored
|
@ -1,185 +0,0 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
#
|
||||
# Copyright 2015 the original author or authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=`expr $i + 1`
|
||||
done
|
||||
case $i in
|
||||
0) set -- ;;
|
||||
1) set -- "$args0" ;;
|
||||
2) set -- "$args0" "$args1" ;;
|
||||
3) set -- "$args0" "$args1" "$args2" ;;
|
||||
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=`save "$@"`
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
exec "$JAVACMD" "$@"
|
104
.ci/pipeline-library/gradlew.bat
vendored
104
.ci/pipeline-library/gradlew.bat
vendored
|
@ -1,104 +0,0 @@
|
|||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
|
@ -1,113 +0,0 @@
|
|||
import com.lesfurets.jenkins.unit.*
|
||||
import org.junit.Before
|
||||
|
||||
class KibanaBasePipelineTest extends BasePipelineTest {
|
||||
Map env = [:]
|
||||
Map params = [:]
|
||||
|
||||
public def Mocks = [
|
||||
TEST_FAILURE_URL: 'https://localhost/',
|
||||
TEST_FAILURE_NAME: 'Kibana Pipeline / kibana-xpack-agent / Chrome X-Pack UI Functional Tests.x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &',
|
||||
]
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
env.BRANCH_NAME = 'master'
|
||||
env.BUILD_ID = '1'
|
||||
env.BUILD_DISPLAY_NAME = "#${env.BUILD_ID}"
|
||||
|
||||
env.JENKINS_URL = 'http://jenkins.localhost:8080'
|
||||
env.BUILD_URL = "${env.JENKINS_URL}/job/elastic+kibana+${env.BRANCH_NAME}/${env.BUILD_ID}/".toString()
|
||||
|
||||
env.JOB_BASE_NAME = "elastic / kibana # ${env.BRANCH_NAME}".toString()
|
||||
env.JOB_NAME = env.JOB_BASE_NAME
|
||||
|
||||
env.WORKSPACE = 'WS'
|
||||
|
||||
props([
|
||||
buildUtils: [
|
||||
getBuildStatus: { 'SUCCESS' },
|
||||
printStacktrace: { ex -> print ex },
|
||||
],
|
||||
githubPr: [
|
||||
isPr: { false },
|
||||
],
|
||||
jenkinsApi: [ getFailedSteps: { [] } ],
|
||||
testUtils: [ getFailures: { [] } ],
|
||||
])
|
||||
|
||||
vars([
|
||||
env: env,
|
||||
params: params,
|
||||
])
|
||||
|
||||
// Some wrappers that can just be mocked to immediately call the closure passed in
|
||||
[
|
||||
'catchError',
|
||||
'catchErrors',
|
||||
'timestamps',
|
||||
'withGithubCredentials',
|
||||
].each {
|
||||
helper.registerAllowedMethod(it, [Closure.class], null)
|
||||
}
|
||||
}
|
||||
|
||||
void props(Map properties) {
|
||||
properties.each {
|
||||
binding.setProperty(it.key, it.value)
|
||||
}
|
||||
}
|
||||
|
||||
void prop(String propertyName, Object propertyValue) {
|
||||
binding.setProperty(propertyName, propertyValue)
|
||||
}
|
||||
|
||||
void vars(Map variables) {
|
||||
variables.each {
|
||||
binding.setVariable(it.key, it.value)
|
||||
}
|
||||
}
|
||||
|
||||
void var(String variableName, Object variableValue) {
|
||||
binding.setVariable(variableName, variableValue)
|
||||
}
|
||||
|
||||
def fnMock(String name) {
|
||||
return helper.callStack.find { it.methodName == name }
|
||||
}
|
||||
|
||||
def fnMocks(String name) {
|
||||
helper.callStack.findAll { it.methodName == name }
|
||||
}
|
||||
|
||||
void mockFailureBuild() {
|
||||
props([
|
||||
buildUtils: [
|
||||
getBuildStatus: { 'FAILURE' },
|
||||
printStacktrace: { ex -> print ex },
|
||||
],
|
||||
jenkinsApi: [ getFailedSteps: { [
|
||||
[
|
||||
displayName: 'Check out from version control',
|
||||
logs: 'http://jenkins.localhost:8080',
|
||||
],
|
||||
[
|
||||
displayName: 'Execute test task',
|
||||
logs: 'http://jenkins.localhost:8080',
|
||||
],
|
||||
] } ],
|
||||
testUtils: [
|
||||
getFailures: {
|
||||
return [
|
||||
[
|
||||
url: Mocks.TEST_FAILURE_URL,
|
||||
fullDisplayName: Mocks.TEST_FAILURE_NAME,
|
||||
]
|
||||
]
|
||||
},
|
||||
],
|
||||
])
|
||||
}
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
import org.junit.*
|
||||
import static groovy.test.GroovyAssert.*
|
||||
|
||||
class BuildStateTest extends KibanaBasePipelineTest {
|
||||
def buildState
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
buildState = loadScript("vars/buildState.groovy")
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'get() returns existing data'() {
|
||||
buildState.add('test', 1)
|
||||
def actual = buildState.get('test')
|
||||
assertEquals(1, actual)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'get() returns null for missing data'() {
|
||||
def actual = buildState.get('missing_key')
|
||||
assertEquals(null, actual)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'add() does not overwrite existing keys'() {
|
||||
assertTrue(buildState.add('test', 1))
|
||||
assertFalse(buildState.add('test', 2))
|
||||
|
||||
def actual = buildState.get('test')
|
||||
|
||||
assertEquals(1, actual)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'set() overwrites existing keys'() {
|
||||
assertFalse(buildState.has('test'))
|
||||
buildState.set('test', 1)
|
||||
assertTrue(buildState.has('test'))
|
||||
buildState.set('test', 2)
|
||||
|
||||
def actual = buildState.get('test')
|
||||
|
||||
assertEquals(2, actual)
|
||||
}
|
||||
}
|
|
@ -1,87 +0,0 @@
|
|||
import org.junit.*
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
class GithubCommitStatusTest extends KibanaBasePipelineTest {
|
||||
def githubCommitStatus
|
||||
def githubApiMock
|
||||
def buildStateMock
|
||||
|
||||
def EXPECTED_STATUS_URL = 'repos/elastic/kibana/statuses/COMMIT_HASH'
|
||||
def EXPECTED_CONTEXT = 'kibana-ci'
|
||||
def EXPECTED_BUILD_URL = 'http://jenkins.localhost:8080/job/elastic+kibana+master/1/'
|
||||
|
||||
interface BuildState {
|
||||
Object get(String key)
|
||||
Object has(String key)
|
||||
}
|
||||
|
||||
interface GithubApi {
|
||||
Object post(String url, Map data)
|
||||
}
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
buildStateMock = mock(BuildState)
|
||||
githubApiMock = mock(GithubApi)
|
||||
|
||||
when(buildStateMock.has('checkoutInfo')).thenReturn(true)
|
||||
when(buildStateMock.get('checkoutInfo')).thenReturn([ commit: 'COMMIT_HASH', ])
|
||||
when(githubApiMock.post(any(), any())).thenReturn(null)
|
||||
|
||||
props([
|
||||
buildState: buildStateMock,
|
||||
githubApi: githubApiMock,
|
||||
])
|
||||
|
||||
githubCommitStatus = loadScript("vars/githubCommitStatus.groovy")
|
||||
}
|
||||
|
||||
void verifyStatusCreate(String state, String description) {
|
||||
verify(githubApiMock).post(
|
||||
EXPECTED_STATUS_URL,
|
||||
[
|
||||
'state': state,
|
||||
'description': description,
|
||||
'context': EXPECTED_CONTEXT,
|
||||
'target_url': EXPECTED_BUILD_URL,
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onStart() should create a pending status'() {
|
||||
githubCommitStatus.onStart()
|
||||
verifyStatusCreate('pending', 'Build started.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onFinish() should create a success status'() {
|
||||
githubCommitStatus.onFinish()
|
||||
verifyStatusCreate('success', 'Build completed successfully.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onFinish() should create an error status for failed builds'() {
|
||||
mockFailureBuild()
|
||||
githubCommitStatus.onFinish()
|
||||
verifyStatusCreate('error', 'Build failed.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onStart() should exit early for PRs'() {
|
||||
prop('githubPr', [ isPr: { true } ])
|
||||
|
||||
githubCommitStatus.onStart()
|
||||
verifyZeroInteractions(githubApiMock)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'onFinish() should exit early for PRs'() {
|
||||
prop('githubPr', [ isPr: { true } ])
|
||||
|
||||
githubCommitStatus.onFinish()
|
||||
verifyZeroInteractions(githubApiMock)
|
||||
}
|
||||
}
|
|
@ -1,113 +0,0 @@
|
|||
import org.junit.*
|
||||
import static groovy.test.GroovyAssert.*
|
||||
|
||||
class PrChangesTest extends KibanaBasePipelineTest {
|
||||
def prChanges
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
env.ghprbPullId = '1'
|
||||
|
||||
props([
|
||||
githubPr: [
|
||||
isPr: { true },
|
||||
],
|
||||
])
|
||||
|
||||
prChanges = loadScript("vars/prChanges.groovy")
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with no changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [] },
|
||||
],
|
||||
])
|
||||
|
||||
assertTrue(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'docs/test/a-fake-doc.asciidoc'],
|
||||
[filename: 'README.md'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertTrue(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable renames'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[ filename: 'docs/test/a-fake-doc.asciidoc', previousFilename: 'docs/test/a-different-fake-doc.asciidoc' ],
|
||||
[ filename: 'README.md', previousFilename: 'README-old.md' ],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertTrue(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with unskippable changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'src/core/index.ts'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable and unskippable changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'README.md'],
|
||||
[filename: 'src/core/index.ts'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with skippable changes that are in notSkippablePaths'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'docs/developer/plugin-list.asciidoc'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'areChangesSkippable() with plugin readme changes'() {
|
||||
props([
|
||||
githubPrs: [
|
||||
getChanges: { [
|
||||
[filename: 'src/plugins/foo/README.asciidoc'],
|
||||
] },
|
||||
],
|
||||
])
|
||||
|
||||
assertFalse(prChanges.areChangesSkippable())
|
||||
}
|
||||
}
|
|
@ -1,185 +0,0 @@
|
|||
import org.junit.*
|
||||
import static groovy.test.GroovyAssert.*
|
||||
|
||||
class SlackNotificationsTest extends KibanaBasePipelineTest {
|
||||
def slackNotifications
|
||||
|
||||
@Before
|
||||
void setUp() {
|
||||
super.setUp()
|
||||
|
||||
helper.registerAllowedMethod('slackSend', [Map.class], null)
|
||||
prop('buildState', loadScript("vars/buildState.groovy"))
|
||||
slackNotifications = loadScript('vars/slackNotifications.groovy')
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'getTestFailures() should properly format failure steps'() {
|
||||
mockFailureBuild()
|
||||
|
||||
def failureMessage = slackNotifications.getTestFailures()
|
||||
|
||||
assertEquals(
|
||||
"*Test Failures*\n• <${Mocks.TEST_FAILURE_URL}|x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &>",
|
||||
failureMessage
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'sendFailedBuild() should call slackSend() with an in-progress message'() {
|
||||
mockFailureBuild()
|
||||
|
||||
slackNotifications.sendFailedBuild()
|
||||
|
||||
def args = fnMock('slackSend').args[0]
|
||||
|
||||
def expected = [
|
||||
channel: '#kibana-operations-alerts',
|
||||
username: 'Kibana Operations',
|
||||
iconEmoji: ':jenkins:',
|
||||
color: 'danger',
|
||||
message: ':hourglass_flowing_sand: elastic / kibana # master #1',
|
||||
]
|
||||
|
||||
expected.each {
|
||||
assertEquals(it.value.toString(), args[it.key].toString())
|
||||
}
|
||||
|
||||
assertEquals(
|
||||
":hourglass_flowing_sand: *<http://jenkins.localhost:8080/job/elastic+kibana+master/1/|elastic / kibana # master #1>*",
|
||||
args.blocks[0].text.text.toString()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
"*Failed Steps*\n• <http://jenkins.localhost:8080|Execute test task>",
|
||||
args.blocks[1].text.text.toString()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
"*Test Failures*\n• <https://localhost/|x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &>",
|
||||
args.blocks[2].text.text.toString()
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'sendFailedBuild() should call slackSend() with message'() {
|
||||
mockFailureBuild()
|
||||
|
||||
slackNotifications.sendFailedBuild(isFinal: true)
|
||||
|
||||
def args = fnMock('slackSend').args[0]
|
||||
|
||||
def expected = [
|
||||
channel: '#kibana-operations-alerts',
|
||||
username: 'Kibana Operations',
|
||||
iconEmoji: ':jenkins:',
|
||||
color: 'danger',
|
||||
message: ':broken_heart: elastic / kibana # master #1',
|
||||
]
|
||||
|
||||
expected.each {
|
||||
assertEquals(it.value.toString(), args[it.key].toString())
|
||||
}
|
||||
|
||||
assertEquals(
|
||||
":broken_heart: *<http://jenkins.localhost:8080/job/elastic+kibana+master/1/|elastic / kibana # master #1>*",
|
||||
args.blocks[0].text.text.toString()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
"*Failed Steps*\n• <http://jenkins.localhost:8080|Execute test task>",
|
||||
args.blocks[1].text.text.toString()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
"*Test Failures*\n• <https://localhost/|x-pack/test/functional/apps/fake/test·ts.Fake test <Component> should & pass &>",
|
||||
args.blocks[2].text.text.toString()
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'sendFailedBuild() should call slackSend() with a backup message when first attempt fails'() {
|
||||
mockFailureBuild()
|
||||
def counter = 0
|
||||
helper.registerAllowedMethod('slackSend', [Map.class], { ++counter > 1 })
|
||||
slackNotifications.sendFailedBuild(isFinal: true)
|
||||
|
||||
def args = fnMocks('slackSend')[1].args[0]
|
||||
|
||||
def expected = [
|
||||
channel: '#kibana-operations-alerts',
|
||||
username: 'Kibana Operations',
|
||||
iconEmoji: ':jenkins:',
|
||||
color: 'danger',
|
||||
message: ':broken_heart: elastic / kibana # master #1',
|
||||
]
|
||||
|
||||
expected.each {
|
||||
assertEquals(it.value.toString(), args[it.key].toString())
|
||||
}
|
||||
|
||||
assertEquals(
|
||||
":broken_heart: *<http://jenkins.localhost:8080/job/elastic+kibana+master/1/|elastic / kibana # master #1>*" +
|
||||
"\n\nFirst attempt at sending this notification failed. Please check the build.",
|
||||
args.blocks[0].text.text.toString()
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'sendFailedBuild() should call slackSend() with a channel id and timestamp on second call'() {
|
||||
mockFailureBuild()
|
||||
helper.registerAllowedMethod('slackSend', [Map.class], { [ channelId: 'CHANNEL_ID', ts: 'TIMESTAMP' ] })
|
||||
slackNotifications.sendFailedBuild(isFinal: false)
|
||||
slackNotifications.sendFailedBuild(isFinal: true)
|
||||
|
||||
def args = fnMocks('slackSend')[1].args[0]
|
||||
|
||||
def expected = [
|
||||
channel: 'CHANNEL_ID',
|
||||
timestamp: 'TIMESTAMP',
|
||||
username: 'Kibana Operations',
|
||||
iconEmoji: ':jenkins:',
|
||||
color: 'danger',
|
||||
message: ':broken_heart: elastic / kibana # master #1',
|
||||
]
|
||||
|
||||
expected.each {
|
||||
assertEquals(it.value.toString(), args[it.key].toString())
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'getTestFailures() should truncate list of failures to 10'() {
|
||||
prop('testUtils', [
|
||||
getFailures: {
|
||||
return (1..12).collect {
|
||||
return [
|
||||
url: Mocks.TEST_FAILURE_URL,
|
||||
fullDisplayName: "Failure #${it}",
|
||||
]
|
||||
}
|
||||
},
|
||||
])
|
||||
|
||||
def message = (String) slackNotifications.getTestFailures()
|
||||
|
||||
assertTrue("Message ends with truncated indicator", message.endsWith("...and 2 more"))
|
||||
assertTrue("Message contains Failure #10", message.contains("Failure #10"))
|
||||
assertTrue("Message does not contain Failure #11", !message.contains("Failure #11"))
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'shortenMessage() should truncate a long message, but leave parts that fit'() {
|
||||
assertEquals('Hello\nHello\n[...truncated...]', slackNotifications.shortenMessage('Hello\nHello\nthis is a long string', 29))
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'shortenMessage() should not modify a short message'() {
|
||||
assertEquals('Hello world', slackNotifications.shortenMessage('Hello world', 11))
|
||||
}
|
||||
|
||||
@Test
|
||||
void 'shortenMessage() should truncate an entire message with only one part'() {
|
||||
assertEquals('[...truncated...]', slackNotifications.shortenMessage('Hello world this is a really long message', 40))
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
../../vars
|
|
@ -1,6 +0,0 @@
|
|||
# This file is not picked up by anything automatically
|
||||
# It is used by being passed as an argument to runbld, when automatic processing of junit reports is not desired
|
||||
profiles:
|
||||
- ".*": # Match any job
|
||||
tests:
|
||||
junit-filename-pattern: false
|
26
Jenkinsfile
vendored
26
Jenkinsfile
vendored
|
@ -1,26 +0,0 @@
|
|||
#!/bin/groovy
|
||||
|
||||
if (!env.ghprbPullId) {
|
||||
print "Non-PR builds are now in Buildkite."
|
||||
return
|
||||
}
|
||||
|
||||
library 'kibana-pipeline-library'
|
||||
kibanaLibrary.load()
|
||||
|
||||
kibanaPipeline(timeoutMinutes: 210, checkPrChanges: true, setCommitStatus: true) {
|
||||
slackNotifications.onFailure(disabled: !params.NOTIFY_ON_FAILURE) {
|
||||
githubPr.withDefaultPrComments {
|
||||
ciStats.trackBuild(requireSuccess: githubPr.isTrackedBranchPr()) {
|
||||
catchError {
|
||||
retryable.enable()
|
||||
kibanaPipeline.allCiTasks()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (params.NOTIFY_ON_FAILURE) {
|
||||
kibanaPipeline.sendMail()
|
||||
}
|
||||
}
|
|
@ -15,9 +15,6 @@ Tip: Look for a `README.md` in a folder to learn about its contents.
|
|||
|
||||
Managed by the operations team to set up a new buildkite ci system. Can be ignored by folks outside the Operations team.
|
||||
|
||||
## [.ci](https://github.com/elastic/kibana/tree/main/.ci)
|
||||
|
||||
Managed by the operations team to contain Jenkins settings. Can be ignored by folks outside the Operations team.
|
||||
|
||||
## [.github](https://github.com/elastic/kibana/tree/main/.github)
|
||||
|
||||
|
|
|
@ -81,11 +81,12 @@ export TEST_BROWSER_HEADLESS=1
|
|||
export TEST_THROTTLE_NETWORK=1
|
||||
----------
|
||||
|
||||
** When running against a Cloud deployment, some tests are not applicable. To skip tests that do not apply, use --exclude-tag. An example shell file can be found at: {kibana-blob}test/scripts/jenkins_cloud.sh[test/scripts/jenkins_cloud.sh]
|
||||
** When running against a Cloud deployment, some tests are not applicable. To skip tests that do not apply, use --exclude-tag.
|
||||
+
|
||||
["source", "shell"]
|
||||
----------
|
||||
node scripts/functional_test_runner --exclude-tag skipCloud
|
||||
node scripts/functional_test_runner --exclude-tag skipMKI
|
||||
----------
|
||||
|
||||
[discrete]
|
||||
|
|
|
@ -118,7 +118,7 @@ Inflammatory feedback such as "this is crap" isn't feedback at all. It's both me
|
|||
Establishing a comprehensive checklist for all of the things that should happen in all possible pull requests is impractical, but that doesn't mean we lack a concrete set of minimum requirements that we can enumerate. The following items should be double checked for any pull request:
|
||||
|
||||
* CLA check passes
|
||||
* Jenkins job runs and passes
|
||||
* CI job runs and passes
|
||||
* Adheres to the spirit of our various styleguides
|
||||
* Has thorough unit test coverage
|
||||
* Automated tests provide high confidence the change continues to work without manual verification
|
||||
|
|
|
@ -1,118 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
function checkout_sibling {
|
||||
project=$1
|
||||
targetDir=$2
|
||||
useExistingParamName=$3
|
||||
useExisting="$(eval "echo "\$$useExistingParamName"")"
|
||||
repoAddress="https://github.com/"
|
||||
|
||||
if [ -z ${useExisting:+x} ]; then
|
||||
if [ -d "$targetDir" ]; then
|
||||
echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$WORKSPACE]!"
|
||||
echo
|
||||
echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# read by clone_target_is_valid, and checkout_clone_target populated by pick_clone_target
|
||||
cloneAuthor=""
|
||||
cloneBranch=""
|
||||
|
||||
function clone_target_is_valid {
|
||||
|
||||
echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}"
|
||||
if [[ -n "$(git ls-remote --heads "${repoAddress}${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function pick_clone_target {
|
||||
echo "To develop Kibana features against a specific branch of ${project} and being able to"
|
||||
echo "test that feature also on CI, the CI is trying to find branches on ${project} with the same name as"
|
||||
echo "the Kibana branch (first on your fork and then upstream) before building from master."
|
||||
echo "picking which branch of ${project} to clone:"
|
||||
if [[ -n "$PR_AUTHOR" && -n "$PR_SOURCE_BRANCH" ]]; then
|
||||
cloneAuthor="$PR_AUTHOR"
|
||||
cloneBranch="$PR_SOURCE_BRANCH"
|
||||
if clone_target_is_valid ; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
cloneAuthor="elastic"
|
||||
cloneBranch="$GIT_BRANCH"
|
||||
if clone_target_is_valid ; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
cloneBranch="${PR_TARGET_BRANCH:-$KIBANA_PKG_BRANCH}"
|
||||
if clone_target_is_valid ; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
cloneBranch="$KIBANA_PKG_BRANCH"
|
||||
if clone_target_is_valid; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "failed to find a valid branch to clone"
|
||||
return 1
|
||||
}
|
||||
|
||||
function checkout_clone_target {
|
||||
pick_clone_target
|
||||
|
||||
if [[ "$cloneAuthor/$cloneBranch" != "elastic/$KIBANA_PKG_BRANCH" ]]; then
|
||||
echo " -> Setting TEST_ES_FROM=source so that ES in tests will be built from $cloneAuthor/$cloneBranch"
|
||||
export TEST_ES_FROM=source
|
||||
fi
|
||||
|
||||
echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..."
|
||||
git clone -b "$cloneBranch" "${repoAddress}${cloneAuthor}/${project}.git" "$targetDir" --depth=1
|
||||
echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)"
|
||||
echo
|
||||
}
|
||||
|
||||
checkout_clone_target
|
||||
else
|
||||
if [ -d "$targetDir" ]; then
|
||||
echo "Using existing '${project}' checkout"
|
||||
else
|
||||
echo "You have defined '${useExistingParamName}' but no existing ${targetDir} directory exists!"
|
||||
exit 2
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
checkout_sibling "elasticsearch" "${WORKSPACE}/elasticsearch" "USE_EXISTING_ES"
|
||||
export TEST_ES_FROM=${TEST_ES_FROM:-snapshot}
|
||||
|
||||
# Set the JAVA_HOME based on the Java property file in the ES repo
|
||||
# This assumes the naming convention used on CI (ex: ~/.java/java10)
|
||||
ES_DIR="$WORKSPACE/elasticsearch"
|
||||
ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
|
||||
|
||||
|
||||
if [ ! -f "$ES_JAVA_PROP_PATH" ]; then
|
||||
echo "Unable to set JAVA_HOME, $ES_JAVA_PROP_PATH does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# While sourcing the property file would currently work, we want
|
||||
# to support the case where whitespace surrounds the equals.
|
||||
# This has the added benefit of explicitly exporting property values
|
||||
ES_BUILD_JAVA="$(grep "^ES_BUILD_JAVA" "$ES_JAVA_PROP_PATH" | cut -d'=' -f2 | tr -d '[:space:]')"
|
||||
export ES_BUILD_JAVA
|
||||
|
||||
if [ -z "$ES_BUILD_JAVA" ]; then
|
||||
echo "Unable to set JAVA_HOME, ES_BUILD_JAVA not present in $ES_JAVA_PROP_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Setting JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA"
|
||||
export JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
|
|
@ -1,31 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
targetBranch="${PR_TARGET_BRANCH:-$GIT_BRANCH}"
|
||||
bootstrapCache="$HOME/.kibana/bootstrap_cache/$targetBranch.tar"
|
||||
|
||||
###
|
||||
### Extract the bootstrap cache that we create in the packer_cache.sh script
|
||||
###
|
||||
if [ -f "$bootstrapCache" ]; then
|
||||
echo "extracting bootstrap_cache from $bootstrapCache";
|
||||
tar -xf "$bootstrapCache";
|
||||
else
|
||||
branchBootstrapCache="$HOME/.kibana/bootstrap_cache/$(jq -r .branch package.json).tar"
|
||||
|
||||
if [ -f "$branchBootstrapCache" ]; then
|
||||
echo "extracting bootstrap_cache from $branchBootstrapCache";
|
||||
tar -xf "$branchBootstrapCache";
|
||||
else
|
||||
echo ""
|
||||
echo ""
|
||||
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~";
|
||||
echo " bootstrap_cache missing";
|
||||
echo " looked for '$bootstrapCache'";
|
||||
echo " and '$branchBootstrapCache'";
|
||||
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~";
|
||||
echo ""
|
||||
echo ""
|
||||
fi
|
||||
fi
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const execa = require('execa');
|
||||
const pkg = require('../../../package.json');
|
||||
|
||||
const { stdout: commit } = execa.sync('git', ['rev-parse', 'HEAD']);
|
||||
const shortCommit = commit.slice(0, 8);
|
||||
|
||||
const isPr = !!process.env.ghprbPullId;
|
||||
if (isPr && !(process.env.PR_TARGET_BRANCH && process.env.PR_SOURCE_BRANCH)) {
|
||||
throw new Error(
|
||||
'getPercyEnv: Unable to determine percy environment in prs without [PR_TARGET_BRANCH] and [PR_SOURCE_BRANCH] environment variables'
|
||||
);
|
||||
}
|
||||
|
||||
let branch;
|
||||
if (isPr) {
|
||||
branch = process.env.PR_SOURCE_BRANCH;
|
||||
} else {
|
||||
if (!process.env.branch_specifier) {
|
||||
throw new Error('getPercyEnv: [branch_specifier] environment variable required');
|
||||
}
|
||||
|
||||
branch = process.env.branch_specifier.split('refs/heads/')[1];
|
||||
|
||||
if (!branch) {
|
||||
throw new Error(
|
||||
`getPercyEnv: [branch_specifier=${process.env.branch_specifier}] must start with 'refs/heads/'`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`export PERCY_PARALLEL_TOTAL=2;`);
|
||||
console.log(
|
||||
`export PERCY_PARALLEL_NONCE="${shortCommit}/${isPr ? 'PR' : branch}/${process.env.BUILD_ID}";`
|
||||
);
|
||||
console.log(`export PERCY_BRANCH="${branch}";`);
|
||||
// percy snapshots always target pkg.branch, so that feature branches can be based on master/7.x/etc.
|
||||
console.log(`export PERCY_TARGET_BRANCH="${isPr ? process.env.PR_TARGET_BRANCH : pkg.branch}";`);
|
|
@ -1,39 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ -z "$VAULT_SECRET_ID" ]; then
|
||||
if [ -n "$GITHUB_TOKEN" ] && [ -n "$KIBANA_CI_REPORTER_KEY" ] && [ -n "$PERCY_TOKEN" ]; then
|
||||
echo " -- secrets already loaded from vault";
|
||||
else
|
||||
echo ""
|
||||
echo ""
|
||||
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~";
|
||||
echo " VAULT_SECRET_ID not set, not loading tokens into env";
|
||||
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~";
|
||||
echo ""
|
||||
echo ""
|
||||
fi
|
||||
else
|
||||
# load shared helpers to get `retry` function
|
||||
source /usr/local/bin/bash_standard_lib.sh
|
||||
|
||||
set +x
|
||||
|
||||
# export after define to avoid https://github.com/koalaman/shellcheck/wiki/SC2155
|
||||
VAULT_TOKEN=$(retry 5 vault write -field=token auth/approle/login role_id="$VAULT_ROLE_ID" secret_id="$VAULT_SECRET_ID")
|
||||
export VAULT_TOKEN
|
||||
|
||||
# Set GITHUB_TOKEN for reporting test failures
|
||||
GITHUB_TOKEN=$(retry 5 vault read -field=github_token secret/kibana-issues/dev/kibanamachine)
|
||||
export GITHUB_TOKEN
|
||||
|
||||
KIBANA_CI_REPORTER_KEY=$(retry 5 vault read -field=value secret/kibana-issues/dev/kibanamachine-reporter)
|
||||
export KIBANA_CI_REPORTER_KEY
|
||||
|
||||
PERCY_TOKEN=$(retry 5 vault read -field=value secret/kibana-issues/dev/percy)
|
||||
export PERCY_TOKEN
|
||||
|
||||
# remove vault related secrets
|
||||
unset VAULT_ROLE_ID VAULT_SECRET_ID VAULT_TOKEN VAULT_ADDR
|
||||
fi
|
|
@ -1,35 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh true
|
||||
|
||||
echo " -- KIBANA_DIR='$KIBANA_DIR'"
|
||||
echo " -- XPACK_DIR='$XPACK_DIR'"
|
||||
echo " -- PARENT_DIR='$PARENT_DIR'"
|
||||
echo " -- KIBANA_PKG_BRANCH='$KIBANA_PKG_BRANCH'"
|
||||
echo " -- TEST_ES_SNAPSHOT_VERSION='$TEST_ES_SNAPSHOT_VERSION'"
|
||||
|
||||
###
|
||||
### install dependencies
|
||||
###
|
||||
echo " -- installing node.js dependencies"
|
||||
yarn kbn bootstrap --verbose
|
||||
|
||||
###
|
||||
### Download es snapshots
|
||||
###
|
||||
echo " -- downloading es snapshot"
|
||||
node scripts/es snapshot --download-only;
|
||||
|
||||
###
|
||||
### verify no git modifications caused by bootstrap
|
||||
###
|
||||
if [[ "$DISABLE_BOOTSTRAP_VALIDATION" != "true" ]]; then
|
||||
GIT_CHANGES="$(git ls-files --modified)"
|
||||
if [ "$GIT_CHANGES" ]; then
|
||||
echo -e "\n${RED}ERROR: 'yarn kbn bootstrap' caused changes to the following files:${C_RESET}\n"
|
||||
echo -e "$GIT_CHANGES\n"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
|
@ -1,172 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
if [[ "$CI_ENV_SETUP" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
installNode=$1
|
||||
|
||||
dir="$(pwd)"
|
||||
cacheDir="$HOME/.kibana"
|
||||
|
||||
RED='\033[0;31m'
|
||||
C_RESET='\033[0m' # Reset color
|
||||
|
||||
export NODE_OPTIONS="$NODE_OPTIONS --max-old-space-size=4096"
|
||||
|
||||
###
|
||||
### Since the Jenkins logging output collector doesn't look like a TTY
|
||||
### Node/Chalk and other color libs disable their color output. But Jenkins
|
||||
### can handle color fine, so this forces https://github.com/chalk/supports-color
|
||||
### to enable color support in Chalk and other related modules.
|
||||
###
|
||||
export FORCE_COLOR=1
|
||||
|
||||
### APM tracking
|
||||
###
|
||||
export ELASTIC_APM_ENVIRONMENT=ci
|
||||
|
||||
###
|
||||
### check that we seem to be in a kibana project
|
||||
###
|
||||
if [ -f "$dir/package.json" ] && [ -f "$dir/.node-version" ]; then
|
||||
echo "Setting up node.js and yarn in $dir"
|
||||
else
|
||||
echo "${RED}src/dev/ci_setup/setup.sh must be run within a kibana repo${C_RESET}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
export KIBANA_DIR="$dir"
|
||||
export XPACK_DIR="$KIBANA_DIR/x-pack"
|
||||
|
||||
parentDir="$(cd "$KIBANA_DIR/.."; pwd)"
|
||||
export PARENT_DIR="$parentDir"
|
||||
|
||||
kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
|
||||
export KIBANA_PKG_BRANCH="$kbnBranch"
|
||||
|
||||
export WORKSPACE="${WORKSPACE:-$PARENT_DIR}"
|
||||
|
||||
###
|
||||
### download node
|
||||
###
|
||||
nodeVersion="$(cat "$dir/.node-version")"
|
||||
nodeDir="$cacheDir/node/$nodeVersion"
|
||||
nodeBin="$nodeDir/bin"
|
||||
hostArch="$(command uname -m)"
|
||||
case "${hostArch}" in
|
||||
x86_64 | amd64) nodeArch="x64" ;;
|
||||
aarch64) nodeArch="arm64" ;;
|
||||
*) nodeArch="${hostArch}" ;;
|
||||
esac
|
||||
classifier="$nodeArch.tar.gz"
|
||||
|
||||
UNAME=$(uname)
|
||||
OS="linux"
|
||||
if [[ "$UNAME" = *"MINGW64_NT"* ]]; then
|
||||
OS="win"
|
||||
nodeBin="$HOME/node"
|
||||
classifier="x64.zip"
|
||||
elif [[ "$UNAME" == "Darwin" ]]; then
|
||||
OS="darwin"
|
||||
fi
|
||||
echo " -- Running on OS: $OS"
|
||||
|
||||
nodeUrl="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/dist/v$nodeVersion/node-v$nodeVersion-${OS}-${classifier}"
|
||||
|
||||
if [[ "$installNode" == "true" ]]; then
|
||||
echo " -- node: version=v${nodeVersion} dir=$nodeDir"
|
||||
|
||||
echo " -- setting up node.js"
|
||||
if [ -x "$nodeBin/node" ] && [ "$("$nodeBin/node" --version)" == "v$nodeVersion" ]; then
|
||||
echo " -- reusing node.js install"
|
||||
else
|
||||
if [ -d "$nodeDir" ]; then
|
||||
echo " -- clearing previous node.js install"
|
||||
rm -rf "$nodeDir"
|
||||
fi
|
||||
|
||||
echo " -- downloading node.js from $nodeUrl"
|
||||
mkdir -p "$nodeDir"
|
||||
if [[ "$OS" == "win" ]]; then
|
||||
nodePkg="$nodeDir/${nodeUrl##*/}"
|
||||
curl --silent -L -o "$nodePkg" "$nodeUrl"
|
||||
unzip -qo "$nodePkg" -d "$nodeDir"
|
||||
mv "${nodePkg%.*}" "$nodeBin"
|
||||
else
|
||||
curl --silent -L "$nodeUrl" | tar -xz -C "$nodeDir" --strip-components=1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
###
|
||||
### "install" node into this shell
|
||||
###
|
||||
export PATH="$nodeBin:$PATH"
|
||||
|
||||
if [[ "$installNode" == "true" || ! $(which yarn) ]]; then
|
||||
###
|
||||
### downloading yarn
|
||||
###
|
||||
yarnVersion="$(node -e "console.log(String(require('./package.json').engines.yarn || '').replace(/^[^\d]+/,''))")"
|
||||
npm install -g "yarn@^${yarnVersion}"
|
||||
fi
|
||||
|
||||
###
|
||||
### setup yarn offline cache
|
||||
###
|
||||
yarn config set yarn-offline-mirror "$cacheDir/yarn-offline-cache"
|
||||
|
||||
###
|
||||
### "install" yarn into this shell
|
||||
###
|
||||
yarnGlobalDir="$(yarn global bin)"
|
||||
export PATH="$PATH:$yarnGlobalDir"
|
||||
|
||||
# use a proxy to fetch chromedriver/geckodriver asset
|
||||
export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export CHROMEDRIVER_CDNBINARIESURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export RE2_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
|
||||
export SASS_BINARY_SITE="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-sass"
|
||||
export CYPRESS_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/cypress"
|
||||
|
||||
export CHECKS_REPORTER_ACTIVE=false
|
||||
|
||||
# This is mainly for release-manager builds, which run in an environment that doesn't have Chrome installed
|
||||
if [[ "$(which google-chrome-stable)" || "$(which google-chrome)" ]]; then
|
||||
echo "Chrome detected, setting DETECT_CHROMEDRIVER_VERSION=true"
|
||||
export DETECT_CHROMEDRIVER_VERSION=true
|
||||
export CHROMEDRIVER_FORCE_DOWNLOAD=true
|
||||
else
|
||||
echo "Chrome not detected, installing default chromedriver binary for the package version"
|
||||
fi
|
||||
|
||||
### only run on pr jobs for elastic/kibana, checks-reporter doesn't work for other repos
|
||||
if [[ "$ghprbPullId" && "$ghprbGhRepository" == 'elastic/kibana' ]] ; then
|
||||
export CHECKS_REPORTER_ACTIVE=true
|
||||
fi
|
||||
|
||||
|
||||
source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh"
|
||||
|
||||
ES_DIR="$WORKSPACE/elasticsearch"
|
||||
ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
|
||||
|
||||
if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then
|
||||
ES_BUILD_JAVA="$(grep "^ES_BUILD_JAVA" "$ES_JAVA_PROP_PATH" | cut -d'=' -f2 | tr -d '[:space:]')"
|
||||
export ES_BUILD_JAVA
|
||||
|
||||
if [ -z "$ES_BUILD_JAVA" ]; then
|
||||
echo "Unable to set JAVA_HOME, ES_BUILD_JAVA not present in $ES_JAVA_PROP_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Setting JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA"
|
||||
export JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
|
||||
fi
|
||||
|
||||
export CI_ENV_SETUP=true
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
###
|
||||
### skip chomium download, use the system chrome install
|
||||
###
|
||||
export PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
PUPPETEER_EXECUTABLE_PATH="$(command -v google-chrome-stable)"
|
||||
export PUPPETEER_EXECUTABLE_PATH
|
||||
|
||||
###
|
||||
### Set Percy parallel build support environment vars
|
||||
###
|
||||
eval "$(node ./src/dev/ci_setup/get_percy_env)"
|
||||
echo " -- PERCY_PARALLEL_NONCE='$PERCY_PARALLEL_NONCE'"
|
||||
echo " -- PERCY_PARALLEL_TOTAL='$PERCY_PARALLEL_TOTAL'"
|
||||
echo " -- PERCY_BRANCH='$PERCY_BRANCH'"
|
||||
echo " -- PERCY_TARGET_BRANCH='$PERCY_TARGET_BRANCH'"
|
|
@ -37,7 +37,6 @@ export const IGNORE_FILE_GLOBS = [
|
|||
'packages/core/apps/core-apps-server-internal/assets/fonts/**/*',
|
||||
'src/dev/code_coverage/ingest_coverage/integration_tests/mocks/**/*',
|
||||
'packages/kbn-utility-types/test-d/**/*',
|
||||
'**/Jenkinsfile*',
|
||||
'Dockerfile*',
|
||||
'vars/*',
|
||||
'.ci/pipeline-library/**/*',
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// Please also add new aliases to test/scripts/jenkins_storybook.sh
|
||||
// Please also add new aliases to .buildkite/scripts/steps/storybooks/build_and_upload.ts
|
||||
//
|
||||
// If you wish for your Storybook to be built and included in CI, also add your
|
||||
// alias to .buildkite/scripts/steps/storybooks/build_and_upload.ts
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
###
|
||||
### rebuild plugin api docs to ensure it's not out of date
|
||||
###
|
||||
echo " -- building api docs"
|
||||
node --max-old-space-size=12000 scripts/build_api_docs
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/build_kibana_platform_plugins --validate-limits
|
|
@ -1,10 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
# Runs pre-commit hook script for the files touched in the last commit.
|
||||
# That way we can ensure a set of quick commit checks earlier as we removed
|
||||
# the pre-commit hook installation by default.
|
||||
# If files are more than 200 we will skip it and just use
|
||||
# the further ci steps that already check linting and file casing for the entire repo.
|
||||
"$(dirname "${0}")/commit_check_runner.sh"
|
|
@ -1,13 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
run_quick_commit_checks() {
|
||||
echo "!!!!!!!! ATTENTION !!!!!!!!
|
||||
That check is intended to provide earlier CI feedback after we remove the automatic install for the local pre-commit hook.
|
||||
If you want, you can still manually install the pre-commit hook locally by running 'node scripts/register_git_hook locally'
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
"
|
||||
|
||||
node scripts/precommit_hook.js --ref HEAD~1..HEAD --max-files 200
|
||||
}
|
||||
|
||||
run_quick_commit_checks
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/check_file_casing --quiet
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/i18n_check --ignore-missing
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/check_jest_configs
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/check_licenses --dev
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
###
|
||||
### rebuild plugin list to ensure it's not out of date
|
||||
###
|
||||
echo " -- building plugin list docs"
|
||||
node scripts/build_plugin_list_docs
|
||||
|
||||
###
|
||||
### verify no git modifications
|
||||
###
|
||||
GIT_CHANGES="$(git ls-files --modified)"
|
||||
if [ "$GIT_CHANGES" ]; then
|
||||
echo -e "\n${RED}ERROR: 'node scripts/build_plugin_list_docs' caused changes to the following files:${C_RESET}\n"
|
||||
echo -e "$GIT_CHANGES\n"
|
||||
exit 1
|
||||
fi
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/telemetry_check
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/test_hardening
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
yarn kbn run-in-packages test
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/check_ts_projects
|
|
@ -1,13 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/build_ts_refs \
|
||||
--clean \
|
||||
--no-cache \
|
||||
--force
|
||||
|
||||
node scripts/type_check
|
||||
|
||||
echo " -- building api docs"
|
||||
node --max-old-space-size=12000 scripts/build_api_docs
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/notice --validate
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--config test/accessibility/config.ts;
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running APM cypress tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
||||
node plugins/apm/scripts/test/e2e.js
|
||||
|
||||
echo ""
|
||||
echo ""
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
cd test/plugin_functional/plugins/kbn_sample_panel_action;
|
||||
if [[ ! -d "target" ]]; then
|
||||
yarn build;
|
||||
fi
|
||||
cd -;
|
|
@ -1,42 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
cd "$KIBANA_DIR"
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
|
||||
./test/scripts/jenkins_build_plugins.sh
|
||||
fi
|
||||
|
||||
# doesn't persist, also set in kibanaPipeline.groovy
|
||||
export KBN_NP_PLUGINS_BUILT=true
|
||||
|
||||
# Do not build kibana for code coverage run
|
||||
if [[ -z "$CODE_COVERAGE" ]] ; then
|
||||
echo " -> building and extracting default Kibana distributable for use in functional tests"
|
||||
node scripts/build --debug
|
||||
|
||||
echo " -> shipping metrics from build to ci-stats"
|
||||
node scripts/ship_ci_stats \
|
||||
--metrics target/optimizer_bundle_metrics.json \
|
||||
--metrics build/kibana/node_modules/@kbn/ui-shared-deps-src/shared_built_assets/metrics.json
|
||||
|
||||
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
|
||||
installDir="$KIBANA_DIR/install/kibana"
|
||||
mkdir -p "$installDir"
|
||||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||
cp "$linuxBuild" "$WORKSPACE/kibana-default.tar.gz"
|
||||
|
||||
mkdir -p "$WORKSPACE/kibana-build"
|
||||
cp -pR install/kibana/. $WORKSPACE/kibana-build/
|
||||
|
||||
echo " -> Archive built plugins"
|
||||
shopt -s globstar
|
||||
tar -zcf \
|
||||
"$WORKSPACE/kibana-default-plugins.tar.gz" \
|
||||
x-pack/plugins/**/target/public \
|
||||
x-pack/test/**/target/public \
|
||||
examples/**/target/public \
|
||||
x-pack/examples/**/target/public \
|
||||
test/**/target/public
|
||||
shopt -u globstar
|
||||
fi
|
|
@ -1,93 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
while getopts s: flag
|
||||
do
|
||||
case "${flag}" in
|
||||
s) simulations=${OPTARG};;
|
||||
esac
|
||||
done
|
||||
echo "Simulation classes: $simulations";
|
||||
|
||||
cd "$KIBANA_DIR"
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
|
||||
./test/scripts/jenkins_xpack_build_plugins.sh
|
||||
fi
|
||||
|
||||
echo " -> Configure Metricbeat monitoring"
|
||||
# Configure Metricbeat monitoring for Kibana and ElasticSearch, ingest monitoring data into Kibana Stats cluster
|
||||
# Getting the URL
|
||||
TOP="$(curl -L http://snapshots.elastic.co/latest/master.json)"
|
||||
MB_BUILD=$(echo $TOP | sed 's/.*"version" : "\(.*\)", "build_id.*/\1/')
|
||||
echo $MB_BUILD
|
||||
MB_BUILD_ID=$(echo $TOP | sed 's/.*"build_id" : "\(.*\)", "manifest_url.*/\1/')
|
||||
|
||||
URL=https://snapshots.elastic.co/${MB_BUILD_ID}/downloads/beats/metricbeat/metricbeat-${MB_BUILD}-linux-x86_64.tar.gz
|
||||
URL=https://artifacts.elastic.co/downloads/beats/metricbeat/metricbeat-7.11.0-linux-x86_64.tar.gz
|
||||
echo $URL
|
||||
# Downloading the Metricbeat package
|
||||
while [ 1 ]; do
|
||||
wget -q --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 0 --continue --no-check-certificate --tries=3 $URL
|
||||
if [ $? = 0 ]; then break; fi; # check return value, break if successful (0)
|
||||
sleep 1s;
|
||||
done;
|
||||
|
||||
# Install Metricbeat
|
||||
echo "untar metricbeat and config"
|
||||
#tar -xzf metricbeat-${MB_BUILD}-linux-x86_64.tar.gz
|
||||
tar -xzf metricbeat-7.11.0-linux-x86_64.tar.gz
|
||||
#mv metricbeat-${MB_BUILD}-linux-x86_64 metricbeat-install
|
||||
mv metricbeat-7.11.0-linux-x86_64 metricbeat-install
|
||||
|
||||
# Configure Metricbeat
|
||||
echo " -> Changing metricbeat config"
|
||||
pushd ../kibana-load-testing
|
||||
cp cfg/metricbeat/elasticsearch-xpack.yml $KIBANA_DIR/metricbeat-install/modules.d/elasticsearch-xpack.yml
|
||||
cp cfg/metricbeat/kibana-xpack.yml $KIBANA_DIR/metricbeat-install/modules.d/kibana-xpack.yml
|
||||
echo "fields.build: ${BUILD_ID}" >> cfg/metricbeat/metricbeat.yml
|
||||
echo "path.config: ${KIBANA_DIR}/metricbeat-install" >> cfg/metricbeat/metricbeat.yml
|
||||
echo "cloud.auth: ${USER_FROM_VAULT}:${PASS_FROM_VAULT}" >> cfg/metricbeat/metricbeat.yml
|
||||
cp cfg/metricbeat/metricbeat.yml $KIBANA_DIR/metricbeat-install/metricbeat.yml
|
||||
# Disable system monitoring: enabled for now to have more data
|
||||
#mv $KIBANA_DIR/metricbeat-install/modules.d/system.yml $KIBANA_DIR/metricbeat-install/modules.d/system.yml.disabled
|
||||
echo " -> Building puppeteer project"
|
||||
cd puppeteer
|
||||
yarn install && yarn build
|
||||
popd
|
||||
|
||||
# doesn't persist, also set in kibanaPipeline.groovy
|
||||
export KBN_NP_PLUGINS_BUILT=true
|
||||
|
||||
echo " -> Building and extracting default Kibana distributable for use in functional tests"
|
||||
cd "$KIBANA_DIR"
|
||||
node scripts/build --debug
|
||||
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
|
||||
installDir="$KIBANA_DIR/install/kibana"
|
||||
mkdir -p "$installDir"
|
||||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||
|
||||
mkdir -p "$WORKSPACE/kibana-build"
|
||||
cp -pR install/kibana/. $WORKSPACE/kibana-build/
|
||||
|
||||
echo " -> Setup env for tests"
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
# Start Metricbeat
|
||||
echo " -> Starting metricbeat"
|
||||
pushd $KIBANA_DIR/metricbeat-install
|
||||
nohup ./metricbeat > metricbeat.log 2>&1 &
|
||||
popd
|
||||
|
||||
echo " -> Running gatling load testing"
|
||||
export GATLING_SIMULATIONS="$simulations"
|
||||
node scripts/functional_tests \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--config test/load/config.ts;
|
||||
|
||||
|
||||
echo " -> Simulations run is finished"
|
||||
|
||||
# Show output of Metricbeat. Disabled. Enable for debug purposes
|
||||
#echo "output of metricbeat.log"
|
||||
#cat $KIBANA_DIR/metricbeat-install/metricbeat.log
|
|
@ -1,23 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
echo " -> building kibana platform plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/health_gateway/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/common/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/examples" \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/common/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/usage_collection/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \
|
||||
--scan-dir "$XPACK_DIR/examples" \
|
||||
--workers 12
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
echo " -> Running functional and api tests"
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--include-tag "ciGroup$CI_GROUP"
|
||||
|
||||
if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then
|
||||
source test/scripts/jenkins_build_kbn_sample_panel_action.sh
|
||||
./test/scripts/test/plugin_functional.sh
|
||||
./test/scripts/test/health_gateway.sh
|
||||
./test/scripts/test/interpreter_functional.sh
|
||||
fi
|
||||
else
|
||||
echo " -> Running Functional tests with code coverage"
|
||||
export NODE_OPTIONS=--max_old_space_size=8192
|
||||
|
||||
echo " -> making hard link clones"
|
||||
cd ..
|
||||
cp -RlP kibana "kibana${CI_GROUP}"
|
||||
cd "kibana${CI_GROUP}"
|
||||
|
||||
echo " -> running tests from the clone folder"
|
||||
node scripts/functional_tests --debug --include-tag "ciGroup$CI_GROUP" --exclude-tag "skipCoverage" || true;
|
||||
|
||||
echo " -> moving junit output, silently fail in case of no report"
|
||||
mkdir -p ../kibana/target/junit
|
||||
mv target/junit/* ../kibana/target/junit/ || echo "copying junit failed"
|
||||
|
||||
echo " -> copying screenshots and html for failures"
|
||||
cp -r test/functional/screenshots/* ../kibana/test/functional/screenshots/ || echo "copying screenshots failed"
|
||||
cp -r test/functional/failure_debug ../kibana/test/functional/ || echo "copying html failed"
|
||||
fi
|
|
@ -1,26 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This script runs kibana tests compatible with cloud.
|
||||
#
|
||||
# The cloud instance setup is done in the elastic/elastic-stack-testing framework,
|
||||
# where the following environment variables are set pointing to the cloud instance.
|
||||
#
|
||||
# export TEST_KIBANA_HOSTNAME
|
||||
# export TEST_KIBANA_PROTOCOL=
|
||||
# export TEST_KIBANA_PORT=
|
||||
# export TEST_KIBANA_USER=
|
||||
# export TEST_KIBANA_PASS=
|
||||
#
|
||||
# export TEST_ES_HOSTNAME=
|
||||
# export TEST_ES_PROTOCOL=
|
||||
# export TEST_ES_PORT=
|
||||
# export TEST_ES_USER=
|
||||
# export TEST_ES_PASS=
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
source "$(dirname $0)/../../src/dev/ci_setup/setup.sh"
|
||||
|
||||
export TEST_BROWSER_HEADLESS=1
|
||||
node scripts/functional_test_runner --debug --exclude-tag skipCloud $@
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--bail --debug \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--include-tag "includeFirefox" \
|
||||
--config test/functional/config.firefox.js;
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running fleet cypress tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
||||
cd x-pack/plugins/fleet
|
||||
yarn --cwd x-pack/plugins/fleet cypress:run
|
||||
|
||||
echo ""
|
||||
echo ""
|
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running osquery cypress tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--config test/osquery_cypress/cli_config.ts
|
||||
|
||||
echo ""
|
||||
echo ""
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
cd test/plugin_functional/plugins/kbn_sample_panel_action;
|
||||
if [[ ! -d "target" ]]; then
|
||||
yarn build;
|
||||
fi
|
||||
cd -;
|
||||
|
||||
pwd
|
||||
|
||||
./test/scripts/test/plugin_functional.sh
|
||||
./test/scripts/test/health_gateway.sh
|
||||
./test/scripts/test/interpreter_functional.sh
|
|
@ -1,2 +0,0 @@
|
|||
# This file just exists to give runbld something to invoke before processing junit reports
|
||||
echo 'Processing junit reports with runbld...'
|
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running security solution cypress tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--config test/security_solution_cypress/cli_config.ts
|
||||
|
||||
echo ""
|
||||
echo ""
|
|
@ -1,6 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/load_env_keys.sh
|
||||
source src/dev/ci_setup/extract_bootstrap_cache.sh
|
||||
source src/dev/ci_setup/setup.sh
|
||||
source src/dev/ci_setup/checkout_sibling_es.sh
|
|
@ -1,32 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
CURRENT_DIR=$(pwd)
|
||||
|
||||
# Copy everything except node_modules into the current workspace
|
||||
rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules
|
||||
rsync -a ${WORKSPACE}/kibana/.??* .
|
||||
|
||||
# Symlink all non-root, non-fixture node_modules into our new workspace
|
||||
cd ${WORKSPACE}/kibana
|
||||
find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
|
||||
find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
|
||||
cd "${CURRENT_DIR}"
|
||||
|
||||
# Symlink all of the individual root-level node_modules into the node_modules/ directory
|
||||
mkdir -p node_modules
|
||||
ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/
|
||||
ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/
|
||||
|
||||
# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked
|
||||
unlink node_modules/@kbn
|
||||
unlink node_modules/css-loader
|
||||
unlink node_modules/style-loader
|
||||
|
||||
# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink
|
||||
unlink node_modules/val-loader
|
||||
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/
|
||||
cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/
|
|
@ -1,33 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
cd "$KIBANA_DIR"
|
||||
|
||||
yarn storybook --site apm
|
||||
yarn storybook --site canvas
|
||||
yarn storybook --site cell_actions
|
||||
yarn storybook --site ci_composite
|
||||
yarn storybook --site content_management
|
||||
yarn storybook --site custom_integrations
|
||||
yarn storybook --site dashboard
|
||||
yarn storybook --site dashboard_enhanced
|
||||
yarn storybook --site data
|
||||
yarn storybook --site embeddable
|
||||
yarn storybook --site expression_error
|
||||
yarn storybook --site expression_image
|
||||
yarn storybook --site expression_metric
|
||||
yarn storybook --site expression_repeat_image
|
||||
yarn storybook --site expression_reveal_image
|
||||
yarn storybook --site expression_shape
|
||||
yarn storybook --site expression_tagcloud
|
||||
yarn storybook --site fleet
|
||||
yarn storybook --site infra
|
||||
yarn storybook --site kibana_react
|
||||
yarn storybook --site lists
|
||||
yarn storybook --site observability
|
||||
yarn storybook --site presentation
|
||||
yarn storybook --site security_solution
|
||||
yarn storybook --site solution_side_nav
|
||||
yarn storybook --site shared_ux
|
||||
yarn storybook --site ui_actions_enhanced
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
function post_work() {
|
||||
set +e
|
||||
if [[ -z "$REMOVE_KIBANA_INSTALL_DIR" && -z "$KIBANA_INSTALL_DIR" && -d "$KIBANA_INSTALL_DIR" ]]; then
|
||||
rm -rf "$REMOVE_KIBANA_INSTALL_DIR"
|
||||
fi
|
||||
}
|
||||
|
||||
trap 'post_work' EXIT
|
||||
|
||||
export TEST_BROWSER_HEADLESS=1
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
# For parallel workspaces, we should copy the .es directory from the root, because it should already have downloaded snapshots in it
|
||||
# This isn't part of jenkins_setup_parallel_workspace.sh just because not all tasks require ES
|
||||
if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then
|
||||
cp -R $WORKSPACE/kibana/.es ./
|
||||
fi
|
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup.sh
|
||||
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
destDir="$WORKSPACE/kibana-build-${TASK_QUEUE_PROCESS_ID:-$CI_PARALLEL_PROCESS_NUMBER}"
|
||||
|
||||
if [[ ! -d $destDir ]]; then
|
||||
mkdir -p $destDir
|
||||
cp -pR "$WORKSPACE/kibana-build/." $destDir/
|
||||
fi
|
||||
|
||||
export KIBANA_INSTALL_DIR="$destDir"
|
||||
fi
|
|
@ -1,16 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup.sh
|
||||
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
destDir="$WORKSPACE/kibana-build-${TASK_QUEUE_PROCESS_ID:-$CI_PARALLEL_PROCESS_NUMBER}"
|
||||
|
||||
if [[ ! -d $destDir ]]; then
|
||||
mkdir -p $destDir
|
||||
cp -pR "$WORKSPACE/kibana-build/." $destDir/
|
||||
fi
|
||||
|
||||
export KIBANA_INSTALL_DIR="$(realpath $destDir)"
|
||||
|
||||
cd "$XPACK_DIR"
|
||||
fi
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running synthetics @elastic/synthetics tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
||||
node plugins/synthetics/scripts/e2e.js
|
||||
|
||||
echo ""
|
||||
echo ""
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
echo " -> Running User Experience plugin @elastic/synthetics tests"
|
||||
cd "$XPACK_DIR"
|
||||
|
||||
node plugins/ux/scripts/e2e.js
|
||||
|
||||
echo ""
|
||||
echo ""
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--config test/accessibility/config.ts;
|
|
@ -1,24 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
source "$KIBANA_DIR/src/dev/ci_setup/setup_percy.sh"
|
||||
|
||||
echo " -> building and extracting default Kibana distributable"
|
||||
cd "$KIBANA_DIR"
|
||||
node scripts/build --debug
|
||||
|
||||
echo " -> shipping metrics from build to ci-stats"
|
||||
node scripts/ship_ci_stats \
|
||||
--metrics target/optimizer_bundle_metrics.json \
|
||||
--metrics build/kibana/node_modules/@kbn/ui-shared-deps-src/shared_built_assets/metrics.json
|
||||
|
||||
linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
|
||||
installDir="$KIBANA_DIR/install/kibana"
|
||||
mkdir -p "$installDir"
|
||||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||
|
||||
mkdir -p "$WORKSPACE/kibana-build"
|
||||
cp -pR install/kibana/. $WORKSPACE/kibana-build/
|
||||
|
||||
cd "$KIBANA_DIR"
|
||||
source "test/scripts/jenkins_xpack_saved_objects_field_metrics.sh"
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
echo " -> building kibana platform plugins"
|
||||
node scripts/build_kibana_platform_plugins \
|
||||
--scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$KIBANA_DIR/test/common/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/functional_with_es_ssl/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/usage_collection/plugins" \
|
||||
--scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \
|
||||
--scan-dir "$KIBANA_DIR/examples" \
|
||||
--scan-dir "$XPACK_DIR/examples" \
|
||||
--workers 12
|
|
@ -1,34 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
if [[ -z "$CODE_COVERAGE" ]]; then
|
||||
echo " -> Running functional and api tests"
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--include-tag "ciGroup$CI_GROUP"
|
||||
|
||||
echo ""
|
||||
echo ""
|
||||
else
|
||||
echo " -> Running X-Pack functional tests with code coverage"
|
||||
export NODE_OPTIONS=--max_old_space_size=8192
|
||||
|
||||
echo " -> making hard link clones"
|
||||
cd ..
|
||||
cp -RlP kibana "kibana${CI_GROUP}"
|
||||
cd "kibana${CI_GROUP}/x-pack"
|
||||
|
||||
echo " -> running tests from the clone folder"
|
||||
node scripts/functional_tests --debug --include-tag "ciGroup$CI_GROUP" --exclude-tag "skipCoverage" || true;
|
||||
|
||||
echo " -> moving junit output, silently fail in case of no report"
|
||||
mkdir -p ../../kibana/target/junit
|
||||
mv ../target/junit/* ../../kibana/target/junit/ || echo "copying junit failed"
|
||||
|
||||
echo " -> copying screenshots and html for failures"
|
||||
cp -r test/functional/screenshots/* ../../kibana/x-pack/test/functional/screenshots/ || echo "copying screenshots failed"
|
||||
cp -r test/functional/failure_debug ../../kibana/x-pack/test/functional/ || echo "copying html failed"
|
||||
fi
|
|
@ -1,10 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--include-tag "includeFirefox" \
|
||||
--config test/functional/config.firefox.js \
|
||||
--config test/functional_embedded/config.firefox.ts;
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_xpack.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--debug --bail \
|
||||
--kibana-install-dir "$KIBANA_INSTALL_DIR" \
|
||||
--config test/saved_objects_field_count/config.ts;
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/eslint --no-cache
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/stylelint
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--config test/api_integration/config.js \
|
||||
--bail \
|
||||
--debug
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--config test/health_gateway/config.ts \
|
||||
--bail \
|
||||
--debug \
|
||||
--kibana-install-dir $KIBANA_INSTALL_DIR
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--config test/interpreter_functional/config.ts \
|
||||
--bail \
|
||||
--debug \
|
||||
--kibana-install-dir $KIBANA_INSTALL_DIR
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node --max-old-space-size=5120 scripts/jest_integration --ci
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
node scripts/jest --ci --maxWorkers=6
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--config test/plugin_functional/config.ts \
|
||||
--bail \
|
||||
--debug
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source test/scripts/jenkins_test_setup_oss.sh
|
||||
|
||||
node scripts/functional_tests \
|
||||
--config test/server_integration/http/ssl/config.js \
|
||||
--config test/server_integration/http/ssl_redirect/config.js \
|
||||
--config test/server_integration/http/platform/config.ts \
|
||||
--config test/server_integration/http/ssl_with_p12/config.js \
|
||||
--config test/server_integration/http/ssl_with_p12_intermediate/config.js \
|
||||
--bail \
|
||||
--debug \
|
||||
--kibana-install-dir $KIBANA_INSTALL_DIR
|
||||
|
||||
# Tests that must be run against source in order to build test plugins
|
||||
node scripts/functional_tests \
|
||||
--config test/server_integration/http/platform/config.status.ts \
|
||||
--bail \
|
||||
--debug
|
|
@ -1,40 +0,0 @@
|
|||
def print() {
|
||||
catchError(catchInterruptions: false, buildResult: null) {
|
||||
def startTime = sh(script: "date -d '-3 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
|
||||
def endTime = sh(script: "date -d '+1 hour 30 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
|
||||
|
||||
def resourcesUrl =
|
||||
(
|
||||
"https://infra-stats.elastic.co/app/kibana#/visualize/edit/8bd92360-1b92-11ea-b719-aba04518cc34" +
|
||||
"?_g=(time:(from:'${startTime}',to:'${endTime}'))" +
|
||||
"&_a=(query:'host.name:${env.NODE_NAME}')"
|
||||
)
|
||||
.replaceAll("'", '%27') // Need to escape ' because of the shell echo below, but can't really replace "'" with "\'" because of groovy sandbox
|
||||
.replaceAll(/\)$/, '%29') // This is just here because the URL parsing in the Jenkins console doesn't work right
|
||||
|
||||
def logsStartTime = sh(script: "date -d '-3 minutes' +%s", returnStdout: true).trim()
|
||||
def logsUrl =
|
||||
(
|
||||
"https://infra-stats.elastic.co/app/infra#/logs" +
|
||||
"?_g=()&flyoutOptions=(flyoutId:!n,flyoutVisibility:hidden,surroundingLogsId:!n)" +
|
||||
"&logFilter=(expression:'host.name:${env.NODE_NAME}',kind:kuery)" +
|
||||
"&logPosition=(position:(time:${logsStartTime}000),streamLive:!f)"
|
||||
)
|
||||
.replaceAll("'", '%27')
|
||||
.replaceAll('\\)', '%29')
|
||||
|
||||
sh script: """
|
||||
set +x
|
||||
echo 'Resource Graph:'
|
||||
echo '${resourcesUrl}'
|
||||
echo ''
|
||||
echo 'Agent Logs:'
|
||||
echo '${logsUrl}'
|
||||
echo ''
|
||||
echo 'SSH Command:'
|
||||
echo "ssh -F ssh_config \$(hostname --ip-address)"
|
||||
""", label: "Worker/Agent/Node debug links"
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,30 +0,0 @@
|
|||
import groovy.transform.Field
|
||||
|
||||
public static @Field JENKINS_BUILD_STATE = [:]
|
||||
|
||||
def add(key, value) {
|
||||
if (!buildState.JENKINS_BUILD_STATE.containsKey(key)) {
|
||||
buildState.JENKINS_BUILD_STATE[key] = value
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
def set(key, value) {
|
||||
buildState.JENKINS_BUILD_STATE[key] = value
|
||||
}
|
||||
|
||||
def get(key) {
|
||||
return buildState.JENKINS_BUILD_STATE[key]
|
||||
}
|
||||
|
||||
def has(key) {
|
||||
return buildState.JENKINS_BUILD_STATE.containsKey(key)
|
||||
}
|
||||
|
||||
def get() {
|
||||
return buildState.JENKINS_BUILD_STATE
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,15 +0,0 @@
|
|||
// Basically, this is a shortcut for catchError(catchInterruptions: false) {}
|
||||
// By default, catchError will swallow aborts/timeouts, which we almost never want
|
||||
// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps
|
||||
def call(Map params = [:], Closure closure) {
|
||||
try {
|
||||
closure()
|
||||
} catch (ex) {
|
||||
params.catchInterruptions = false
|
||||
catchError(params) {
|
||||
throw ex
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,50 +0,0 @@
|
|||
def promote(snapshotVersion, snapshotId) {
|
||||
def snapshotDestination = "${snapshotVersion}/archives/${snapshotId}"
|
||||
def MANIFEST_URL = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${snapshotDestination}/manifest.json"
|
||||
|
||||
dir('verified-manifest') {
|
||||
def verifiedSnapshotFilename = 'manifest-latest-verified.json'
|
||||
|
||||
sh """
|
||||
curl -O '${MANIFEST_URL}'
|
||||
mv manifest.json ${verifiedSnapshotFilename}
|
||||
"""
|
||||
|
||||
googleStorageUpload(
|
||||
credentialsId: 'kibana-ci-gcs-plugin',
|
||||
bucket: "gs://kibana-ci-es-snapshots-daily/${snapshotVersion}",
|
||||
pattern: verifiedSnapshotFilename,
|
||||
sharedPublicly: false,
|
||||
showInline: false,
|
||||
)
|
||||
}
|
||||
|
||||
// This would probably be more efficient if we could just copy using gsutil and specifying buckets for src and dest
|
||||
// But we don't currently have access to the GCS credentials in a way that can be consumed easily from here...
|
||||
dir('transfer-to-permanent') {
|
||||
googleStorageDownload(
|
||||
credentialsId: 'kibana-ci-gcs-plugin',
|
||||
bucketUri: "gs://kibana-ci-es-snapshots-daily/${snapshotDestination}/*",
|
||||
localDirectory: '.',
|
||||
pathPrefix: snapshotDestination,
|
||||
)
|
||||
|
||||
def manifestJson = readFile file: 'manifest.json'
|
||||
writeFile(
|
||||
file: 'manifest.json',
|
||||
text: manifestJson.replace("kibana-ci-es-snapshots-daily/${snapshotDestination}", "kibana-ci-es-snapshots-permanent/${snapshotVersion}")
|
||||
)
|
||||
|
||||
// Ideally we would have some delete logic here before uploading,
|
||||
// But we don't currently have access to the GCS credentials in a way that can be consumed easily from here...
|
||||
googleStorageUpload(
|
||||
credentialsId: 'kibana-ci-gcs-plugin',
|
||||
bucket: "gs://kibana-ci-es-snapshots-permanent/${snapshotVersion}",
|
||||
pattern: '*.*',
|
||||
sharedPublicly: false,
|
||||
showInline: false,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,50 +0,0 @@
|
|||
def call(branchOverride) {
|
||||
def repoInfo = [
|
||||
branch: branchOverride ?: env.ghprbSourceBranch,
|
||||
targetBranch: env.ghprbTargetBranch,
|
||||
targetsTrackedBranch: true
|
||||
]
|
||||
|
||||
if (repoInfo.branch == null) {
|
||||
if (!(params.branch_specifier instanceof String)) {
|
||||
throw new Exception(
|
||||
"Unable to determine branch automatically, either pass a branch name to getCheckoutInfo() or use the branch_specifier param."
|
||||
)
|
||||
}
|
||||
|
||||
// strip prefix from the branch specifier to make it consistent with ghprbSourceBranch
|
||||
repoInfo.branch = params.branch_specifier.replaceFirst(/^(refs\/heads\/|origin\/)/, "")
|
||||
}
|
||||
|
||||
repoInfo.commit = sh(
|
||||
script: "git rev-parse HEAD",
|
||||
label: "determining checked out sha",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
|
||||
if (repoInfo.targetBranch) {
|
||||
// Try to clone fetch from Github up to 8 times, waiting 15 secs between attempts
|
||||
retryWithDelay(8, 15) {
|
||||
sh(
|
||||
script: "git fetch origin ${repoInfo.targetBranch}",
|
||||
label: "fetch latest from '${repoInfo.targetBranch}' at origin"
|
||||
)
|
||||
}
|
||||
|
||||
repoInfo.mergeBase = sh(
|
||||
script: "git merge-base HEAD FETCH_HEAD",
|
||||
label: "determining merge point with '${repoInfo.targetBranch}' at origin",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
|
||||
def pkgJson = readFile("package.json")
|
||||
def releaseBranch = toJSON(pkgJson).branch
|
||||
repoInfo.targetsTrackedBranch = releaseBranch == repoInfo.targetBranch
|
||||
}
|
||||
|
||||
print "repoInfo: ${repoInfo}"
|
||||
|
||||
return repoInfo
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,57 +0,0 @@
|
|||
def defaultCommit() {
|
||||
if (buildState.has('checkoutInfo')) {
|
||||
return buildState.get('checkoutInfo').commit
|
||||
}
|
||||
}
|
||||
|
||||
def onStart(commit = defaultCommit(), context = 'kibana-ci') {
|
||||
catchError {
|
||||
if (githubPr.isPr() || !commit) {
|
||||
return
|
||||
}
|
||||
|
||||
create(commit, 'pending', 'Build started.', context)
|
||||
}
|
||||
}
|
||||
|
||||
def onFinish(commit = defaultCommit(), context = 'kibana-ci') {
|
||||
catchError {
|
||||
if (githubPr.isPr() || !commit) {
|
||||
return
|
||||
}
|
||||
|
||||
def status = buildUtils.getBuildStatus()
|
||||
|
||||
if (status == 'SUCCESS' || status == 'UNSTABLE') {
|
||||
create(commit, 'success', 'Build completed successfully.', context)
|
||||
} else if(status == 'ABORTED') {
|
||||
create(commit, 'error', 'Build aborted or timed out.', context)
|
||||
} else {
|
||||
create(commit, 'error', 'Build failed.', context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def trackBuild(commit, context, Closure closure) {
|
||||
onStart(commit, context)
|
||||
catchError {
|
||||
closure()
|
||||
}
|
||||
onFinish(commit, context)
|
||||
}
|
||||
|
||||
// state: error|failure|pending|success
|
||||
def create(sha, state, description, context, targetUrl = null) {
|
||||
targetUrl = targetUrl ?: env.BUILD_URL
|
||||
|
||||
withGithubCredentials {
|
||||
return githubApi.post("repos/elastic/kibana/statuses/${sha}", [
|
||||
state: state,
|
||||
description: description,
|
||||
context: context,
|
||||
target_url: targetUrl.toString()
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,369 +0,0 @@
|
|||
/**
|
||||
Wraps the main/important part of a job, executes it, and then publishes a comment to GitHub with the status.
|
||||
|
||||
It will check for the existence of GHPRB env variables before doing any actual PR work,
|
||||
so it can be used to wrap code that is executed in both PR and non-PR contexts.
|
||||
|
||||
Inside the comment, it will hide a JSON blob containing build data (status, etc).
|
||||
|
||||
Then, the next time it posts a comment, it will:
|
||||
1. Read the previous comment and parse the json
|
||||
2. Create a new comment, add a summary of up to 5 previous builds to it, and append this build's data to the hidden JSON
|
||||
3. Delete the old comment
|
||||
|
||||
So, there is only ever one build status comment on a PR at any given time, the most recent one.
|
||||
*/
|
||||
def withDefaultPrComments(closure) {
|
||||
catchErrors {
|
||||
// kibanaPipeline.notifyOnError() needs to know if comments are enabled, so lets track it with a global
|
||||
// isPr() just ensures this functionality is skipped for non-PR builds
|
||||
buildState.set('PR_COMMENTS_ENABLED', isPr())
|
||||
catchErrors {
|
||||
closure()
|
||||
}
|
||||
sendComment(true)
|
||||
}
|
||||
}
|
||||
|
||||
def sendComment(isFinal = false) {
|
||||
if (!buildState.get('PR_COMMENTS_ENABLED')) {
|
||||
return
|
||||
}
|
||||
|
||||
def status = buildUtils.getBuildStatus()
|
||||
if (status == "ABORTED") {
|
||||
return
|
||||
}
|
||||
|
||||
def lastComment = getLatestBuildComment()
|
||||
def info = getLatestBuildInfo(lastComment) ?: [:]
|
||||
info.builds = (info.builds ?: []).takeRight(5) // Rotate out old builds
|
||||
|
||||
// If two builds are running at the same time, the first one should not post a comment after the second one
|
||||
if (info.number && info.number.toInteger() > env.BUILD_NUMBER.toInteger()) {
|
||||
return
|
||||
}
|
||||
|
||||
def shouldUpdateComment = !!info.builds.find { it.number == env.BUILD_NUMBER }
|
||||
|
||||
def message = getNextCommentMessage(info, isFinal)
|
||||
|
||||
if (shouldUpdateComment) {
|
||||
updateComment(lastComment.id, message)
|
||||
} else {
|
||||
createComment(message)
|
||||
|
||||
if (lastComment && lastComment.user.login == 'kibanamachine') {
|
||||
deleteComment(lastComment.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Checks whether or not this currently executing build was triggered via a PR in the elastic/kibana repo
|
||||
def isPr() {
|
||||
return !!(env.ghprbPullId && env.ghprbPullLink && env.ghprbPullLink =~ /\/elastic\/kibana\//)
|
||||
}
|
||||
|
||||
def isTrackedBranchPr() {
|
||||
return isPr() && (env.ghprbTargetBranch == 'master' || env.ghprbTargetBranch == '6.8' || env.ghprbTargetBranch =~ /[7-8]\.[x0-9]+/)
|
||||
}
|
||||
|
||||
def getLatestBuildComment() {
|
||||
return getComments()
|
||||
.reverse()
|
||||
.find { (it.user.login == 'elasticmachine' || it.user.login == 'kibanamachine') && it.body =~ /<!--PIPELINE/ }
|
||||
}
|
||||
|
||||
def getBuildInfoFromComment(commentText) {
|
||||
def matches = commentText =~ /(?ms)<!--PIPELINE(.*?)PIPELINE-->/
|
||||
if (!matches || !matches[0]) {
|
||||
return null
|
||||
}
|
||||
|
||||
return toJSON(matches[0][1].trim())
|
||||
}
|
||||
|
||||
def getLatestBuildInfo() {
|
||||
return getLatestBuildInfo(getLatestBuildComment())
|
||||
}
|
||||
|
||||
def getLatestBuildInfo(comment) {
|
||||
return comment ? getBuildInfoFromComment(comment.body) : null
|
||||
}
|
||||
|
||||
def getHistoryText(builds) {
|
||||
if (!builds || builds.size() < 1) {
|
||||
return ""
|
||||
}
|
||||
|
||||
def list = builds
|
||||
.reverse()
|
||||
.collect { build ->
|
||||
if (build.status == "SUCCESS") {
|
||||
return "* :green_heart: [Build #${build.number}](${build.url}) succeeded ${build.commit}"
|
||||
} else if(build.status == "UNSTABLE") {
|
||||
return "* :yellow_heart: [Build #${build.number}](${build.url}) was flaky ${build.commit}"
|
||||
} else {
|
||||
return "* :broken_heart: [Build #${build.number}](${build.url}) failed ${build.commit}"
|
||||
}
|
||||
}
|
||||
.join("\n")
|
||||
|
||||
return "### History\n${list}"
|
||||
}
|
||||
|
||||
def getTestFailuresMessage() {
|
||||
def failures = testUtils.getFailures()
|
||||
if (!failures) {
|
||||
return ""
|
||||
}
|
||||
|
||||
def messages = []
|
||||
messages << "---\n\n### [Test Failures](${env.BUILD_URL}testReport)"
|
||||
|
||||
failures.take(3).each { failure ->
|
||||
messages << """
|
||||
<details><summary>${failure.fullDisplayName}</summary>
|
||||
|
||||
[Link to Jenkins](${failure.url})
|
||||
"""
|
||||
|
||||
if (failure.stdOut) {
|
||||
messages << "\n#### Standard Out\n```\n${failure.stdOut}\n```"
|
||||
}
|
||||
|
||||
if (failure.stdErr) {
|
||||
messages << "\n#### Standard Error\n```\n${failure.stdErr}\n```"
|
||||
}
|
||||
|
||||
if (failure.stacktrace) {
|
||||
messages << "\n#### Stack Trace\n```\n${failure.stacktrace}\n```"
|
||||
}
|
||||
|
||||
messages << "</details>\n\n---"
|
||||
}
|
||||
|
||||
if (failures.size() > 3) {
|
||||
messages << "and ${failures.size() - 3} more failures, only showing the first 3."
|
||||
}
|
||||
|
||||
return messages.join("\n")
|
||||
}
|
||||
|
||||
def getBuildStatusIncludingMetrics() {
|
||||
def status = buildUtils.getBuildStatus()
|
||||
|
||||
if (status == 'SUCCESS' && shouldCheckCiMetricSuccess() && !ciStats.getMetricsSuccess()) {
|
||||
return 'FAILURE'
|
||||
}
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
def getNextCommentMessage(previousCommentInfo = [:], isFinal = false) {
|
||||
def info = previousCommentInfo ?: [:]
|
||||
info.builds = previousCommentInfo.builds ?: []
|
||||
|
||||
// When we update an in-progress comment, we need to remove the old version from the history
|
||||
info.builds = info.builds.findAll { it.number != env.BUILD_NUMBER }
|
||||
|
||||
def messages = []
|
||||
|
||||
def status = isFinal
|
||||
? getBuildStatusIncludingMetrics()
|
||||
: buildUtils.getBuildStatus()
|
||||
|
||||
def storybooksUrl = buildState.get('storybooksUrl')
|
||||
def storybooksMessage = storybooksUrl ? "* [Storybooks Preview](${storybooksUrl})" : "* Storybooks not built"
|
||||
|
||||
if (!isFinal) {
|
||||
storybooksMessage = storybooksUrl ? storybooksMessage : "* Storybooks not built yet"
|
||||
|
||||
def failuresPart = status != 'SUCCESS' ? ', with failures' : ''
|
||||
messages << """
|
||||
## :hourglass_flowing_sand: Build in-progress${failuresPart}
|
||||
* [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
|
||||
* Commit: ${getCommitHash()}
|
||||
${storybooksMessage}
|
||||
* This comment will update when the build is complete
|
||||
"""
|
||||
} else if (status == 'SUCCESS') {
|
||||
messages << """
|
||||
## :green_heart: Build Succeeded
|
||||
* [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
|
||||
* Commit: ${getCommitHash()}
|
||||
${storybooksMessage}
|
||||
${getDocsChangesLink()}
|
||||
"""
|
||||
} else if(status == 'UNSTABLE') {
|
||||
def message = """
|
||||
## :yellow_heart: Build succeeded, but was flaky
|
||||
* [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
|
||||
* Commit: ${getCommitHash()}
|
||||
${storybooksMessage}
|
||||
${getDocsChangesLink()}
|
||||
""".stripIndent()
|
||||
|
||||
def failures = retryable.getFlakyFailures()
|
||||
if (failures && failures.size() > 0) {
|
||||
def list = failures.collect { " * ${it.label}" }.join("\n")
|
||||
message += "* Flaky suites:\n${list}"
|
||||
}
|
||||
|
||||
messages << message
|
||||
} else {
|
||||
messages << """
|
||||
## :broken_heart: Build Failed
|
||||
* [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
|
||||
* Commit: ${getCommitHash()}
|
||||
${storybooksMessage}
|
||||
* [Pipeline Steps](${env.BUILD_URL}flowGraphTable) (look for red circles / failed steps)
|
||||
* [Interpreting CI Failures](https://www.elastic.co/guide/en/kibana/current/interpreting-ci-failures.html)
|
||||
${getDocsChangesLink()}
|
||||
"""
|
||||
}
|
||||
|
||||
if (status != 'SUCCESS' && status != 'UNSTABLE') {
|
||||
try {
|
||||
def steps = getFailedSteps()
|
||||
if (steps?.size() > 0) {
|
||||
def list = steps.collect { "* [${it.displayName}](${it.logs})" }.join("\n")
|
||||
messages << "### Failed CI Steps\n${list}"
|
||||
}
|
||||
} catch (ex) {
|
||||
buildUtils.printStacktrace(ex)
|
||||
print "Error retrieving failed pipeline steps for PR comment, will skip this section"
|
||||
}
|
||||
}
|
||||
|
||||
messages << getTestFailuresMessage()
|
||||
|
||||
catchErrors {
|
||||
if (isFinal && isTrackedBranchPr()) {
|
||||
messages << ciStats.getMetricsReport()
|
||||
}
|
||||
}
|
||||
|
||||
if (info.builds && info.builds.size() > 0) {
|
||||
messages << getHistoryText(info.builds)
|
||||
}
|
||||
|
||||
messages << "To update your PR or re-run it, just comment with:\n`@elasticmachine merge upstream`"
|
||||
|
||||
catchErrors {
|
||||
def assignees = getAssignees()
|
||||
if (assignees) {
|
||||
messages << "cc " + assignees.collect { "@${it}"}.join(" ")
|
||||
}
|
||||
}
|
||||
|
||||
info.builds << [
|
||||
status: status,
|
||||
url: env.BUILD_URL,
|
||||
number: env.BUILD_NUMBER,
|
||||
commit: getCommitHash()
|
||||
]
|
||||
|
||||
messages << """
|
||||
<!--PIPELINE
|
||||
${toJSON(info).toString()}
|
||||
PIPELINE-->
|
||||
"""
|
||||
|
||||
return messages
|
||||
.findAll { !!it } // No blank strings
|
||||
.collect { it.stripIndent().trim() } // This just allows us to indent various strings above, but leaves them un-indented in the comment
|
||||
.join("\n\n")
|
||||
}
|
||||
|
||||
def createComment(message) {
|
||||
if (!isPr()) {
|
||||
error "Trying to post a GitHub PR comment on a non-PR or non-elastic PR build"
|
||||
}
|
||||
|
||||
withGithubCredentials {
|
||||
return githubApi.post("repos/elastic/kibana/issues/${env.ghprbPullId}/comments", [ body: message ])
|
||||
}
|
||||
}
|
||||
|
||||
def getComments() {
|
||||
withGithubCredentials {
|
||||
return githubIssues.getComments(env.ghprbPullId)
|
||||
}
|
||||
}
|
||||
|
||||
def updateComment(commentId, message) {
|
||||
if (!isPr()) {
|
||||
error "Trying to post a GitHub PR comment on a non-PR or non-elastic PR build"
|
||||
}
|
||||
|
||||
withGithubCredentials {
|
||||
def path = "repos/elastic/kibana/issues/comments/${commentId}"
|
||||
def json = toJSON([ body: message ]).toString()
|
||||
|
||||
def resp = githubApi([ path: path ], [ method: "POST", data: json, headers: [ "X-HTTP-Method-Override": "PATCH" ] ])
|
||||
return toJSON(resp)
|
||||
}
|
||||
}
|
||||
|
||||
def deleteComment(commentId) {
|
||||
withGithubCredentials {
|
||||
def path = "repos/elastic/kibana/issues/comments/${commentId}"
|
||||
return githubApi([ path: path ], [ method: "DELETE" ])
|
||||
}
|
||||
}
|
||||
|
||||
def getCommitHash() {
|
||||
return env.ghprbActualCommit
|
||||
}
|
||||
|
||||
def getDocsChangesLink() {
|
||||
def url = "https://kibana_${env.ghprbPullId}.docs-preview.app.elstc.co/diff"
|
||||
|
||||
try {
|
||||
// httpRequest throws on status codes >400 and failures
|
||||
def resp = httpRequest([ method: "GET", url: url ])
|
||||
|
||||
if (resp.contains("There aren't any differences!")) {
|
||||
return ""
|
||||
}
|
||||
|
||||
return "* [Documentation Changes](${url})"
|
||||
} catch (ex) {
|
||||
print "Failed to reach ${url}"
|
||||
buildUtils.printStacktrace(ex)
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
def getFailedSteps() {
|
||||
return jenkinsApi.getFailedSteps()?.findAll { step ->
|
||||
step.displayName != 'Check out from version control'
|
||||
}
|
||||
}
|
||||
|
||||
def shouldCheckCiMetricSuccess() {
|
||||
// disable ciMetrics success check when a PR is targetting a non-tracked branch
|
||||
if (buildState.has('checkoutInfo') && !buildState.get('checkoutInfo').targetsTrackedBranch) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
def getPR() {
|
||||
withGithubCredentials {
|
||||
def path = "repos/elastic/kibana/pulls/${env.ghprbPullId}"
|
||||
return githubApi.get(path)
|
||||
}
|
||||
}
|
||||
|
||||
def getAssignees() {
|
||||
def pr = getPR()
|
||||
if (!pr) {
|
||||
return []
|
||||
}
|
||||
|
||||
return pr.assignees.collect { it.login }
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
def getSteps() {
|
||||
def url = "${env.BUILD_URL}api/json?tree=actions[nodes[iconColor,running,displayName,id,parents]]"
|
||||
def responseRaw = httpRequest([ method: "GET", url: url ])
|
||||
def response = toJSON(responseRaw)
|
||||
|
||||
def graphAction = response?.actions?.find { it._class == "org.jenkinsci.plugins.workflow.job.views.FlowGraphAction" }
|
||||
|
||||
return graphAction?.nodes
|
||||
}
|
||||
|
||||
def getFailedSteps() {
|
||||
def steps = getSteps()
|
||||
def failedSteps = steps?.findAll { (it.iconColor == "red" || it.iconColor == "red_anime") && it._class == "org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode" }
|
||||
failedSteps.each { step ->
|
||||
step.logs = "${env.BUILD_URL}execution/node/${step.id}/log".toString()
|
||||
}
|
||||
|
||||
return failedSteps
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,496 +0,0 @@
|
|||
def withPostBuildReporting(Map params, Closure closure) {
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
def parallelWorkspaces = []
|
||||
try {
|
||||
parallelWorkspaces = getParallelWorkspaces()
|
||||
} catch(ex) {
|
||||
print ex
|
||||
}
|
||||
|
||||
if (params.runErrorReporter) {
|
||||
catchErrors {
|
||||
runErrorReporter([pwd()] + parallelWorkspaces)
|
||||
}
|
||||
}
|
||||
|
||||
catchErrors {
|
||||
publishJunit()
|
||||
}
|
||||
|
||||
catchErrors {
|
||||
def parallelWorkspace = "${env.WORKSPACE}/parallel"
|
||||
if (fileExists(parallelWorkspace)) {
|
||||
dir(parallelWorkspace) {
|
||||
def workspaceTasks = [:]
|
||||
|
||||
parallelWorkspaces.each { workspaceDir ->
|
||||
workspaceTasks[workspaceDir] = {
|
||||
dir(workspaceDir) {
|
||||
catchErrors {
|
||||
runbld.junit()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (workspaceTasks) {
|
||||
parallel(workspaceTasks)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getParallelWorkspaces() {
|
||||
def workspaces = []
|
||||
def parallelWorkspace = "${env.WORKSPACE}/parallel"
|
||||
if (fileExists(parallelWorkspace)) {
|
||||
dir(parallelWorkspace) {
|
||||
// findFiles only returns files if you use glob, so look for a file that should be in every valid workspace
|
||||
workspaces = findFiles(glob: '*/kibana/package.json')
|
||||
.collect {
|
||||
// get the paths to the kibana directories for the parallel workspaces
|
||||
return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return workspaces
|
||||
}
|
||||
|
||||
def notifyOnError(Closure closure) {
|
||||
try {
|
||||
closure()
|
||||
} catch (ex) {
|
||||
// If this is the first failed step, it's likely that the error hasn't propagated up far enough to mark the build as a failure
|
||||
currentBuild.result = 'FAILURE'
|
||||
catchErrors {
|
||||
githubPr.sendComment(false)
|
||||
}
|
||||
catchErrors {
|
||||
// an empty map is a valid config, but is falsey, so let's use .has()
|
||||
if (buildState.has('SLACK_NOTIFICATION_CONFIG')) {
|
||||
slackNotifications.sendFailedBuild(buildState.get('SLACK_NOTIFICATION_CONFIG'))
|
||||
}
|
||||
}
|
||||
throw ex
|
||||
}
|
||||
}
|
||||
|
||||
def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) {
|
||||
// This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue
|
||||
def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER
|
||||
|
||||
def kibanaPort = "61${parallelId}1"
|
||||
def esPort = "62${parallelId}1"
|
||||
// Ports 62x2-62x9 kept open for ES nodes
|
||||
def esTransportPort = "63${parallelId}1-63${parallelId}9"
|
||||
def fleetPackageRegistryPort = "64${parallelId}1"
|
||||
def alertingProxyPort = "64${parallelId}2"
|
||||
def corsTestServerPort = "64${parallelId}3"
|
||||
// needed for https://github.com/elastic/kibana/issues/107246
|
||||
def proxyTestServerPort = "64${parallelId}4"
|
||||
def contextPropagationOnly = githubPr.isPr() ? "true" : "false"
|
||||
|
||||
withEnv([
|
||||
"CI_GROUP=${parallelId}",
|
||||
"REMOVE_KIBANA_INSTALL_DIR=1",
|
||||
"CI_PARALLEL_PROCESS_NUMBER=${parallelId}",
|
||||
"TEST_KIBANA_HOST=localhost",
|
||||
"TEST_KIBANA_PORT=${kibanaPort}",
|
||||
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
|
||||
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
|
||||
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
|
||||
"TEST_CORS_SERVER_PORT=${corsTestServerPort}",
|
||||
"TEST_PROXY_SERVER_PORT=${proxyTestServerPort}",
|
||||
"KBN_NP_PLUGINS_BUILT=true",
|
||||
"FLEET_PACKAGE_REGISTRY_PORT=${fleetPackageRegistryPort}",
|
||||
"ALERTING_PROXY_PORT=${alertingProxyPort}",
|
||||
"ELASTIC_APM_ACTIVE=true",
|
||||
"ELASTIC_APM_CONTEXT_PROPAGATION_ONLY=${contextPropagationOnly}",
|
||||
"ELASTIC_APM_TRANSACTION_SAMPLE_RATE=0.1",
|
||||
] + additionalEnvs) {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
|
||||
def functionalTestProcess(String name, Closure closure) {
|
||||
return {
|
||||
notifyOnError {
|
||||
withFunctionalTestEnv(["JOB=${name}"], closure)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def functionalTestProcess(String name, String script) {
|
||||
return functionalTestProcess(name) {
|
||||
retryable(name) {
|
||||
runbld(script, "Execute ${name}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def ossCiGroupProcess(ciGroup, withDelay = false) {
|
||||
return functionalTestProcess("ciGroup" + ciGroup) {
|
||||
if (withDelay && !(ciGroup instanceof String) && !(ciGroup instanceof GString)) {
|
||||
sleep((ciGroup-1)*30) // smooth out CPU spikes from ES startup
|
||||
}
|
||||
|
||||
withEnv([
|
||||
"CI_GROUP=${ciGroup}",
|
||||
"JOB=kibana-ciGroup${ciGroup}",
|
||||
]) {
|
||||
retryable("kibana-ciGroup${ciGroup}") {
|
||||
runbld("./test/scripts/jenkins_ci_group.sh", "Execute kibana-ciGroup${ciGroup}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def xpackCiGroupProcess(ciGroup, withDelay = false) {
|
||||
return functionalTestProcess("xpack-ciGroup" + ciGroup) {
|
||||
if (withDelay && !(ciGroup instanceof String) && !(ciGroup instanceof GString)) {
|
||||
sleep((ciGroup-1)*30) // smooth out CPU spikes from ES startup
|
||||
}
|
||||
withEnv([
|
||||
"CI_GROUP=${ciGroup}",
|
||||
"JOB=xpack-kibana-ciGroup${ciGroup}",
|
||||
]) {
|
||||
retryable("xpack-kibana-ciGroup${ciGroup}") {
|
||||
runbld("./test/scripts/jenkins_xpack_ci_group.sh", "Execute xpack-kibana-ciGroup${ciGroup}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def uploadGcsArtifact(uploadPrefix, pattern) {
|
||||
googleStorageUpload(
|
||||
credentialsId: 'kibana-ci-gcs-plugin',
|
||||
bucket: "gs://${uploadPrefix}",
|
||||
pattern: pattern,
|
||||
sharedPublicly: true,
|
||||
showInline: true,
|
||||
)
|
||||
}
|
||||
|
||||
def withGcsArtifactUpload(workerName, closure) {
|
||||
def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}"
|
||||
def ARTIFACT_PATTERNS = [
|
||||
'target/junit/**/*',
|
||||
'target/kibana-*',
|
||||
'target/kibana-coverage/jest/**/*',
|
||||
'target/kibana-security-solution/**/*.png',
|
||||
'target/kibana-fleet/**/*.png',
|
||||
'target/test-metrics/*',
|
||||
'target/test-suites-ci-plan.json',
|
||||
'test/**/screenshots/diff/*.png',
|
||||
'test/**/screenshots/failure/*.png',
|
||||
'test/**/screenshots/session/*.png',
|
||||
'test/functional/failure_debug/html/*.html',
|
||||
'x-pack/test/**/screenshots/diff/*.png',
|
||||
'x-pack/test/**/screenshots/failure/*.png',
|
||||
'x-pack/test/**/screenshots/session/*.png',
|
||||
'x-pack/test/functional/failure_debug/html/*.html',
|
||||
'.es/**/*.hprof'
|
||||
]
|
||||
|
||||
withEnv([
|
||||
"GCS_UPLOAD_PREFIX=${uploadPrefix}"
|
||||
], {
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
catchErrors {
|
||||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(uploadPrefix, pattern)
|
||||
}
|
||||
|
||||
dir(env.WORKSPACE) {
|
||||
ARTIFACT_PATTERNS.each { pattern ->
|
||||
uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
def publishJunit() {
|
||||
junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
|
||||
|
||||
dir(env.WORKSPACE) {
|
||||
junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
|
||||
}
|
||||
}
|
||||
|
||||
def sendMail(Map params = [:]) {
|
||||
// If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success
|
||||
// The e-mail plugin for the infra e-mail depends upon this being set
|
||||
currentBuild.result = currentBuild.result ?: 'SUCCESS'
|
||||
|
||||
def buildStatus = buildUtils.getBuildStatus()
|
||||
if (buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
|
||||
node('flyweight') {
|
||||
sendInfraMail()
|
||||
sendKibanaMail(params)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def sendInfraMail() {
|
||||
catchErrors {
|
||||
step([
|
||||
$class: 'Mailer',
|
||||
notifyEveryUnstableBuild: true,
|
||||
recipients: 'infra-root+build@elastic.co',
|
||||
sendToIndividuals: false
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
def sendKibanaMail(Map params = [:]) {
|
||||
def config = [to: 'build-kibana@elastic.co'] + params
|
||||
|
||||
catchErrors {
|
||||
def buildStatus = buildUtils.getBuildStatus()
|
||||
if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
|
||||
emailext(
|
||||
config.to,
|
||||
subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}",
|
||||
body: '${SCRIPT,template="groovy-html.template"}',
|
||||
mimeType: 'text/html',
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def bash(script, label) {
|
||||
sh(
|
||||
script: "#!/bin/bash\n${script}",
|
||||
label: label
|
||||
)
|
||||
}
|
||||
|
||||
def doSetup() {
|
||||
notifyOnError {
|
||||
retryWithDelay(2, 15) {
|
||||
try {
|
||||
runbld("./test/scripts/jenkins_setup.sh", "Setup Build Environment and Dependencies")
|
||||
} catch (ex) {
|
||||
try {
|
||||
// Setup expects this directory to be missing, so we need to remove it before we do a retry
|
||||
bash("rm -rf ../elasticsearch", "Remove elasticsearch sibling directory, if it exists")
|
||||
} finally {
|
||||
throw ex
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getBuildArtifactBucket() {
|
||||
def dir = env.ghprbPullId ? "pr-${env.ghprbPullId}" : buildState.get('checkoutInfo').branch.replace("/", "__")
|
||||
return "gs://ci-artifacts.kibana.dev/default-build/${dir}/${buildState.get('checkoutInfo').commit}"
|
||||
}
|
||||
|
||||
def buildKibana(maxWorkers = '') {
|
||||
notifyOnError {
|
||||
withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
|
||||
runbld("./test/scripts/jenkins_build_kibana.sh", "Build Kibana")
|
||||
}
|
||||
|
||||
withGcpServiceAccount.fromVaultSecret('secret/kibana-issues/dev/ci-artifacts-key', 'value') {
|
||||
bash("""
|
||||
cd "${env.WORKSPACE}"
|
||||
gsutil -q -m cp 'kibana-default.tar.gz' '${getBuildArtifactBucket()}/'
|
||||
gsutil -q -m cp 'kibana-default-plugins.tar.gz' '${getBuildArtifactBucket()}/'
|
||||
""", "Upload Default Build artifacts to GCS")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def downloadDefaultBuildArtifacts() {
|
||||
withGcpServiceAccount.fromVaultSecret('secret/kibana-issues/dev/ci-artifacts-key', 'value') {
|
||||
bash("""
|
||||
cd "${env.WORKSPACE}"
|
||||
gsutil -q -m cp '${getBuildArtifactBucket()}/kibana-default.tar.gz' ./
|
||||
gsutil -q -m cp '${getBuildArtifactBucket()}/kibana-default-plugins.tar.gz' ./
|
||||
""", "Download Default Build artifacts from GCS")
|
||||
}
|
||||
}
|
||||
|
||||
def runErrorReporter() {
|
||||
return runErrorReporter([pwd()])
|
||||
}
|
||||
|
||||
def runErrorReporter(workspaces) {
|
||||
def status = buildUtils.getBuildStatus()
|
||||
def dryRun = status != "ABORTED" ? "" : "--no-github-update"
|
||||
|
||||
def globs = workspaces.collect { "'${it}/target/junit/**/*.xml'" }.join(" ")
|
||||
|
||||
bash(
|
||||
"""
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
node scripts/report_failed_tests --no-index-errors ${dryRun} ${globs}
|
||||
""",
|
||||
"Report failed tests, if necessary"
|
||||
)
|
||||
}
|
||||
|
||||
def call(Map params = [:], Closure closure) {
|
||||
def config = [timeoutMinutes: 135, checkPrChanges: false, setCommitStatus: false] + params
|
||||
|
||||
stage("Kibana Pipeline") {
|
||||
timeout(time: config.timeoutMinutes, unit: 'MINUTES') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
if (config.setCommitStatus) {
|
||||
buildState.set('shouldSetCommitStatus', true)
|
||||
}
|
||||
if (config.checkPrChanges && githubPr.isPr()) {
|
||||
pipelineLibraryTests()
|
||||
|
||||
print "Checking PR for changes to determine if CI needs to be run..."
|
||||
|
||||
if (prChanges.areChangesSkippable()) {
|
||||
print "No changes requiring CI found in PR, skipping."
|
||||
return
|
||||
}
|
||||
}
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
if (config.setCommitStatus) {
|
||||
githubCommitStatus.onFinish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace
|
||||
// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh
|
||||
def withCiTaskQueue(Map options = [:], Closure closure) {
|
||||
def setupClosure = {
|
||||
// This can't use runbld, because it expects the source to be there, which isn't yet
|
||||
bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process")
|
||||
}
|
||||
|
||||
def config = [parallel: 24, setup: setupClosure] + options
|
||||
|
||||
withTaskQueue(config) {
|
||||
closure.call()
|
||||
}
|
||||
}
|
||||
|
||||
def scriptTask(description, script) {
|
||||
return {
|
||||
withFunctionalTestEnv {
|
||||
notifyOnError {
|
||||
runbld(script, description)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def scriptTaskDocker(description, script) {
|
||||
return {
|
||||
withDocker(scriptTask(description, script))
|
||||
}
|
||||
}
|
||||
|
||||
def buildDocker() {
|
||||
sh(
|
||||
script: "./.ci/build_docker.sh",
|
||||
label: 'Build CI Docker image'
|
||||
)
|
||||
}
|
||||
|
||||
def withDocker(Closure closure) {
|
||||
docker
|
||||
.image('kibana-ci')
|
||||
.inside(
|
||||
"-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4",
|
||||
closure
|
||||
)
|
||||
}
|
||||
|
||||
def buildPlugins() {
|
||||
runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins')
|
||||
}
|
||||
|
||||
def withTasks(Map params = [:], Closure closure) {
|
||||
catchErrors {
|
||||
def config = [setupWork: {}, worker: [:], parallel: 24] + params
|
||||
def workerConfig = [name: 'ci-worker', size: 'xxl', ramDisk: true] + config.worker
|
||||
|
||||
workers.ci(workerConfig) {
|
||||
withCiTaskQueue([parallel: config.parallel]) {
|
||||
parallel([
|
||||
docker: {
|
||||
retry(2) {
|
||||
buildDocker()
|
||||
}
|
||||
},
|
||||
|
||||
// There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces
|
||||
plugins: { buildPlugins() },
|
||||
])
|
||||
|
||||
config.setupWork()
|
||||
|
||||
catchErrors {
|
||||
closure()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def allCiTasks() {
|
||||
parallel([
|
||||
general: {
|
||||
withTasks {
|
||||
tasks.check()
|
||||
tasks.lint()
|
||||
tasks.test()
|
||||
task {
|
||||
buildKibana(16)
|
||||
tasks.functionalOss()
|
||||
tasks.functionalXpack()
|
||||
}
|
||||
tasks.storybooksCi()
|
||||
}
|
||||
},
|
||||
jest: {
|
||||
workers.ci(name: 'jest', size: 'n2-standard-16', ramDisk: false) {
|
||||
catchErrors {
|
||||
scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh')()
|
||||
}
|
||||
|
||||
catchErrors {
|
||||
runbld.junit()
|
||||
}
|
||||
}
|
||||
},
|
||||
])
|
||||
}
|
||||
|
||||
def pipelineLibraryTests() {
|
||||
return
|
||||
whenChanged(['vars/', '.ci/pipeline-library/']) {
|
||||
workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) {
|
||||
dir('.ci/pipeline-library') {
|
||||
sh './gradlew test'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue