[7.5] Elasticsearch snapshots automation (#53706) (#54152)

This commit is contained in:
Brian Seeders 2020-01-07 14:59:27 -05:00 committed by GitHub
parent 96fa661d14
commit 896f058c10
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 591 additions and 95 deletions

View file

@ -0,0 +1,162 @@
#!/bin/groovy
// This job effectively has two SCM configurations:
// one for kibana, used to check out this Jenkinsfile (which means it's the job's main SCM configuration), as well as kick-off the downstream verification job
// one for elasticsearch, used to check out the elasticsearch source before building it
// There are two parameters that drive which branch is checked out for each of these, but they will typically be the same
// 'branch_specifier' is for kibana / the job itself
// ES_BRANCH is for elasticsearch
library 'kibana-pipeline-library'
kibanaLibrary.load()
def ES_BRANCH = params.ES_BRANCH
if (!ES_BRANCH) {
error "Parameter 'ES_BRANCH' must be specified."
}
currentBuild.displayName += " - ${ES_BRANCH}"
currentBuild.description = "ES: ${ES_BRANCH}<br />Kibana: ${params.branch_specifier}"
def PROMOTE_WITHOUT_VERIFY = !!params.PROMOTE_WITHOUT_VERIFICATION
timeout(time: 120, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
node('linux && immutable') {
catchError {
def VERSION
def SNAPSHOT_ID
def DESTINATION
def scmVars = checkoutEs(ES_BRANCH)
def GIT_COMMIT = scmVars.GIT_COMMIT
def GIT_COMMIT_SHORT = sh(script: "git rev-parse --short ${GIT_COMMIT}", returnStdout: true).trim()
buildArchives('to-archive')
dir('to-archive') {
def now = new Date()
def date = now.format("yyyyMMdd-HHmmss")
def filesRaw = sh(script: "ls -1", returnStdout: true).trim()
def files = filesRaw
.split("\n")
.collect { filename ->
// Filename examples
// elasticsearch-oss-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
// elasticsearch-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
def parts = filename.replace("elasticsearch-oss", "oss").split("-")
VERSION = VERSION ?: parts[1]
SNAPSHOT_ID = SNAPSHOT_ID ?: "${date}_${GIT_COMMIT_SHORT}"
DESTINATION = DESTINATION ?: "${VERSION}/archives/${SNAPSHOT_ID}"
return [
filename: filename,
checksum: filename + '.sha512',
url: "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${DESTINATION}/${filename}".toString(),
version: parts[1],
platform: parts[3],
architecture: parts[4].split('\\.')[0],
license: parts[0] == 'oss' ? 'oss' : 'default',
]
}
sh 'find * -exec bash -c "shasum -a 512 {} > {}.sha512" \\;'
def manifest = [
bucket: "kibana-ci-es-snapshots-daily/${DESTINATION}".toString(),
branch: ES_BRANCH,
sha: GIT_COMMIT,
sha_short: GIT_COMMIT_SHORT,
version: VERSION,
generated: now.format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone("UTC")),
archives: files,
]
def manifestJson = toJSON(manifest).toString()
writeFile file: 'manifest.json', text: manifestJson
upload(DESTINATION, '*.*')
sh "cp manifest.json manifest-latest.json"
upload(VERSION, 'manifest-latest.json')
}
if (PROMOTE_WITHOUT_VERIFY) {
esSnapshots.promote(VERSION, SNAPSHOT_ID)
emailext(
to: 'build-kibana@elastic.co',
subject: "ES snapshot promoted without verification: ${params.ES_BRANCH}",
body: '${SCRIPT,template="groovy-html.template"}',
mimeType: 'text/html',
)
} else {
build(
propagate: false,
wait: false,
job: 'elasticsearch+snapshots+verify',
parameters: [
string(name: 'branch_specifier', value: branch_specifier),
string(name: 'SNAPSHOT_VERSION', value: VERSION),
string(name: 'SNAPSHOT_ID', value: SNAPSHOT_ID),
]
)
}
}
kibanaPipeline.sendMail()
}
}
}
}
def checkoutEs(branch) {
retryWithDelay(8, 15) {
return checkout([
$class: 'GitSCM',
branches: [[name: branch]],
doGenerateSubmoduleConfigurations: false,
extensions: [],
submoduleCfg: [],
userRemoteConfigs: [[
credentialsId: 'f6c7695a-671e-4f4f-a331-acdce44ff9ba',
url: 'git@github.com:elastic/elasticsearch',
]],
])
}
}
def upload(destination, pattern) {
return googleStorageUpload(
credentialsId: 'kibana-ci-gcs-plugin',
bucket: "gs://kibana-ci-es-snapshots-daily/${destination}",
pattern: pattern,
sharedPublicly: false,
showInline: false,
)
}
def buildArchives(destination) {
def props = readProperties file: '.ci/java-versions.properties'
withEnv([
// Select the correct JDK for this branch
"PATH=/var/lib/jenkins/.java/${props.ES_BUILD_JAVA}/bin:${env.PATH}",
// These Jenkins env vars trigger some automation in the elasticsearch repo that we don't want
"BUILD_NUMBER=",
"JENKINS_URL=",
"BUILD_URL=",
"JOB_NAME=",
"NODE_NAME=",
]) {
sh """
./gradlew -p distribution/archives assemble --parallel
mkdir -p ${destination}
find distribution/archives -type f \\( -name 'elasticsearch-*-*-*-*.tar.gz' -o -name 'elasticsearch-*-*-*-*.zip' \\) -not -path *no-jdk* -exec cp {} ${destination} \\;
"""
}
}

View file

@ -0,0 +1,19 @@
#!/bin/groovy
if (!params.branches_yaml) {
error "'branches_yaml' parameter must be specified"
}
def branches = readYaml text: params.branches_yaml
branches.each { branch ->
build(
propagate: false,
wait: false,
job: 'elasticsearch+snapshots+build',
parameters: [
string(name: 'branch_specifier', value: branch),
string(name: 'ES_BRANCH', value: branch),
]
)
}

View file

@ -0,0 +1,72 @@
#!/bin/groovy
library 'kibana-pipeline-library'
kibanaLibrary.load()
def SNAPSHOT_VERSION = params.SNAPSHOT_VERSION
def SNAPSHOT_ID = params.SNAPSHOT_ID
if (!SNAPSHOT_VERSION) {
error "Parameter SNAPSHOT_VERSION must be specified"
}
if (!SNAPSHOT_ID) {
error "Parameter SNAPSHOT_ID must be specified"
}
currentBuild.displayName += " - ${SNAPSHOT_VERSION}"
currentBuild.description = "ES: ${SNAPSHOT_VERSION}<br />Kibana: ${params.branch_specifier}"
def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"
timeout(time: 120, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
catchError {
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
parallel([
// TODO we just need to run integration tests from intake?
'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'),
'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'),
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
]),
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
]),
])
}
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
}
kibanaPipeline.sendMail()
}
}
}
def promoteSnapshot(snapshotVersion, snapshotId) {
node('linux && immutable') {
esSnapshots.promote(snapshotVersion, snapshotId)
}
}

View file

@ -27,16 +27,14 @@ const { createHash } = require('crypto');
const path = require('path');
const asyncPipeline = promisify(pipeline);
const V1_VERSIONS_API = 'https://artifacts-api.elastic.co/v1/versions';
const DAILY_SNAPSHOTS_BASE_URL = 'https://storage.googleapis.com/kibana-ci-es-snapshots-daily';
const PERMANENT_SNAPSHOTS_BASE_URL =
'https://storage.googleapis.com/kibana-ci-es-snapshots-permanent';
const { cache } = require('./utils');
const { resolveCustomSnapshotUrl } = require('./custom_snapshots');
const { createCliError, isCliError } = require('./errors');
const TEST_ES_SNAPSHOT_VERSION = process.env.TEST_ES_SNAPSHOT_VERSION
? process.env.TEST_ES_SNAPSHOT_VERSION
: 'latest';
function getChecksumType(checksumUrl) {
if (checksumUrl.endsWith('.sha512')) {
return 'sha512';
@ -45,20 +43,6 @@ function getChecksumType(checksumUrl) {
throw new Error(`unable to determine checksum type: ${checksumUrl}`);
}
function getPlatform(key) {
if (key.includes('-linux-')) {
return 'linux';
}
if (key.includes('-windows-')) {
return 'win32';
}
if (key.includes('-darwin-')) {
return 'darwin';
}
}
function headersToString(headers, indent = '') {
return [...headers.entries()].reduce(
(acc, [key, value]) => `${acc}\n${indent}${key}: ${value}`,
@ -85,6 +69,75 @@ async function retry(log, fn) {
return await doAttempt(1);
}
// Setting this flag provides an easy way to run the latest un-promoted snapshot without having to look it up
function shouldUseUnverifiedSnapshot() {
return !!process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED;
}
async function fetchSnapshotManifest(url, log) {
log.info('Downloading snapshot manifest from %s', chalk.bold(url));
const abc = new AbortController();
const resp = await retry(log, async () => await fetch(url, { signal: abc.signal }));
const json = await resp.text();
return { abc, resp, json };
}
async function getArtifactSpecForSnapshot(urlVersion, license, log) {
const desiredVersion = urlVersion.replace('-SNAPSHOT', '');
const desiredLicense = license === 'oss' ? 'oss' : 'default';
const customManifestUrl = process.env.ES_SNAPSHOT_MANIFEST;
const primaryManifestUrl = `${DAILY_SNAPSHOTS_BASE_URL}/${desiredVersion}/manifest-latest${
shouldUseUnverifiedSnapshot() ? '' : '-verified'
}.json`;
const secondaryManifestUrl = `${PERMANENT_SNAPSHOTS_BASE_URL}/${desiredVersion}/manifest.json`;
let { abc, resp, json } = await fetchSnapshotManifest(
customManifestUrl || primaryManifestUrl,
log
);
if (!customManifestUrl && !shouldUseUnverifiedSnapshot() && resp.status === 404) {
log.info('Daily snapshot manifest not found, falling back to permanent manifest');
({ abc, resp, json } = await fetchSnapshotManifest(secondaryManifestUrl, log));
}
if (resp.status === 404) {
abc.abort();
throw createCliError(`Snapshots for ${desiredVersion} are not available`);
}
if (!resp.ok) {
abc.abort();
throw new Error(`Unable to read snapshot manifest: ${resp.statusText}\n ${json}`);
}
const manifest = JSON.parse(json);
const platform = process.platform === 'win32' ? 'windows' : process.platform;
const archive = manifest.archives.find(
archive =>
archive.version === desiredVersion &&
archive.platform === platform &&
archive.license === desiredLicense
);
if (!archive) {
throw createCliError(
`Snapshots for ${desiredVersion} are available, but couldn't find an artifact in the manifest for [${desiredVersion}, ${desiredLicense}, ${platform}]`
);
}
return {
url: archive.url,
checksumUrl: archive.url + '.sha512',
checksumType: 'sha512',
filename: archive.filename,
};
}
exports.Artifact = class Artifact {
/**
* Fetch an Artifact from the Artifact API for a license level and version
@ -100,71 +153,7 @@ exports.Artifact = class Artifact {
return new Artifact(customSnapshotArtifactSpec, log);
}
const urlBuild = encodeURIComponent(TEST_ES_SNAPSHOT_VERSION);
const url = `${V1_VERSIONS_API}/${urlVersion}/builds/${urlBuild}/projects/elasticsearch`;
const json = await retry(log, async () => {
log.info('downloading artifact info from %s', chalk.bold(url));
const abc = new AbortController();
const resp = await fetch(url, { signal: abc.signal });
const json = await resp.text();
if (resp.status === 404) {
abc.abort();
throw createCliError(
`Snapshots for ${version}/${TEST_ES_SNAPSHOT_VERSION} are not available`
);
}
if (!resp.ok) {
abc.abort();
throw new Error(`Unable to read artifact info from ${url}: ${resp.statusText}\n ${json}`);
}
return json;
});
// parse the api response into an array of Artifact objects
const {
project: { packages: artifactInfoMap },
} = JSON.parse(json);
const filenames = Object.keys(artifactInfoMap);
const hasNoJdkVersions = filenames.some(filename => filename.includes('-no-jdk-'));
const artifactSpecs = filenames.map(filename => ({
filename,
url: artifactInfoMap[filename].url,
checksumUrl: artifactInfoMap[filename].sha_url,
checksumType: getChecksumType(artifactInfoMap[filename].sha_url),
type: artifactInfoMap[filename].type,
isOss: filename.includes('-oss-'),
platform: getPlatform(filename),
jdkRequired: hasNoJdkVersions ? filename.includes('-no-jdk-') : true,
}));
// pick the artifact we are going to use for this license/version combo
const reqOss = license === 'oss';
const reqPlatform = artifactSpecs.some(a => a.platform !== undefined)
? process.platform
: undefined;
const reqJdkRequired = hasNoJdkVersions ? false : true;
const reqType = process.platform === 'win32' ? 'zip' : 'tar';
const artifactSpec = artifactSpecs.find(
spec =>
spec.isOss === reqOss &&
spec.type === reqType &&
spec.platform === reqPlatform &&
spec.jdkRequired === reqJdkRequired
);
if (!artifactSpec) {
throw new Error(
`Unable to determine artifact for license [${license}] and version [${version}]\n` +
` options: ${filenames.join(',')}`
);
}
const artifactSpec = await getArtifactSpecForSnapshot(urlVersion, license, log);
return new Artifact(artifactSpec, log);
}

View file

@ -0,0 +1,191 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ToolingLog } from '@kbn/dev-utils';
jest.mock('node-fetch');
import fetch from 'node-fetch';
const { Response } = jest.requireActual('node-fetch');
import { Artifact } from './artifact';
const log = new ToolingLog();
let MOCKS;
const PLATFORM = process.platform === 'win32' ? 'windows' : process.platform;
const MOCK_VERSION = 'test-version';
const MOCK_URL = 'http://127.0.0.1:12345';
const MOCK_FILENAME = 'test-filename';
const DAILY_SNAPSHOT_BASE_URL = 'https://storage.googleapis.com/kibana-ci-es-snapshots-daily';
const PERMANENT_SNAPSHOT_BASE_URL =
'https://storage.googleapis.com/kibana-ci-es-snapshots-permanent';
const createArchive = (params = {}) => {
const license = params.license || 'default';
return {
license: 'default',
version: MOCK_VERSION,
url: MOCK_URL + `/${license}`,
platform: PLATFORM,
filename: MOCK_FILENAME + `.${license}`,
...params,
};
};
const mockFetch = mock =>
fetch.mockReturnValue(Promise.resolve(new Response(JSON.stringify(mock))));
let previousSnapshotManifestValue = null;
beforeAll(() => {
if ('ES_SNAPSHOT_MANIFEST' in process.env) {
previousSnapshotManifestValue = process.env.ES_SNAPSHOT_MANIFEST;
delete process.env.ES_SNAPSHOT_MANIFEST;
}
});
afterAll(() => {
if (previousSnapshotManifestValue !== null) {
process.env.ES_SNAPSHOT_MANIFEST = previousSnapshotManifestValue;
} else {
delete process.env.ES_SNAPSHOT_MANIFEST;
}
});
beforeEach(() => {
jest.resetAllMocks();
MOCKS = {
valid: {
archives: [createArchive({ license: 'oss' }), createArchive({ license: 'default' })],
},
};
});
const artifactTest = (requestedLicense, expectedLicense, fetchTimesCalled = 1) => {
return async () => {
const artifact = await Artifact.getSnapshot(requestedLicense, MOCK_VERSION, log);
expect(fetch).toHaveBeenCalledTimes(fetchTimesCalled);
expect(fetch.mock.calls[0][0]).toEqual(
`${DAILY_SNAPSHOT_BASE_URL}/${MOCK_VERSION}/manifest-latest-verified.json`
);
if (fetchTimesCalled === 2) {
expect(fetch.mock.calls[1][0]).toEqual(
`${PERMANENT_SNAPSHOT_BASE_URL}/${MOCK_VERSION}/manifest.json`
);
}
expect(artifact.getUrl()).toEqual(MOCK_URL + `/${expectedLicense}`);
expect(artifact.getChecksumUrl()).toEqual(MOCK_URL + `/${expectedLicense}.sha512`);
expect(artifact.getChecksumType()).toEqual('sha512');
expect(artifact.getFilename()).toEqual(MOCK_FILENAME + `.${expectedLicense}`);
};
};
describe('Artifact', () => {
describe('getSnapshot()', () => {
describe('with default snapshot', () => {
beforeEach(() => {
mockFetch(MOCKS.valid);
});
it('should return artifact metadata for a daily oss artifact', artifactTest('oss', 'oss'));
it(
'should return artifact metadata for a daily default artifact',
artifactTest('default', 'default')
);
it(
'should default to default license with anything other than "oss"',
artifactTest('INVALID_LICENSE', 'default')
);
it('should throw when an artifact cannot be found in the manifest for the specified parameters', async () => {
await expect(Artifact.getSnapshot('default', 'INVALID_VERSION', log)).rejects.toThrow(
"couldn't find an artifact"
);
});
});
describe('with missing default snapshot', () => {
beforeEach(() => {
fetch.mockReturnValueOnce(Promise.resolve(new Response('', { status: 404 })));
mockFetch(MOCKS.valid);
});
it(
'should return artifact metadata for a permanent oss artifact',
artifactTest('oss', 'oss', 2)
);
it(
'should return artifact metadata for a permanent default artifact',
artifactTest('default', 'default', 2)
);
it(
'should default to default license with anything other than "oss"',
artifactTest('INVALID_LICENSE', 'default', 2)
);
it('should throw when an artifact cannot be found in the manifest for the specified parameters', async () => {
await expect(Artifact.getSnapshot('default', 'INVALID_VERSION', log)).rejects.toThrow(
"couldn't find an artifact"
);
});
});
describe('with custom snapshot manifest URL', () => {
const CUSTOM_URL = 'http://www.creedthoughts.gov.www/creedthoughts';
beforeEach(() => {
process.env.ES_SNAPSHOT_MANIFEST = CUSTOM_URL;
mockFetch(MOCKS.valid);
});
it('should use the custom URL when looking for a snapshot', async () => {
await Artifact.getSnapshot('oss', MOCK_VERSION, log);
expect(fetch.mock.calls[0][0]).toEqual(CUSTOM_URL);
});
afterEach(() => {
delete process.env.ES_SNAPSHOT_MANIFEST;
});
});
describe('with latest unverified snapshot', () => {
beforeEach(() => {
process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = 1;
mockFetch(MOCKS.valid);
});
it('should use the daily unverified URL when looking for a snapshot', async () => {
await Artifact.getSnapshot('oss', MOCK_VERSION, log);
expect(fetch.mock.calls[0][0]).toEqual(
`${DAILY_SNAPSHOT_BASE_URL}/${MOCK_VERSION}/manifest-latest.json`
);
});
afterEach(() => {
delete process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED;
});
});
});
});

View file

@ -24,11 +24,13 @@ function isVersionFlag(a) {
}
function getCustomSnapshotUrl() {
// force use of manually created snapshots until live ones are available
if (!process.env.KBN_ES_SNAPSHOT_URL && !process.argv.some(isVersionFlag)) {
if (
!process.env.ES_SNAPSHOT_MANIFEST &&
!process.env.KBN_ES_SNAPSHOT_URL &&
!process.argv.some(isVersionFlag)
) {
// return 'https://storage.googleapis.com/kibana-ci-tmp-artifacts/{name}-{version}-{os}-x86_64.{ext}';
// return undefined;
return 'https://storage.googleapis.com/kibana-ci-tmp-artifacts/{name}-{version}-{os}-x86_64.{ext}';
return undefined;
}
if (process.env.KBN_ES_SNAPSHOT_URL && process.env.KBN_ES_SNAPSHOT_URL !== 'false') {

50
vars/esSnapshots.groovy Normal file
View file

@ -0,0 +1,50 @@
def promote(snapshotVersion, snapshotId) {
def snapshotDestination = "${snapshotVersion}/archives/${snapshotId}"
def MANIFEST_URL = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${snapshotDestination}/manifest.json"
dir('verified-manifest') {
def verifiedSnapshotFilename = 'manifest-latest-verified.json'
sh """
curl -O '${MANIFEST_URL}'
mv manifest.json ${verifiedSnapshotFilename}
"""
googleStorageUpload(
credentialsId: 'kibana-ci-gcs-plugin',
bucket: "gs://kibana-ci-es-snapshots-daily/${snapshotVersion}",
pattern: verifiedSnapshotFilename,
sharedPublicly: false,
showInline: false,
)
}
// This would probably be more efficient if we could just copy using gsutil and specifying buckets for src and dest
// But we don't currently have access to the GCS credentials in a way that can be consumed easily from here...
dir('transfer-to-permanent') {
googleStorageDownload(
credentialsId: 'kibana-ci-gcs-plugin',
bucketUri: "gs://kibana-ci-es-snapshots-daily/${snapshotDestination}/*",
localDirectory: '.',
pathPrefix: snapshotDestination,
)
def manifestJson = readFile file: 'manifest.json'
writeFile(
file: 'manifest.json',
text: manifestJson.replace("kibana-ci-es-snapshots-daily/${snapshotDestination}", "kibana-ci-es-snapshots-permanent/${snapshotVersion}")
)
// Ideally we would have some delete logic here before uploading,
// But we don't currently have access to the GCS credentials in a way that can be consumed easily from here...
googleStorageUpload(
credentialsId: 'kibana-ci-gcs-plugin',
bucket: "gs://kibana-ci-es-snapshots-permanent/${snapshotVersion}",
pattern: '*.*',
sharedPublicly: false,
showInline: false,
)
}
}
return this

View file

@ -137,13 +137,8 @@ def jobRunner(label, useRamDisk, closure) {
def scmVars
// Try to clone from Github up to 8 times, waiting 15 secs between attempts
retry(8) {
try {
scmVars = checkout scm
} catch (ex) {
sleep 15
throw ex
}
retryWithDelay(8, 15) {
scmVars = checkout scm
}
withEnv([

View file

@ -0,0 +1,16 @@
def call(retryTimes, delaySecs, closure) {
retry(retryTimes) {
try {
closure()
} catch (ex) {
sleep delaySecs
throw ex
}
}
}
def call(retryTimes, Closure closure) {
call(retryTimes, 15, closure)
}
return this