mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Backports the following commits to 6.7: - [kbn/es] use artifact api to determine snapshot url (#33893)
This commit is contained in:
parent
9f67e5a5c1
commit
92c968cc13
4 changed files with 289 additions and 98 deletions
|
@ -6,6 +6,7 @@
|
|||
"private": true,
|
||||
"dependencies": {
|
||||
"@kbn/dev-utils": "1.0.0",
|
||||
"abort-controller": "^2.0.3",
|
||||
"chalk": "^2.4.1",
|
||||
"dedent": "^0.7.0",
|
||||
"del": "^3.0.0",
|
||||
|
|
271
packages/kbn-es/src/artifact.js
Normal file
271
packages/kbn-es/src/artifact.js
Normal file
|
@ -0,0 +1,271 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const AbortController = require('abort-controller');
|
||||
const fs = require('fs');
|
||||
const { promisify } = require('util');
|
||||
const { pipeline, Transform } = require('stream');
|
||||
const mkdirp = require('mkdirp');
|
||||
const chalk = require('chalk');
|
||||
const { createHash } = require('crypto');
|
||||
const path = require('path');
|
||||
|
||||
const asyncPipeline = promisify(pipeline);
|
||||
const V1_VERSIONS_API = 'https://artifacts-api.elastic.co/v1/versions';
|
||||
|
||||
const { cache } = require('./utils');
|
||||
const { createCliError } = require('./errors');
|
||||
|
||||
const TEST_ES_SNAPSHOT_VERSION = process.env.TEST_ES_SNAPSHOT_VERSION
|
||||
? process.env.TEST_ES_SNAPSHOT_VERSION
|
||||
: 'latest';
|
||||
|
||||
function getChecksumType(checksumUrl) {
|
||||
if (checksumUrl.endsWith('.sha512')) {
|
||||
return 'sha512';
|
||||
}
|
||||
|
||||
throw new Error(`unable to determine checksum type: ${checksumUrl}`);
|
||||
}
|
||||
|
||||
function getPlatform(key) {
|
||||
if (key.includes('-linux-')) {
|
||||
return 'linux';
|
||||
}
|
||||
|
||||
if (key.includes('-windows-')) {
|
||||
return 'win32';
|
||||
}
|
||||
|
||||
if (key.includes('-darwin-')) {
|
||||
return 'darwin';
|
||||
}
|
||||
}
|
||||
|
||||
exports.Artifact = class Artifact {
|
||||
/**
|
||||
* Fetch an Artifact from the Artifact API for a license level and version
|
||||
* @param {('oss'|'basic'|'trial')} license
|
||||
* @param {string} version
|
||||
* @param {ToolingLog} log
|
||||
*/
|
||||
static async get(license, version, log) {
|
||||
const urlVersion = `${encodeURIComponent(version)}-SNAPSHOT`;
|
||||
const urlBuild = encodeURIComponent(TEST_ES_SNAPSHOT_VERSION);
|
||||
const url = `${V1_VERSIONS_API}/${urlVersion}/builds/${urlBuild}/projects/elasticsearch`;
|
||||
|
||||
log.info('downloading artifact info from %s', chalk.bold(url));
|
||||
const abc = new AbortController();
|
||||
const resp = await fetch(url, { signal: abc.signal });
|
||||
const json = await resp.text();
|
||||
|
||||
if (resp.status === 404) {
|
||||
abc.abort();
|
||||
throw createCliError(
|
||||
`Snapshots for ${version}/${TEST_ES_SNAPSHOT_VERSION} are not available`
|
||||
);
|
||||
}
|
||||
|
||||
if (!resp.ok) {
|
||||
abc.abort();
|
||||
throw new Error(`Unable to read artifact info from ${url}: ${resp.statusText}\n ${json}`);
|
||||
}
|
||||
|
||||
// parse the api response into an array of Artifact objects
|
||||
const {
|
||||
project: { packages: artifactInfoMap },
|
||||
} = JSON.parse(json);
|
||||
const filenames = Object.keys(artifactInfoMap);
|
||||
const hasNoJdkVersions = filenames.some(filename => filename.includes('-no-jdk-'));
|
||||
const artifactSpecs = filenames.map(filename => ({
|
||||
filename,
|
||||
url: artifactInfoMap[filename].url,
|
||||
checksumUrl: artifactInfoMap[filename].sha_url,
|
||||
checksumType: getChecksumType(artifactInfoMap[filename].sha_url),
|
||||
type: artifactInfoMap[filename].type,
|
||||
isOss: filename.includes('-oss-'),
|
||||
platform: getPlatform(filename),
|
||||
jdkRequired: hasNoJdkVersions ? filename.includes('-no-jdk-') : true,
|
||||
}));
|
||||
|
||||
// pick the artifact we are going to use for this license/version combo
|
||||
const reqOss = license === 'oss';
|
||||
const reqPlatform = artifactSpecs.some(a => a.platform !== undefined)
|
||||
? process.platform
|
||||
: undefined;
|
||||
const reqJdkRequired = hasNoJdkVersions ? false : true;
|
||||
const reqType = process.platform === 'win32' ? 'zip' : 'tar';
|
||||
|
||||
const artifactSpec = artifactSpecs.find(
|
||||
spec =>
|
||||
spec.isOss === reqOss &&
|
||||
spec.type === reqType &&
|
||||
spec.platform === reqPlatform &&
|
||||
spec.jdkRequired === reqJdkRequired
|
||||
);
|
||||
|
||||
if (!artifactSpec) {
|
||||
throw new Error(
|
||||
`Unable to determine artifact for license [${license}] and version [${version}]\n` +
|
||||
` options: ${filenames.join(',')}`
|
||||
);
|
||||
}
|
||||
|
||||
return new Artifact(artifactSpec, log);
|
||||
}
|
||||
|
||||
constructor(spec, log) {
|
||||
this._spec = spec;
|
||||
this._log = log;
|
||||
}
|
||||
|
||||
getUrl() {
|
||||
return this._spec.url;
|
||||
}
|
||||
|
||||
getChecksumUrl() {
|
||||
return this._spec.checksumUrl;
|
||||
}
|
||||
|
||||
getChecksumType() {
|
||||
return this._spec.checksumType;
|
||||
}
|
||||
|
||||
getFilename() {
|
||||
return this._spec.filename;
|
||||
}
|
||||
|
||||
/**
|
||||
* Download the artifact to disk, skips the download if the cache is
|
||||
* up-to-date, verifies checksum when downloaded
|
||||
* @param {string} dest
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async download(dest) {
|
||||
const cacheMeta = cache.readMeta(dest);
|
||||
const tmpPath = `${dest}.tmp`;
|
||||
|
||||
const artifactResp = await this._download(tmpPath, cacheMeta.etag, cacheMeta.ts);
|
||||
if (artifactResp.cached) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this._verifyChecksum(artifactResp.checksum);
|
||||
|
||||
// cache the etag for future downloads
|
||||
cache.writeMeta(dest, { etag: artifactResp.etag });
|
||||
|
||||
// rename temp download to the destination location
|
||||
fs.renameSync(tmpPath, dest);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the artifact with an etag
|
||||
* @param {string} tmpPath
|
||||
* @param {string} etag
|
||||
* @param {string} ts
|
||||
* @return {{ cached: true }|{ checksum: string, etag: string }}
|
||||
*/
|
||||
async _download(tmpPath, etag, ts) {
|
||||
const url = this.getUrl();
|
||||
|
||||
if (etag) {
|
||||
this._log.info('verifying cache of %s', chalk.bold(url));
|
||||
} else {
|
||||
this._log.info('downloading artifact from %s', chalk.bold(url));
|
||||
}
|
||||
|
||||
const abc = new AbortController();
|
||||
const resp = await fetch(url, {
|
||||
signal: abc.signal,
|
||||
headers: {
|
||||
'If-None-Match': etag,
|
||||
},
|
||||
});
|
||||
|
||||
if (resp.status === 304) {
|
||||
this._log.info('etags match, reusing cache from %s', chalk.bold(ts));
|
||||
|
||||
abc.abort();
|
||||
return {
|
||||
cached: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (!resp.ok) {
|
||||
abc.abort();
|
||||
throw new Error(`Unable to download elasticsearch snapshot: ${resp.statusText}`);
|
||||
}
|
||||
|
||||
if (etag) {
|
||||
this._log.info('cache invalid, redownloading');
|
||||
}
|
||||
|
||||
const hash = createHash(this.getChecksumType());
|
||||
|
||||
mkdirp.sync(path.dirname(tmpPath));
|
||||
await asyncPipeline(
|
||||
resp.body,
|
||||
new Transform({
|
||||
transform(chunk, encoding, cb) {
|
||||
hash.update(chunk, encoding);
|
||||
cb(null, chunk);
|
||||
},
|
||||
}),
|
||||
fs.createWriteStream(tmpPath)
|
||||
);
|
||||
|
||||
return {
|
||||
checksum: hash.digest('hex'),
|
||||
etag: resp.headers.get('etag'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify the checksum of the downloaded artifact with the checksum at checksumUrl
|
||||
* @param {string} actualChecksum
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async _verifyChecksum(actualChecksum) {
|
||||
this._log.info('downloading artifact checksum from %s', chalk.bold(this.getUrl()));
|
||||
|
||||
const abc = new AbortController();
|
||||
const resp = await fetch(this.getChecksumUrl(), {
|
||||
signal: abc.signal,
|
||||
});
|
||||
|
||||
if (!resp.ok) {
|
||||
abc.abort();
|
||||
throw new Error(`Unable to download elasticsearch checksum: ${resp.statusText}`);
|
||||
}
|
||||
|
||||
// in format of stdout from `shasum` cmd, which is `<checksum> <filename>`
|
||||
const [expectedChecksum] = (await resp.text()).split(' ');
|
||||
if (actualChecksum !== expectedChecksum) {
|
||||
throw createCliError(
|
||||
`artifact downloaded from ${this.getUrl()} does not match expected checksum\n` +
|
||||
` expected: ${expectedChecksum}\n` +
|
||||
` received: ${actualChecksum}`
|
||||
);
|
||||
}
|
||||
|
||||
this._log.info('checksum verified');
|
||||
}
|
||||
};
|
|
@ -17,15 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const mkdirp = require('mkdirp');
|
||||
const chalk = require('chalk');
|
||||
const path = require('path');
|
||||
const { BASE_PATH } = require('../paths');
|
||||
const { installArchive } = require('./archive');
|
||||
const { log: defaultLog, cache } = require('../utils');
|
||||
const { log: defaultLog } = require('../utils');
|
||||
const { Artifact } = require('../artifact');
|
||||
|
||||
/**
|
||||
* Download an ES snapshot
|
||||
|
@ -44,15 +41,13 @@ exports.downloadSnapshot = async function installSnapshot({
|
|||
installPath = path.resolve(basePath, version),
|
||||
log = defaultLog,
|
||||
}) {
|
||||
const fileName = getFilename(license, version);
|
||||
const url = getUrl(fileName);
|
||||
const dest = path.resolve(basePath, 'cache', fileName);
|
||||
|
||||
log.info('version: %s', chalk.bold(version));
|
||||
log.info('install path: %s', chalk.bold(installPath));
|
||||
log.info('license: %s', chalk.bold(license));
|
||||
|
||||
await downloadFile(url, dest, log);
|
||||
const artifact = await Artifact.get(license, version, log);
|
||||
const dest = path.resolve(basePath, 'cache', artifact.getFilename());
|
||||
await artifact.download(dest);
|
||||
|
||||
return {
|
||||
downloadPath: dest,
|
||||
|
@ -94,91 +89,3 @@ exports.installSnapshot = async function installSnapshot({
|
|||
log,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Downloads to tmp and moves once complete
|
||||
*
|
||||
* @param {String} url
|
||||
* @param {String} dest
|
||||
* @param {ToolingLog} log
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function downloadFile(url, dest, log) {
|
||||
const downloadPath = `${dest}.tmp`;
|
||||
const cacheMeta = cache.readMeta(dest);
|
||||
|
||||
mkdirp.sync(path.dirname(dest));
|
||||
|
||||
log.info('downloading from %s', chalk.bold(url));
|
||||
|
||||
return fetch(url, { headers: { 'If-None-Match': cacheMeta.etag } }).then(
|
||||
res =>
|
||||
new Promise((resolve, reject) => {
|
||||
if (res.status === 304) {
|
||||
log.info('etags match, using cache from %s', chalk.bold(cacheMeta.ts));
|
||||
return resolve();
|
||||
}
|
||||
|
||||
if (!res.ok) {
|
||||
return reject(new Error(`Unable to download elasticsearch snapshot: ${res.statusText}`));
|
||||
}
|
||||
|
||||
const stream = fs.createWriteStream(downloadPath);
|
||||
res.body
|
||||
.pipe(stream)
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
})
|
||||
.on('finish', () => {
|
||||
if (res.ok) {
|
||||
const etag = res.headers.get('etag');
|
||||
|
||||
cache.writeMeta(dest, { etag });
|
||||
fs.renameSync(downloadPath, dest);
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(res.statusText));
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function getFilename(license, version) {
|
||||
// HACK to support both 6.7 and 7.0 in the same Kibana build
|
||||
if (version.startsWith('6.')) {
|
||||
const extension = os.platform().startsWith('win') ? 'zip' : 'tar.gz';
|
||||
const basename = `elasticsearch${license === 'oss' ? '-oss-' : '-'}${version}`;
|
||||
|
||||
return `${basename}-SNAPSHOT.${extension}`;
|
||||
}
|
||||
|
||||
const platform = os.platform();
|
||||
let suffix = null;
|
||||
switch (platform) {
|
||||
case 'darwin':
|
||||
suffix = 'darwin-x86_64.tar.gz';
|
||||
break;
|
||||
case 'linux':
|
||||
suffix = 'linux-x86_64.tar.gz';
|
||||
break;
|
||||
case 'win32':
|
||||
suffix = 'windows-x86_64.zip';
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported platform ${platform}`);
|
||||
}
|
||||
|
||||
const basename = `elasticsearch${license === 'oss' ? '-oss-' : '-'}${version}`;
|
||||
return `${basename}-SNAPSHOT-${suffix}`;
|
||||
}
|
||||
|
||||
function getUrl(fileName) {
|
||||
if (process.env.TEST_ES_SNAPSHOT_VERSION) {
|
||||
return `https://snapshots.elastic.co/${
|
||||
process.env.TEST_ES_SNAPSHOT_VERSION
|
||||
}/downloads/elasticsearch/${fileName}`;
|
||||
} else {
|
||||
return `https://snapshots.elastic.co/downloads/elasticsearch/${fileName}`;
|
||||
}
|
||||
}
|
||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -2356,6 +2356,13 @@ abbrev@1.0.x:
|
|||
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135"
|
||||
integrity sha1-kbR5JYinc4wl813W9jdSovh3YTU=
|
||||
|
||||
abort-controller@^2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-2.0.3.tgz#b174827a732efadff81227ed4b8d1cc569baf20a"
|
||||
integrity sha512-EPSq5wr2aFyAZ1PejJB32IX9Qd4Nwus+adnp7STYFM5/23nLPBazqZ1oor6ZqbH+4otaaGXTlC8RN5hq3C8w9Q==
|
||||
dependencies:
|
||||
event-target-shim "^5.0.0"
|
||||
|
||||
abortcontroller-polyfill@^1.1.9:
|
||||
version "1.1.9"
|
||||
resolved "https://registry.yarnpkg.com/abortcontroller-polyfill/-/abortcontroller-polyfill-1.1.9.tgz#9fefe359fda2e9e0932dc85e6106453ac393b2da"
|
||||
|
@ -8516,6 +8523,11 @@ event-emitter@^0.3.5, event-emitter@~0.3.5:
|
|||
d "1"
|
||||
es5-ext "~0.10.14"
|
||||
|
||||
event-target-shim@^5.0.0:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789"
|
||||
integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==
|
||||
|
||||
eventemitter2@~0.4.13:
|
||||
version "0.4.14"
|
||||
resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-0.4.14.tgz#8f61b75cde012b2e9eb284d4545583b5643b61ab"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue