[kbn/dev-utils] pull in extract() helper (#106277)

Co-authored-by: spalger <spalger@users.noreply.github.com>
This commit is contained in:
Spencer 2021-07-20 13:13:48 -07:00 committed by GitHub
parent 20a947058d
commit ec160d5c47
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 150 additions and 151 deletions

View file

@ -43,6 +43,7 @@ NPM_MODULE_EXTRA_FILES = [
SRC_DEPS = [
"//packages/kbn-expect",
"//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@babel/core",
"@npm//axios",
@ -60,10 +61,12 @@ SRC_DEPS = [
"@npm//moment",
"@npm//normalize-path",
"@npm//rxjs",
"@npm//tar",
"@npm//tree-kill",
"@npm//tslib",
"@npm//typescript",
"@npm//vinyl"
"@npm//vinyl",
"@npm//yauzl"
]
TYPES_DEPS = [
@ -76,8 +79,10 @@ TYPES_DEPS = [
"@npm//@types/node",
"@npm//@types/normalize-path",
"@npm//@types/react",
"@npm//@types/tar",
"@npm//@types/testing-library__jest-dom",
"@npm//@types/vinyl"
"@npm//@types/vinyl",
"@npm//@types/yauzl"
]
DEPS = SRC_DEPS + TYPES_DEPS

View file

@ -0,0 +1,120 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Fs from 'fs/promises';
import { createWriteStream } from 'fs';
import Path from 'path';
import { pipeline } from 'stream';
import { promisify } from 'util';
import { lastValueFrom } from '@kbn/std';
import Tar from 'tar';
import Yauzl, { ZipFile, Entry } from 'yauzl';
import * as Rx from 'rxjs';
import { map, mergeMap, takeUntil } from 'rxjs/operators';
const asyncPipeline = promisify(pipeline);
interface Options {
/**
* Path to the archive to extract, .tar, .tar.gz, and .zip archives are supported
*/
archivePath: string;
/**
* Directory where the contents of the archive will be written. Existing files in that
* directory will be overwritten. If the directory doesn't exist it will be created.
*/
targetDir: string;
/**
* Number of path segments to strip form paths in the archive, like --strip-components from tar
*/
stripComponents?: number;
}
/**
* Extract tar and zip archives using a single function, supporting stripComponents
* for both archive types, only tested with familiar archives we create so might not
* support some weird exotic zip features we don't use in our own snapshot/build tooling
*/
export async function extract({ archivePath, targetDir, stripComponents = 0 }: Options) {
await Fs.mkdir(targetDir, { recursive: true });
if (archivePath.endsWith('.tar') || archivePath.endsWith('.tar.gz')) {
return await Tar.x({
file: archivePath,
cwd: targetDir,
stripComponents,
});
}
if (!archivePath.endsWith('.zip')) {
throw new Error('unsupported archive type');
}
// zip mode
const zipFile = await new Promise<ZipFile>((resolve, reject) => {
Yauzl.open(archivePath, { lazyEntries: true }, (error, _zipFile) => {
if (error || !_zipFile) {
reject(error || new Error('no zipfile provided by yauzl'));
} else {
resolve(_zipFile);
}
});
});
// bound version of zipFile.openReadStream which returns an observable, because of type defs the readStream
// result is technically optional (thanks callbacks)
const openReadStream$ = Rx.bindNodeCallback(zipFile.openReadStream.bind(zipFile));
const close$ = Rx.fromEvent(zipFile, 'close');
const error$ = Rx.fromEvent<Error>(zipFile, 'error').pipe(
takeUntil(close$),
map((error) => {
throw error;
})
);
const entry$ = Rx.fromEvent<Entry>(zipFile, 'entry').pipe(
takeUntil(close$),
mergeMap((entry) => {
const entryPath = entry.fileName.split(/\/|\\/).slice(stripComponents).join(Path.sep);
const fileName = Path.resolve(targetDir, entryPath);
// detect directories
if (entry.fileName.endsWith('/')) {
return Rx.defer(async () => {
// ensure the directory exists
await Fs.mkdir(fileName, { recursive: true });
// tell yauzl to read the next entry
zipFile.readEntry();
});
}
// file entry
return openReadStream$(entry).pipe(
mergeMap(async (readStream) => {
if (!readStream) {
throw new Error('no readstream provided by yauzl');
}
// write the file contents to disk
await asyncPipeline(readStream, createWriteStream(fileName));
// tell yauzl to read the next entry
zipFile.readEntry();
})
);
})
);
// trigger the initial 'entry' event, happens async so the event will be delivered after the observable is subscribed
zipFile.readEntry();
await lastValueFrom(Rx.merge(entry$, error$));
}

View file

@ -31,3 +31,4 @@ export * from './plugin_list';
export * from './plugins';
export * from './streams';
export * from './babel';
export * from './extract';

View file

@ -8,6 +8,7 @@
import Path from 'path';
import chalk from 'chalk';
import 'core-js/features/string/repeat';
import dedent from 'dedent';
@ -116,7 +117,7 @@ export function getHelpForAllCommands({
: '';
return [
dedent(command.usage || '') || command.name,
chalk.bold.whiteBright.bgBlack(` ${dedent(command.usage || '') || command.name} `),
` ${indent(dedent(command.description || 'Runs a dev task'), 2)}`,
...([indent(options, 2)] || []),
].join('\n');

View file

@ -6,9 +6,12 @@
* Side Public License, v 1.
*/
export function createAnyInstanceSerializer(Class: Function, name?: string) {
export function createAnyInstanceSerializer(
Class: Function,
name?: string | ((instance: any) => string)
) {
return {
test: (v: any) => v instanceof Class,
serialize: () => `<${name ?? Class.name}>`,
serialize: (v: any) => `<${typeof name === 'function' ? name(v) : name ?? Class.name}>`,
};
}

View file

@ -39,9 +39,7 @@ DEPS = [
"@npm//glob",
"@npm//node-fetch",
"@npm//simple-git",
"@npm//tar-fs",
"@npm//tree-kill",
"@npm//yauzl",
"@npm//zlib"
]

View file

@ -13,18 +13,12 @@ const chalk = require('chalk');
const path = require('path');
const { downloadSnapshot, installSnapshot, installSource, installArchive } = require('./install');
const { ES_BIN } = require('./paths');
const {
log: defaultLog,
parseEsLog,
extractConfigFiles,
decompress,
NativeRealm,
} = require('./utils');
const { log: defaultLog, parseEsLog, extractConfigFiles, NativeRealm } = require('./utils');
const { createCliError } = require('./errors');
const { promisify } = require('util');
const treeKillAsync = promisify(require('tree-kill'));
const { parseSettings, SettingsFilter } = require('./settings');
const { CA_CERT_PATH, ES_P12_PATH, ES_P12_PASSWORD } = require('@kbn/dev-utils');
const { CA_CERT_PATH, ES_P12_PATH, ES_P12_PASSWORD, extract } = require('@kbn/dev-utils');
const readFile = util.promisify(fs.readFile);
// listen to data on stream until map returns anything but undefined
@ -144,13 +138,17 @@ exports.Cluster = class Cluster {
this._log.info(chalk.bold(`Extracting data directory`));
this._log.indent(4);
// decompress excludes the root directory as that is how our archives are
// stripComponents=1 excludes the root directory as that is how our archives are
// structured. This works in our favor as we can explicitly extract into the data dir
const extractPath = path.resolve(installPath, extractDirName);
this._log.info(`Data archive: ${archivePath}`);
this._log.info(`Extract path: ${extractPath}`);
await decompress(archivePath, extractPath);
await extract({
archivePath,
targetDir: extractPath,
stripComponents: 1,
});
this._log.indent(-4);
}

View file

@ -12,7 +12,8 @@ const chalk = require('chalk');
const execa = require('execa');
const del = require('del');
const url = require('url');
const { log: defaultLog, decompress } = require('../utils');
const { extract } = require('@kbn/dev-utils');
const { log: defaultLog } = require('../utils');
const { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } = require('../paths');
const { Artifact } = require('../artifact');
const { parseSettings, SettingsFilter } = require('../settings');
@ -50,7 +51,11 @@ exports.installArchive = async function installArchive(archive, options = {}) {
}
log.info('extracting %s', chalk.bold(dest));
await decompress(dest, installPath);
await extract({
archivePath: dest,
targetDir: installPath,
stripComponents: 1,
});
log.info('extracted to %s', chalk.bold(installPath));
const tmpdir = path.resolve(installPath, 'ES_TMPDIR');

View file

@ -1,86 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
const fs = require('fs');
const path = require('path');
const yauzl = require('yauzl');
const zlib = require('zlib');
const tarFs = require('tar-fs');
function decompressTarball(archive, dirPath) {
return new Promise((resolve, reject) => {
fs.createReadStream(archive)
.on('error', reject)
.pipe(zlib.createGunzip())
.on('error', reject)
.pipe(tarFs.extract(dirPath, { strip: true }))
.on('error', reject)
.on('finish', resolve);
});
}
function decompressZip(input, output) {
fs.mkdirSync(output, { recursive: true });
return new Promise((resolve, reject) => {
yauzl.open(input, { lazyEntries: true }, (err, zipfile) => {
if (err) {
reject(err);
}
zipfile.readEntry();
zipfile.on('close', () => {
resolve();
});
zipfile.on('error', (err) => {
reject(err);
});
zipfile.on('entry', (entry) => {
const zipPath = entry.fileName.split(/\/|\\/).slice(1).join(path.sep);
const fileName = path.resolve(output, zipPath);
if (/\/$/.test(entry.fileName)) {
fs.mkdirSync(fileName, { recursive: true });
zipfile.readEntry();
} else {
// file entry
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
reject(err);
}
readStream.on('end', () => {
zipfile.readEntry();
});
readStream.pipe(fs.createWriteStream(fileName));
});
}
});
});
});
}
exports.decompress = async function (input, output) {
const ext = path.extname(input);
switch (path.extname(input)) {
case '.zip':
await decompressZip(input, output);
break;
case '.tar':
case '.gz':
await decompressTarball(input, output);
break;
default:
throw new Error(`unknown extension "${ext}"`);
}
};

View file

@ -1,45 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
const { decompress } = require('./decompress');
const fs = require('fs');
const path = require('path');
const del = require('del');
const os = require('os');
const fixturesFolder = path.resolve(__dirname, '__fixtures__');
const randomDir = Math.random().toString(36);
const tmpFolder = path.resolve(os.tmpdir(), randomDir);
const dataFolder = path.resolve(tmpFolder, 'data');
const esFolder = path.resolve(tmpFolder, '.es');
const zipSnapshot = path.resolve(dataFolder, 'snapshot.zip');
const tarGzSnapshot = path.resolve(dataFolder, 'snapshot.tar.gz');
beforeEach(() => {
fs.mkdirSync(tmpFolder, { recursive: true });
fs.mkdirSync(dataFolder, { recursive: true });
fs.mkdirSync(esFolder, { recursive: true });
fs.copyFileSync(path.resolve(fixturesFolder, 'snapshot.zip'), zipSnapshot);
fs.copyFileSync(path.resolve(fixturesFolder, 'snapshot.tar.gz'), tarGzSnapshot);
});
afterEach(() => {
del.sync(tmpFolder, { force: true });
});
test('zip strips root directory', async () => {
await decompress(zipSnapshot, path.resolve(esFolder, 'foo'));
expect(fs.readdirSync(path.resolve(esFolder, 'foo/bin'))).toContain('elasticsearch.bat');
});
test('tar strips root directory', async () => {
await decompress(tarGzSnapshot, path.resolve(esFolder, 'foo'));
expect(fs.readdirSync(path.resolve(esFolder, 'foo/bin'))).toContain('elasticsearch');
});

View file

@ -11,7 +11,6 @@ exports.log = require('./log').log;
exports.parseEsLog = require('./parse_es_log').parseEsLog;
exports.findMostRecentlyChanged = require('./find_most_recently_changed').findMostRecentlyChanged;
exports.extractConfigFiles = require('./extract_config_files').extractConfigFiles;
exports.decompress = require('./decompress').decompress;
exports.NativeRealm = require('./native_realm').NativeRealm;
exports.buildSnapshot = require('./build_snapshot').buildSnapshot;
exports.archiveForPlatform = require('./build_snapshot').archiveForPlatform;