mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[kbn-es/installers] convert to TypeScript (#120560)
* [kbn-es/installers] convert to TypeScript * catch invalid urls passed to installArchive()
This commit is contained in:
parent
44c19bd2ca
commit
760374f5a0
20 changed files with 379 additions and 354 deletions
|
@ -90,10 +90,10 @@ const artifactTest = (requestedLicense, expectedLicense, fetchTimesCalled = 1) =
|
|||
`${PERMANENT_SNAPSHOT_BASE_URL}/${MOCK_VERSION}/manifest.json`
|
||||
);
|
||||
}
|
||||
expect(artifact.getUrl()).toEqual(MOCK_URL + `/${expectedLicense}`);
|
||||
expect(artifact.getChecksumUrl()).toEqual(MOCK_URL + `/${expectedLicense}.sha512`);
|
||||
expect(artifact.getChecksumType()).toEqual('sha512');
|
||||
expect(artifact.getFilename()).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.${expectedLicense}`);
|
||||
expect(artifact.spec.url).toEqual(MOCK_URL + `/${expectedLicense}`);
|
||||
expect(artifact.spec.checksumUrl).toEqual(MOCK_URL + `/${expectedLicense}.sha512`);
|
||||
expect(artifact.spec.checksumType).toEqual('sha512');
|
||||
expect(artifact.spec.filename).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.${expectedLicense}`);
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -158,7 +158,7 @@ describe('Artifact', () => {
|
|||
|
||||
it('should return artifact metadata for the correct architecture', async () => {
|
||||
const artifact = await Artifact.getSnapshot('oss', MOCK_VERSION, log);
|
||||
expect(artifact.getFilename()).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.oss`);
|
||||
expect(artifact.spec.filename).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.oss`);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -182,7 +182,7 @@ describe('Artifact', () => {
|
|||
|
||||
describe('with latest unverified snapshot', () => {
|
||||
beforeEach(() => {
|
||||
process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = 1;
|
||||
process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = '1';
|
||||
mockFetch(MOCKS.valid);
|
||||
});
|
||||
|
||||
|
|
|
@ -6,25 +6,69 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const AbortController = require('abort-controller');
|
||||
const fs = require('fs');
|
||||
const { promisify } = require('util');
|
||||
const { pipeline, Transform } = require('stream');
|
||||
const chalk = require('chalk');
|
||||
const { createHash } = require('crypto');
|
||||
const path = require('path');
|
||||
import fs from 'fs';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import { createHash } from 'crypto';
|
||||
import { pipeline, Transform } from 'stream';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
|
||||
import fetch, { Headers } from 'node-fetch';
|
||||
import AbortController from 'abort-controller';
|
||||
import chalk from 'chalk';
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
import { cache } from './utils/cache';
|
||||
import { resolveCustomSnapshotUrl } from './custom_snapshots';
|
||||
import { createCliError, isCliError } from './errors';
|
||||
|
||||
const asyncPipeline = promisify(pipeline);
|
||||
const DAILY_SNAPSHOTS_BASE_URL = 'https://storage.googleapis.com/kibana-ci-es-snapshots-daily';
|
||||
const PERMANENT_SNAPSHOTS_BASE_URL =
|
||||
'https://storage.googleapis.com/kibana-ci-es-snapshots-permanent';
|
||||
|
||||
const { cache } = require('./utils');
|
||||
const { resolveCustomSnapshotUrl } = require('./custom_snapshots');
|
||||
const { createCliError, isCliError } = require('./errors');
|
||||
type ChecksumType = 'sha512';
|
||||
export type ArtifactLicense = 'oss' | 'basic' | 'trial';
|
||||
|
||||
function getChecksumType(checksumUrl) {
|
||||
interface ArtifactManifest {
|
||||
id: string;
|
||||
bucket: string;
|
||||
branch: string;
|
||||
sha: string;
|
||||
sha_short: string;
|
||||
version: string;
|
||||
generated: string;
|
||||
archives: Array<{
|
||||
filename: string;
|
||||
checksum: string;
|
||||
url: string;
|
||||
version: string;
|
||||
platform: string;
|
||||
architecture: string;
|
||||
license: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface ArtifactSpec {
|
||||
url: string;
|
||||
checksumUrl: string;
|
||||
checksumType: ChecksumType;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
interface ArtifactDownloaded {
|
||||
cached: false;
|
||||
checksum: string;
|
||||
etag?: string;
|
||||
contentLength: number;
|
||||
first500Bytes: Buffer;
|
||||
headers: Headers;
|
||||
}
|
||||
interface ArtifactCached {
|
||||
cached: true;
|
||||
}
|
||||
|
||||
function getChecksumType(checksumUrl: string): ChecksumType {
|
||||
if (checksumUrl.endsWith('.sha512')) {
|
||||
return 'sha512';
|
||||
}
|
||||
|
@ -32,15 +76,18 @@ function getChecksumType(checksumUrl) {
|
|||
throw new Error(`unable to determine checksum type: ${checksumUrl}`);
|
||||
}
|
||||
|
||||
function headersToString(headers, indent = '') {
|
||||
function headersToString(headers: Headers, indent = '') {
|
||||
return [...headers.entries()].reduce(
|
||||
(acc, [key, value]) => `${acc}\n${indent}${key}: ${value}`,
|
||||
''
|
||||
);
|
||||
}
|
||||
|
||||
async function retry(log, fn) {
|
||||
async function doAttempt(attempt) {
|
||||
async function retry<T>(log: ToolingLog, fn: () => Promise<T>): Promise<T> {
|
||||
let attempt = 0;
|
||||
while (true) {
|
||||
attempt += 1;
|
||||
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
|
@ -49,13 +96,10 @@ async function retry(log, fn) {
|
|||
}
|
||||
|
||||
log.warning('...failure, retrying in 5 seconds:', error.message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
await setTimeout(5000);
|
||||
log.info('...retrying');
|
||||
return await doAttempt(attempt + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return await doAttempt(1);
|
||||
}
|
||||
|
||||
// Setting this flag provides an easy way to run the latest un-promoted snapshot without having to look it up
|
||||
|
@ -63,7 +107,7 @@ function shouldUseUnverifiedSnapshot() {
|
|||
return !!process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED;
|
||||
}
|
||||
|
||||
async function fetchSnapshotManifest(url, log) {
|
||||
async function fetchSnapshotManifest(url: string, log: ToolingLog) {
|
||||
log.info('Downloading snapshot manifest from %s', chalk.bold(url));
|
||||
|
||||
const abc = new AbortController();
|
||||
|
@ -73,7 +117,11 @@ async function fetchSnapshotManifest(url, log) {
|
|||
return { abc, resp, json };
|
||||
}
|
||||
|
||||
async function getArtifactSpecForSnapshot(urlVersion, license, log) {
|
||||
async function getArtifactSpecForSnapshot(
|
||||
urlVersion: string,
|
||||
license: string,
|
||||
log: ToolingLog
|
||||
): Promise<ArtifactSpec> {
|
||||
const desiredVersion = urlVersion.replace('-SNAPSHOT', '');
|
||||
const desiredLicense = license === 'oss' ? 'oss' : 'default';
|
||||
|
||||
|
@ -103,17 +151,16 @@ async function getArtifactSpecForSnapshot(urlVersion, license, log) {
|
|||
throw new Error(`Unable to read snapshot manifest: ${resp.statusText}\n ${json}`);
|
||||
}
|
||||
|
||||
const manifest = JSON.parse(json);
|
||||
|
||||
const manifest: ArtifactManifest = JSON.parse(json);
|
||||
const platform = process.platform === 'win32' ? 'windows' : process.platform;
|
||||
const arch = process.arch === 'arm64' ? 'aarch64' : 'x86_64';
|
||||
|
||||
const archive = manifest.archives.find(
|
||||
(archive) =>
|
||||
archive.version === desiredVersion &&
|
||||
archive.platform === platform &&
|
||||
archive.license === desiredLicense &&
|
||||
archive.architecture === arch
|
||||
(a) =>
|
||||
a.version === desiredVersion &&
|
||||
a.platform === platform &&
|
||||
a.license === desiredLicense &&
|
||||
a.architecture === arch
|
||||
);
|
||||
|
||||
if (!archive) {
|
||||
|
@ -130,93 +177,65 @@ async function getArtifactSpecForSnapshot(urlVersion, license, log) {
|
|||
};
|
||||
}
|
||||
|
||||
exports.Artifact = class Artifact {
|
||||
export class Artifact {
|
||||
/**
|
||||
* Fetch an Artifact from the Artifact API for a license level and version
|
||||
* @param {('oss'|'basic'|'trial')} license
|
||||
* @param {string} version
|
||||
* @param {ToolingLog} log
|
||||
*/
|
||||
static async getSnapshot(license, version, log) {
|
||||
static async getSnapshot(license: ArtifactLicense, version: string, log: ToolingLog) {
|
||||
const urlVersion = `${encodeURIComponent(version)}-SNAPSHOT`;
|
||||
|
||||
const customSnapshotArtifactSpec = resolveCustomSnapshotUrl(urlVersion, license);
|
||||
if (customSnapshotArtifactSpec) {
|
||||
return new Artifact(customSnapshotArtifactSpec, log);
|
||||
return new Artifact(log, customSnapshotArtifactSpec);
|
||||
}
|
||||
|
||||
const artifactSpec = await getArtifactSpecForSnapshot(urlVersion, license, log);
|
||||
return new Artifact(artifactSpec, log);
|
||||
return new Artifact(log, artifactSpec);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch an Artifact from the Elasticsearch past releases url
|
||||
* @param {string} url
|
||||
* @param {ToolingLog} log
|
||||
*/
|
||||
static async getArchive(url, log) {
|
||||
static async getArchive(url: string, log: ToolingLog) {
|
||||
const shaUrl = `${url}.sha512`;
|
||||
|
||||
const artifactSpec = {
|
||||
url: url,
|
||||
return new Artifact(log, {
|
||||
url,
|
||||
filename: path.basename(url),
|
||||
checksumUrl: shaUrl,
|
||||
checksumType: getChecksumType(shaUrl),
|
||||
};
|
||||
|
||||
return new Artifact(artifactSpec, log);
|
||||
});
|
||||
}
|
||||
|
||||
constructor(spec, log) {
|
||||
this._spec = spec;
|
||||
this._log = log;
|
||||
}
|
||||
|
||||
getUrl() {
|
||||
return this._spec.url;
|
||||
}
|
||||
|
||||
getChecksumUrl() {
|
||||
return this._spec.checksumUrl;
|
||||
}
|
||||
|
||||
getChecksumType() {
|
||||
return this._spec.checksumType;
|
||||
}
|
||||
|
||||
getFilename() {
|
||||
return this._spec.filename;
|
||||
}
|
||||
constructor(private readonly log: ToolingLog, public readonly spec: ArtifactSpec) {}
|
||||
|
||||
/**
|
||||
* Download the artifact to disk, skips the download if the cache is
|
||||
* up-to-date, verifies checksum when downloaded
|
||||
* @param {string} dest
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async download(dest, { useCached = false }) {
|
||||
await retry(this._log, async () => {
|
||||
async download(dest: string, { useCached = false }: { useCached?: boolean } = {}) {
|
||||
await retry(this.log, async () => {
|
||||
const cacheMeta = cache.readMeta(dest);
|
||||
const tmpPath = `${dest}.tmp`;
|
||||
|
||||
if (useCached) {
|
||||
if (cacheMeta.exists) {
|
||||
this._log.info(
|
||||
this.log.info(
|
||||
'use-cached passed, forcing to use existing snapshot',
|
||||
chalk.bold(cacheMeta.ts)
|
||||
);
|
||||
return;
|
||||
} else {
|
||||
this._log.info('use-cached passed but no cached snapshot found. Continuing to download');
|
||||
this.log.info('use-cached passed but no cached snapshot found. Continuing to download');
|
||||
}
|
||||
}
|
||||
|
||||
const artifactResp = await this._download(tmpPath, cacheMeta.etag, cacheMeta.ts);
|
||||
const artifactResp = await this.fetchArtifact(tmpPath, cacheMeta.etag, cacheMeta.ts);
|
||||
if (artifactResp.cached) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this._verifyChecksum(artifactResp);
|
||||
await this.verifyChecksum(artifactResp);
|
||||
|
||||
// cache the etag for future downloads
|
||||
cache.writeMeta(dest, { etag: artifactResp.etag });
|
||||
|
@ -228,18 +247,18 @@ exports.Artifact = class Artifact {
|
|||
|
||||
/**
|
||||
* Fetch the artifact with an etag
|
||||
* @param {string} tmpPath
|
||||
* @param {string} etag
|
||||
* @param {string} ts
|
||||
* @return {{ cached: true }|{ checksum: string, etag: string, first500Bytes: Buffer }}
|
||||
*/
|
||||
async _download(tmpPath, etag, ts) {
|
||||
const url = this.getUrl();
|
||||
private async fetchArtifact(
|
||||
tmpPath: string,
|
||||
etag: string,
|
||||
ts: string
|
||||
): Promise<ArtifactDownloaded | ArtifactCached> {
|
||||
const url = this.spec.url;
|
||||
|
||||
if (etag) {
|
||||
this._log.info('verifying cache of %s', chalk.bold(url));
|
||||
this.log.info('verifying cache of %s', chalk.bold(url));
|
||||
} else {
|
||||
this._log.info('downloading artifact from %s', chalk.bold(url));
|
||||
this.log.info('downloading artifact from %s', chalk.bold(url));
|
||||
}
|
||||
|
||||
const abc = new AbortController();
|
||||
|
@ -251,7 +270,7 @@ exports.Artifact = class Artifact {
|
|||
});
|
||||
|
||||
if (resp.status === 304) {
|
||||
this._log.info('etags match, reusing cache from %s', chalk.bold(ts));
|
||||
this.log.info('etags match, reusing cache from %s', chalk.bold(ts));
|
||||
|
||||
abc.abort();
|
||||
return {
|
||||
|
@ -270,10 +289,10 @@ exports.Artifact = class Artifact {
|
|||
}
|
||||
|
||||
if (etag) {
|
||||
this._log.info('cache invalid, redownloading');
|
||||
this.log.info('cache invalid, redownloading');
|
||||
}
|
||||
|
||||
const hash = createHash(this.getChecksumType());
|
||||
const hash = createHash(this.spec.checksumType);
|
||||
let first500Bytes = Buffer.alloc(0);
|
||||
let contentLength = 0;
|
||||
|
||||
|
@ -300,8 +319,9 @@ exports.Artifact = class Artifact {
|
|||
);
|
||||
|
||||
return {
|
||||
cached: false,
|
||||
checksum: hash.digest('hex'),
|
||||
etag: resp.headers.get('etag'),
|
||||
etag: resp.headers.get('etag') ?? undefined,
|
||||
contentLength,
|
||||
first500Bytes,
|
||||
headers: resp.headers,
|
||||
|
@ -310,14 +330,12 @@ exports.Artifact = class Artifact {
|
|||
|
||||
/**
|
||||
* Verify the checksum of the downloaded artifact with the checksum at checksumUrl
|
||||
* @param {{ checksum: string, contentLength: number, first500Bytes: Buffer }} artifactResp
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async _verifyChecksum(artifactResp) {
|
||||
this._log.info('downloading artifact checksum from %s', chalk.bold(this.getChecksumUrl()));
|
||||
private async verifyChecksum(artifactResp: ArtifactDownloaded) {
|
||||
this.log.info('downloading artifact checksum from %s', chalk.bold(this.spec.checksumUrl));
|
||||
|
||||
const abc = new AbortController();
|
||||
const resp = await fetch(this.getChecksumUrl(), {
|
||||
const resp = await fetch(this.spec.checksumUrl, {
|
||||
signal: abc.signal,
|
||||
});
|
||||
|
||||
|
@ -338,7 +356,7 @@ exports.Artifact = class Artifact {
|
|||
const lenString = `${len} / ${artifactResp.contentLength}`;
|
||||
|
||||
throw createCliError(
|
||||
`artifact downloaded from ${this.getUrl()} does not match expected checksum\n` +
|
||||
`artifact downloaded from ${this.spec.url} does not match expected checksum\n` +
|
||||
` expected: ${expectedChecksum}\n` +
|
||||
` received: ${artifactResp.checksum}\n` +
|
||||
` headers: ${headersToString(artifactResp.headers, ' ')}\n` +
|
||||
|
@ -346,6 +364,6 @@ exports.Artifact = class Artifact {
|
|||
);
|
||||
}
|
||||
|
||||
this._log.info('checksum verified');
|
||||
this.log.info('checksum verified');
|
||||
}
|
||||
};
|
||||
}
|
|
@ -6,13 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const { basename } = require('path');
|
||||
import Path from 'path';
|
||||
|
||||
function isVersionFlag(a) {
|
||||
import type { ArtifactSpec } from './artifact';
|
||||
|
||||
function isVersionFlag(a: string) {
|
||||
return a.startsWith('--version');
|
||||
}
|
||||
|
||||
function getCustomSnapshotUrl() {
|
||||
export function getCustomSnapshotUrl() {
|
||||
// force use of manually created snapshots until ReindexPutMappings fix
|
||||
if (
|
||||
!process.env.ES_SNAPSHOT_MANIFEST &&
|
||||
|
@ -28,7 +30,10 @@ function getCustomSnapshotUrl() {
|
|||
}
|
||||
}
|
||||
|
||||
function resolveCustomSnapshotUrl(urlVersion, license) {
|
||||
export function resolveCustomSnapshotUrl(
|
||||
urlVersion: string,
|
||||
license: string
|
||||
): ArtifactSpec | undefined {
|
||||
const customSnapshotUrl = getCustomSnapshotUrl();
|
||||
|
||||
if (!customSnapshotUrl) {
|
||||
|
@ -48,8 +53,6 @@ function resolveCustomSnapshotUrl(urlVersion, license) {
|
|||
url: overrideUrl,
|
||||
checksumUrl: overrideUrl + '.sha512',
|
||||
checksumType: 'sha512',
|
||||
filename: basename(overrideUrl),
|
||||
filename: Path.basename(overrideUrl),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { getCustomSnapshotUrl, resolveCustomSnapshotUrl };
|
25
packages/kbn-es/src/errors.ts
Normal file
25
packages/kbn-es/src/errors.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
interface CliError extends Error {
|
||||
isCliError: boolean;
|
||||
}
|
||||
|
||||
export function createCliError(message: string) {
|
||||
return Object.assign(new Error(message), {
|
||||
isCliError: true,
|
||||
});
|
||||
}
|
||||
|
||||
function isObj(x: unknown): x is Record<string, unknown> {
|
||||
return typeof x === 'object' && x !== null;
|
||||
}
|
||||
|
||||
export function isCliError(error: unknown): error is CliError {
|
||||
return isObj(error) && error.isCliError === true;
|
||||
}
|
|
@ -6,5 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
exports.run = require('./cli').run;
|
||||
exports.Cluster = require('./cluster').Cluster;
|
||||
// @ts-expect-error not typed yet
|
||||
export { run } from './cli';
|
||||
// @ts-expect-error not typed yet
|
||||
export { Cluster } from './cluster';
|
|
@ -1,12 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
exports.installArchive = require('./archive').installArchive;
|
||||
exports.installSnapshot = require('./snapshot').installSnapshot;
|
||||
exports.downloadSnapshot = require('./snapshot').downloadSnapshot;
|
||||
exports.installSource = require('./source').installSource;
|
|
@ -6,12 +6,6 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
exports.createCliError = function (message) {
|
||||
const error = new Error(message);
|
||||
error.isCliError = true;
|
||||
return error;
|
||||
};
|
||||
|
||||
exports.isCliError = function (error) {
|
||||
return error && error.isCliError;
|
||||
};
|
||||
export { installArchive } from './install_archive';
|
||||
export { installSnapshot, downloadSnapshot } from './install_snapshot';
|
||||
export { installSource } from './install_source';
|
|
@ -6,29 +6,40 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const chalk = require('chalk');
|
||||
const execa = require('execa');
|
||||
const del = require('del');
|
||||
const url = require('url');
|
||||
const { extract } = require('@kbn/dev-utils');
|
||||
const { log: defaultLog } = require('../utils');
|
||||
const { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } = require('../paths');
|
||||
const { Artifact } = require('../artifact');
|
||||
const { parseSettings, SettingsFilter } = require('../settings');
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import chalk from 'chalk';
|
||||
import execa from 'execa';
|
||||
import del from 'del';
|
||||
import { extract, ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
import { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } from '../paths';
|
||||
import { Artifact } from '../artifact';
|
||||
import { parseSettings, SettingsFilter } from '../settings';
|
||||
import { log as defaultLog } from '../utils/log';
|
||||
|
||||
interface InstallArchiveOptions {
|
||||
license?: string;
|
||||
password?: string;
|
||||
basePath?: string;
|
||||
installPath?: string;
|
||||
log?: ToolingLog;
|
||||
esArgs?: string[];
|
||||
}
|
||||
|
||||
const isHttpUrl = (str: string) => {
|
||||
try {
|
||||
return ['http:', 'https:'].includes(new URL(str).protocol);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Extracts an ES archive and optionally installs plugins
|
||||
*
|
||||
* @param {String} archive - path to tar
|
||||
* @param {Object} options
|
||||
* @property {('oss'|'basic'|'trial')} options.license
|
||||
* @property {String} options.basePath
|
||||
* @property {String} options.installPath
|
||||
* @property {ToolingLog} options.log
|
||||
*/
|
||||
exports.installArchive = async function installArchive(archive, options = {}) {
|
||||
export async function installArchive(archive: string, options: InstallArchiveOptions = {}) {
|
||||
const {
|
||||
license = 'basic',
|
||||
password = 'changeme',
|
||||
|
@ -39,9 +50,9 @@ exports.installArchive = async function installArchive(archive, options = {}) {
|
|||
} = options;
|
||||
|
||||
let dest = archive;
|
||||
if (['http:', 'https:'].includes(url.parse(archive).protocol)) {
|
||||
if (isHttpUrl(archive)) {
|
||||
const artifact = await Artifact.getArchive(archive, log);
|
||||
dest = path.resolve(basePath, 'cache', artifact.getFilename());
|
||||
dest = path.resolve(basePath, 'cache', artifact.spec.filename);
|
||||
await artifact.download(dest);
|
||||
}
|
||||
|
||||
|
@ -75,28 +86,23 @@ exports.installArchive = async function installArchive(archive, options = {}) {
|
|||
}
|
||||
|
||||
return { installPath };
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends single line to elasticsearch.yml config file
|
||||
*
|
||||
* @param {String} installPath
|
||||
* @param {String} key
|
||||
* @param {String} value
|
||||
*/
|
||||
async function appendToConfig(installPath, key, value) {
|
||||
async function appendToConfig(installPath: string, key: string, value: string) {
|
||||
fs.appendFileSync(path.resolve(installPath, ES_CONFIG), `${key}: ${value}\n`, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and configures Keystore
|
||||
*
|
||||
* @param {String} installPath
|
||||
* @param {ToolingLog} log
|
||||
* @param {Array<[string, string]>} secureSettings List of custom Elasticsearch secure settings to
|
||||
* add into the keystore.
|
||||
*/
|
||||
async function configureKeystore(installPath, log = defaultLog, secureSettings) {
|
||||
async function configureKeystore(
|
||||
installPath: string,
|
||||
log: ToolingLog = defaultLog,
|
||||
secureSettings: Array<[string, string]>
|
||||
) {
|
||||
const env = { JAVA_HOME: '' };
|
||||
await execa(ES_KEYSTORE_BIN, ['create'], { cwd: installPath, env });
|
||||
|
|
@ -6,56 +6,58 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const chalk = require('chalk');
|
||||
const path = require('path');
|
||||
const { BASE_PATH } = require('../paths');
|
||||
const { installArchive } = require('./archive');
|
||||
const { log: defaultLog } = require('../utils');
|
||||
const { Artifact } = require('../artifact');
|
||||
import path from 'path';
|
||||
|
||||
import chalk from 'chalk';
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
import { BASE_PATH } from '../paths';
|
||||
import { installArchive } from './install_archive';
|
||||
import { log as defaultLog } from '../utils/log';
|
||||
import { Artifact, ArtifactLicense } from '../artifact';
|
||||
|
||||
interface DownloadSnapshotOptions {
|
||||
version: string;
|
||||
license?: ArtifactLicense;
|
||||
basePath?: string;
|
||||
installPath?: string;
|
||||
log?: ToolingLog;
|
||||
useCached?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Download an ES snapshot
|
||||
*
|
||||
* @param {Object} options
|
||||
* @property {('oss'|'basic'|'trial')} options.license
|
||||
* @property {String} options.version
|
||||
* @property {String} options.basePath
|
||||
* @property {String} options.installPath
|
||||
* @property {ToolingLog} options.log
|
||||
*/
|
||||
exports.downloadSnapshot = async function installSnapshot({
|
||||
export async function downloadSnapshot({
|
||||
license = 'basic',
|
||||
version,
|
||||
basePath = BASE_PATH,
|
||||
installPath = path.resolve(basePath, version),
|
||||
log = defaultLog,
|
||||
useCached = false,
|
||||
}) {
|
||||
}: DownloadSnapshotOptions) {
|
||||
log.info('version: %s', chalk.bold(version));
|
||||
log.info('install path: %s', chalk.bold(installPath));
|
||||
log.info('license: %s', chalk.bold(license));
|
||||
|
||||
const artifact = await Artifact.getSnapshot(license, version, log);
|
||||
const dest = path.resolve(basePath, 'cache', artifact.getFilename());
|
||||
const dest = path.resolve(basePath, 'cache', artifact.spec.filename);
|
||||
await artifact.download(dest, { useCached });
|
||||
|
||||
return {
|
||||
downloadPath: dest,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface InstallSnapshotOptions extends DownloadSnapshotOptions {
|
||||
password?: string;
|
||||
esArgs?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Installs ES from snapshot
|
||||
*
|
||||
* @param {Object} options
|
||||
* @property {('oss'|'basic'|'trial')} options.license
|
||||
* @property {String} options.password
|
||||
* @property {String} options.version
|
||||
* @property {String} options.basePath
|
||||
* @property {String} options.installPath
|
||||
* @property {ToolingLog} options.log
|
||||
*/
|
||||
exports.installSnapshot = async function installSnapshot({
|
||||
export async function installSnapshot({
|
||||
license = 'basic',
|
||||
password = 'password',
|
||||
version,
|
||||
|
@ -64,8 +66,8 @@ exports.installSnapshot = async function installSnapshot({
|
|||
log = defaultLog,
|
||||
esArgs,
|
||||
useCached = false,
|
||||
}) {
|
||||
const { downloadPath } = await exports.downloadSnapshot({
|
||||
}: InstallSnapshotOptions) {
|
||||
const { downloadPath } = await downloadSnapshot({
|
||||
license,
|
||||
version,
|
||||
basePath,
|
||||
|
@ -82,4 +84,4 @@ exports.installSnapshot = async function installSnapshot({
|
|||
log,
|
||||
esArgs,
|
||||
});
|
||||
};
|
||||
}
|
|
@ -6,28 +6,35 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const chalk = require('chalk');
|
||||
const crypto = require('crypto');
|
||||
const simpleGit = require('simple-git/promise');
|
||||
const { installArchive } = require('./archive');
|
||||
const { log: defaultLog, cache, buildSnapshot, archiveForPlatform } = require('../utils');
|
||||
const { BASE_PATH } = require('../paths');
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import crypto from 'crypto';
|
||||
|
||||
import chalk from 'chalk';
|
||||
import simpleGit from 'simple-git/promise';
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
import { installArchive } from './install_archive';
|
||||
import { log as defaultLog } from '../utils/log';
|
||||
import { cache } from '../utils/cache';
|
||||
import { buildSnapshot, archiveForPlatform } from '../utils/build_snapshot';
|
||||
import { BASE_PATH } from '../paths';
|
||||
|
||||
interface InstallSourceOptions {
|
||||
sourcePath: string;
|
||||
license?: string;
|
||||
password?: string;
|
||||
basePath?: string;
|
||||
installPath?: string;
|
||||
log?: ToolingLog;
|
||||
esArgs?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Installs ES from source
|
||||
*
|
||||
* @param {Object} options
|
||||
* @property {('oss'|'basic'|'trial')} options.license
|
||||
* @property {String} options.password
|
||||
* @property {String} options.sourcePath
|
||||
* @property {String} options.basePath
|
||||
* @property {String} options.installPath
|
||||
* @property {ToolingLog} options.log
|
||||
*/
|
||||
exports.installSource = async function installSource({
|
||||
export async function installSource({
|
||||
license = 'basic',
|
||||
password = 'changeme',
|
||||
sourcePath,
|
||||
|
@ -35,7 +42,7 @@ exports.installSource = async function installSource({
|
|||
installPath = path.resolve(basePath, 'source'),
|
||||
log = defaultLog,
|
||||
esArgs,
|
||||
}) {
|
||||
}: InstallSourceOptions) {
|
||||
log.info('source path: %s', chalk.bold(sourcePath));
|
||||
log.info('install path: %s', chalk.bold(installPath));
|
||||
log.info('license: %s', chalk.bold(license));
|
||||
|
@ -62,14 +69,9 @@ exports.installSource = async function installSource({
|
|||
log,
|
||||
esArgs,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {String} cwd
|
||||
* @param {ToolingLog} log
|
||||
*/
|
||||
async function sourceInfo(cwd, license, log = defaultLog) {
|
||||
async function sourceInfo(cwd: string, license: string, log: ToolingLog = defaultLog) {
|
||||
if (!fs.existsSync(cwd)) {
|
||||
throw new Error(`${cwd} does not exist`);
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
function maybeUseBat(bin) {
|
||||
return os.platform().startsWith('win') ? `${bin}.bat` : bin;
|
||||
}
|
||||
|
||||
const tempDir = os.tmpdir();
|
||||
|
||||
exports.BASE_PATH = path.resolve(tempDir, 'kbn-es');
|
||||
|
||||
exports.GRADLE_BIN = maybeUseBat('./gradlew');
|
||||
exports.ES_BIN = maybeUseBat('bin/elasticsearch');
|
||||
exports.ES_CONFIG = 'config/elasticsearch.yml';
|
||||
|
||||
exports.ES_KEYSTORE_BIN = maybeUseBat('./bin/elasticsearch-keystore');
|
24
packages/kbn-es/src/paths.ts
Normal file
24
packages/kbn-es/src/paths.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Os from 'os';
|
||||
import Path from 'path';
|
||||
|
||||
function maybeUseBat(bin: string) {
|
||||
return Os.platform().startsWith('win') ? `${bin}.bat` : bin;
|
||||
}
|
||||
|
||||
const tempDir = Os.tmpdir();
|
||||
|
||||
export const BASE_PATH = Path.resolve(tempDir, 'kbn-es');
|
||||
|
||||
export const GRADLE_BIN = maybeUseBat('./gradlew');
|
||||
export const ES_BIN = maybeUseBat('bin/elasticsearch');
|
||||
export const ES_CONFIG = 'config/elasticsearch.yml';
|
||||
|
||||
export const ES_KEYSTORE_BIN = maybeUseBat('./bin/elasticsearch-keystore');
|
|
@ -6,25 +6,25 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const execa = require('execa');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const readline = require('readline');
|
||||
const { createCliError } = require('../errors');
|
||||
const { findMostRecentlyChanged } = require('../utils');
|
||||
const { GRADLE_BIN } = require('../paths');
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
|
||||
const onceEvent = (emitter, event) => new Promise((resolve) => emitter.once(event, resolve));
|
||||
import { ToolingLog, withProcRunner } from '@kbn/dev-utils';
|
||||
|
||||
import { createCliError } from '../errors';
|
||||
import { findMostRecentlyChanged } from './find_most_recently_changed';
|
||||
import { GRADLE_BIN } from '../paths';
|
||||
|
||||
interface BuildSnapshotOptions {
|
||||
license: string;
|
||||
sourcePath: string;
|
||||
log: ToolingLog;
|
||||
platform?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates archive from source
|
||||
*
|
||||
* @param {Object} options
|
||||
* @property {('oss'|'basic'|'trial')} options.license
|
||||
* @property {String} options.sourcePath
|
||||
* @property {ToolingLog} options.log
|
||||
* @returns {Object} containing archive and optional plugins
|
||||
*
|
||||
* Gradle tasks:
|
||||
* $ ./gradlew tasks --all | grep 'distribution.*assemble\s'
|
||||
* :distribution:archives:darwin-tar:assemble
|
||||
|
@ -34,39 +34,27 @@ const onceEvent = (emitter, event) => new Promise((resolve) => emitter.once(even
|
|||
* :distribution:archives:oss-linux-tar:assemble
|
||||
* :distribution:archives:oss-windows-zip:assemble
|
||||
*/
|
||||
exports.buildSnapshot = async ({ license, sourcePath, log, platform = os.platform() }) => {
|
||||
export async function buildSnapshot({
|
||||
license,
|
||||
sourcePath,
|
||||
log,
|
||||
platform = os.platform(),
|
||||
}: BuildSnapshotOptions) {
|
||||
const { task, ext } = exports.archiveForPlatform(platform, license);
|
||||
const buildArgs = [`:distribution:archives:${task}:assemble`];
|
||||
|
||||
log.info('%s %s', GRADLE_BIN, buildArgs.join(' '));
|
||||
log.debug('cwd:', sourcePath);
|
||||
|
||||
const build = execa(GRADLE_BIN, buildArgs, {
|
||||
cwd: sourcePath,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
await withProcRunner(log, async (procs) => {
|
||||
await procs.run('gradle', {
|
||||
cmd: GRADLE_BIN,
|
||||
args: buildArgs,
|
||||
cwd: sourcePath,
|
||||
wait: true,
|
||||
});
|
||||
});
|
||||
|
||||
const stdout = readline.createInterface({ input: build.stdout });
|
||||
const stderr = readline.createInterface({ input: build.stderr });
|
||||
|
||||
stdout.on('line', (line) => log.debug(line));
|
||||
stderr.on('line', (line) => log.error(line));
|
||||
|
||||
const [exitCode] = await Promise.all([
|
||||
Promise.race([
|
||||
onceEvent(build, 'exit'),
|
||||
onceEvent(build, 'error').then((error) => {
|
||||
throw createCliError(`Error spawning gradle: ${error.message}`);
|
||||
}),
|
||||
]),
|
||||
onceEvent(stdout, 'close'),
|
||||
onceEvent(stderr, 'close'),
|
||||
]);
|
||||
|
||||
if (exitCode > 0) {
|
||||
throw createCliError('unable to build ES');
|
||||
}
|
||||
|
||||
const archivePattern = `distribution/archives/${task}/build/distributions/elasticsearch-*.${ext}`;
|
||||
const esArchivePath = findMostRecentlyChanged(path.resolve(sourcePath, archivePattern));
|
||||
|
||||
|
@ -75,9 +63,9 @@ exports.buildSnapshot = async ({ license, sourcePath, log, platform = os.platfor
|
|||
}
|
||||
|
||||
return esArchivePath;
|
||||
};
|
||||
}
|
||||
|
||||
exports.archiveForPlatform = (platform, license) => {
|
||||
export function archiveForPlatform(platform: NodeJS.Platform, license: string) {
|
||||
const taskPrefix = license === 'oss' ? 'oss-' : '';
|
||||
|
||||
switch (platform) {
|
||||
|
@ -88,6 +76,6 @@ exports.archiveForPlatform = (platform, license) => {
|
|||
case 'linux':
|
||||
return { format: 'tar', ext: 'tar.gz', task: `${taskPrefix}linux-tar`, platform: 'linux' };
|
||||
default:
|
||||
throw new Error(`unknown platform: ${platform}`);
|
||||
throw new Error(`unsupported platform: ${platform}`);
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
exports.readMeta = function readMeta(file) {
|
||||
try {
|
||||
const meta = fs.readFileSync(`${file}.meta`, {
|
||||
encoding: 'utf8',
|
||||
});
|
||||
|
||||
return {
|
||||
exists: fs.existsSync(file),
|
||||
...JSON.parse(meta),
|
||||
};
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') {
|
||||
throw e;
|
||||
}
|
||||
|
||||
return {
|
||||
exists: false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
exports.writeMeta = function readMeta(file, details = {}) {
|
||||
const meta = {
|
||||
ts: new Date(),
|
||||
...details,
|
||||
};
|
||||
|
||||
fs.mkdirSync(path.dirname(file), { recursive: true });
|
||||
fs.writeFileSync(`${file}.meta`, JSON.stringify(meta, null, 2));
|
||||
};
|
40
packages/kbn-es/src/utils/cache.ts
Normal file
40
packages/kbn-es/src/utils/cache.ts
Normal file
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Fs from 'fs';
|
||||
import Path from 'path';
|
||||
|
||||
export const cache = {
|
||||
readMeta(path: string) {
|
||||
try {
|
||||
const meta = Fs.readFileSync(`${path}.meta`, {
|
||||
encoding: 'utf8',
|
||||
});
|
||||
|
||||
return {
|
||||
...JSON.parse(meta),
|
||||
};
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') {
|
||||
throw e;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
},
|
||||
|
||||
writeMeta(path: string, details = {}) {
|
||||
const meta = {
|
||||
ts: new Date(),
|
||||
...details,
|
||||
};
|
||||
|
||||
Fs.mkdirSync(Path.dirname(path), { recursive: true });
|
||||
Fs.writeFileSync(`${path}.meta`, JSON.stringify(meta, null, 2));
|
||||
},
|
||||
};
|
|
@ -6,6 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { findMostRecentlyChanged } from './find_most_recently_changed';
|
||||
|
||||
jest.mock('fs', () => ({
|
||||
statSync: jest.fn().mockImplementation((path) => {
|
||||
if (path.includes('oldest')) {
|
||||
|
@ -31,8 +33,6 @@ jest.mock('fs', () => ({
|
|||
}),
|
||||
}));
|
||||
|
||||
const { findMostRecentlyChanged } = require('./find_most_recently_changed');
|
||||
|
||||
test('returns newest file', () => {
|
||||
const file = findMostRecentlyChanged('/data/*.yml');
|
||||
expect(file).toEqual('/data/newest.yml');
|
|
@ -6,25 +6,22 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import glob from 'glob';
|
||||
|
||||
/**
|
||||
* Find the most recently modified file that matches the pattern pattern
|
||||
*
|
||||
* @param {String} pattern absolute path with glob expressions
|
||||
* @return {String} Absolute path
|
||||
*/
|
||||
exports.findMostRecentlyChanged = function findMostRecentlyChanged(pattern) {
|
||||
export function findMostRecentlyChanged(pattern: string) {
|
||||
if (!path.isAbsolute(pattern)) {
|
||||
throw new TypeError(`Pattern must be absolute, got ${pattern}`);
|
||||
}
|
||||
|
||||
const ctime = (path) => fs.statSync(path).ctime.getTime();
|
||||
const ctime = (p: string) => fs.statSync(p).ctime.getTime();
|
||||
|
||||
return glob
|
||||
.sync(pattern)
|
||||
.sort((a, b) => ctime(a) - ctime(b))
|
||||
.pop();
|
||||
};
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
exports.cache = require('./cache');
|
||||
exports.log = require('./log').log;
|
||||
exports.parseEsLog = require('./parse_es_log').parseEsLog;
|
||||
exports.findMostRecentlyChanged = require('./find_most_recently_changed').findMostRecentlyChanged;
|
||||
exports.extractConfigFiles = require('./extract_config_files').extractConfigFiles;
|
||||
exports.NativeRealm = require('./native_realm').NativeRealm;
|
||||
exports.buildSnapshot = require('./build_snapshot').buildSnapshot;
|
||||
exports.archiveForPlatform = require('./build_snapshot').archiveForPlatform;
|
19
packages/kbn-es/src/utils/index.ts
Normal file
19
packages/kbn-es/src/utils/index.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { cache } from './cache';
|
||||
export { log } from './log';
|
||||
// @ts-expect-error not typed yet
|
||||
export { parseEsLog } from './parse_es_log';
|
||||
export { findMostRecentlyChanged } from './find_most_recently_changed';
|
||||
// @ts-expect-error not typed yet
|
||||
export { extractConfigFiles } from './extract_config_files';
|
||||
// @ts-expect-error not typed yet
|
||||
export { NativeRealm } from './native_realm';
|
||||
export { buildSnapshot } from './build_snapshot';
|
||||
export { archiveForPlatform } from './build_snapshot';
|
|
@ -6,11 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const { ToolingLog } = require('@kbn/dev-utils');
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
const log = new ToolingLog({
|
||||
export const log = new ToolingLog({
|
||||
level: 'verbose',
|
||||
writeTo: process.stdout,
|
||||
});
|
||||
|
||||
exports.log = log;
|
Loading…
Add table
Add a link
Reference in a new issue