[ToolingLog] remove some manual indent management (#126636) (#126793)

* [ToolingLog] remove some manual indent management

* improve log message

* [dev/build] write the newline after a failed step

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
(cherry picked from commit 4f2321d34c)

Co-authored-by: Spencer <spencer@elastic.co>
This commit is contained in:
Kibana Machine 2022-03-03 11:46:26 -05:00 committed by GitHub
parent b86304803b
commit 20030a3510
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 153 additions and 163 deletions

View file

@ -62,7 +62,10 @@ export class ToolingLog {
* @param delta the number of spaces to increase/decrease the indentation
* @param block a function to run and reset any indentation changes after
*/
public indent<T>(delta = 0, block?: () => Promise<T>) {
public indent(delta: number): undefined;
public indent<T>(delta: number, block: () => Promise<T>): Promise<T>;
public indent<T>(delta: number, block: () => T): T;
public indent<T>(delta = 0, block?: () => T | Promise<T>) {
const originalWidth = this.indentWidth$.getValue();
this.indentWidth$.next(Math.max(originalWidth + delta, 0));
if (!block) {

View file

@ -42,32 +42,31 @@ exports.run = async (defaults = {}) => {
for (const license of ['oss', 'trial']) {
for (const platform of ['darwin', 'win32', 'linux']) {
log.info('Building', platform, license === 'trial' ? 'default' : 'oss', 'snapshot');
log.indent(4);
await log.indent(4, async () => {
const snapshotPath = await buildSnapshot({
license,
sourcePath: options.sourcePath,
log,
platform,
});
const snapshotPath = await buildSnapshot({
license,
sourcePath: options.sourcePath,
log,
platform,
const filename = basename(snapshotPath);
const outputPath = resolve(outputDir, filename);
const hash = createHash('sha512');
await pipelineAsync(
Fs.createReadStream(snapshotPath),
new Transform({
transform(chunk, _, cb) {
hash.update(chunk);
cb(undefined, chunk);
},
}),
Fs.createWriteStream(outputPath)
);
Fs.writeFileSync(`${outputPath}.sha512`, `${hash.digest('hex')} ${filename}`);
log.success('snapshot and shasum written to', outputPath);
});
const filename = basename(snapshotPath);
const outputPath = resolve(outputDir, filename);
const hash = createHash('sha512');
await pipelineAsync(
Fs.createReadStream(snapshotPath),
new Transform({
transform(chunk, _, cb) {
hash.update(chunk);
cb(undefined, chunk);
},
}),
Fs.createWriteStream(outputPath)
);
Fs.writeFileSync(`${outputPath}.sha512`, `${hash.digest('hex')} ${filename}`);
log.success('snapshot and shasum written to', outputPath);
log.indent(-4);
}
}
};

View file

@ -59,13 +59,10 @@ exports.Cluster = class Cluster {
*/
async installSource(options = {}) {
this._log.info(chalk.bold('Installing from source'));
this._log.indent(4);
const { installPath } = await installSource({ log: this._log, ...options });
this._log.indent(-4);
return { installPath };
return await this._log.indent(4, async () => {
const { installPath } = await installSource({ log: this._log, ...options });
return { installPath };
});
}
/**
@ -78,16 +75,14 @@ exports.Cluster = class Cluster {
*/
async downloadSnapshot(options = {}) {
this._log.info(chalk.bold('Downloading snapshot'));
this._log.indent(4);
return await this._log.indent(4, async () => {
const { installPath } = await downloadSnapshot({
log: this._log,
...options,
});
const { installPath } = await downloadSnapshot({
log: this._log,
...options,
return { installPath };
});
this._log.indent(-4);
return { installPath };
}
/**
@ -100,16 +95,14 @@ exports.Cluster = class Cluster {
*/
async installSnapshot(options = {}) {
this._log.info(chalk.bold('Installing from snapshot'));
this._log.indent(4);
return await this._log.indent(4, async () => {
const { installPath } = await installSnapshot({
log: this._log,
...options,
});
const { installPath } = await installSnapshot({
log: this._log,
...options,
return { installPath };
});
this._log.indent(-4);
return { installPath };
}
/**
@ -122,16 +115,14 @@ exports.Cluster = class Cluster {
*/
async installArchive(path, options = {}) {
this._log.info(chalk.bold('Installing from an archive'));
this._log.indent(4);
return await this._log.indent(4, async () => {
const { installPath } = await installArchive(path, {
log: this._log,
...options,
});
const { installPath } = await installArchive(path, {
log: this._log,
...options,
return { installPath };
});
this._log.indent(-4);
return { installPath };
}
/**
@ -144,21 +135,19 @@ exports.Cluster = class Cluster {
*/
async extractDataDirectory(installPath, archivePath, extractDirName = 'data') {
this._log.info(chalk.bold(`Extracting data directory`));
this._log.indent(4);
await this._log.indent(4, async () => {
// stripComponents=1 excludes the root directory as that is how our archives are
// structured. This works in our favor as we can explicitly extract into the data dir
const extractPath = path.resolve(installPath, extractDirName);
this._log.info(`Data archive: ${archivePath}`);
this._log.info(`Extract path: ${extractPath}`);
// stripComponents=1 excludes the root directory as that is how our archives are
// structured. This works in our favor as we can explicitly extract into the data dir
const extractPath = path.resolve(installPath, extractDirName);
this._log.info(`Data archive: ${archivePath}`);
this._log.info(`Extract path: ${extractPath}`);
await extract({
archivePath,
targetDir: extractPath,
stripComponents: 1,
await extract({
archivePath,
targetDir: extractPath,
stripComponents: 1,
});
});
this._log.indent(-4);
}
/**
@ -169,24 +158,27 @@ exports.Cluster = class Cluster {
* @returns {Promise<void>}
*/
async start(installPath, options = {}) {
this._exec(installPath, options);
// _exec indents and we wait for our own end condition, so reset the indent level to it's current state after we're done waiting
await this._log.indent(0, async () => {
this._exec(installPath, options);
await Promise.race([
// wait for native realm to be setup and es to be started
Promise.all([
first(this._process.stdout, (data) => {
if (/started/.test(data)) {
return true;
}
await Promise.race([
// wait for native realm to be setup and es to be started
Promise.all([
first(this._process.stdout, (data) => {
if (/started/.test(data)) {
return true;
}
}),
this._setupPromise,
]),
// await the outcome of the process in case it exits before starting
this._outcome.then(() => {
throw createCliError('ES exited without starting');
}),
this._setupPromise,
]),
// await the outcome of the process in case it exits before starting
this._outcome.then(() => {
throw createCliError('ES exited without starting');
}),
]);
]);
});
}
/**
@ -197,16 +189,19 @@ exports.Cluster = class Cluster {
* @returns {Promise<void>}
*/
async run(installPath, options = {}) {
this._exec(installPath, options);
// _exec indents and we wait for our own end condition, so reset the indent level to it's current state after we're done waiting
await this._log.indent(0, async () => {
this._exec(installPath, options);
// log native realm setup errors so they aren't uncaught
this._setupPromise.catch((error) => {
this._log.error(error);
this.stop();
// log native realm setup errors so they aren't uncaught
this._setupPromise.catch((error) => {
this._log.error(error);
this.stop();
});
// await the final outcome of the process
await this._outcome;
});
// await the final outcome of the process
await this._outcome;
}
/**

View file

@ -95,16 +95,16 @@ export function logOptimizerState(log: ToolingLog, config: OptimizerConfig) {
if (state.phase === 'issue') {
log.error(`webpack compile errors`);
log.indent(4);
for (const b of state.compilerStates) {
if (b.type === 'compiler issue') {
log.error(`[${b.bundleId}] build`);
log.indent(4);
log.error(b.failure);
log.indent(-4);
log.indent(4, () => {
for (const b of state.compilerStates) {
if (b.type === 'compiler issue') {
log.error(`[${b.bundleId}] build`);
log.indent(4, () => {
log.error(b.failure);
});
}
}
}
log.indent(-4);
});
return;
}

View file

@ -23,26 +23,25 @@ export async function optimize({ log, plugin, sourceDir, buildDir }: BuildContex
}
log.info('running @kbn/optimizer');
log.indent(2);
await log.indent(2, async () => {
// build bundles into target
const config = OptimizerConfig.create({
repoRoot: REPO_ROOT,
pluginPaths: [sourceDir],
cache: false,
dist: true,
filter: [plugin.manifest.id],
});
// build bundles into target
const config = OptimizerConfig.create({
repoRoot: REPO_ROOT,
pluginPaths: [sourceDir],
cache: false,
dist: true,
filter: [plugin.manifest.id],
const target = Path.resolve(sourceDir, 'target');
await runOptimizer(config).pipe(logOptimizerState(log, config)).toPromise();
// clean up unnecessary files
Fs.unlinkSync(Path.resolve(target, 'public/metrics.json'));
Fs.unlinkSync(Path.resolve(target, 'public/.kbn-optimizer-cache'));
// move target into buildDir
await asyncRename(target, Path.resolve(buildDir, 'target'));
});
const target = Path.resolve(sourceDir, 'target');
await runOptimizer(config).pipe(logOptimizerState(log, config)).toPromise();
// clean up unnecessary files
Fs.unlinkSync(Path.resolve(target, 'public/metrics.json'));
Fs.unlinkSync(Path.resolve(target, 'public/.kbn-optimizer-cache'));
// move target into buildDir
await asyncRename(target, Path.resolve(buildDir, 'target'));
log.indent(-2);
}

View file

@ -229,10 +229,6 @@ export function createTestServers({
writeTo: process.stdout,
});
log.indent(6);
log.info('starting elasticsearch');
log.indent(4);
const es = createTestEsCluster(
defaultsDeep({}, settings.es ?? {}, {
log,
@ -240,8 +236,6 @@ export function createTestServers({
})
);
log.indent(-4);
// Add time for KBN and adding users
adjustTimeout(es.getStartTimeout() + 100000);

View file

@ -33,29 +33,30 @@ export interface Task {
export function createRunner({ config, log }: Options) {
async function execTask(desc: string, task: Task | GlobalTask, lastArg: any) {
log.info(desc);
log.indent(4);
const start = Date.now();
const time = () => {
const sec = (Date.now() - start) / 1000;
const minStr = sec > 60 ? `${Math.floor(sec / 60)} min ` : '';
const secStr = `${Math.round(sec % 60)} sec`;
return chalk.dim(`${minStr}${secStr}`);
};
try {
await task.run(config, log, lastArg);
log.success(chalk.green('✓'), time());
} catch (error) {
if (!isErrorLogged(error)) {
log.error(`failure ${time()}`);
log.error(error);
markErrorLogged(error);
}
await log.indent(4, async () => {
const start = Date.now();
const time = () => {
const sec = (Date.now() - start) / 1000;
const minStr = sec > 60 ? `${Math.floor(sec / 60)} min ` : '';
const secStr = `${Math.round(sec % 60)} sec`;
return chalk.dim(`${minStr}${secStr}`);
};
throw error;
try {
await task.run(config, log, lastArg);
log.success(chalk.green('✓'), time());
} catch (error) {
if (!isErrorLogged(error)) {
log.error(`failure ${time()}`);
log.error(error);
markErrorLogged(error);
}
throw error;
}
});
} finally {
log.indent(-4);
log.write('');
}
}

View file

@ -18,13 +18,15 @@ export const CreateNoticeFile: Task = {
async run(config, log, build) {
log.info('Generating notice from source');
log.indent(4);
const noticeFromSource = await generateNoticeFromSource({
productName: 'Kibana',
directory: build.resolvePath(),
log,
});
log.indent(-4);
const noticeFromSource = await log.indent(
4,
async () =>
await generateNoticeFromSource({
productName: 'Kibana',
directory: build.resolvePath(),
log,
})
);
log.info('Discovering installed packages');
const packages = await getInstalledPackages({

View file

@ -109,9 +109,9 @@ export function runEslintWithTypes() {
return undefined;
} else {
log.error(`${project.name} failed`);
log.indent(4);
log.write(proc.all);
log.indent(-4);
log.indent(4, () => {
log.write(proc.all);
});
return project;
}
}, concurrency),

View file

@ -148,12 +148,9 @@ run(
await init();
for (const pr of prs) {
log.info('pr #%s', pr.number);
log.indent(4);
try {
await log.indent(4, async () => {
await updatePr(pr);
} finally {
log.indent(-4);
}
});
}
},
{