mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[ci-stats] Collects additional timings about bootstrap (#112919)
Signed-off-by: Tyler Smalley <tyler.smalley@elastic.co>
This commit is contained in:
parent
e54950177d
commit
d20a696e95
16 changed files with 5045 additions and 4815 deletions
|
@ -10,7 +10,8 @@ import { inspect } from 'util';
|
|||
import Os from 'os';
|
||||
import Fs from 'fs';
|
||||
import Path from 'path';
|
||||
|
||||
import crypto from 'crypto';
|
||||
import execa from 'execa';
|
||||
import Axios from 'axios';
|
||||
|
||||
import { ToolingLog } from '../tooling_log';
|
||||
|
@ -80,6 +81,15 @@ export class CiStatsReporter {
|
|||
const timings = options.timings;
|
||||
const upstreamBranch = options.upstreamBranch ?? this.getUpstreamBranch();
|
||||
const kibanaUuid = options.kibanaUuid === undefined ? this.getKibanaUuid() : options.kibanaUuid;
|
||||
let email;
|
||||
|
||||
try {
|
||||
const { stdout } = await execa('git', ['config', 'user.email']);
|
||||
email = stdout;
|
||||
} catch (e) {
|
||||
this.log.debug(e.message);
|
||||
}
|
||||
|
||||
const defaultMetadata = {
|
||||
osPlatform: Os.platform(),
|
||||
osRelease: Os.release(),
|
||||
|
@ -89,9 +99,15 @@ export class CiStatsReporter {
|
|||
cpuSpeed: Os.cpus()[0]?.speed,
|
||||
freeMem: Os.freemem(),
|
||||
totalMem: Os.totalmem(),
|
||||
committerHash: email
|
||||
? crypto.createHash('sha256').update(email).digest('hex').substring(0, 20)
|
||||
: undefined,
|
||||
isElasticCommitter: email ? email.endsWith('@elastic.co') : undefined,
|
||||
kibanaUuid,
|
||||
};
|
||||
|
||||
this.log.debug('CIStatsReporter committerHash: %s', defaultMetadata.committerHash);
|
||||
|
||||
return await this.req({
|
||||
auth: !!buildId,
|
||||
path: '/v1/timings',
|
||||
|
|
61
packages/kbn-dev-utils/src/run/metrics.ts
Normal file
61
packages/kbn-dev-utils/src/run/metrics.ts
Normal file
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { CiStatsReporter } from '../ci_stats_reporter';
|
||||
import { ToolingLog } from '../tooling_log';
|
||||
|
||||
export type MetricsMeta = Map<string, string | boolean | number>;
|
||||
|
||||
export class Metrics {
|
||||
private reporter: CiStatsReporter;
|
||||
meta: MetricsMeta = new Map();
|
||||
startTime: number;
|
||||
filePath: string;
|
||||
|
||||
constructor(log: ToolingLog) {
|
||||
this.reporter = CiStatsReporter.fromEnv(log);
|
||||
this.meta = new Map();
|
||||
this.startTime = Date.now();
|
||||
this.filePath = path.relative(REPO_ROOT, process.argv[1]).replace('.js', '');
|
||||
}
|
||||
|
||||
async reportSuccess(command?: string) {
|
||||
return await this.reporter.timings({
|
||||
timings: [
|
||||
{
|
||||
group: `${command ? `${this.filePath} ${command}` : this.filePath}`,
|
||||
id: 'total',
|
||||
ms: Date.now() - this.startTime,
|
||||
meta: {
|
||||
success: true,
|
||||
...Object.fromEntries(this.meta),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
async reportError(errorMessage?: string, command?: string) {
|
||||
return await this.reporter.timings({
|
||||
timings: [
|
||||
{
|
||||
group: `${command ? `${this.filePath} ${command}` : this.filePath}`,
|
||||
id: 'total',
|
||||
ms: Date.now() - this.startTime,
|
||||
meta: {
|
||||
success: false,
|
||||
errorMessage,
|
||||
...Object.fromEntries(this.meta),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
|
@ -12,11 +12,13 @@ import { Flags, getFlags, FlagOptions } from './flags';
|
|||
import { ProcRunner, withProcRunner } from '../proc_runner';
|
||||
import { getHelp } from './help';
|
||||
import { CleanupTask, Cleanup } from './cleanup';
|
||||
import { Metrics, MetricsMeta } from './metrics';
|
||||
|
||||
export interface RunContext {
|
||||
log: ToolingLog;
|
||||
flags: Flags;
|
||||
procRunner: ProcRunner;
|
||||
statsMeta: MetricsMeta;
|
||||
addCleanupTask: (task: CleanupTask) => void;
|
||||
}
|
||||
export type RunFn = (context: RunContext) => Promise<void> | void;
|
||||
|
@ -32,13 +34,6 @@ export interface RunOptions {
|
|||
|
||||
export async function run(fn: RunFn, options: RunOptions = {}) {
|
||||
const flags = getFlags(process.argv.slice(2), options.flags, options.log?.defaultLevel);
|
||||
const helpText = getHelp({
|
||||
description: options.description,
|
||||
usage: options.usage,
|
||||
flagHelp: options.flags?.help,
|
||||
defaultLogLevel: options.log?.defaultLevel,
|
||||
});
|
||||
|
||||
const log = new ToolingLog({
|
||||
level: pickLevelFromFlags(flags, {
|
||||
default: options.log?.defaultLevel,
|
||||
|
@ -46,6 +41,14 @@ export async function run(fn: RunFn, options: RunOptions = {}) {
|
|||
writeTo: process.stdout,
|
||||
});
|
||||
|
||||
const metrics = new Metrics(log);
|
||||
const helpText = getHelp({
|
||||
description: options.description,
|
||||
usage: options.usage,
|
||||
flagHelp: options.flags?.help,
|
||||
defaultLogLevel: options.log?.defaultLevel,
|
||||
});
|
||||
|
||||
if (flags.help) {
|
||||
log.write(helpText);
|
||||
process.exit();
|
||||
|
@ -65,14 +68,18 @@ export async function run(fn: RunFn, options: RunOptions = {}) {
|
|||
log,
|
||||
flags,
|
||||
procRunner,
|
||||
statsMeta: metrics.meta,
|
||||
addCleanupTask: cleanup.add.bind(cleanup),
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
cleanup.execute(error);
|
||||
await metrics.reportError(error?.message);
|
||||
// process.exitCode is set by `cleanup` when necessary
|
||||
process.exit();
|
||||
} finally {
|
||||
cleanup.execute();
|
||||
}
|
||||
|
||||
await metrics.reportSuccess();
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ it('extends the context using extendContext()', async () => {
|
|||
flags: expect.any(Object),
|
||||
addCleanupTask: expect.any(Function),
|
||||
procRunner: expect.any(ProcRunner),
|
||||
statsMeta: expect.any(Map),
|
||||
extraContext: true,
|
||||
});
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import { Cleanup } from './cleanup';
|
|||
import { getHelpForAllCommands, getCommandLevelHelp } from './help';
|
||||
import { createFlagError } from './fail';
|
||||
import { withProcRunner } from '../proc_runner';
|
||||
import { Metrics } from './metrics';
|
||||
|
||||
export type CommandRunFn<T> = (context: RunContext & T) => Promise<void> | void;
|
||||
|
||||
|
@ -46,16 +47,17 @@ export class RunWithCommands<T> {
|
|||
const globalFlags = getFlags(process.argv.slice(2), {
|
||||
allowUnexpected: true,
|
||||
});
|
||||
|
||||
const isHelpCommand = globalFlags._[0] === 'help';
|
||||
const commandName = isHelpCommand ? globalFlags._[1] : globalFlags._[0];
|
||||
const command = this.commands.find((c) => c.name === commandName);
|
||||
const log = new ToolingLog({
|
||||
level: pickLevelFromFlags(globalFlags, {
|
||||
default: this.options.log?.defaultLevel,
|
||||
}),
|
||||
writeTo: process.stdout,
|
||||
});
|
||||
const metrics = new Metrics(log);
|
||||
|
||||
const isHelpCommand = globalFlags._[0] === 'help';
|
||||
const commandName = isHelpCommand ? globalFlags._[1] : globalFlags._[0];
|
||||
const command = this.commands.find((c) => c.name === commandName);
|
||||
|
||||
const globalHelp = getHelpForAllCommands({
|
||||
description: this.options.description,
|
||||
|
@ -111,6 +113,7 @@ export class RunWithCommands<T> {
|
|||
log,
|
||||
flags: commandFlags,
|
||||
procRunner,
|
||||
statsMeta: metrics.meta,
|
||||
addCleanupTask: cleanup.add.bind(cleanup),
|
||||
};
|
||||
|
||||
|
@ -123,10 +126,13 @@ export class RunWithCommands<T> {
|
|||
});
|
||||
} catch (error) {
|
||||
cleanup.execute(error);
|
||||
await metrics.reportError(error?.message, commandName);
|
||||
// exitCode is set by `cleanup` when necessary
|
||||
process.exit();
|
||||
} finally {
|
||||
cleanup.execute();
|
||||
}
|
||||
|
||||
await metrics.reportSuccess(commandName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,12 +40,13 @@ export function runCli() {
|
|||
--es-ca if Elasticsearch url points to https://localhost we default to the CA from @kbn/dev-utils, customize the CA with this flag
|
||||
`,
|
||||
},
|
||||
async extendContext({ log, flags, addCleanupTask }) {
|
||||
async extendContext({ log, flags, addCleanupTask, statsMeta }) {
|
||||
const configPath = flags.config || defaultConfigPath;
|
||||
if (typeof configPath !== 'string') {
|
||||
throw createFlagError('--config must be a string');
|
||||
}
|
||||
const config = await readConfigFile(log, Path.resolve(configPath));
|
||||
statsMeta.set('ftrConfigPath', configPath);
|
||||
|
||||
let esUrl = flags['es-url'];
|
||||
if (esUrl && typeof esUrl !== 'string') {
|
||||
|
@ -148,15 +149,19 @@ export function runCli() {
|
|||
--query query object to limit the documents being archived, needs to be properly escaped JSON
|
||||
`,
|
||||
},
|
||||
async run({ flags, esArchiver }) {
|
||||
async run({ flags, esArchiver, statsMeta }) {
|
||||
const [path, ...indices] = flags._;
|
||||
if (!path) {
|
||||
throw createFlagError('missing [path] argument');
|
||||
}
|
||||
|
||||
if (!indices.length) {
|
||||
throw createFlagError('missing [...indices] arguments');
|
||||
}
|
||||
|
||||
statsMeta.set('esArchiverPath', path);
|
||||
statsMeta.set('esArchiverIndices', indices.join(','));
|
||||
|
||||
const raw = flags.raw;
|
||||
if (typeof raw !== 'boolean') {
|
||||
throw createFlagError('--raw does not take a value');
|
||||
|
@ -195,7 +200,7 @@ export function runCli() {
|
|||
--use-create use create instead of index for loading documents
|
||||
`,
|
||||
},
|
||||
async run({ flags, esArchiver }) {
|
||||
async run({ flags, esArchiver, statsMeta }) {
|
||||
const [path] = flags._;
|
||||
if (!path) {
|
||||
throw createFlagError('missing [path] argument');
|
||||
|
@ -204,6 +209,8 @@ export function runCli() {
|
|||
throw createFlagError(`unknown extra arguments: [${flags._.slice(1).join(', ')}]`);
|
||||
}
|
||||
|
||||
statsMeta.set('esArchiverPath', path);
|
||||
|
||||
const useCreate = flags['use-create'];
|
||||
if (typeof useCreate !== 'boolean') {
|
||||
throw createFlagError('--use-create does not take a value');
|
||||
|
@ -216,7 +223,7 @@ export function runCli() {
|
|||
name: 'unload',
|
||||
usage: 'unload [path]',
|
||||
description: 'remove indices created by the archive at [path]',
|
||||
async run({ flags, esArchiver }) {
|
||||
async run({ flags, esArchiver, statsMeta }) {
|
||||
const [path] = flags._;
|
||||
if (!path) {
|
||||
throw createFlagError('missing [path] argument');
|
||||
|
@ -225,6 +232,8 @@ export function runCli() {
|
|||
throw createFlagError(`unknown extra arguments: [${flags._.slice(1).join(', ')}]`);
|
||||
}
|
||||
|
||||
statsMeta.set('esArchiverPath', path);
|
||||
|
||||
await esArchiver.unload(path);
|
||||
},
|
||||
})
|
||||
|
@ -233,7 +242,7 @@ export function runCli() {
|
|||
usage: 'edit [path]',
|
||||
description:
|
||||
'extract the archives within or at [path], wait for edits to be completed, and then recompress the archives',
|
||||
async run({ flags, esArchiver }) {
|
||||
async run({ flags, esArchiver, statsMeta }) {
|
||||
const [path] = flags._;
|
||||
if (!path) {
|
||||
throw createFlagError('missing [path] argument');
|
||||
|
@ -242,6 +251,8 @@ export function runCli() {
|
|||
throw createFlagError(`unknown extra arguments: [${flags._.slice(1).join(', ')}]`);
|
||||
}
|
||||
|
||||
statsMeta.set('esArchiverPath', path);
|
||||
|
||||
await esArchiver.edit(path, async () => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
|
|
|
@ -47,7 +47,7 @@ export function reportOptimizerTimings(log: ToolingLog, config: OptimizerConfig)
|
|||
timings: [
|
||||
{
|
||||
group: '@kbn/optimizer',
|
||||
id: 'overall time',
|
||||
id: 'total',
|
||||
ms: time,
|
||||
meta: {
|
||||
optimizerBundleCount: config.filteredBundles.length,
|
||||
|
|
9644
packages/kbn-pm/dist/index.js
vendored
9644
packages/kbn-pm/dist/index.js
vendored
File diff suppressed because one or more lines are too long
|
@ -7,6 +7,9 @@
|
|||
*/
|
||||
|
||||
import { resolve, sep } from 'path';
|
||||
import { CiStatsReporter } from '@kbn/dev-utils/ci_stats_reporter';
|
||||
|
||||
import { log } from '../utils/log';
|
||||
import { spawnStreaming } from '../utils/child_process';
|
||||
import { linkProjectExecutables } from '../utils/link_project_executables';
|
||||
import { getNonBazelProjectsOnly, topologicallyBatchProjects } from '../utils/projects';
|
||||
|
@ -25,8 +28,8 @@ export const BootstrapCommand: ICommand = {
|
|||
name: 'bootstrap',
|
||||
|
||||
reportTiming: {
|
||||
group: 'bootstrap',
|
||||
id: 'overall time',
|
||||
group: 'scripts/kbn bootstrap',
|
||||
id: 'total',
|
||||
},
|
||||
|
||||
async run(projects, projectGraph, { options, kbn, rootPath }) {
|
||||
|
@ -34,6 +37,8 @@ export const BootstrapCommand: ICommand = {
|
|||
const batchedNonBazelProjects = topologicallyBatchProjects(nonBazelProjectsOnly, projectGraph);
|
||||
const kibanaProjectPath = projects.get('kibana')?.path || '';
|
||||
const runOffline = options?.offline === true;
|
||||
const reporter = CiStatsReporter.fromEnv(log);
|
||||
const timings = [];
|
||||
|
||||
// Force install is set in case a flag is passed or
|
||||
// if the `.yarn-integrity` file is not found which
|
||||
|
@ -58,11 +63,23 @@ export const BootstrapCommand: ICommand = {
|
|||
// That way non bazel projects could depend on bazel projects but not the other way around
|
||||
// That is only intended during the migration process while non Bazel projects are not removed at all.
|
||||
//
|
||||
|
||||
if (forceInstall) {
|
||||
const forceInstallStartTime = Date.now();
|
||||
await runBazel(['run', '@nodejs//:yarn'], runOffline);
|
||||
timings.push({
|
||||
id: 'force install dependencies',
|
||||
ms: Date.now() - forceInstallStartTime,
|
||||
});
|
||||
}
|
||||
|
||||
// build packages
|
||||
const packageStartTime = Date.now();
|
||||
await runBazel(['build', '//packages:build', '--show_result=1'], runOffline);
|
||||
timings.push({
|
||||
id: 'build packages',
|
||||
ms: Date.now() - packageStartTime,
|
||||
});
|
||||
|
||||
// Install monorepo npm dependencies outside of the Bazel managed ones
|
||||
for (const batch of batchedNonBazelProjects) {
|
||||
|
@ -113,15 +130,22 @@ export const BootstrapCommand: ICommand = {
|
|||
{ prefix: '[vscode]', debug: false }
|
||||
);
|
||||
|
||||
// Build typescript references
|
||||
await spawnStreaming(
|
||||
process.execPath,
|
||||
['scripts/build_ts_refs', '--ignore-type-failures', '--info'],
|
||||
['scripts/build_ts_refs', '--ignore-type-failures'],
|
||||
{
|
||||
cwd: kbn.getAbsolute(),
|
||||
env: process.env,
|
||||
},
|
||||
{ prefix: '[ts refs]', debug: false }
|
||||
);
|
||||
|
||||
// send timings
|
||||
await reporter.timings({
|
||||
upstreamBranch: kbn.kibanaProject.json.branch,
|
||||
// prevent loading @kbn/utils by passing null
|
||||
kibanaUuid: kbn.getUuid() || null,
|
||||
timings: timings.map((t) => ({ group: 'scripts/kbn bootstrap', ...t })),
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
@ -13,6 +13,11 @@ export const BuildCommand: ICommand = {
|
|||
description: 'Runs a build in the Bazel built packages',
|
||||
name: 'build',
|
||||
|
||||
reportTiming: {
|
||||
group: 'scripts/kbn build',
|
||||
id: 'total',
|
||||
},
|
||||
|
||||
async run(projects, projectGraph, { options }) {
|
||||
const runOffline = options?.offline === true;
|
||||
|
||||
|
|
|
@ -20,6 +20,11 @@ export const CleanCommand: ICommand = {
|
|||
description: 'Deletes output directories, node_modules and resets internal caches.',
|
||||
name: 'clean',
|
||||
|
||||
reportTiming: {
|
||||
group: 'scripts/kbn clean',
|
||||
id: 'total',
|
||||
},
|
||||
|
||||
async run(projects) {
|
||||
log.warning(dedent`
|
||||
This command is only necessary for the rare circumstance where you need to recover a consistent
|
||||
|
|
|
@ -26,6 +26,11 @@ export const ResetCommand: ICommand = {
|
|||
'Deletes node_modules and output directories, resets internal and disk caches, and stops Bazel server',
|
||||
name: 'reset',
|
||||
|
||||
reportTiming: {
|
||||
group: 'scripts/kbn reset',
|
||||
id: 'total',
|
||||
},
|
||||
|
||||
async run(projects) {
|
||||
log.warning(dedent`
|
||||
In most cases, 'yarn kbn clean' is all that should be needed to recover a consistent state when
|
||||
|
|
|
@ -18,6 +18,11 @@ export const RunCommand: ICommand = {
|
|||
'Run script defined in package.json in each package that contains that script (only works on packages not using Bazel yet)',
|
||||
name: 'run',
|
||||
|
||||
reportTiming: {
|
||||
group: 'scripts/kbn run',
|
||||
id: 'total',
|
||||
},
|
||||
|
||||
async run(projects, projectGraph, { extraArgs, options }) {
|
||||
log.warning(dedent`
|
||||
We are migrating packages into the Bazel build system and we will no longer support running npm scripts on
|
||||
|
|
|
@ -13,6 +13,11 @@ export const WatchCommand: ICommand = {
|
|||
description: 'Runs a build in the Bazel built packages and keeps watching them for changes',
|
||||
name: 'watch',
|
||||
|
||||
reportTiming: {
|
||||
group: 'scripts/kbn watch',
|
||||
id: 'total',
|
||||
},
|
||||
|
||||
async run(projects, projectGraph, { options }) {
|
||||
const runOffline = options?.offline === true;
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ export function runKbnArchiverCli() {
|
|||
--kibana-url set the url that kibana can be reached at, uses the "servers.kibana" setting from --config by default
|
||||
`,
|
||||
},
|
||||
async extendContext({ log, flags }) {
|
||||
async extendContext({ log, flags, statsMeta }) {
|
||||
let config;
|
||||
if (flags.config) {
|
||||
if (typeof flags.config !== 'string') {
|
||||
|
@ -58,6 +58,7 @@ export function runKbnArchiverCli() {
|
|||
}
|
||||
|
||||
config = await readConfigFile(log, Path.resolve(flags.config));
|
||||
statsMeta.set('ftrConfigPath', flags.config);
|
||||
}
|
||||
|
||||
let kibanaUrl;
|
||||
|
@ -82,6 +83,8 @@ export function runKbnArchiverCli() {
|
|||
throw createFlagError('--space must be a string');
|
||||
}
|
||||
|
||||
statsMeta.set('kbnArchiverArg', getSinglePositionalArg(flags));
|
||||
|
||||
return {
|
||||
space,
|
||||
kbnClient: new KbnClient({
|
||||
|
|
|
@ -30,8 +30,11 @@ const isTypeFailure = (error: any) =>
|
|||
|
||||
export async function runBuildRefsCli() {
|
||||
run(
|
||||
async ({ log, flags, procRunner }) => {
|
||||
if (process.env.BUILD_TS_REFS_DISABLE === 'true' && !flags.force) {
|
||||
async ({ log, flags, procRunner, statsMeta }) => {
|
||||
const enabled = process.env.BUILD_TS_REFS_DISABLE !== 'true' || !!flags.force;
|
||||
statsMeta.set('buildTsRefsEnabled', enabled);
|
||||
|
||||
if (!enabled) {
|
||||
log.info(
|
||||
'Building ts refs is disabled because the BUILD_TS_REFS_DISABLE environment variable is set to "true". Pass `--force` to run the build anyway.'
|
||||
);
|
||||
|
@ -57,6 +60,12 @@ export async function runBuildRefsCli() {
|
|||
const doClean = !!flags.clean || doCapture;
|
||||
const doInitCache = cacheEnabled && !doCapture;
|
||||
|
||||
statsMeta.set('buildTsRefsEnabled', enabled);
|
||||
statsMeta.set('buildTsRefsCacheEnabled', cacheEnabled);
|
||||
statsMeta.set('buildTsRefsDoCapture', doCapture);
|
||||
statsMeta.set('buildTsRefsDoClean', doClean);
|
||||
statsMeta.set('buildTsRefsDoInitCache', doInitCache);
|
||||
|
||||
if (doClean) {
|
||||
log.info('deleting', projects.outDirs.length, 'ts output directories');
|
||||
await concurrentMap(100, projects.outDirs, (outDir) => del(outDir));
|
||||
|
@ -114,9 +123,6 @@ export async function runBuildRefsCli() {
|
|||
--ignore-type-failures If tsc reports type errors, ignore them and just log a small warning
|
||||
`,
|
||||
},
|
||||
log: {
|
||||
defaultLevel: 'debug',
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue