[Build] Parallel artifact builds (#217929)

## Summary

Closes #218143
Reverts #39292

This PR parallelizes the artifact outputs and archive creation tasks to
reduce build runtime by about 49% (104min down to 53min). It required
buffering the logs as they were all interweaved from each task and were
not easily parsed by a human. So, the PR also cleans up the logging a
little bit by utilizing the dropdowns available in Buildkite.


### Testing
https://buildkite.com/elastic/kibana-artifacts-snapshot/builds/6253

I also tried bumping the machine to `c2-standard-30` which increases
vCPU from 16 to 30 and Memory from 64GB to 120GB:
https://buildkite.com/elastic/kibana-artifacts-snapshot/builds/6254

This results in a 13% reduction in run time for the parallel builds but
is about twice the cost, which isn't worth it.
This commit is contained in:
Brad White 2025-06-02 18:49:02 -06:00 committed by GitHub
parent 3a879d46fb
commit d0fd87e6c5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 213 additions and 127 deletions

View file

@ -9,7 +9,7 @@
import { ToolingLog } from '@kbn/tooling-log';
import { Config, createRunner, Task, GlobalTask } from './lib';
import { Config, createRunner } from './lib';
import * as Tasks from './tasks';
export interface BuildOptions {
@ -48,23 +48,22 @@ export interface BuildOptions {
export async function buildDistributables(log: ToolingLog, options: BuildOptions): Promise<void> {
log.verbose('building distributables with options:', options);
const config = await Config.create(options);
log.write('--- Running global Kibana build tasks');
const run: (task: Task | GlobalTask) => Promise<void> = createRunner({
config,
log,
});
const config = await Config.create(options);
const globalRun = createRunner({ config, log });
const artifactTasks = [];
/**
* verify, reset, and initialize the build environment
*/
if (options.initialize) {
await run(Tasks.VerifyEnv);
await run(Tasks.Clean);
await run(
await globalRun(Tasks.VerifyEnv);
await globalRun(Tasks.Clean);
await globalRun(
options.downloadFreshNode ? Tasks.DownloadNodeBuilds : Tasks.VerifyExistingNodeBuilds
);
await run(Tasks.ExtractNodeBuilds);
await globalRun(Tasks.ExtractNodeBuilds);
}
/**
@ -73,33 +72,33 @@ export async function buildDistributables(log: ToolingLog, options: BuildOptions
if (options.createGenericFolders) {
// Build before copying source files
if (options.buildCanvasShareableRuntime) {
await run(Tasks.BuildCanvasShareableRuntime);
await globalRun(Tasks.BuildCanvasShareableRuntime);
}
await run(Tasks.CopyLegacySource);
await globalRun(Tasks.CopyLegacySource);
await run(Tasks.CreateEmptyDirsAndFiles);
await run(Tasks.CreateReadme);
await run(Tasks.BuildPackages);
await run(Tasks.ReplaceFavicon);
await run(Tasks.BuildKibanaPlatformPlugins);
await run(Tasks.CreatePackageJson);
await run(Tasks.InstallDependencies);
await run(Tasks.GeneratePackagesOptimizedAssets);
await globalRun(Tasks.CreateEmptyDirsAndFiles);
await globalRun(Tasks.CreateReadme);
await globalRun(Tasks.BuildPackages);
await globalRun(Tasks.ReplaceFavicon);
await globalRun(Tasks.BuildKibanaPlatformPlugins);
await globalRun(Tasks.CreatePackageJson);
await globalRun(Tasks.InstallDependencies);
await globalRun(Tasks.GeneratePackagesOptimizedAssets);
// Run on all source files
// **/packages need to be read
// before DeletePackagesFromBuildRoot
await run(Tasks.CreateNoticeFile);
await run(Tasks.CreateXPackNoticeFile);
await globalRun(Tasks.CreateNoticeFile);
await globalRun(Tasks.CreateXPackNoticeFile);
await run(Tasks.DeletePackagesFromBuildRoot);
await run(Tasks.UpdateLicenseFile);
await run(Tasks.RemovePackageJsonDeps);
await run(Tasks.CleanPackageManagerRelatedFiles);
await run(Tasks.CleanExtraFilesFromModules);
await run(Tasks.CleanEmptyFolders);
await run(Tasks.FetchAgentVersionsList);
await globalRun(Tasks.DeletePackagesFromBuildRoot);
await globalRun(Tasks.UpdateLicenseFile);
await globalRun(Tasks.RemovePackageJsonDeps);
await globalRun(Tasks.CleanPackageManagerRelatedFiles);
await globalRun(Tasks.CleanExtraFilesFromModules);
await globalRun(Tasks.CleanEmptyFolders);
await globalRun(Tasks.FetchAgentVersionsList);
}
/**
@ -107,18 +106,18 @@ export async function buildDistributables(log: ToolingLog, options: BuildOptions
* directories and perform platform/architecture-specific steps
*/
if (options.createPlatformFolders) {
await run(Tasks.CreateArchivesSources);
await run(Tasks.InstallChromium);
await run(Tasks.CopyBinScripts);
await run(Tasks.CleanNodeBuilds);
await globalRun(Tasks.CreateArchivesSources);
await globalRun(Tasks.InstallChromium);
await globalRun(Tasks.CopyBinScripts);
await globalRun(Tasks.CleanNodeBuilds);
await run(Tasks.AssertFileTime);
await run(Tasks.AssertPathLength);
await run(Tasks.AssertNoUUID);
await globalRun(Tasks.AssertFileTime);
await globalRun(Tasks.AssertPathLength);
await globalRun(Tasks.AssertNoUUID);
}
// control w/ --skip-cdn-assets
if (options.createCdnAssets) {
await run(Tasks.CreateCdnAssets);
await globalRun(Tasks.CreateCdnAssets);
}
/**
@ -127,64 +126,73 @@ export async function buildDistributables(log: ToolingLog, options: BuildOptions
*/
if (options.createArchives) {
// control w/ --skip-archives
await run(Tasks.CreateArchives);
await globalRun(Tasks.CreateArchives);
}
if (
options.downloadCloudDependencies &&
(options.createDockerCloud || options.createDockerCloudFIPS)
) {
// control w/ --skip-cloud-dependencies-download
await globalRun(Tasks.DownloadCloudDependencies);
}
if (options.createDebPackage || options.createRpmPackage) {
await run(Tasks.CreatePackageConfig);
}
if (options.createDebPackage) {
// control w/ --deb or --skip-os-packages
await run(Tasks.CreateDebPackage);
}
if (options.createRpmPackage) {
// control w/ --rpm or --skip-os-packages
await run(Tasks.CreateRpmPackage);
await globalRun(Tasks.CreatePackageConfig);
if (options.createDebPackage) {
// control w/ --deb or --skip-os-packages
artifactTasks.push(Tasks.CreateDebPackage);
}
if (options.createRpmPackage) {
// control w/ --rpm or --skip-os-packages
artifactTasks.push(Tasks.CreateRpmPackage);
}
}
if (options.createDockerUBI) {
// control w/ --docker-images or --skip-docker-ubi or --skip-os-packages
await run(Tasks.CreateDockerUBI);
artifactTasks.push(Tasks.CreateDockerUBI);
}
if (options.createDockerWolfi) {
// control w/ --docker-images or --skip-docker-wolfi or --skip-os-packages
await run(Tasks.CreateDockerWolfi);
artifactTasks.push(Tasks.CreateDockerWolfi);
}
if (options.createDockerCloud) {
// control w/ --docker-images and --skip-docker-cloud
if (options.downloadCloudDependencies) {
// control w/ --skip-cloud-dependencies-download
await run(Tasks.DownloadCloudDependencies);
}
await run(Tasks.CreateDockerCloud);
artifactTasks.push(Tasks.CreateDockerCloud);
}
if (options.createDockerServerless) {
// control w/ --docker-images and --skip-docker-serverless
await run(Tasks.CreateDockerServerless);
artifactTasks.push(Tasks.CreateDockerServerless);
}
if (options.createDockerFIPS) {
// control w/ --docker-images or --skip-docker-fips or --skip-os-packages
await run(Tasks.CreateDockerFIPS);
artifactTasks.push(Tasks.CreateDockerFIPS);
}
if (options.createDockerCloudFIPS) {
// control w/ --docker-images and --skip-docker-cloud-fips
if (options.downloadCloudDependencies) {
// control w/ --skip-cloud-dependencies-download
await run(Tasks.DownloadCloudDependencies);
}
await run(Tasks.CreateDockerCloudFIPS);
artifactTasks.push(Tasks.CreateDockerCloudFIPS);
}
if (options.createDockerContexts) {
// control w/ --skip-docker-contexts
await run(Tasks.CreateDockerContexts);
artifactTasks.push(Tasks.CreateDockerContexts);
}
await Promise.allSettled(
// createRunner for each task to ensure each task gets its own Build instance
artifactTasks.map(async (task) => await createRunner({ config, log, bufferLogs: true })(task))
);
/**
* finalize artifacts by writing sha1sums of each into the target directory
*/
await run(Tasks.WriteShaSums);
log.write('--- Finalizing Kibana artifacts');
await globalRun(Tasks.WriteShaSums);
}

View file

@ -13,10 +13,12 @@ import { Config } from './config';
import { Platform } from './platform';
export class Build {
private buildDesc: string = '';
private buildArch: string = '';
private name = 'kibana';
private logTag = chalk`{cyan [ kibana ]}`;
constructor(private config: Config) {}
constructor(private config: Config, private bufferLogs = false) {}
resolvePath(...args: string[]) {
return this.config.resolveFromRepo('build', this.name, ...args);
@ -52,4 +54,24 @@ export class Build {
getLogTag() {
return this.logTag;
}
getBufferLogs() {
return this.bufferLogs;
}
setBuildDesc(desc: string) {
this.buildDesc = desc;
}
getBuildDesc() {
return this.buildDesc;
}
setBuildArch(arch: string) {
this.buildArch = arch;
}
getBuildArch() {
return this.buildArch;
}
}

View file

@ -9,24 +9,27 @@
import execa from 'execa';
import chalk from 'chalk';
import { fromEvent, merge, map, toArray, takeUntil } from 'rxjs';
import { ToolingLog, LogLevel } from '@kbn/tooling-log';
import { watchStdioForLine } from './watch_stdio_for_line';
import { Build } from './build';
interface Options {
level?: Exclude<LogLevel, 'silent' | 'error'>;
cwd?: string;
env?: Record<string, string>;
exitAfter?: RegExp;
build?: Build;
}
export async function exec(
log: ToolingLog,
cmd: string,
args: string[],
{ level = 'debug', cwd, env, exitAfter }: Options = {}
{ level = 'debug', cwd, env, exitAfter, build }: Options = {}
) {
log[level](chalk.dim('$'), cmd, ...args);
const bufferLogs = build && build?.getBufferLogs();
const proc = execa(cmd, args, {
stdio: ['ignore', 'pipe', 'pipe'],
@ -35,5 +38,28 @@ export async function exec(
preferLocal: true,
});
await watchStdioForLine(proc, (line) => log[level](line), exitAfter);
if (bufferLogs) {
const stdout$ = fromEvent<Buffer>(proc.stdout!, 'data').pipe(map((chunk) => chunk.toString()));
const stderr$ = fromEvent<Buffer>(proc.stderr!, 'data').pipe(map((chunk) => chunk.toString()));
const close$ = fromEvent(proc, 'close');
await merge(stdout$, stderr$)
.pipe(takeUntil(close$), toArray())
.toPromise()
.then((logs) => {
log.write(`--- ${build.getBuildDesc()} [${build.getBuildArch()}]`);
log.indent(4, () => {
log[level](chalk.dim('$'), cmd, ...args);
if (logs?.length) {
logs.forEach((line: string) => log[level](line.trim()));
}
});
});
} else {
log[level](chalk.dim('$'), cmd, ...args);
await watchStdioForLine(proc, (line: string) => log[level](line), exitAfter);
}
}

View file

@ -80,7 +80,7 @@ describe('default dist', () => {
});
expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]);
expect(mock).toHaveBeenLastCalledWith(config, log);
});
it('calls local tasks once, passing the default build', async () => {

View file

@ -17,12 +17,13 @@ import { Config } from './config';
interface Options {
config: Config;
log: ToolingLog;
bufferLogs?: boolean;
}
export interface GlobalTask {
global: true;
description: string;
run(config: Config, log: ToolingLog, builds: Build[]): Promise<void>;
run(config: Config, log: ToolingLog): Promise<void>;
}
export interface Task {
@ -31,11 +32,18 @@ export interface Task {
run(config: Config, log: ToolingLog, build: Build): Promise<void>;
}
export function createRunner({ config, log }: Options) {
async function execTask(desc: string, task: Task | GlobalTask, lastArg: any) {
log.info(desc);
export function createRunner({ config, log, bufferLogs = false }: Options) {
async function execTask(desc: string, task: GlobalTask): Promise<void>;
async function execTask(desc: string, task: Task, build: Build): Promise<void>;
async function execTask(desc: string, task: GlobalTask | Task, build?: Build): Promise<void> {
if (!task.global && build && bufferLogs) {
log.info(`Buffering logs for Task: ${desc}`);
} else {
log.info(desc);
}
try {
await log.indent(4, async () => {
await log.indent(bufferLogs ? 0 : 4, async () => {
const start = Date.now();
const time = () => {
const secs = Math.round((Date.now() - start) / 1000);
@ -45,7 +53,15 @@ export function createRunner({ config, log }: Options) {
};
try {
await task.run(config, log, lastArg);
if (task.global) {
await task.run(config, log);
} else {
// This shouldn't ever happen since we're passing the builds in below, but needed for TS
if (!build) {
throw new Error('Task is not global, but no build was provided');
}
await task.run(config, log, build);
}
log.success(chalk.green('✓'), time());
} catch (error) {
if (!isErrorLogged(error)) {
@ -63,7 +79,7 @@ export function createRunner({ config, log }: Options) {
}
const builds: Build[] = [];
builds.push(new Build(config));
builds.push(new Build(config, bufferLogs));
/**
* Run a task by calling its `run()` method with three arguments:
@ -73,10 +89,13 @@ export function createRunner({ config, log }: Options) {
*/
return async function run(task: Task | GlobalTask) {
if (task.global) {
await execTask(chalk`{dim [ global ]} ${task.description}`, task, builds);
await execTask(chalk`{dim [ global ]} ${task.description}`, task);
} else {
for (const build of builds) {
await execTask(`${build.getLogTag()} ${task.description}`, task, build);
const desc = `${build.getLogTag()} ${task.description}`;
build.setBuildDesc(desc);
await execTask(desc, task, build);
}
}
};

View file

@ -15,65 +15,72 @@ import { CiStatsMetric } from '@kbn/ci-stats-reporter';
import { mkdirp, compressTar, compressZip, Task } from '../lib';
interface Archive {
format: string;
path: string;
fileCount: number;
}
const asyncStat = promisify(Fs.stat);
export const CreateArchives: Task = {
description: 'Creating the archives for each platform',
async run(config, log, build) {
const archives = [];
const archives: Archive[] = [];
// archive one at a time, parallel causes OOM sometimes
for (const platform of config.getTargetPlatforms()) {
const source = build.resolvePathForPlatform(platform, '.');
const destination = build.getPlatformArchivePath(platform);
await Promise.allSettled(
config.getTargetPlatforms().map(async (platform) => {
const source = build.resolvePathForPlatform(platform, '.');
const destination = build.getPlatformArchivePath(platform);
log.info('archiving', source, 'to', destination);
log.info('archiving', source, 'to', destination);
await mkdirp(Path.dirname(destination));
await mkdirp(Path.dirname(destination));
switch (Path.extname(destination)) {
case '.zip':
archives.push({
format: 'zip',
path: destination,
fileCount: await compressZip({
source,
destination,
archiverOptions: {
zlib: {
level: 9,
switch (Path.extname(destination)) {
case '.zip':
archives.push({
format: 'zip',
path: destination,
fileCount: await compressZip({
source,
destination,
archiverOptions: {
zlib: {
level: 9,
},
},
},
createRootDirectory: true,
rootDirectoryName: build.getRootDirectory(),
}),
});
break;
createRootDirectory: true,
rootDirectoryName: build.getRootDirectory(),
}),
});
break;
case '.gz':
archives.push({
format: 'tar',
path: destination,
fileCount: await compressTar({
source,
destination,
archiverOptions: {
gzip: true,
gzipOptions: {
level: 9,
case '.gz':
archives.push({
format: 'tar',
path: destination,
fileCount: await compressTar({
source,
destination,
archiverOptions: {
gzip: true,
gzipOptions: {
level: 9,
},
},
},
createRootDirectory: true,
rootDirectoryName: build.getRootDirectory(),
}),
});
break;
createRootDirectory: true,
rootDirectoryName: build.getRootDirectory(),
}),
});
break;
default:
throw new Error(`Unexpected extension for archive destination: ${destination}`);
}
}
default:
throw new Error(`Unexpected extension for archive destination: ${destination}`);
}
})
);
const metrics: CiStatsMetric[] = [];
for (const { format, path, fileCount } of archives) {

View file

@ -77,7 +77,7 @@ async function setup({ failOnUrl }: { failOnUrl?: string } = {}) {
it('downloads node builds for each platform', async () => {
const { config } = await setup();
await DownloadNodeBuilds.run(config, log, []);
await DownloadNodeBuilds.run(config, log);
expect(downloadToDisk.mock.calls).toMatchInlineSnapshot(`
Array [
@ -119,7 +119,7 @@ it('downloads node builds for each platform', async () => {
it('rejects if any download fails', async () => {
const { config } = await setup({ failOnUrl: 'linux:url' });
await expect(DownloadNodeBuilds.run(config, log, [])).rejects.toMatchInlineSnapshot(
await expect(DownloadNodeBuilds.run(config, log)).rejects.toMatchInlineSnapshot(
`[Error: Download failed for reasons]`
);
expect(testWriter.messages).toMatchInlineSnapshot(`Array []`);

View file

@ -73,7 +73,7 @@ beforeEach(() => {
it('runs expected fs operations', async () => {
const { config } = await setup();
await ExtractNodeBuilds.run(config, log, []);
await ExtractNodeBuilds.run(config, log);
const usedMethods = Object.fromEntries(
Object.entries(Fs)

View file

@ -102,7 +102,7 @@ beforeEach(() => {
it('checks shasums for each downloaded node build', async () => {
const { config } = await setup();
await VerifyExistingNodeBuilds.run(config, log, []);
await VerifyExistingNodeBuilds.run(config, log);
expect(getNodeShasums).toMatchInlineSnapshot(`
[MockFunction] {
@ -488,7 +488,7 @@ it('rejects if any download has an incorrect sha256', async () => {
});
await expect(
VerifyExistingNodeBuilds.run(config, log, [])
VerifyExistingNodeBuilds.run(config, log)
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Download at linux:default:linux-arm64:downloadPath does not match expected checksum invalid shasum"`
);

View file

@ -74,6 +74,7 @@ export async function runDockerGenerator(
const imageTag = `docker.elastic.co/${imageNamespace}/kibana`;
const version = config.getBuildVersion();
const artifactArchitecture = flags.architecture === 'aarch64' ? 'aarch64' : 'x86_64';
build.setBuildArch(artifactArchitecture);
let artifactVariant = '';
if (flags.serverless) artifactVariant = '-serverless';
const artifactPrefix = `kibana${artifactVariant}-${version}-linux`;
@ -204,6 +205,7 @@ export async function runDockerGenerator(
await exec(log, `./build_docker.sh`, [], {
cwd: dockerBuildDir,
level: 'info',
build,
});
}

View file

@ -23,6 +23,7 @@ export async function runFpm(
) {
const linux = config.getPlatform('linux', architecture);
const version = config.getBuildVersion();
build.setBuildArch(architecture);
const resolveWithTrailingSlash = (...paths: string[]) => `${resolve(...paths)}/`;
@ -150,5 +151,6 @@ export async function runFpm(
await exec(log, 'fpm', args, {
cwd: config.resolveFromRepo('.'),
level: 'info',
build,
});
}