mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[kbn/pm] reduce responsibilities (#130592)
* [kbn/pm] reduce responsibilities * [CI] Auto-commit changed files from 'node scripts/generate packages_build_manifest' * [CI] Auto-commit changed files from 'yarn kbn run build -i @kbn/pm' Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
9430e41eb2
commit
9ed9c02bae
38 changed files with 670 additions and 27915 deletions
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
|
@ -220,6 +220,7 @@
|
|||
/packages/kbn-ambient-ui-types/ @elastic/kibana-operations
|
||||
/packages/kbn-ambient-storybook-types/ @elastic/kibana-operations
|
||||
/packages/kbn-bazel-packages/ @elastic/kibana-operations
|
||||
/packages/kbn-bazel-runner/ @elastic/kibana-operations
|
||||
/packages/kbn-cli-dev-mode/ @elastic/kibana-operations
|
||||
/packages/kbn-dev-utils*/ @elastic/kibana-operations
|
||||
/packages/kbn-es-archiver/ @elastic/kibana-operations
|
||||
|
|
|
@ -481,6 +481,7 @@
|
|||
"@kbn/babel-plugin-synthetic-packages": "link:bazel-bin/packages/kbn-babel-plugin-synthetic-packages",
|
||||
"@kbn/babel-preset": "link:bazel-bin/packages/kbn-babel-preset",
|
||||
"@kbn/bazel-packages": "link:bazel-bin/packages/kbn-bazel-packages",
|
||||
"@kbn/bazel-runner": "link:bazel-bin/packages/kbn-bazel-runner",
|
||||
"@kbn/ci-stats-client": "link:bazel-bin/packages/kbn-ci-stats-client",
|
||||
"@kbn/ci-stats-core": "link:bazel-bin/packages/kbn-ci-stats-core",
|
||||
"@kbn/ci-stats-reporter": "link:bazel-bin/packages/kbn-ci-stats-reporter",
|
||||
|
@ -607,6 +608,7 @@
|
|||
"@types/kbn__apm-utils": "link:bazel-bin/packages/kbn-apm-utils/npm_module_types",
|
||||
"@types/kbn__axe-config": "link:bazel-bin/packages/kbn-axe-config/npm_module_types",
|
||||
"@types/kbn__bazel-packages": "link:bazel-bin/packages/kbn-bazel-packages/npm_module_types",
|
||||
"@types/kbn__bazel-runner": "link:bazel-bin/packages/kbn-bazel-runner/npm_module_types",
|
||||
"@types/kbn__ci-stats-client": "link:bazel-bin/packages/kbn-ci-stats-client/npm_module_types",
|
||||
"@types/kbn__ci-stats-core": "link:bazel-bin/packages/kbn-ci-stats-core/npm_module_types",
|
||||
"@types/kbn__ci-stats-reporter": "link:bazel-bin/packages/kbn-ci-stats-reporter/npm_module_types",
|
||||
|
|
|
@ -24,6 +24,7 @@ filegroup(
|
|||
"//packages/kbn-babel-plugin-synthetic-packages:build",
|
||||
"//packages/kbn-babel-preset:build",
|
||||
"//packages/kbn-bazel-packages:build",
|
||||
"//packages/kbn-bazel-runner:build",
|
||||
"//packages/kbn-ci-stats-client:build",
|
||||
"//packages/kbn-ci-stats-core:build",
|
||||
"//packages/kbn-ci-stats-reporter:build",
|
||||
|
@ -121,6 +122,7 @@ filegroup(
|
|||
"//packages/kbn-apm-utils:build_types",
|
||||
"//packages/kbn-axe-config:build_types",
|
||||
"//packages/kbn-bazel-packages:build_types",
|
||||
"//packages/kbn-bazel-runner:build_types",
|
||||
"//packages/kbn-ci-stats-client:build_types",
|
||||
"//packages/kbn-ci-stats-core:build_types",
|
||||
"//packages/kbn-ci-stats-reporter:build_types",
|
||||
|
|
|
@ -70,6 +70,13 @@ export class BazelPackage {
|
|||
return !!(this.buildBazelContent && BUILD_TYPES_RULE_NAME.test(this.buildBazelContent));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the package is not intended to be in the build
|
||||
*/
|
||||
isDevOnly() {
|
||||
return !!this.pkg.kibana?.devOnly;
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom inspect handler so that logging variables in scripts/generate doesn't
|
||||
* print all the BUILD.bazel files
|
||||
|
|
|
@ -13,17 +13,18 @@ import Fs from 'fs';
|
|||
* by `assertParsedPackageJson()` and extensible as needed in the future
|
||||
*/
|
||||
export interface ParsedPackageJson {
|
||||
/**
|
||||
* The name of the package, usually `@kbn/`+something
|
||||
*/
|
||||
/** The name of the package, usually `@kbn/`+something */
|
||||
name: string;
|
||||
/** "dependenices" property from package.json */
|
||||
dependencies?: Record<string, string>;
|
||||
/** "devDependenices" property from package.json */
|
||||
devDependencies?: Record<string, string>;
|
||||
/**
|
||||
* All other fields in the package.json are typed as unknown as we don't care what they are
|
||||
*/
|
||||
/** Some kibana specific properties about this package */
|
||||
kibana?: {
|
||||
/** Is this package only intended for dev? */
|
||||
devOnly?: boolean;
|
||||
};
|
||||
/** All other fields in the package.json are typed as unknown as we don't care what they are */
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
|
@ -46,6 +47,17 @@ export function assertParsedPackageJson(v: unknown): asserts v is ParsedPackageJ
|
|||
if (v.devDependencies && !isObj(v.devDependencies)) {
|
||||
throw new Error('Expected "dependencies" to be an object');
|
||||
}
|
||||
|
||||
const kibana = v.kibana;
|
||||
if (kibana !== undefined) {
|
||||
if (!isObj(kibana)) {
|
||||
throw new Error('Expected "kibana" field in package.json to be an object');
|
||||
}
|
||||
|
||||
if (kibana.devOnly !== undefined && typeof kibana.devOnly !== 'boolean') {
|
||||
throw new Error('Expected "kibana.devOnly" field in package.json to be a boolean');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
122
packages/kbn-bazel-runner/BUILD.bazel
Normal file
122
packages/kbn-bazel-runner/BUILD.bazel
Normal file
|
@ -0,0 +1,122 @@
|
|||
load("@npm//@bazel/typescript:index.bzl", "ts_config")
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
|
||||
load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
|
||||
|
||||
PKG_DIRNAME = "kbn-bazel-runner"
|
||||
PKG_REQUIRE_NAME = "@kbn/bazel-runner"
|
||||
|
||||
SOURCE_FILES = glob(
|
||||
[
|
||||
"src/**/*.ts",
|
||||
],
|
||||
exclude = [
|
||||
"**/*.test.*",
|
||||
],
|
||||
)
|
||||
|
||||
SRCS = SOURCE_FILES
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = SRCS,
|
||||
)
|
||||
|
||||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
]
|
||||
|
||||
# In this array place runtime dependencies, including other packages and NPM packages
|
||||
# which must be available for this code to run.
|
||||
#
|
||||
# To reference other packages use:
|
||||
# "//repo/relative/path/to/package"
|
||||
# eg. "//packages/kbn-utils"
|
||||
#
|
||||
# To reference a NPM package use:
|
||||
# "@npm//name-of-package"
|
||||
# eg. "@npm//lodash"
|
||||
RUNTIME_DEPS = [
|
||||
"@npm//execa",
|
||||
"@npm//chalk",
|
||||
"@npm//rxjs",
|
||||
"//packages/kbn-dev-utils",
|
||||
]
|
||||
|
||||
# In this array place dependencies necessary to build the types, which will include the
|
||||
# :npm_module_types target of other packages and packages from NPM, including @types/*
|
||||
# packages.
|
||||
#
|
||||
# To reference the types for another package use:
|
||||
# "//repo/relative/path/to/package:npm_module_types"
|
||||
# eg. "//packages/kbn-utils:npm_module_types"
|
||||
#
|
||||
# References to NPM packages work the same as RUNTIME_DEPS
|
||||
TYPES_DEPS = [
|
||||
"@npm//@types/node",
|
||||
"@npm//@types/jest",
|
||||
"@npm//execa",
|
||||
"@npm//chalk",
|
||||
"@npm//rxjs",
|
||||
"//packages/kbn-dev-utils:npm_module_types",
|
||||
]
|
||||
|
||||
jsts_transpiler(
|
||||
name = "target_node",
|
||||
srcs = SRCS,
|
||||
build_pkg_name = package_name(),
|
||||
)
|
||||
|
||||
ts_config(
|
||||
name = "tsconfig",
|
||||
src = "tsconfig.json",
|
||||
deps = [
|
||||
"//:tsconfig.base.json",
|
||||
"//:tsconfig.bazel.json",
|
||||
],
|
||||
)
|
||||
|
||||
ts_project(
|
||||
name = "tsc_types",
|
||||
args = ['--pretty'],
|
||||
srcs = SRCS,
|
||||
deps = TYPES_DEPS,
|
||||
declaration = True,
|
||||
emit_declaration_only = True,
|
||||
out_dir = "target_types",
|
||||
root_dir = "src",
|
||||
tsconfig = ":tsconfig",
|
||||
)
|
||||
|
||||
js_library(
|
||||
name = PKG_DIRNAME,
|
||||
srcs = NPM_MODULE_EXTRA_FILES,
|
||||
deps = RUNTIME_DEPS + [":target_node"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm(
|
||||
name = "npm_module",
|
||||
deps = [":" + PKG_DIRNAME],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build",
|
||||
srcs = [":npm_module"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm_types(
|
||||
name = "npm_module_types",
|
||||
srcs = SRCS,
|
||||
deps = [":tsc_types"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
tsconfig = ":tsconfig",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build_types",
|
||||
srcs = [":npm_module_types"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
3
packages/kbn-bazel-runner/README.md
Normal file
3
packages/kbn-bazel-runner/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/bazel-runner
|
||||
|
||||
Helpers for running bazel commands, used everywhere we programatically run bazel.
|
13
packages/kbn-bazel-runner/jest.config.js
Normal file
13
packages/kbn-bazel-runner/jest.config.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/packages/kbn-bazel-runner'],
|
||||
};
|
10
packages/kbn-bazel-runner/package.json
Normal file
10
packages/kbn-bazel-runner/package.json
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"name": "@kbn/bazel-runner",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "./target_node/index.js",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0",
|
||||
"kibana": {
|
||||
"devOnly": true
|
||||
}
|
||||
}
|
76
packages/kbn-bazel-runner/src/bazel_runner.ts
Normal file
76
packages/kbn-bazel-runner/src/bazel_runner.ts
Normal file
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import execa from 'execa';
|
||||
import * as Rx from 'rxjs';
|
||||
import { tap } from 'rxjs/operators';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { observeLines } from '@kbn/stdio-dev-helpers';
|
||||
|
||||
type BazelCommandRunner = 'bazel' | 'ibazel';
|
||||
|
||||
interface BazelRunOptions {
|
||||
log: ToolingLog;
|
||||
bazelArgs: string[];
|
||||
offline?: boolean;
|
||||
execaOpts?: execa.Options;
|
||||
}
|
||||
|
||||
async function runBazelCommandWithRunner(runner: BazelCommandRunner, options: BazelRunOptions) {
|
||||
const bazelProc = execa(
|
||||
runner,
|
||||
options.offline ? [...options.bazelArgs, '--config=offline'] : options.bazelArgs,
|
||||
{
|
||||
...options.execaOpts,
|
||||
stdio: 'pipe',
|
||||
preferLocal: true,
|
||||
}
|
||||
);
|
||||
|
||||
await Promise.all([
|
||||
// Bazel outputs machine readable output into stdout and human readable output goes to stderr.
|
||||
// Therefore we need to get both. In order to get errors we need to parse the actual text line
|
||||
Rx.lastValueFrom(
|
||||
Rx.merge(
|
||||
observeLines(bazelProc.stdout!).pipe(
|
||||
tap((line) => options.log.info(`${chalk.cyan(`[${runner}]`)} ${line}`))
|
||||
),
|
||||
observeLines(bazelProc.stderr!).pipe(
|
||||
tap((line) => options.log.info(`${chalk.cyan(`[${runner}]`)} ${line}`))
|
||||
)
|
||||
).pipe(Rx.defaultIfEmpty(undefined))
|
||||
),
|
||||
|
||||
// Wait for process and logs to finish, unsubscribing in the end
|
||||
bazelProc.catch(() => {
|
||||
options.log.error(
|
||||
'HINT: If experiencing problems with node_modules try `yarn kbn bootstrap --force-install` or as last resort `yarn kbn reset && yarn kbn bootstrap`'
|
||||
);
|
||||
|
||||
throw new Error(`The bazel command that was running failed to complete.`);
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
export async function runBazel(options: BazelRunOptions) {
|
||||
await runBazelCommandWithRunner('bazel', options);
|
||||
}
|
||||
|
||||
export async function runIBazel(options: BazelRunOptions) {
|
||||
await runBazelCommandWithRunner('ibazel', {
|
||||
...options,
|
||||
execaOpts: {
|
||||
...options.execaOpts,
|
||||
env: {
|
||||
...options.execaOpts?.env,
|
||||
IBAZEL_USE_LEGACY_WATCHER: '0',
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
|
@ -6,5 +6,4 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { buildBazelProductionProjects } from './build_bazel_production_projects';
|
||||
export { buildNonBazelProductionProjects } from './build_non_bazel_production_projects';
|
||||
export * from './bazel_runner';
|
17
packages/kbn-bazel-runner/tsconfig.json
Normal file
17
packages/kbn-bazel-runner/tsconfig.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"extends": "../../tsconfig.bazel.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "target_types",
|
||||
"rootDir": "src",
|
||||
"stripInternal": false,
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
|
@ -36,7 +36,7 @@ filegroup(
|
|||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
"README.md",
|
||||
":certs"
|
||||
":certs",
|
||||
]
|
||||
|
||||
RUNTIME_DEPS = [
|
||||
|
|
27500
packages/kbn-pm/dist/index.js
vendored
27500
packages/kbn-pm/dist/index.js
vendored
File diff suppressed because it is too large
Load diff
|
@ -6,17 +6,17 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { resolve, sep } from 'path';
|
||||
import Path from 'path';
|
||||
import { CiStatsReporter } from '@kbn/ci-stats-reporter';
|
||||
import { runBazel } from '@kbn/bazel-runner';
|
||||
|
||||
import { log } from '../utils/log';
|
||||
import { spawnStreaming } from '../utils/child_process';
|
||||
import { linkProjectExecutables } from '../utils/link_project_executables';
|
||||
import { getNonBazelProjectsOnly, topologicallyBatchProjects } from '../utils/projects';
|
||||
import { ICommand } from '.';
|
||||
import { readYarnLock } from '../utils/yarn_lock';
|
||||
import { validateDependencies } from '../utils/validate_dependencies';
|
||||
import { installBazelTools, removeYarnIntegrityFileIfExists, runBazel } from '../utils/bazel';
|
||||
import { installBazelTools, removeYarnIntegrityFileIfExists } from '../utils/bazel';
|
||||
import { setupRemoteCache } from '../utils/bazel/setup_remote_cache';
|
||||
|
||||
export const BootstrapCommand: ICommand = {
|
||||
|
@ -29,10 +29,8 @@ export const BootstrapCommand: ICommand = {
|
|||
},
|
||||
|
||||
async run(projects, projectGraph, { options, kbn, rootPath }) {
|
||||
const nonBazelProjectsOnly = await getNonBazelProjectsOnly(projects);
|
||||
const batchedNonBazelProjects = topologicallyBatchProjects(nonBazelProjectsOnly, projectGraph);
|
||||
const kibanaProjectPath = projects.get('kibana')?.path || '';
|
||||
const runOffline = options?.offline === true;
|
||||
const offline = options?.offline === true;
|
||||
const reporter = CiStatsReporter.fromEnv(log);
|
||||
|
||||
const timings: Array<{ id: string; ms: number }> = [];
|
||||
|
@ -69,14 +67,22 @@ export const BootstrapCommand: ICommand = {
|
|||
|
||||
if (forceInstall) {
|
||||
await time('force install dependencies', async () => {
|
||||
await removeYarnIntegrityFileIfExists(resolve(kibanaProjectPath, 'node_modules'));
|
||||
await runBazel(['clean', '--expunge']);
|
||||
await runBazel(['run', '@nodejs//:yarn'], runOffline, {
|
||||
env: {
|
||||
SASS_BINARY_SITE:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-sass',
|
||||
RE2_DOWNLOAD_MIRROR:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2',
|
||||
await removeYarnIntegrityFileIfExists(Path.resolve(kibanaProjectPath, 'node_modules'));
|
||||
await runBazel({
|
||||
bazelArgs: ['clean', '--expunge'],
|
||||
log,
|
||||
});
|
||||
await runBazel({
|
||||
bazelArgs: ['run', '@nodejs//:yarn'],
|
||||
offline,
|
||||
log,
|
||||
execaOpts: {
|
||||
env: {
|
||||
SASS_BINARY_SITE:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-sass',
|
||||
RE2_DOWNLOAD_MIRROR:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
@ -84,35 +90,13 @@ export const BootstrapCommand: ICommand = {
|
|||
|
||||
// build packages
|
||||
await time('build packages', async () => {
|
||||
await runBazel(['build', '//packages:build', '--show_result=1'], runOffline);
|
||||
await runBazel({
|
||||
bazelArgs: ['build', '//packages:build', '--show_result=1'],
|
||||
log,
|
||||
offline,
|
||||
});
|
||||
});
|
||||
|
||||
// Install monorepo npm dependencies outside of the Bazel managed ones
|
||||
for (const batch of batchedNonBazelProjects) {
|
||||
for (const project of batch) {
|
||||
const isExternalPlugin = project.path.includes(`${kibanaProjectPath}${sep}plugins`);
|
||||
|
||||
if (!project.hasDependencies()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isExternalPlugin) {
|
||||
await project.installDependencies();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
!project.isSinglePackageJsonProject &&
|
||||
!project.isEveryDependencyLocal() &&
|
||||
!isExternalPlugin
|
||||
) {
|
||||
throw new Error(
|
||||
`[${project.name}] is not eligible to hold non local dependencies. Move the non local dependencies into the top level package.json.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const yarnLock = await time('read yarn.lock', async () => await readYarnLock(kbn));
|
||||
|
||||
if (options.validate) {
|
||||
|
|
|
@ -6,8 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { runBazel } from '../utils/bazel';
|
||||
import { runBazel } from '@kbn/bazel-runner';
|
||||
import { ICommand } from '.';
|
||||
import { log } from '../utils/log';
|
||||
|
||||
export const BuildCommand: ICommand = {
|
||||
description: 'Runs a build in the Bazel built packages',
|
||||
|
@ -19,9 +20,11 @@ export const BuildCommand: ICommand = {
|
|||
},
|
||||
|
||||
async run(projects, projectGraph, { options }) {
|
||||
const runOffline = options?.offline === true;
|
||||
|
||||
// Call bazel with the target to build all available packages
|
||||
await runBazel(['build', '//packages:build', '--show_result=1'], runOffline);
|
||||
await runBazel({
|
||||
bazelArgs: ['build', '//packages:build', '--show_result=1'],
|
||||
log,
|
||||
offline: options?.offline === true,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
@ -10,8 +10,9 @@ import dedent from 'dedent';
|
|||
import del from 'del';
|
||||
import ora from 'ora';
|
||||
import { join, relative } from 'path';
|
||||
import { runBazel } from '@kbn/bazel-runner';
|
||||
|
||||
import { isBazelBinAvailable, runBazel } from '../utils/bazel';
|
||||
import { isBazelBinAvailable } from '../utils/bazel';
|
||||
import { isDirectory } from '../utils/fs';
|
||||
import { log } from '../utils/log';
|
||||
import { ICommand } from '.';
|
||||
|
@ -54,7 +55,10 @@ export const CleanCommand: ICommand = {
|
|||
|
||||
// Runs Bazel soft clean
|
||||
if (await isBazelBinAvailable(kbn.getAbsolute())) {
|
||||
await runBazel(['clean']);
|
||||
await runBazel({
|
||||
bazelArgs: ['clean'],
|
||||
log,
|
||||
});
|
||||
log.success('Soft cleaned bazel');
|
||||
}
|
||||
|
||||
|
|
|
@ -11,11 +11,12 @@ import del from 'del';
|
|||
import ora from 'ora';
|
||||
import { join, relative } from 'path';
|
||||
|
||||
import { runBazel } from '@kbn/bazel-runner';
|
||||
|
||||
import {
|
||||
getBazelDiskCacheFolder,
|
||||
getBazelRepositoryCacheFolder,
|
||||
isBazelBinAvailable,
|
||||
runBazel,
|
||||
} from '../utils/bazel';
|
||||
import { isDirectory } from '../utils/fs';
|
||||
import { log } from '../utils/log';
|
||||
|
@ -66,7 +67,10 @@ export const ResetCommand: ICommand = {
|
|||
// Runs Bazel hard clean and deletes Bazel Cache Folders
|
||||
if (await isBazelBinAvailable(kbn.getAbsolute())) {
|
||||
// Hard cleaning bazel
|
||||
await runBazel(['clean', '--expunge']);
|
||||
await runBazel({
|
||||
bazelArgs: ['clean', '--expunge'],
|
||||
log,
|
||||
});
|
||||
log.success('Hard cleaned bazel');
|
||||
|
||||
// Deletes Bazel Cache Folders
|
||||
|
|
|
@ -6,8 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { runIBazel } from '../utils/bazel';
|
||||
import { runIBazel } from '@kbn/bazel-runner';
|
||||
import { ICommand } from '.';
|
||||
import { log } from '../utils/log';
|
||||
|
||||
export const WatchCommand: ICommand = {
|
||||
description: 'Runs a build in the Bazel built packages and keeps watching them for changes',
|
||||
|
@ -25,9 +26,10 @@ export const WatchCommand: ICommand = {
|
|||
//
|
||||
// Note: --run_output=false arg will disable the iBazel notifications about gazelle and buildozer when running it
|
||||
// Can also be solved by adding a root `.bazel_fix_commands.json` but its not needed at the moment
|
||||
await runIBazel(
|
||||
['--run_output=false', 'build', '//packages:build', '--show_result=1'],
|
||||
runOffline
|
||||
);
|
||||
await runIBazel({
|
||||
bazelArgs: ['--run_output=false', 'build', '//packages:build', '--show_result=1'],
|
||||
log,
|
||||
offline: runOffline,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
*/
|
||||
|
||||
export { run } from './cli';
|
||||
export { buildBazelProductionProjects, buildNonBazelProductionProjects } from './production';
|
||||
export { getProjects } from './utils/projects';
|
||||
export { Project } from './utils/project';
|
||||
export { transformDependencies } from './utils/package_json';
|
||||
export { getProjectPaths } from './config';
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "quux",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@kbn/foo": "link:../../packages/foo"
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "quux",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"bar": "link:../foo/packages/bar"
|
||||
}
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import copy from 'cpy';
|
||||
import globby from 'globby';
|
||||
import { basename, join, relative, resolve } from 'path';
|
||||
|
||||
import { getProductionProjects } from './build_non_bazel_production_projects';
|
||||
import { runBazel } from '../utils/bazel';
|
||||
import { chmod, isFile, isDirectory } from '../utils/fs';
|
||||
import { log } from '../utils/log';
|
||||
import {
|
||||
createProductionPackageJson,
|
||||
readPackageJson,
|
||||
writePackageJson,
|
||||
} from '../utils/package_json';
|
||||
import { getBazelProjectsOnly } from '../utils/projects';
|
||||
import { Project } from '..';
|
||||
|
||||
export async function buildBazelProductionProjects({
|
||||
kibanaRoot,
|
||||
buildRoot,
|
||||
onlyOSS,
|
||||
}: {
|
||||
kibanaRoot: string;
|
||||
buildRoot: string;
|
||||
onlyOSS?: boolean;
|
||||
}) {
|
||||
const projects = await getBazelProjectsOnly(await getProductionProjects(kibanaRoot, onlyOSS));
|
||||
|
||||
const projectNames = [...projects.values()].map((project) => project.name);
|
||||
log.info(`Preparing Bazel projects production build for [${projectNames.join(', ')}]`);
|
||||
|
||||
await runBazel(['build', '//packages:build']);
|
||||
log.info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete`);
|
||||
|
||||
for (const project of projects.values()) {
|
||||
await copyToBuild(project, kibanaRoot, buildRoot);
|
||||
await applyCorrectPermissions(project, kibanaRoot, buildRoot);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy all the project's files from its Bazel dist directory into the
|
||||
* project build folder.
|
||||
*
|
||||
* When copying all the files into the build, we exclude `node_modules` because
|
||||
* we want the Kibana build to be responsible for actually installing all
|
||||
* dependencies. The primary reason for allowing the Kibana build process to
|
||||
* manage dependencies is that it will "dedupe" them, so we don't include
|
||||
* unnecessary copies of dependencies. We also exclude every related Bazel build
|
||||
* files in order to get the most cleaner package module we can in the final distributable.
|
||||
*/
|
||||
async function copyToBuild(project: Project, kibanaRoot: string, buildRoot: string) {
|
||||
// We want the package to have the same relative location within the build
|
||||
const relativeProjectPath = relative(kibanaRoot, project.path);
|
||||
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
|
||||
|
||||
await copy(['**/*'], buildProjectPath, {
|
||||
cwd: join(kibanaRoot, 'bazel-bin', 'packages', basename(buildProjectPath), 'npm_module'),
|
||||
dot: true,
|
||||
onlyFiles: true,
|
||||
parents: true,
|
||||
} as copy.Options);
|
||||
|
||||
// If a project is using an intermediate build directory, we special-case our
|
||||
// handling of `package.json`, as the project build process might have copied
|
||||
// (a potentially modified) `package.json` into the intermediate build
|
||||
// directory already. If so, we want to use that `package.json` as the basis
|
||||
// for creating the production-ready `package.json`. If it's not present in
|
||||
// the intermediate build, we fall back to using the project's already defined
|
||||
// `package.json`.
|
||||
const packageJson = (await isFile(join(buildProjectPath, 'package.json')))
|
||||
? await readPackageJson(buildProjectPath)
|
||||
: project.json;
|
||||
|
||||
const preparedPackageJson = createProductionPackageJson(packageJson);
|
||||
await writePackageJson(buildProjectPath, preparedPackageJson);
|
||||
}
|
||||
|
||||
async function applyCorrectPermissions(project: Project, kibanaRoot: string, buildRoot: string) {
|
||||
const relativeProjectPath = relative(kibanaRoot, project.path);
|
||||
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
|
||||
const allPluginPaths = await globby([`**/*`], {
|
||||
onlyFiles: false,
|
||||
cwd: buildProjectPath,
|
||||
dot: true,
|
||||
});
|
||||
|
||||
for (const pluginPath of allPluginPaths) {
|
||||
const resolvedPluginPath = resolve(buildProjectPath, pluginPath);
|
||||
if (await isFile(resolvedPluginPath)) {
|
||||
await chmod(resolvedPluginPath, 0o644);
|
||||
}
|
||||
|
||||
if (await isDirectory(resolvedPluginPath)) {
|
||||
await chmod(resolvedPluginPath, 0o755);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,139 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import copy from 'cpy';
|
||||
import del from 'del';
|
||||
import { join, relative, resolve } from 'path';
|
||||
|
||||
import { getProjectPaths } from '../config';
|
||||
import { isDirectory, isFile } from '../utils/fs';
|
||||
import { log } from '../utils/log';
|
||||
import {
|
||||
createProductionPackageJson,
|
||||
readPackageJson,
|
||||
writePackageJson,
|
||||
} from '../utils/package_json';
|
||||
import {
|
||||
buildProjectGraph,
|
||||
getNonBazelProjectsOnly,
|
||||
getProjects,
|
||||
includeTransitiveProjects,
|
||||
topologicallyBatchProjects,
|
||||
} from '../utils/projects';
|
||||
import { Project } from '..';
|
||||
|
||||
export async function buildNonBazelProductionProjects({
|
||||
kibanaRoot,
|
||||
buildRoot,
|
||||
onlyOSS,
|
||||
}: {
|
||||
kibanaRoot: string;
|
||||
buildRoot: string;
|
||||
onlyOSS?: boolean;
|
||||
}) {
|
||||
const projects = await getNonBazelProjectsOnly(await getProductionProjects(kibanaRoot, onlyOSS));
|
||||
const projectGraph = buildProjectGraph(projects);
|
||||
const batchedProjects = topologicallyBatchProjects(projects, projectGraph);
|
||||
|
||||
const projectNames = [...projects.values()].map((project) => project.name);
|
||||
log.info(`Preparing non Bazel production build for [${projectNames.join(', ')}]`);
|
||||
|
||||
for (const batch of batchedProjects) {
|
||||
for (const project of batch) {
|
||||
await deleteTarget(project);
|
||||
await buildProject(project);
|
||||
await copyToBuild(project, kibanaRoot, buildRoot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the subset of projects that should be built into the production
|
||||
* bundle. As we copy these into Kibana's `node_modules` during the build step,
|
||||
* and let Kibana's build process be responsible for installing dependencies,
|
||||
* we only include Kibana's transitive _production_ dependencies. If onlyOSS
|
||||
* is supplied, we omit projects with build.oss in their package.json set to false.
|
||||
*/
|
||||
export async function getProductionProjects(rootPath: string, onlyOSS?: boolean) {
|
||||
const projectPaths = getProjectPaths({ rootPath });
|
||||
const projects = await getProjects(rootPath, projectPaths);
|
||||
const projectsSubset = [projects.get('kibana')!];
|
||||
|
||||
if (projects.has('x-pack')) {
|
||||
projectsSubset.push(projects.get('x-pack')!);
|
||||
}
|
||||
|
||||
const productionProjects = includeTransitiveProjects(projectsSubset, projects, {
|
||||
onlyProductionDependencies: true,
|
||||
});
|
||||
|
||||
// We remove Kibana, as we're already building Kibana
|
||||
productionProjects.delete('kibana');
|
||||
|
||||
if (onlyOSS) {
|
||||
productionProjects.forEach((project) => {
|
||||
if (project.getBuildConfig().oss === false) {
|
||||
productionProjects.delete(project.json.name);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return productionProjects;
|
||||
}
|
||||
|
||||
async function deleteTarget(project: Project) {
|
||||
const targetDir = project.targetLocation;
|
||||
|
||||
if (await isDirectory(targetDir)) {
|
||||
await del(targetDir, { force: true });
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildProject(project: Project) {
|
||||
if (project.hasScript('build')) {
|
||||
await project.runScript('build');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy all the project's files from its "intermediate build directory" and
|
||||
* into the build. The intermediate directory can either be the root of the
|
||||
* project or some other location defined in the project's `package.json`.
|
||||
*
|
||||
* When copying all the files into the build, we exclude `node_modules` because
|
||||
* we want the Kibana build to be responsible for actually installing all
|
||||
* dependencies. The primary reason for allowing the Kibana build process to
|
||||
* manage dependencies is that it will "dedupe" them, so we don't include
|
||||
* unnecessary copies of dependencies.
|
||||
*/
|
||||
async function copyToBuild(project: Project, kibanaRoot: string, buildRoot: string) {
|
||||
// We want the package to have the same relative location within the build
|
||||
const relativeProjectPath = relative(kibanaRoot, project.path);
|
||||
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
|
||||
|
||||
await copy(['**/*', '!node_modules/**'], buildProjectPath, {
|
||||
cwd: project.getIntermediateBuildDirectory(),
|
||||
dot: true,
|
||||
onlyFiles: true,
|
||||
parents: true,
|
||||
} as copy.Options);
|
||||
|
||||
// If a project is using an intermediate build directory, we special-case our
|
||||
// handling of `package.json`, as the project build process might have copied
|
||||
// (a potentially modified) `package.json` into the intermediate build
|
||||
// directory already. If so, we want to use that `package.json` as the basis
|
||||
// for creating the production-ready `package.json`. If it's not present in
|
||||
// the intermediate build, we fall back to using the project's already defined
|
||||
// `package.json`.
|
||||
const packageJson = (await isFile(join(buildProjectPath, 'package.json')))
|
||||
? await readPackageJson(buildProjectPath)
|
||||
: project.json;
|
||||
|
||||
const preparedPackageJson = createProductionPackageJson(packageJson);
|
||||
await writePackageJson(buildProjectPath, preparedPackageJson);
|
||||
}
|
|
@ -8,5 +8,4 @@
|
|||
|
||||
export * from './get_cache_folders';
|
||||
export * from './install_tools';
|
||||
export * from './run';
|
||||
export * from './yarn_integrity';
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import execa from 'execa';
|
||||
import * as Rx from 'rxjs';
|
||||
import { tap } from 'rxjs/operators';
|
||||
import { observeLines } from '@kbn/stdio-dev-helpers';
|
||||
import { spawn } from '../child_process';
|
||||
import { log } from '../log';
|
||||
import { CliError } from '../errors';
|
||||
|
||||
type BazelCommandRunner = 'bazel' | 'ibazel';
|
||||
|
||||
async function runBazelCommandWithRunner(
|
||||
bazelCommandRunner: BazelCommandRunner,
|
||||
bazelArgs: string[],
|
||||
offline: boolean = false,
|
||||
runOpts: execa.Options = {}
|
||||
) {
|
||||
// Force logs to pipe in order to control the output of them
|
||||
const bazelOpts: execa.Options = {
|
||||
...runOpts,
|
||||
stdio: 'pipe',
|
||||
};
|
||||
|
||||
if (offline) {
|
||||
bazelArgs = [...bazelArgs, '--config=offline'];
|
||||
}
|
||||
|
||||
const bazelProc = spawn(bazelCommandRunner, bazelArgs, bazelOpts);
|
||||
|
||||
const bazelLogs$ = new Rx.Subject<string>();
|
||||
|
||||
// Bazel outputs machine readable output into stdout and human readable output goes to stderr.
|
||||
// Therefore we need to get both. In order to get errors we need to parse the actual text line
|
||||
const bazelLogSubscription = Rx.merge(
|
||||
observeLines(bazelProc.stdout!).pipe(
|
||||
tap((line) => log.info(`${chalk.cyan(`[${bazelCommandRunner}]`)} ${line}`))
|
||||
),
|
||||
observeLines(bazelProc.stderr!).pipe(
|
||||
tap((line) => log.info(`${chalk.cyan(`[${bazelCommandRunner}]`)} ${line}`))
|
||||
)
|
||||
).subscribe(bazelLogs$);
|
||||
|
||||
// Wait for process and logs to finish, unsubscribing in the end
|
||||
try {
|
||||
await bazelProc;
|
||||
} catch {
|
||||
log.error(
|
||||
'HINT: If experiencing problems with node_modules try `yarn kbn bootstrap --force-install` or as last resort `yarn kbn reset && yarn kbn bootstrap`'
|
||||
);
|
||||
throw new CliError(`The bazel command that was running failed to complete.`);
|
||||
}
|
||||
await bazelLogs$.toPromise();
|
||||
await bazelLogSubscription.unsubscribe();
|
||||
}
|
||||
|
||||
export async function runBazel(
|
||||
bazelArgs: string[],
|
||||
offline: boolean = false,
|
||||
runOpts: execa.Options = {}
|
||||
) {
|
||||
await runBazelCommandWithRunner('bazel', bazelArgs, offline, runOpts);
|
||||
}
|
||||
|
||||
export async function runIBazel(
|
||||
bazelArgs: string[],
|
||||
offline: boolean = false,
|
||||
runOpts: execa.Options = {}
|
||||
) {
|
||||
const extendedEnv = { IBAZEL_USE_LEGACY_WATCHER: '0', ...runOpts?.env };
|
||||
await runBazelCommandWithRunner('ibazel', bazelArgs, offline, { ...runOpts, env: extendedEnv });
|
||||
}
|
|
@ -27,45 +27,4 @@ export function writePackageJson(path: string, json: IPackageJson) {
|
|||
return writePkg(path, json);
|
||||
}
|
||||
|
||||
export const createProductionPackageJson = (pkgJson: IPackageJson) => ({
|
||||
...pkgJson,
|
||||
dependencies: transformDependencies(pkgJson.dependencies),
|
||||
});
|
||||
|
||||
export const isLinkDependency = (depVersion: string) => depVersion.startsWith('link:');
|
||||
|
||||
export const isBazelPackageDependency = (depVersion: string) =>
|
||||
depVersion.startsWith('link:bazel-bin/');
|
||||
|
||||
/**
|
||||
* Replaces `link:` dependencies with `file:` dependencies. When installing
|
||||
* dependencies, these `file:` dependencies will be copied into `node_modules`
|
||||
* instead of being symlinked.
|
||||
*
|
||||
* This will allow us to copy packages into the build and run `yarn`, which
|
||||
* will then _copy_ the `file:` dependencies into `node_modules` instead of
|
||||
* symlinking like we do in development.
|
||||
*
|
||||
* Additionally it also taken care of replacing `link:bazel-bin/` with
|
||||
* `file:` so we can also support the copy of the Bazel packages dist already into
|
||||
* build/packages to be copied into the node_modules
|
||||
*/
|
||||
export function transformDependencies(dependencies: IPackageDependencies = {}) {
|
||||
const newDeps: IPackageDependencies = {};
|
||||
for (const name of Object.keys(dependencies)) {
|
||||
const depVersion = dependencies[name];
|
||||
|
||||
if (!isLinkDependency(depVersion)) {
|
||||
newDeps[name] = depVersion;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isBazelPackageDependency(depVersion)) {
|
||||
newDeps[name] = depVersion.replace('link:bazel-bin/', 'file:');
|
||||
continue;
|
||||
}
|
||||
|
||||
newDeps[name] = depVersion.replace('link:', 'file:');
|
||||
}
|
||||
return newDeps;
|
||||
}
|
||||
|
|
|
@ -145,28 +145,3 @@ describe('#getExecutables()', () => {
|
|||
expect(() => createProjectWith({ bin: 1 }).getExecutables()).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getIntermediateBuildDirectory', () => {
|
||||
test('is the same as the project path when not specified', () => {
|
||||
const project = createProjectWith({}, 'packages/my-project');
|
||||
const path = project.getIntermediateBuildDirectory();
|
||||
|
||||
expect(path).toBe(project.path);
|
||||
});
|
||||
|
||||
test('appends the `intermediateBuildDirectory` to project path when specified', () => {
|
||||
const project = createProjectWith(
|
||||
{
|
||||
kibana: {
|
||||
build: {
|
||||
intermediateBuildDirectory: 'quux',
|
||||
},
|
||||
},
|
||||
},
|
||||
'packages/my-project'
|
||||
);
|
||||
const path = project.getIntermediateBuildDirectory();
|
||||
|
||||
expect(path).toBe(join(project.path, 'quux'));
|
||||
});
|
||||
});
|
||||
|
|
|
@ -19,11 +19,10 @@ import {
|
|||
isLinkDependency,
|
||||
readPackageJson,
|
||||
} from './package_json';
|
||||
import { installInDir, runScriptInPackage, runScriptInPackageStreaming } from './scripts';
|
||||
import { runScriptInPackage, runScriptInPackageStreaming } from './scripts';
|
||||
|
||||
interface BuildConfig {
|
||||
skip?: boolean;
|
||||
intermediateBuildDirectory?: string;
|
||||
oss?: boolean;
|
||||
}
|
||||
|
||||
|
@ -135,15 +134,6 @@ export class Project {
|
|||
return (this.json.kibana && this.json.kibana.build) || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the directory that should be copied into the Kibana build artifact.
|
||||
* This config can be specified to only include the project's build artifacts
|
||||
* instead of everything located in the project directory.
|
||||
*/
|
||||
public getIntermediateBuildDirectory() {
|
||||
return Path.resolve(this.path, this.getBuildConfig().intermediateBuildDirectory || '.');
|
||||
}
|
||||
|
||||
public getCleanConfig(): CleanConfig {
|
||||
return (this.json.kibana && this.json.kibana.clean) || {};
|
||||
}
|
||||
|
@ -215,14 +205,6 @@ export class Project {
|
|||
public isEveryDependencyLocal() {
|
||||
return Object.values(this.allDependencies).every((dep) => isLinkDependency(dep));
|
||||
}
|
||||
|
||||
public async installDependencies(options: { extraArgs?: string[] } = {}) {
|
||||
log.info(`[${this.name}] running yarn`);
|
||||
|
||||
log.write('');
|
||||
await installInDir(this.path, options?.extraArgs);
|
||||
log.write('');
|
||||
}
|
||||
}
|
||||
|
||||
// We normalize all path separators to `/` in generated files
|
||||
|
|
|
@ -11,25 +11,6 @@ import { Project } from './project';
|
|||
|
||||
const YARN_EXEC = process.env.npm_execpath || 'yarn';
|
||||
|
||||
/**
|
||||
* Install all dependencies in the given directory
|
||||
*/
|
||||
export async function installInDir(directory: string, extraArgs: string[] = []) {
|
||||
const options = ['install', '--non-interactive', ...extraArgs];
|
||||
|
||||
// We pass the mutex flag to ensure only one instance of yarn runs at any
|
||||
// given time (e.g. to avoid conflicts).
|
||||
await spawn(YARN_EXEC, options, {
|
||||
cwd: directory,
|
||||
env: {
|
||||
SASS_BINARY_SITE:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-sass',
|
||||
RE2_DOWNLOAD_MIRROR:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Run script in the given directory
|
||||
*/
|
||||
|
|
|
@ -74,7 +74,7 @@ export async function buildDistributables(log: ToolingLog, options: BuildOptions
|
|||
await run(Tasks.CreateEmptyDirsAndFiles);
|
||||
await run(Tasks.CreateReadme);
|
||||
await run(Tasks.BuildBazelPackages);
|
||||
await run(Tasks.BuildPackages);
|
||||
await run(Tasks.BuildXpack);
|
||||
await run(Tasks.BuildKibanaPlatformPlugins);
|
||||
await run(Tasks.TranspileBabel);
|
||||
await run(Tasks.CreatePackageJson);
|
||||
|
|
|
@ -7,18 +7,13 @@
|
|||
*/
|
||||
|
||||
import Fs from 'fs';
|
||||
import { basename, join } from 'path';
|
||||
import { promisify } from 'util';
|
||||
import Fsp from 'fs/promises';
|
||||
import Path from 'path';
|
||||
|
||||
import { asyncMap, asyncForEach } from '@kbn/std';
|
||||
|
||||
// @ts-ignore
|
||||
import { assertAbsolute, mkdirp } from './fs';
|
||||
|
||||
const statAsync = promisify(Fs.stat);
|
||||
const mkdirAsync = promisify(Fs.mkdir);
|
||||
const utimesAsync = promisify(Fs.utimes);
|
||||
const copyFileAsync = promisify(Fs.copyFile);
|
||||
const readdirAsync = promisify(Fs.readdir);
|
||||
|
||||
interface Options {
|
||||
/**
|
||||
* directory to copy from
|
||||
|
@ -32,6 +27,10 @@ interface Options {
|
|||
* function that is called with each Record
|
||||
*/
|
||||
filter?: (record: Record) => boolean;
|
||||
/**
|
||||
* define permissions for reach item copied
|
||||
*/
|
||||
permissions?: (record: Record) => number | undefined;
|
||||
/**
|
||||
* Date to use for atime/mtime
|
||||
*/
|
||||
|
@ -52,48 +51,50 @@ class Record {
|
|||
* function or modifying mtime/atime for each file.
|
||||
*/
|
||||
export async function scanCopy(options: Options) {
|
||||
const { source, destination, filter, time } = options;
|
||||
const { source, destination, filter, time, permissions } = options;
|
||||
|
||||
assertAbsolute(source);
|
||||
assertAbsolute(destination);
|
||||
|
||||
// get filtered Records for files/directories within a directory
|
||||
const getChildRecords = async (parent: Record) => {
|
||||
const names = await readdirAsync(parent.absolute);
|
||||
const records = await Promise.all(
|
||||
names.map(async (name) => {
|
||||
const absolute = join(parent.absolute, name);
|
||||
const stat = await statAsync(absolute);
|
||||
return new Record(stat.isDirectory(), name, absolute, join(parent.absoluteDest, name));
|
||||
})
|
||||
);
|
||||
|
||||
return records.filter((record) => (filter ? filter(record) : true));
|
||||
};
|
||||
|
||||
// create or copy each child of a directory
|
||||
const copyChildren = async (record: Record) => {
|
||||
const children = await getChildRecords(record);
|
||||
await Promise.all(children.map(async (child) => await copy(child)));
|
||||
};
|
||||
const copyChildren = async (parent: Record) => {
|
||||
const names = await Fsp.readdir(parent.absolute);
|
||||
|
||||
// create or copy a record and recurse into directories
|
||||
const copy = async (record: Record) => {
|
||||
if (record.isDirectory) {
|
||||
await mkdirAsync(record.absoluteDest);
|
||||
} else {
|
||||
await copyFileAsync(record.absolute, record.absoluteDest, Fs.constants.COPYFILE_EXCL);
|
||||
}
|
||||
const records = await asyncMap(names, async (name) => {
|
||||
const absolute = Path.join(parent.absolute, name);
|
||||
const stat = await Fsp.stat(absolute);
|
||||
return new Record(stat.isDirectory(), name, absolute, Path.join(parent.absoluteDest, name));
|
||||
});
|
||||
|
||||
if (time) {
|
||||
await utimesAsync(record.absoluteDest, time, time);
|
||||
}
|
||||
await asyncForEach(records, async (rec) => {
|
||||
if (filter && !filter(rec)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (record.isDirectory) {
|
||||
await copyChildren(record);
|
||||
}
|
||||
if (rec.isDirectory) {
|
||||
await Fsp.mkdir(rec.absoluteDest, {
|
||||
mode: permissions ? permissions(rec) : undefined,
|
||||
});
|
||||
} else {
|
||||
await Fsp.copyFile(rec.absolute, rec.absoluteDest, Fs.constants.COPYFILE_EXCL);
|
||||
if (permissions) {
|
||||
const perm = permissions(rec);
|
||||
if (perm !== undefined) {
|
||||
await Fsp.chmod(rec.absoluteDest, perm);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (time) {
|
||||
await Fsp.utimes(rec.absoluteDest, time, time);
|
||||
}
|
||||
|
||||
if (rec.isDirectory) {
|
||||
await copyChildren(rec);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
await mkdirp(destination);
|
||||
await copyChildren(new Record(true, basename(source), source, destination));
|
||||
await copyChildren(new Record(true, Path.basename(source), source, destination));
|
||||
}
|
||||
|
|
|
@ -6,74 +6,56 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { buildBazelProductionProjects, buildNonBazelProductionProjects } from '@kbn/pm';
|
||||
import Path from 'path';
|
||||
|
||||
import { mkdirp, Task } from '../lib';
|
||||
import { discoverBazelPackages } from '@kbn/bazel-packages';
|
||||
import { runBazel } from '@kbn/bazel-runner';
|
||||
|
||||
/**
|
||||
* High-level overview of how we enable shared packages in production:
|
||||
*
|
||||
* tl;dr We copy the packages directly into Kibana's `node_modules` folder,
|
||||
* which means they will be available when `require(...)`d.
|
||||
*
|
||||
* During development we rely on `@kbn/pm` to find all the packages
|
||||
* in the Kibana repo and run Yarn in all the right places to create symlinks
|
||||
* between these packages. This development setup is described in-depth in the
|
||||
* readme in `@kbn/pm`.
|
||||
*
|
||||
* However, for production we can't use `@kbn/pm` as part of the
|
||||
* installation as we don't have an install "tool/step" that can kick it off.
|
||||
* We also can't include symlinks in the archives for the different platform, so
|
||||
* we can't run `@kbn/pm` in the same way we do for development and
|
||||
* just package the result. That means we have two options: either we prepare
|
||||
* everything in the built package or we perform the necessary actions when
|
||||
* Kibana is starting up in production. We decided on the former: all the Kibana
|
||||
* packages are prepared as part of the build process.
|
||||
*
|
||||
* (All of this is a bit different for Kibana plugins as they _do_ have an
|
||||
* install step — the plugin CLI tool. However, Kibana plugins are not allowed
|
||||
* to have separate packages yet.)
|
||||
*
|
||||
* How Kibana packages are prepared:
|
||||
*
|
||||
* 1. Run the build for each package
|
||||
* 2. Copy all the packages into the `build/kibana` folder
|
||||
* 3. Replace `link:` dependencies with `file:` dependencies in both Kibana's
|
||||
* `package.json` and in all the dependencies. Yarn will then copy the
|
||||
* sources of these dependencies into `node_modules` instead of setting up
|
||||
* symlinks.
|
||||
*
|
||||
* In the end after the `install dependencies` build step all Kibana packages
|
||||
* will be located within the top-level `node_modules` folder, which means
|
||||
* normal module resolution will apply and you can `require(...)` any of these
|
||||
* packages when running Kibana in production.
|
||||
*
|
||||
* ## Known limitations
|
||||
*
|
||||
* - This process _only_ include packages that used by Kibana or any of its
|
||||
* transitive packages, as it depends on only running `yarn` at the top level.
|
||||
* That means a Kibana plugin can only depend on Kibana packages that are used
|
||||
* in some way by Kibana itself in production, as it won't otherwise be
|
||||
* included in the production build.
|
||||
*/
|
||||
import { Task, scanCopy, write, exec } from '../lib';
|
||||
|
||||
export const BuildBazelPackages: Task = {
|
||||
description: 'Building distributable versions of Bazel packages',
|
||||
async run(config, log, build) {
|
||||
await buildBazelProductionProjects({
|
||||
kibanaRoot: config.resolveFromRepo(),
|
||||
buildRoot: build.resolvePath(),
|
||||
const packages = (await discoverBazelPackages()).filter((pkg) => !pkg.isDevOnly());
|
||||
|
||||
log.info(`Preparing Bazel projects production build non-devOnly packages`);
|
||||
await runBazel({
|
||||
log,
|
||||
bazelArgs: ['build', '//packages:build'],
|
||||
});
|
||||
|
||||
for (const pkg of packages) {
|
||||
log.info(`Copying build of`, pkg.pkg.name, 'into build');
|
||||
|
||||
const pkgDirInBuild = build.resolvePath(pkg.normalizedRepoRelativeDir);
|
||||
|
||||
// copy the built npm_module target dir into the build, package.json is updated to copy
|
||||
// the sources we actually end up using into the node_modules directory when we run
|
||||
// yarn install
|
||||
await scanCopy({
|
||||
source: config.resolveFromRepo('bazel-bin', pkg.normalizedRepoRelativeDir, 'npm_module'),
|
||||
destination: pkgDirInBuild,
|
||||
permissions: (rec) => (rec.isDirectory ? 0o755 : 0o644),
|
||||
});
|
||||
|
||||
await write(Path.resolve(pkgDirInBuild, 'package.json'), JSON.stringify(pkg.pkg, null, 2));
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
export const BuildPackages: Task = {
|
||||
description: 'Building distributable versions of non Bazel packages',
|
||||
export const BuildXpack: Task = {
|
||||
description: 'Building distributable versions of x-pack',
|
||||
async run(config, log, build) {
|
||||
await mkdirp(config.resolveFromRepo('target'));
|
||||
await buildNonBazelProductionProjects({
|
||||
kibanaRoot: config.resolveFromRepo(),
|
||||
buildRoot: build.resolvePath(),
|
||||
log.info('running x-pack build task');
|
||||
await exec(log, 'yarn', ['build'], {
|
||||
level: 'debug',
|
||||
cwd: config.resolveFromRepo('x-pack'),
|
||||
});
|
||||
|
||||
log.info('copying built x-pack into build dir');
|
||||
await scanCopy({
|
||||
source: config.resolveFromRepo('x-pack/build/plugin/kibana/x-pack'),
|
||||
destination: build.resolvePath('x-pack'),
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
@ -6,6 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getAllRepoRelativeBazelPackageDirs } from '@kbn/bazel-packages';
|
||||
import normalizePath from 'normalize-path';
|
||||
|
||||
import { copyAll, Task } from '../lib';
|
||||
|
||||
export const CopySource: Task = {
|
||||
|
@ -45,6 +48,8 @@ export const CopySource: Task = {
|
|||
'tsconfig*.json',
|
||||
'.i18nrc.json',
|
||||
'kibana.d.ts',
|
||||
// explicitly ignore all package roots, even if they're not selected by previous patterns
|
||||
...getAllRepoRelativeBazelPackageDirs().map((dir) => `!${normalizePath(dir)}/**`),
|
||||
],
|
||||
});
|
||||
},
|
||||
|
|
|
@ -6,18 +6,20 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Project } from '@kbn/pm';
|
||||
import { Task, exec } from '../lib';
|
||||
|
||||
import { Task } from '../lib';
|
||||
const YARN_EXEC = process.env.npm_execpath || 'yarn';
|
||||
|
||||
export const InstallDependencies: Task = {
|
||||
description: 'Installing node_modules, including production builds of packages',
|
||||
|
||||
async run(config, log, build) {
|
||||
const project = await Project.fromPath(build.resolvePath());
|
||||
|
||||
await project.installDependencies({
|
||||
extraArgs: [
|
||||
await exec(
|
||||
log,
|
||||
YARN_EXEC,
|
||||
[
|
||||
'install',
|
||||
'--non-interactive',
|
||||
'--production',
|
||||
'--ignore-optional',
|
||||
'--pure-lockfile',
|
||||
|
@ -27,6 +29,15 @@ export const InstallDependencies: Task = {
|
|||
// This is commonly seen in shared folders on virtual machines
|
||||
'--no-bin-links',
|
||||
],
|
||||
});
|
||||
{
|
||||
cwd: build.resolvePath(),
|
||||
env: {
|
||||
SASS_BINARY_SITE:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-sass',
|
||||
RE2_DOWNLOAD_MIRROR:
|
||||
'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2',
|
||||
},
|
||||
}
|
||||
);
|
||||
},
|
||||
};
|
||||
|
|
|
@ -6,11 +6,38 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// @ts-ignore
|
||||
import { transformDependencies } from '@kbn/pm';
|
||||
import { findUsedDependencies } from './find_used_dependencies';
|
||||
import { read, write, Task } from '../../lib';
|
||||
|
||||
/**
|
||||
* Replaces `link:` dependencies with `file:` dependencies. When installing
|
||||
* dependencies, these `file:` dependencies will be copied into `node_modules`
|
||||
* instead of being symlinked.
|
||||
*
|
||||
* This will allow us to copy packages into the build and run `yarn`, which
|
||||
* will then _copy_ the `file:` dependencies into `node_modules` instead of
|
||||
* symlinking like we do in development.
|
||||
*
|
||||
* Additionally it also taken care of replacing `link:bazel-bin/` with
|
||||
* `file:` so we can also support the copy of the Bazel packages dist already into
|
||||
* build/packages to be copied into the node_modules
|
||||
*/
|
||||
export function transformDependencies(dependencies: Record<string, string>) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(dependencies).map(([name, version]) => {
|
||||
if (!version.startsWith('link:')) {
|
||||
return [name, version];
|
||||
}
|
||||
|
||||
if (version.startsWith('link:bazel-bin/')) {
|
||||
return [name, version.replace('link:bazel-bin/', 'file:')];
|
||||
}
|
||||
|
||||
return [name, version.replace('link:', 'file:')];
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export const CreatePackageJson: Task = {
|
||||
description: 'Creating build-ready version of package.json',
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
},
|
||||
"kibana": {
|
||||
"build": {
|
||||
"intermediateBuildDirectory": "build/plugin/kibana/x-pack",
|
||||
"oss": false
|
||||
},
|
||||
"clean": {
|
||||
|
|
|
@ -2960,6 +2960,10 @@
|
|||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/bazel-runner@link:bazel-bin/packages/kbn-bazel-runner":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/ci-stats-client@link:bazel-bin/packages/kbn-ci-stats-client":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
@ -6036,6 +6040,10 @@
|
|||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@types/kbn__bazel-runner@link:bazel-bin/packages/kbn-bazel-runner/npm_module_types":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@types/kbn__ci-stats-client@link:bazel-bin/packages/kbn-ci-stats-client/npm_module_types":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue