[7.x] build immutable bundles for new platform plugins (#53976) (#57528)

* build immutable bundles for new platform plugins (#53976)

* build immutable bundles for new platform plugins

* only inspect workers if configured to do so

* [navigation] use an index.scss file

* add yarn.lock symlink

* set pluginScanDirs in test so fixtures stay consistent

* cleanup helpers a little

* fix type error

* support KBN_OPTIMIZER_MAX_WORKERS for limiting workers via env

* test support for KBN_OPTIMIZER_MAX_WORKERS

* expand the available memory for workers when only running one or two

* add docs about KBN_OPTIMIZER_MAX_WORKERS environment variable

* fix README link

* update kbn/pm dist

* implement bundle caching/reuse

* update kbn/pm dist

* don't check for cache if --no-cache is passed

* update renovate config

* standardize on index.scss, move console styles over

* add support for --no-cache to cli

* include worker config vars in optimizer version

* ignore concatenated modules

* update integration test

* add safari to browserslist to avoid user-agent warnings in dev

* update docs, clean up optimizer message/misc naming

* always handle initialized messages, don't ignore states that are attached to specific events

* reword caching docs, add environment var to disable caching

* tweak logging and don't use optimizer.useBundleCache as that's disabled in dev

* handle change notifications

* batch changes for 1 second

* rename CompilerState type to CompilerMsg

* getChanges() no longer needs to assign changes to dirs

* remove unused deps

* split up run_worker.ts and share cacheKey generation logic

* add a couple docs

* update tests and remove unused imports

* specify files when creating bundle cache key

* remove one more unused import

* match existing dev cli output more closely

* update kbn/pm dist

* set KBN_NP_PLUGINS_BUILT to avoid warning in CI

* avoid extending global window type

* add note to keep pluginScanDirs in sync

* pass browserslistEnv in workerConfig so it is used for cache key

* load commons.bundle.js in parallel too

* emit initialized+success states if all bundles are cached

* load bootstraps as quickly as possible

* skip flaky suite

* bump

* update jest snapshots

* remove hashing from cache key generation

* remove unnecessary non-null assertion

* improve docs and break up Optimizer#run()

* remove unused import

* refactor kbn/optimizer to break up observable logic, implement more helpful cache invalidation logic with logging

* fix tests

* add initializing phase

* avoid rxjs observable constructor

* remove unnecessary rxjs helper, add tests for bundle cache

* update consumers of optimizer

* update readme with new call style

* replace "new platform" with "kibana platform"

* fix a couple more renames

* add support for several plain-text file formats

* fix naming of OptimizerMsg => OptimizerUpdate, use "store" naming too

* one more OptimizerMsg update

* ensure bundles are not cached when cache config is false

* test for initializing states and bundle cache events

* remove unnecessary timeout change

* Remove unnecessary helpers

* Add tests for BundleCache class

* Add tests for Bundle class

* test summarizeEvent$

* missing paths are no longer listed in mtimes map

* add tests for optimizer/cache_keys

* Add some extra docs

* Remove labeled loop

* add integration test for kbn-optimizer watcher components

* querystring-browser removed

* tweak logging a smidge, improve info and final message

* remove unused imports

* remove duplication of getModuleCount() method

* move type annotation that validates things

* clear up the build completion message

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>

* [kbn/optimizer] Fix windows support (#57592)

* [kbn/optimizer] simplify run_workers.ts a smidge

* use Path.resolve() to create windows paths from normalized ones

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
(cherry picked from commit 343bc9c303)

* remove istanbul/code coverage references

* fix webpack config syntax

* removal of querystring-browser was backported to 7.x

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Spencer 2020-02-14 11:35:24 -07:00 committed by GitHub
parent 98bbf2cd48
commit 9f94c39960
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
182 changed files with 11460 additions and 3609 deletions

View file

@ -1,3 +1,9 @@
[production]
last 2 versions
> 5%
Safari 7 # for PhantomJS support: https://github.com/elastic/kibana/issues/27136
[dev]
last 1 chrome versions
last 1 firefox versions
last 1 safari versions

View file

@ -134,12 +134,6 @@
"@kbn/test-subj-selector": "0.2.1",
"@kbn/ui-framework": "1.0.0",
"@kbn/ui-shared-deps": "1.0.0",
"@types/flot": "^0.0.31",
"@types/json-stable-stringify": "^1.0.32",
"@types/lodash.clonedeep": "^4.5.4",
"@types/node-forge": "^0.9.0",
"@types/react-grid-layout": "^0.16.7",
"@types/recompose": "^0.30.5",
"JSONStream": "1.3.5",
"abortcontroller-polyfill": "^1.3.0",
"angular": "^1.7.9",
@ -149,11 +143,12 @@
"angular-route": "^1.7.9",
"angular-sanitize": "^1.7.9",
"angular-sortable-view": "^0.0.17",
"autoprefixer": "9.6.1",
"autoprefixer": "^9.7.4",
"babel-loader": "^8.0.6",
"bluebird": "3.5.5",
"boom": "^7.2.0",
"brace": "0.11.1",
"browserslist-useragent": "^3.0.2",
"cache-loader": "^4.1.0",
"chalk": "^2.4.2",
"check-disk-space": "^2.1.0",
@ -162,7 +157,7 @@
"commander": "3.0.2",
"compare-versions": "3.5.1",
"core-js": "^3.2.1",
"css-loader": "2.1.1",
"css-loader": "^3.4.2",
"d3": "3.5.17",
"d3-cloud": "1.2.5",
"deep-freeze-strict": "^1.1.1",
@ -223,7 +218,7 @@
"opn": "^5.5.0",
"oppsy": "^2.0.0",
"pegjs": "0.10.0",
"postcss-loader": "3.0.0",
"postcss-loader": "^3.0.0",
"prop-types": "15.6.0",
"proxy-from-env": "1.0.0",
"pug": "^2.0.4",
@ -255,7 +250,7 @@
"script-loader": "0.7.2",
"seedrandom": "^3.0.5",
"semver": "^5.5.0",
"style-loader": "0.23.1",
"style-loader": "^1.1.3",
"symbol-observable": "^1.2.0",
"tar": "4.4.13",
"terser-webpack-plugin": "^2.3.4",
@ -275,7 +270,7 @@
"vega-schema-url-parser": "1.0.0",
"vega-tooltip": "^0.12.0",
"vision": "^5.3.3",
"webpack": "4.41.0",
"webpack": "^4.41.5",
"webpack-merge": "4.2.2",
"whatwg-fetch": "^3.0.0",
"yauzl": "2.10.0"
@ -295,6 +290,7 @@
"@kbn/eslint-plugin-eslint": "1.0.0",
"@kbn/expect": "1.0.0",
"@kbn/plugin-generator": "1.0.0",
"@kbn/optimizer": "1.0.0",
"@kbn/test": "1.0.0",
"@kbn/utility-types": "1.0.0",
"@microsoft/api-documenter": "7.7.2",
@ -307,6 +303,7 @@
"@types/babel__core": "^7.1.2",
"@types/bluebird": "^3.1.1",
"@types/boom": "^7.2.0",
"@types/browserslist-useragent": "^3.0.0",
"@types/chance": "^1.0.0",
"@types/cheerio": "^0.22.10",
"@types/chromedriver": "^2.38.0",
@ -319,6 +316,7 @@
"@types/enzyme": "^3.9.0",
"@types/eslint": "^6.1.3",
"@types/fetch-mock": "^7.3.1",
"@types/flot": "^0.0.31",
"@types/getopts": "^2.0.1",
"@types/glob": "^7.1.1",
"@types/globby": "^8.0.0",
@ -332,10 +330,12 @@
"@types/joi": "^13.4.2",
"@types/jquery": "^3.3.31",
"@types/js-yaml": "^3.11.1",
"@types/json-stable-stringify": "^1.0.32",
"@types/json5": "^0.0.30",
"@types/license-checker": "15.0.0",
"@types/listr": "^0.14.0",
"@types/lodash": "^3.10.1",
"@types/lodash.clonedeep": "^4.5.4",
"@types/lru-cache": "^5.1.0",
"@types/markdown-it": "^0.0.7",
"@types/minimatch": "^2.0.29",
@ -343,6 +343,7 @@
"@types/moment-timezone": "^0.5.12",
"@types/mustache": "^0.8.31",
"@types/node": "^10.12.27",
"@types/node-forge": "^0.9.0",
"@types/numeral": "^0.0.26",
"@types/opn": "^5.1.0",
"@types/pegjs": "^0.10.1",
@ -352,11 +353,13 @@
"@types/reach__router": "^1.2.6",
"@types/react": "^16.9.11",
"@types/react-dom": "^16.9.4",
"@types/react-grid-layout": "^0.16.7",
"@types/react-redux": "^6.0.6",
"@types/react-resize-detector": "^4.0.1",
"@types/react-router": "^5.1.3",
"@types/react-router-dom": "^5.1.3",
"@types/react-virtualized": "^9.18.7",
"@types/recompose": "^0.30.6",
"@types/redux": "^3.6.31",
"@types/redux-actions": "^2.6.1",
"@types/request": "^2.48.2",
@ -455,7 +458,7 @@
"pixelmatch": "^5.1.0",
"pkg-up": "^2.0.0",
"pngjs": "^3.4.0",
"postcss": "^7.0.5",
"postcss": "^7.0.26",
"postcss-url": "^8.0.0",
"prettier": "^1.19.1",
"proxyquire": "1.8.0",

View file

@ -18,12 +18,7 @@
*/
export { withProcRunner, ProcRunner } from './proc_runner';
export {
ToolingLog,
ToolingLogTextWriter,
pickLevelFromFlags,
ToolingLogCollectingWriter,
} from './tooling_log';
export * from './tooling_log';
export { createAbsolutePathSerializer } from './serializers';
export {
CA_CERT_PATH,

View file

@ -17,7 +17,9 @@
* under the License.
*/
export function createAbsolutePathSerializer(rootPath: string) {
import { REPO_ROOT } from '../repo_root';
export function createAbsolutePathSerializer(rootPath: string = REPO_ROOT) {
return {
print: (value: string) => value.replace(rootPath, '<absolute path>').replace(/\\/g, '/'),
test: (value: any) => typeof value === 'string' && value.startsWith(rootPath),

View file

@ -19,5 +19,5 @@
export { ToolingLog } from './tooling_log';
export { ToolingLogTextWriter, ToolingLogTextWriterConfig } from './tooling_log_text_writer';
export { pickLevelFromFlags, LogLevel } from './log_levels';
export { pickLevelFromFlags, parseLogLevel, LogLevel } from './log_levels';
export { ToolingLogCollectingWriter } from './tooling_log_collecting_writer';

View file

@ -82,20 +82,28 @@ export class ToolingLogTextWriter implements Writer {
}
}
write({ type, indent, args }: Message) {
if (!shouldWriteType(this.level, type)) {
write(msg: Message) {
if (!shouldWriteType(this.level, msg.type)) {
return false;
}
const txt = type === 'error' ? stringifyError(args[0]) : format(args[0], ...args.slice(1));
const prefix = has(MSG_PREFIXES, type) ? MSG_PREFIXES[type] : '';
const prefix = has(MSG_PREFIXES, msg.type) ? MSG_PREFIXES[msg.type] : '';
ToolingLogTextWriter.write(this.writeTo, prefix, msg);
return true;
}
static write(writeTo: ToolingLogTextWriter['writeTo'], prefix: string, msg: Message) {
const txt =
msg.type === 'error'
? stringifyError(msg.args[0])
: format(msg.args[0], ...msg.args.slice(1));
(prefix + txt).split('\n').forEach((line, i) => {
let lineIndent = '';
if (indent > 0) {
if (msg.indent > 0) {
// if we are indenting write some spaces followed by a symbol
lineIndent += ' '.repeat(indent - 1);
lineIndent += ' '.repeat(msg.indent - 1);
lineIndent += line.startsWith('-') ? '└' : '│';
}
@ -105,9 +113,7 @@ export class ToolingLogTextWriter implements Writer {
lineIndent += PREFIX_INDENT;
}
this.writeTo.write(`${lineIndent}${line}\n`);
writeTo.write(`${lineIndent}${line}\n`);
});
return true;
}
}

View file

@ -16,6 +16,6 @@
"glob-all": "^3.1.0",
"lru-cache": "^4.1.5",
"resolve": "^1.7.1",
"webpack": "^4.41.0"
"webpack": "^4.41.5"
}
}

View file

@ -23,15 +23,15 @@
"@kbn/dev-utils": "1.0.0",
"babel-loader": "^8.0.6",
"copy-webpack-plugin": "^5.0.4",
"css-loader": "2.1.1",
"css-loader": "^3.4.2",
"del": "^5.1.0",
"getopts": "^2.2.4",
"pegjs": "0.10.0",
"sass-loader": "^7.3.1",
"style-loader": "0.23.1",
"sass-loader": "^8.0.2",
"style-loader": "^1.1.3",
"supports-color": "^5.5.0",
"url-loader": "2.2.0",
"webpack": "4.41.0",
"webpack-cli": "^3.3.9"
"webpack": "^4.41.5",
"webpack-cli": "^3.3.10"
}
}

View file

@ -0,0 +1,110 @@
# @kbn/optimizer
`@kbn/optimizer` is a package for building Kibana platform UI plugins (and hopefully more soon).
Kibana Platform plugins with `"ui": true` in their `kibana.json` file will have their `public/index.ts` file (and all of its dependencies) bundled into the `target/public` directory of the plugin. The build output does not need to be updated when other plugins are updated and is included in the distributable without requiring that we ship `@kbn/optimizer` 🎉.
## Webpack config
The [Webpack config][WebpackConfig] is designed to provide the majority of what was available in the legacy optimizer and is the same for all plugins to promote consistency and keep things sane for the operations team. It has support for JS/TS built with babel, url imports of image and font files, and support for importing `scss` and `css` files. SCSS is pre-processed by [postcss][PostCss], built for both light and dark mode and injected automatically into the page when the parent module is loaded (page reloads are still required for switching between light/dark mode). CSS is injected into the DOM as it is written on disk when the parent module is loaded (no postcss support).
Source maps are enabled except when building the distributable. They show the code actually being executed by the browser to strike a balance between debuggability and performance. They are not configurable at this time but will be configurable once we have a developer configuration solution that doesn't rely on the server (see [#55656](https://github.com/elastic/kibana/issues/55656)).
### IE Support
To make front-end code easier to debug the optimizer uses the `BROWSERSLIST_ENV=dev` environment variable (by default) to build JS and CSS that is compatible with modern browsers. In order to support older browsers like IE in development you will need to specify the `BROWSERSLIST_ENV=production` environment variable or build a distributable for testing.
## Running the optimizer
The `@kbn/optimizer` is automatically executed from the dev cli, the Kibana build scripts, and in CI. If you're running Kibana locally in some other way you might need to build the plugins manually, which you can do by running `node scripts/build_kibana_platform_plugins` (pass `--help` for options).
### Worker count
You can limit the number of workers the optimizer uses by setting the `KBN_OPTIMIZER_MAX_WORKERS` environment variable. You might want to do this if your system struggles to keep up while the optimizer is getting started and building all plugins as fast as possible. Setting `KBN_OPTIMIZER_MAX_WORKERS=1` will cause the optimizer to take the longest amount of time but will have the smallest impact on other components of your system.
We only limit the number of workers we will start at any given time. If we start more workers later we will limit the number of workers we start at that time by the maximum, but we don't take into account the number of workers already started because it is assumed that those workers are doing very little work. This greatly simplifies the logic as we don't ever have to reallocate workers and provides the best performance in most cases.
### Caching
Bundles built by the the optimizer include a cache file which describes the information needed to determine if the bundle needs to be rebuilt when the optimizer is restarted. Caching is enabled by default and is very aggressive about invalidating the cache output, but if you need to disable caching you can pass `--no-cache` to `node scripts/build_kibana_platform_plugins`, or set the `KBN_OPTIMIZER_NO_CACHE` environment variable to anything (env overrides everything).
When a bundle is determined to be up-to-date a worker is not started for the bundle. If running the optimizer with the `--dev/--watch` flag, then all the files referenced by cached bundles are watched for changes. Once a change is detected in any of the files referenced by the built bundle a worker is started. If a file is changed that is referenced by several bundles then workers will be started for each bundle, combining workers together to respect the worker limit.
## API
To run the optimizer from code, you can import the [`OptimizerConfig`][OptimizerConfig] class and [`runOptimizer`][Optimizer] function. Create an [`OptimizerConfig`][OptimizerConfig] instance by calling it's static `create()` method with some options, then pass it to the [`runOptimizer`][Optimizer] function. `runOptimizer()` returns an observable of update objects, which are summaries of the optimizer state plus an optional `event` property which describes the internal events occuring and may be of use. You can use the [`logOptimizerState()`][LogOptimizerState] helper to write the relevant bits of state to a tooling log or checkout it's implementation to see how the internal events like [`WorkerStdio`][ObserveWorker] and [`WorkerStarted`][ObserveWorker] are used.
Example:
```ts
import { runOptimizer, OptimizerConfig, logOptimizerState } from '@kbn/optimizer';
import { REPO_ROOT, ToolingLog } from '@kbn/dev-utils';
const log = new ToolingLog({
level: 'verbose',
writeTo: process.stdout,
})
const config = OptimizerConfig.create({
repoRoot: Path.resolve(__dirname, '../../..'),
watch: false,
oss: true,
dist: true
});
await runOptimizer(config)
.pipe(logOptimizerState(log, config))
.toPromise();
```
This is essentially what we're doing in [`script/build_kibana_platform_plugins`][Cli] and the new [build system task][BuildTask].
## Internals
The optimizer runs webpack instances in worker processes. Each worker is configured via a [`WorkerConfig`][WorkerConfig] object and an array of [`Bundle`][Bundle] objects which are JSON serialized and passed to the worker as it's arguments.
Plugins/bundles are assigned to workers based on the number of modules historically seen in each bundle in an effort to evenly distribute the load across the worker pool (see [`assignBundlesToWorkers`][AssignBundlesToWorkers]).
The number of workers that will be started at any time is automatically chosen by dividing the number of cores available by 3 (minimum of 2).
The [`WorkerConfig`][WorkerConfig] includes the location of the repo (it might be one of many builds, or the main repo), wether we are running in watch mode, wether we are building a distributable, and other global config items.
The [`Bundle`][Bundle] objects which include the details necessary to create a webpack config for a specific plugin's bundle (created using [`webpack.config.ts`][WebpackConfig]).
Each worker communicates state back to the main process by sending [`WorkerMsg`][WorkerMsg] and [`CompilerMsg`][CompilerMsg] objects using IPC.
The Optimizer captures all of these messages and produces a stream of update objects.
Optimizer phases:
<dl>
<dt><code>'initializing'</code></dt>
<dd>Initial phase, during this state the optimizer is validating caches and determining which builds should be built initially.</dd>
<dt><code>'initialized'</code></dt>
<dd>Emitted by the optimizer once it's don't initializing its internal state and determined which bundles are going to be built initially.</dd>
<dt><code>'running'</code></dt>
<dd>Emitted when any worker is in a running state. To determine which compilers are running, look for <code>BundleState</code> objects with type <code>'running'</code>.</dd>
<dt><code>'issue'</code></dt>
<dd>Emitted when all workers are done running and any compiler completed with a <code>'compiler issue'</code> status. Compiler issues include things like "unable to resolve module" or syntax errors in the source modules and can be fixed by users when running in watch mode.</dd>
<dt><code>'success'</code></dt>
<dd>Emitted when all workers are done running and all compilers completed with <code>'compiler success'</code>.</dd>
<dt><code>'reallocating'</code></dt>
<dd>Emitted when the files referenced by a cached bundle have changed, before the worker has been started up to update that bundle.</dd>
</dl>
Workers have several error message they may emit which indicate unrecoverable errors. When any of those messages are received the stream will error and the workers will be torn down.
For an example of how to handle these states checkout the [`logOptimizerState()`][LogOptimizerState] helper.
[PostCss]: https://postcss.org/
[Cli]: src/cli.ts
[Optimizer]: src/optimizer.ts
[ObserveWorker]: src/observe_worker.ts
[CompilerMsg]: src/common/compiler_messages.ts
[WorkerMsg]: src/common/worker_messages.ts
[Bundle]: src/common/bundle.ts
[WebpackConfig]: src/worker/webpack.config.ts
[BundleDefinition]: src/common/bundle_definition.ts
[WorkerConfig]: src/common/worker_config.ts
[OptimizerConfig]: src/optimizer_config.ts
[LogOptimizerState]: src/log_optimizer_state.ts
[AssignBundlesToWorkers]: src/assign_bundles_to_workers.ts
[BuildTask]: ../../src/dev/build/tasks/build_kibana_platform_plugins.js

View file

@ -17,8 +17,7 @@
* under the License.
*/
import chalk from 'chalk';
export const green = chalk.black.bgGreen;
export const red = chalk.white.bgRed;
export const yellow = chalk.black.bgYellow;
module.exports = {
presets: ['@kbn/babel-preset/node_preset'],
ignore: ['**/*.test.js'],
};

View file

@ -17,4 +17,4 @@
* under the License.
*/
module.exports = require('tinymath/lib/tinymath.es5.js');
export * from './src/index';

View file

@ -0,0 +1,44 @@
{
"name": "@kbn/optimizer",
"version": "1.0.0",
"private": true,
"license": "Apache-2.0",
"main": "./target/index.js",
"scripts": {
"build": "babel src --out-dir target --copy-files --delete-dir-on-start --extensions .ts --ignore *.test.ts --source-maps=inline",
"kbn:bootstrap": "yarn build",
"kbn:watch": "yarn build --watch"
},
"dependencies": {
"@babel/cli": "^7.5.5",
"@kbn/babel-preset": "1.0.0",
"@kbn/dev-utils": "1.0.0",
"@kbn/ui-shared-deps": "1.0.0",
"@types/loader-utils": "^1.1.3",
"@types/watchpack": "^1.1.5",
"@types/webpack": "^4.41.3",
"autoprefixer": "^9.7.4",
"babel-loader": "^8.0.6",
"clean-webpack-plugin": "^3.0.0",
"cpy": "^8.0.0",
"css-loader": "^3.4.2",
"del": "^5.1.0",
"file-loader": "^4.2.0",
"istanbul-instrumenter-loader": "^3.0.1",
"jest-diff": "^25.1.0",
"json-stable-stringify": "^1.0.1",
"loader-utils": "^1.2.3",
"node-sass": "^4.13.0",
"postcss-loader": "^3.0.0",
"raw-loader": "^3.1.0",
"rxjs": "^6.5.3",
"sass-loader": "^8.0.2",
"style-loader": "^1.1.3",
"terser-webpack-plugin": "^2.1.2",
"tinymath": "1.2.1",
"url-loader": "^2.2.0",
"watchpack": "^1.6.0",
"webpack": "^4.41.5",
"webpack-merge": "^4.2.2"
}
}

View file

@ -0,0 +1,4 @@
{
"id": "bar",
"ui": true
}

View file

@ -17,7 +17,6 @@
* under the License.
*/
// TODO these are imports from the old plugin world.
// Once the new platform is ready, they can get removed
// and handled by the platform itself in the setup method
// of the ExpressionExectorService
import { fooLibFn } from '../../foo/public/index';
export * from './lib';
export { fooLibFn };

View file

@ -17,12 +17,6 @@
* under the License.
*/
import { resolve } from 'path';
export default function(kibana) {
return new kibana.Plugin({
uiExports: {
styleSheetPaths: resolve(__dirname, 'public/index.scss'),
},
});
export function barLibFn() {
return 'bar';
}

View file

@ -0,0 +1,3 @@
{
"id": "baz"
}

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './lib';

View file

@ -0,0 +1,22 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function bazLibFn() {
return 'baz';
}

View file

@ -0,0 +1,4 @@
{
"id": "foo",
"ui": true
}

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const ext = 'TRUE';

View file

@ -0,0 +1,21 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './lib';
export * from './ext';

View file

@ -0,0 +1,22 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function fooLibFn() {
return 'foo';
}

View file

@ -0,0 +1,3 @@
{
"id": "test_baz"
}

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './lib';

View file

@ -0,0 +1,22 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function bazLibFn() {
return 'baz';
}

View file

@ -0,0 +1,118 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import 'source-map-support/register';
import Path from 'path';
import { run, REPO_ROOT, createFlagError } from '@kbn/dev-utils';
import { logOptimizerState } from './log_optimizer_state';
import { OptimizerConfig } from './optimizer';
import { runOptimizer } from './run_optimizer';
run(
async ({ log, flags }) => {
const watch = flags.watch ?? false;
if (typeof watch !== 'boolean') {
throw createFlagError('expected --watch to have no value');
}
const oss = flags.oss ?? false;
if (typeof oss !== 'boolean') {
throw createFlagError('expected --oss to have no value');
}
const cache = flags.cache ?? true;
if (typeof cache !== 'boolean') {
throw createFlagError('expected --cache to have no value');
}
const dist = flags.dist ?? false;
if (typeof dist !== 'boolean') {
throw createFlagError('expected --dist to have no value');
}
const examples = flags.examples ?? false;
if (typeof examples !== 'boolean') {
throw createFlagError('expected --no-examples to have no value');
}
const profileWebpack = flags.profile ?? false;
if (typeof profileWebpack !== 'boolean') {
throw createFlagError('expected --profile to have no value');
}
const inspectWorkers = flags['inspect-workers'] ?? false;
if (typeof inspectWorkers !== 'boolean') {
throw createFlagError('expected --no-inspect-workers to have no value');
}
const maxWorkerCount = flags.workers ? Number.parseInt(String(flags.workers), 10) : undefined;
if (maxWorkerCount !== undefined && (!Number.isFinite(maxWorkerCount) || maxWorkerCount < 1)) {
throw createFlagError('expected --workers to be a number greater than 0');
}
const extraPluginScanDirs = ([] as string[])
.concat((flags['scan-dir'] as string | string[]) || [])
.map(p => Path.resolve(p));
if (!extraPluginScanDirs.every(s => typeof s === 'string')) {
throw createFlagError('expected --scan-dir to be a string');
}
const config = OptimizerConfig.create({
repoRoot: REPO_ROOT,
watch,
maxWorkerCount,
oss,
dist,
cache,
examples,
profileWebpack,
extraPluginScanDirs,
inspectWorkers,
});
await runOptimizer(config)
.pipe(logOptimizerState(log, config))
.toPromise();
},
{
flags: {
boolean: ['watch', 'oss', 'examples', 'dist', 'cache', 'profile', 'inspect-workers'],
string: ['workers', 'scan-dir'],
default: {
examples: true,
cache: true,
'inspect-workers': true,
},
help: `
--watch run the optimizer in watch mode
--workers max number of workers to use
--oss only build oss plugins
--profile profile the webpack builds and write stats.json files to build outputs
--no-cache disable the cache
--no-examples don't build the example plugins
--dist create bundles that are suitable for inclusion in the Kibana distributable
--scan-dir add a directory to the list of directories scanned for plugins (specify as many times as necessary)
--no-inspect-workers when inspecting the parent process, don't inspect the workers
`,
},
}
);

View file

@ -0,0 +1,112 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ascending, descending } from './array_helpers';
describe('ascending/descending', () => {
interface Item {
a: number;
b: number | string;
c?: number;
}
const a = (x: Item) => x.a;
const b = (x: Item) => x.b;
const c = (x: Item) => x.c;
const print = (x: Item) => `${x.a}/${x.b}/${x.c}`;
const values: Item[] = [
{ a: 1, b: 2, c: 3 },
{ a: 3, b: 2, c: 1 },
{ a: 9, b: 9, c: 9 },
{ a: 8, b: 5, c: 8 },
{ a: 8, b: 5 },
{ a: 8, b: 4 },
{ a: 8, b: 3, c: 8 },
{ a: 8, b: 2 },
{ a: 8, b: 1, c: 8 },
{ a: 8, b: 1 },
{ a: 8, b: 0 },
{ a: 8, b: -1, c: 8 },
{ a: 8, b: -2 },
{ a: 8, b: -3, c: 8 },
{ a: 8, b: -4 },
{ a: 8, b: 'foo', c: 8 },
{ a: 8, b: 'foo' },
{ a: 8, b: 'bar', c: 8 },
{ a: 8, b: 'bar' },
].sort(() => 0.5 - Math.random());
it('sorts items using getters', () => {
expect(
Array.from(values)
.sort(ascending(a, b, c))
.map(print)
).toMatchInlineSnapshot(`
Array [
"1/2/3",
"3/2/1",
"8/-4/undefined",
"8/-3/8",
"8/-2/undefined",
"8/-1/8",
"8/0/undefined",
"8/1/undefined",
"8/1/8",
"8/2/undefined",
"8/3/8",
"8/4/undefined",
"8/5/undefined",
"8/5/8",
"8/bar/undefined",
"8/bar/8",
"8/foo/undefined",
"8/foo/8",
"9/9/9",
]
`);
expect(
Array.from(values)
.sort(descending(a, b, c))
.map(print)
).toMatchInlineSnapshot(`
Array [
"9/9/9",
"8/foo/8",
"8/foo/undefined",
"8/bar/8",
"8/bar/undefined",
"8/5/8",
"8/5/undefined",
"8/4/undefined",
"8/3/8",
"8/2/undefined",
"8/1/8",
"8/1/undefined",
"8/0/undefined",
"8/-1/8",
"8/-2/undefined",
"8/-3/8",
"8/-4/undefined",
"3/2/1",
"1/2/3",
]
`);
});
});

View file

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
type SortPropGetter<T> = (x: T) => number | string | undefined;
type Comparator<T> = (a: T, b: T) => number;
/**
* create a sort comparator that sorts objects in ascending
* order based on the ...getters. getters are called for each
* item and return the value to compare against the other items.
*
* - if a getter returns undefined the item will be sorted
* before all other items
* - if a getter returns a string it will be compared using
* `String#localeCompare`
* - otherwise comparison is done using subtraction
* - If the values for a getter are equal the next getter is
* used to compare the items.
*/
export const ascending = <T>(...getters: Array<SortPropGetter<T>>): Comparator<T> => (a, b) => {
for (const getter of getters) {
const valA = getter(a);
const valB = getter(b);
if (valA === valB) {
continue;
}
if (valA === undefined) {
return -1;
}
if (valB === undefined) {
return 1;
}
return typeof valA === 'string' || typeof valB === 'string'
? String(valA).localeCompare(String(valB))
: valA - valB;
}
return 0;
};
/**
* create a sort comparator that sorts values in descending
* order based on the ...getters
*
* See docs for ascending()
*/
export const descending = <T>(...getters: Array<SortPropGetter<T>>): Comparator<T> => {
const sorter = ascending(...getters);
return (a, b) => sorter(b, a);
};
/**
* Alternate Array#includes() implementation with sane types, functions as a type guard
*/
export const includes = <T>(array: T[], value: any): value is T => array.includes(value);
/**
* Ponyfill for Object.fromEntries()
*/
export const entriesToObject = <T>(entries: Array<readonly [string, T]>): Record<string, T> => {
const object: Record<string, T> = {};
for (const [key, value] of entries) {
object[key] = value;
}
return object;
};

View file

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Bundle, BundleSpec, parseBundles } from './bundle';
jest.mock('fs');
const SPEC: BundleSpec = {
contextDir: '/foo/bar',
entry: 'entry',
id: 'bar',
outputDir: '/foo/bar/target',
sourceRoot: '/foo',
type: 'plugin',
};
it('creates cache keys', () => {
const bundle = new Bundle(SPEC);
expect(
bundle.createCacheKey(
['/foo/bar/a', '/foo/bar/c'],
new Map([
['/foo/bar/a', 123],
['/foo/bar/b', 456],
['/foo/bar/c', 789],
])
)
).toMatchInlineSnapshot(`
Object {
"mtimes": Object {
"/foo/bar/a": 123,
"/foo/bar/c": 789,
},
"spec": Object {
"contextDir": "/foo/bar",
"entry": "entry",
"id": "bar",
"outputDir": "/foo/bar/target",
"sourceRoot": "/foo",
"type": "plugin",
},
}
`);
});
it('provides serializable versions of itself', () => {
const bundle = new Bundle(SPEC);
expect(bundle.toSpec()).toEqual(SPEC);
});
it('provides the module count from the cache', () => {
const bundle = new Bundle(SPEC);
expect(bundle.cache.getModuleCount()).toBe(undefined);
bundle.cache.set({ moduleCount: 123 });
expect(bundle.cache.getModuleCount()).toBe(123);
});
it('parses bundles from JSON specs', () => {
const bundles = parseBundles(JSON.stringify([SPEC]));
expect(bundles).toMatchInlineSnapshot(`
Array [
Bundle {
"cache": BundleCache {
"path": "/foo/bar/target/.kbn-optimizer-cache",
"state": undefined,
},
"contextDir": "/foo/bar",
"entry": "entry",
"id": "bar",
"outputDir": "/foo/bar/target",
"sourceRoot": "/foo",
"type": "plugin",
},
]
`);
});

View file

@ -0,0 +1,170 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import { BundleCache } from './bundle_cache';
import { UnknownVals } from './ts_helpers';
import { includes, ascending, entriesToObject } from './array_helpers';
const VALID_BUNDLE_TYPES = ['plugin' as const];
export interface BundleSpec {
readonly type: typeof VALID_BUNDLE_TYPES[0];
/** Unique id for this bundle */
readonly id: string;
/** Webpack entry request for this plugin, relative to the contextDir */
readonly entry: string;
/** Absolute path to the plugin source directory */
readonly contextDir: string;
/** Absolute path to the root of the repository */
readonly sourceRoot: string;
/** Absolute path to the directory where output should be written */
readonly outputDir: string;
}
export class Bundle {
/** Bundle type, only "plugin" is supported for now */
public readonly type: BundleSpec['type'];
/** Unique identifier for this bundle */
public readonly id: BundleSpec['id'];
/** Path, relative to `contextDir`, to the entry file for the Webpack bundle */
public readonly entry: BundleSpec['entry'];
/**
* Absolute path to the root of the bundle context (plugin directory)
* where the entry is resolved relative to and the default output paths
* are relative to
*/
public readonly contextDir: BundleSpec['contextDir'];
/** Absolute path to the root of the whole project source, repo root */
public readonly sourceRoot: BundleSpec['sourceRoot'];
/** Absolute path to the output directory for this bundle */
public readonly outputDir: BundleSpec['outputDir'];
public readonly cache: BundleCache;
constructor(spec: BundleSpec) {
this.type = spec.type;
this.id = spec.id;
this.entry = spec.entry;
this.contextDir = spec.contextDir;
this.sourceRoot = spec.sourceRoot;
this.outputDir = spec.outputDir;
this.cache = new BundleCache(Path.resolve(this.outputDir, '.kbn-optimizer-cache'));
}
/**
* Calculate the cache key for this bundle based from current
* mtime values.
*
* @param mtimes pre-fetched mtimes (ms || undefined) for all referenced files
*/
createCacheKey(files: string[], mtimes: Map<string, number | undefined>): unknown {
return {
spec: this.toSpec(),
mtimes: entriesToObject(
files.map(p => [p, mtimes.get(p)] as const).sort(ascending(e => e[0]))
),
};
}
/**
* Get the raw "specification" for the bundle, this object is JSON serialized
* in the cache key, passed to worker processes so they know what bundles
* to build, and passed to the Bundle constructor to rebuild the Bundle object.
*/
toSpec(): BundleSpec {
return {
type: this.type,
id: this.id,
entry: this.entry,
contextDir: this.contextDir,
sourceRoot: this.sourceRoot,
outputDir: this.outputDir,
};
}
}
/**
* Parse a JSON string containing an array of BundleSpec objects into an array
* of Bundle objects, validating everything.
*/
export function parseBundles(json: string) {
try {
if (typeof json !== 'string') {
throw new Error('must be a JSON string');
}
const specs: Array<UnknownVals<BundleSpec>> = JSON.parse(json);
if (!Array.isArray(specs)) {
throw new Error('must be an array');
}
return specs.map(
(spec: UnknownVals<BundleSpec>): Bundle => {
if (!(spec && typeof spec === 'object')) {
throw new Error('`bundles[]` must be an object');
}
const { type } = spec;
if (!includes(VALID_BUNDLE_TYPES, type)) {
throw new Error('`bundles[]` must have a valid `type`');
}
const { id } = spec;
if (!(typeof id === 'string')) {
throw new Error('`bundles[]` must have a string `id` property');
}
const { entry } = spec;
if (!(typeof entry === 'string')) {
throw new Error('`bundles[]` must have a string `entry` property');
}
const { contextDir } = spec;
if (!(typeof contextDir === 'string' && Path.isAbsolute(contextDir))) {
throw new Error('`bundles[]` must have an absolute path `contextDir` property');
}
const { sourceRoot } = spec;
if (!(typeof sourceRoot === 'string' && Path.isAbsolute(sourceRoot))) {
throw new Error('`bundles[]` must have an absolute path `sourceRoot` property');
}
const { outputDir } = spec;
if (!(typeof outputDir === 'string' && Path.isAbsolute(outputDir))) {
throw new Error('`bundles[]` must have an absolute path `outputDir` property');
}
return new Bundle({
type,
id,
entry,
contextDir,
sourceRoot,
outputDir,
});
}
);
} catch (error) {
throw new Error(`unable to parse bundles: ${error.message}`);
}
}

View file

@ -0,0 +1,118 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { BundleCache, State } from './bundle_cache';
jest.mock('fs');
const mockReadFileSync: jest.Mock = jest.requireMock('fs').readFileSync;
const mockMkdirSync: jest.Mock = jest.requireMock('fs').mkdirSync;
const mockWriteFileSync: jest.Mock = jest.requireMock('fs').writeFileSync;
const SOME_STATE: State = {
cacheKey: 'abc',
files: ['123'],
moduleCount: 123,
optimizerCacheKey: 'abc',
};
beforeEach(() => {
jest.clearAllMocks();
});
it(`doesn't complain if files are not on disk`, () => {
const cache = new BundleCache('/foo/bar.json');
expect(cache.get()).toEqual({});
});
it(`updates files on disk when calling set()`, () => {
const cache = new BundleCache('/foo/bar.json');
cache.set(SOME_STATE);
expect(mockReadFileSync).not.toHaveBeenCalled();
expect(mockMkdirSync.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"/foo",
Object {
"recursive": true,
},
],
]
`);
expect(mockWriteFileSync.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"/foo/bar.json",
"{
\\"cacheKey\\": \\"abc\\",
\\"files\\": [
\\"123\\"
],
\\"moduleCount\\": 123,
\\"optimizerCacheKey\\": \\"abc\\"
}",
],
]
`);
});
it(`serves updated state from memory`, () => {
const cache = new BundleCache('/foo/bar.json');
cache.set(SOME_STATE);
jest.clearAllMocks();
expect(cache.get()).toEqual(SOME_STATE);
expect(mockReadFileSync).not.toHaveBeenCalled();
expect(mockMkdirSync).not.toHaveBeenCalled();
expect(mockWriteFileSync).not.toHaveBeenCalled();
});
it('reads state from disk on get() after refresh()', () => {
const cache = new BundleCache('/foo/bar.json');
cache.set(SOME_STATE);
cache.refresh();
jest.clearAllMocks();
cache.get();
expect(mockMkdirSync).not.toHaveBeenCalled();
expect(mockWriteFileSync).not.toHaveBeenCalled();
expect(mockReadFileSync.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"/foo/bar.json",
"utf8",
],
]
`);
});
it('provides accessors to specific state properties', () => {
const cache = new BundleCache('/foo/bar.json');
expect(cache.getModuleCount()).toBe(undefined);
expect(cache.getReferencedFiles()).toEqual(undefined);
expect(cache.getCacheKey()).toEqual(undefined);
expect(cache.getOptimizerCacheKey()).toEqual(undefined);
cache.set(SOME_STATE);
expect(cache.getModuleCount()).toBe(123);
expect(cache.getReferencedFiles()).toEqual(['123']);
expect(cache.getCacheKey()).toEqual('abc');
expect(cache.getOptimizerCacheKey()).toEqual('abc');
});

View file

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Fs from 'fs';
import Path from 'path';
export interface State {
optimizerCacheKey?: unknown;
cacheKey?: unknown;
moduleCount?: number;
files?: string[];
}
const DEFAULT_STATE: State = {};
const DEFAULT_STATE_JSON = JSON.stringify(DEFAULT_STATE);
/**
* Helper to read and update metadata for bundles.
*/
export class BundleCache {
private state: State | undefined = undefined;
constructor(private readonly path: string | false) {}
refresh() {
this.state = undefined;
}
get() {
if (!this.state) {
let json;
try {
if (this.path) {
json = Fs.readFileSync(this.path, 'utf8');
}
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
}
}
let partialCache: Partial<State>;
try {
partialCache = JSON.parse(json || DEFAULT_STATE_JSON);
} catch (error) {
partialCache = {};
}
this.state = {
...DEFAULT_STATE,
...partialCache,
};
}
return this.state;
}
set(updated: State) {
this.state = updated;
if (this.path) {
const directory = Path.dirname(this.path);
Fs.mkdirSync(directory, { recursive: true });
Fs.writeFileSync(this.path, JSON.stringify(this.state, null, 2));
}
}
public getModuleCount() {
return this.get().moduleCount;
}
public getReferencedFiles() {
return this.get().files;
}
public getCacheKey() {
return this.get().cacheKey;
}
public getOptimizerCacheKey() {
return this.get().optimizerCacheKey;
}
}

View file

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Message sent when a compiler encouters an unresolvable error.
* The worker will be shut down following this message.
*/
export interface CompilerErrorMsg {
type: 'compiler error';
id: string;
errorMsg: string;
errorStack?: string;
}
/**
* Message sent when a compiler starts running, either for the first
* time or because of changes detected when watching.
*/
export interface CompilerRunningMsg {
type: 'running';
bundleId: string;
}
/**
* Message sent when a compiler encounters an error that
* prevents the bundle from building correctly. When in
* watch mode these issues can be fixed by the user.
* (ie. unresolved import, syntax error, etc.)
*/
export interface CompilerIssueMsg {
type: 'compiler issue';
bundleId: string;
failure: string;
}
/**
* Message sent when a compiler completes successfully and
* the bundle has been written to disk or updated on disk.
*/
export interface CompilerSuccessMsg {
type: 'compiler success';
bundleId: string;
moduleCount: number;
}
export type CompilerMsg = CompilerRunningMsg | CompilerIssueMsg | CompilerSuccessMsg;
export class CompilerMsgs {
constructor(private bundle: string) {}
running(): CompilerRunningMsg {
return {
bundleId: this.bundle,
type: 'running',
};
}
compilerFailure(options: { failure: string }): CompilerIssueMsg {
return {
bundleId: this.bundle,
type: 'compiler issue',
failure: options.failure,
};
}
compilerSuccess(options: { moduleCount: number }): CompilerSuccessMsg {
return {
bundleId: this.bundle,
type: 'compiler success',
moduleCount: options.moduleCount,
};
}
error(error: Error): CompilerErrorMsg {
return {
id: this.bundle,
type: 'compiler error',
errorMsg: error.message,
errorStack: error.stack,
};
}
}

View file

@ -0,0 +1,69 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { toArray } from 'rxjs/operators';
import { summarizeEvent$ } from './event_stream_helpers';
it('emits each state with each event, ignoring events when reducer returns undefined', async () => {
const values = await summarizeEvent$(
Rx.of(1, 2, 3, 4, 5),
{
sum: 0,
},
(state, event) => {
if (event % 2) {
return {
sum: state.sum + event,
};
}
}
)
.pipe(toArray())
.toPromise();
expect(values).toMatchInlineSnapshot(`
Array [
Object {
"state": Object {
"sum": 0,
},
},
Object {
"event": 1,
"state": Object {
"sum": 1,
},
},
Object {
"event": 3,
"state": Object {
"sum": 4,
},
},
Object {
"event": 5,
"state": Object {
"sum": 9,
},
},
]
`);
});

View file

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { scan, distinctUntilChanged, startWith } from 'rxjs/operators';
export interface Update<Event, State> {
event?: Event;
state: State;
}
export type Summarizer<Event, State> = (prev: State, event: Event) => State | undefined;
/**
* Transform an event stream into a state update stream which emits
* the events and individual states for each event.
*/
export const summarizeEvent$ = <Event, State>(
event$: Rx.Observable<Event>,
initialState: State,
reducer: Summarizer<Event, State>
) => {
const initUpdate: Update<Event, State> = {
state: initialState,
};
return event$.pipe(
scan((prev, event): Update<Event, State> => {
const newState = reducer(prev.state, event);
return newState === undefined
? prev
: {
event,
state: newState,
};
}, initUpdate),
distinctUntilChanged(),
startWith(initUpdate)
);
};

View file

@ -0,0 +1,28 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './bundle';
export * from './bundle_cache';
export * from './worker_config';
export * from './worker_messages';
export * from './compiler_messages';
export * from './ts_helpers';
export * from './rxjs_helpers';
export * from './array_helpers';
export * from './event_stream_helpers';

View file

@ -0,0 +1,140 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { toArray, map } from 'rxjs/operators';
import { pipeClosure, debounceTimeBuffer, maybeMap, maybe } from './rxjs_helpers';
jest.useFakeTimers();
describe('pipeClosure()', () => {
it('calls closure on each subscription to setup unique state', async () => {
let counter = 0;
const foo$ = Rx.of(1, 2, 3).pipe(
pipeClosure(source$ => {
const multiplier = ++counter;
return source$.pipe(map(i => i * multiplier));
}),
toArray()
);
await expect(foo$.toPromise()).resolves.toMatchInlineSnapshot(`
Array [
1,
2,
3,
]
`);
await expect(foo$.toPromise()).resolves.toMatchInlineSnapshot(`
Array [
2,
4,
6,
]
`);
await expect(foo$.toPromise()).resolves.toMatchInlineSnapshot(`
Array [
3,
6,
9,
]
`);
});
});
describe('maybe()', () => {
it('filters out undefined values from the stream', async () => {
const foo$ = Rx.of(1, undefined, 2, undefined, 3).pipe(maybe(), toArray());
await expect(foo$.toPromise()).resolves.toEqual([1, 2, 3]);
});
});
describe('maybeMap()', () => {
it('calls map fn and filters out undefined values returned', async () => {
const foo$ = Rx.of(1, 2, 3, 4, 5).pipe(
maybeMap(i => (i % 2 ? i : undefined)),
toArray()
);
await expect(foo$.toPromise()).resolves.toEqual([1, 3, 5]);
});
});
describe('debounceTimeBuffer()', () => {
beforeEach(() => {
jest.useFakeTimers();
});
afterEach(() => {
jest.useRealTimers();
});
it('buffers items until there is n milliseconds of silence, then flushes buffer to stream', async () => {
const foo$ = new Rx.Subject<number>();
const dest = new Rx.BehaviorSubject<number | undefined>(undefined);
foo$
.pipe(
debounceTimeBuffer(100),
map(items => items.reduce((sum, n) => sum + n))
)
.subscribe(dest);
foo$.next(1);
expect(dest.getValue()).toBe(undefined);
// only wait 99 milliseconds before sending the next value
jest.advanceTimersByTime(99);
foo$.next(1);
expect(dest.getValue()).toBe(undefined);
// only wait 99 milliseconds before sending the next value
jest.advanceTimersByTime(99);
foo$.next(1);
expect(dest.getValue()).toBe(undefined);
// send the next value after 100 milliseconds and observe that it was forwarded
jest.advanceTimersByTime(100);
foo$.next(1);
expect(dest.getValue()).toBe(3);
foo$.complete();
if (!dest.isStopped) {
throw new Error('Expected destination to stop as soon as the source is completed');
}
});
it('clears queue as soon as source completes if source completes before time is up', () => {
const foo$ = new Rx.Subject<number>();
const dest = new Rx.BehaviorSubject<number | undefined>(undefined);
foo$
.pipe(
debounceTimeBuffer(100),
map(items => items.reduce((sum, n) => sum + n))
)
.subscribe(dest);
foo$.next(1);
expect(dest.getValue()).toBe(undefined);
foo$.complete();
expect(dest.getValue()).toBe(1);
});
});

View file

@ -0,0 +1,75 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { mergeMap, tap, debounceTime, map } from 'rxjs/operators';
type Operator<T1, T2> = (source: Rx.Observable<T1>) => Rx.Observable<T2>;
type MapFn<T1, T2> = (item: T1, index: number) => T2;
/**
* Wrap an operator chain in a closure so that is can have some local
* state. The `fn` is called each time the final observable is
* subscribed so the pipeline/closure is setup for each subscription.
*/
export const pipeClosure = <T1, T2>(fn: Operator<T1, T2>): Operator<T1, T2> => {
return (source: Rx.Observable<T1>) => {
return Rx.defer(() => fn(source));
};
};
/**
* An operator that filters out undefined values from the stream while
* supporting TypeScript
*/
export const maybe = <T1>(): Operator<T1 | undefined, T1> => {
return mergeMap(item => (item === undefined ? Rx.EMPTY : [item]));
};
/**
* An operator like map(), but undefined values are filered out automatically
* with TypeScript support. For some reason TS doesn't have great support for
* filter's without defining an explicit type assertion in the signature of
* the filter.
*/
export const maybeMap = <T1, T2>(fn: MapFn<T1, undefined | T2>): Operator<T1, T2> => {
return mergeMap((item, index) => {
const result = fn(item, index);
return result === undefined ? Rx.EMPTY : [result];
});
};
/**
* Debounce received notifications and write them to a buffer. Once the source
* has been silent for `ms` milliseconds the buffer is flushed as a single array
* to the destination stream
*/
export const debounceTimeBuffer = <T>(ms: number) =>
pipeClosure((source$: Rx.Observable<T>) => {
const buffer: T[] = [];
return source$.pipe(
tap(item => buffer.push(item)),
debounceTime(ms),
map(() => {
const items = Array.from(buffer);
buffer.length = 0;
return items;
})
);
});

View file

@ -0,0 +1,26 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Convert an object type into an object with the same keys
* but with each value type replaced with `unknown`
*/
export type UnknownVals<T extends object> = {
[k in keyof T]: unknown;
};

View file

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import { UnknownVals } from './ts_helpers';
export interface WorkerConfig {
readonly repoRoot: string;
readonly watch: boolean;
readonly dist: boolean;
readonly cache: boolean;
readonly profileWebpack: boolean;
readonly browserslistEnv: string;
readonly optimizerCacheKey: unknown;
}
export function parseWorkerConfig(json: string): WorkerConfig {
try {
if (typeof json !== 'string') {
throw new Error('expected worker config to be a JSON string');
}
const parsed: UnknownVals<WorkerConfig> = JSON.parse(json);
if (!(typeof parsed === 'object' && parsed)) {
throw new Error('config must be an object');
}
const repoRoot = parsed.repoRoot;
if (typeof repoRoot !== 'string' || !Path.isAbsolute(repoRoot)) {
throw new Error('`repoRoot` config must be an absolute path');
}
const cache = parsed.cache;
if (typeof cache !== 'boolean') {
throw new Error('`cache` config must be a boolean');
}
const watch = parsed.watch;
if (typeof watch !== 'boolean') {
throw new Error('`watch` config must be a boolean');
}
const dist = parsed.dist;
if (typeof dist !== 'boolean') {
throw new Error('`dist` config must be a boolean');
}
const profileWebpack = parsed.profileWebpack;
if (typeof profileWebpack !== 'boolean') {
throw new Error('`profileWebpack` must be a boolean');
}
const optimizerCacheKey = parsed.optimizerCacheKey;
if (optimizerCacheKey === undefined) {
throw new Error('`optimizerCacheKey` must be defined');
}
const browserslistEnv = parsed.browserslistEnv;
if (typeof browserslistEnv !== 'string') {
throw new Error('`browserslistEnv` must be a string');
}
return {
repoRoot,
cache,
watch,
dist,
profileWebpack,
optimizerCacheKey,
browserslistEnv,
};
} catch (error) {
throw new Error(`unable to parse worker config: ${error.message}`);
}
}

View file

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
CompilerRunningMsg,
CompilerIssueMsg,
CompilerSuccessMsg,
CompilerErrorMsg,
} from './compiler_messages';
export type WorkerMsg =
| CompilerRunningMsg
| CompilerIssueMsg
| CompilerSuccessMsg
| CompilerErrorMsg
| WorkerErrorMsg;
/**
* Message sent when the worker encounters an error that it can't
* recover from, no more messages will be sent and the worker
* will exit after this message.
*/
export interface WorkerErrorMsg {
type: 'worker error';
errorMsg: string;
errorStack?: string;
}
const WORKER_STATE_TYPES: ReadonlyArray<WorkerMsg['type']> = [
'running',
'compiler issue',
'compiler success',
'compiler error',
'worker error',
];
export const isWorkerMsg = (value: any): value is WorkerMsg =>
typeof value === 'object' && value && WORKER_STATE_TYPES.includes(value.type);
export class WorkerMsgs {
error(error: Error): WorkerErrorMsg {
return {
type: 'worker error',
errorMsg: error.message,
errorStack: error.stack,
};
}
}

View file

@ -0,0 +1,22 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { OptimizerConfig } from './optimizer';
export * from './run_optimizer';
export * from './log_optimizer_state';

View file

@ -0,0 +1,557 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`builds expected bundles, saves bundle counts to metadata: OptimizerConfig 1`] = `
OptimizerConfig {
"bundles": Array [
Bundle {
"cache": BundleCache {
"path": <absolute path>/plugins/bar/target/public/.kbn-optimizer-cache,
"state": undefined,
},
"contextDir": <absolute path>/plugins/bar,
"entry": "./public/index",
"id": "bar",
"outputDir": <absolute path>/plugins/bar/target/public,
"sourceRoot": <absolute path>,
"type": "plugin",
},
Bundle {
"cache": BundleCache {
"path": <absolute path>/plugins/foo/target/public/.kbn-optimizer-cache,
"state": undefined,
},
"contextDir": <absolute path>/plugins/foo,
"entry": "./public/index",
"id": "foo",
"outputDir": <absolute path>/plugins/foo/target/public,
"sourceRoot": <absolute path>,
"type": "plugin",
},
],
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 1,
"plugins": Array [
Object {
"directory": <absolute path>/plugins/bar,
"id": "bar",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/plugins/baz,
"id": "baz",
"isUiPlugin": false,
},
Object {
"directory": <absolute path>/plugins/foo,
"id": "foo",
"isUiPlugin": true,
},
],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`;
exports[`builds expected bundles, saves bundle counts to metadata: bar bundle 1`] = `
"var __kbnBundles__ = typeof __kbnBundles__ === \\"object\\" ? __kbnBundles__ : {}; __kbnBundles__[\\"plugin/bar\\"] =
/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
/******/ }
/******/ };
/******/
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/ __webpack_require__.t = function(value, mode) {
/******/ if(mode & 1) value = __webpack_require__(value);
/******/ if(mode & 8) return value;
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
/******/ return ns;
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = \\"__REPLACE_WITH_PUBLIC_PATH__\\";
/******/
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = \\"./public/index.ts\\");
/******/ })
/************************************************************************/
/******/ ({
/***/ \\"../foo/public/ext.ts\\":
/*!****************************!*\\\\
!*** ../foo/public/ext.ts ***!
\\\\****************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
exports.ext = void 0;
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the \\"License\\"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* \\"AS IS\\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
const ext = 'TRUE';
exports.ext = ext;
/***/ }),
/***/ \\"../foo/public/index.ts\\":
/*!******************************!*\\\\
!*** ../foo/public/index.ts ***!
\\\\******************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
var _lib = __webpack_require__(/*! ./lib */ \\"../foo/public/lib.ts\\");
Object.keys(_lib).forEach(function (key) {
if (key === \\"default\\" || key === \\"__esModule\\") return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _lib[key];
}
});
});
var _ext = __webpack_require__(/*! ./ext */ \\"../foo/public/ext.ts\\");
Object.keys(_ext).forEach(function (key) {
if (key === \\"default\\" || key === \\"__esModule\\") return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _ext[key];
}
});
});
/***/ }),
/***/ \\"../foo/public/lib.ts\\":
/*!****************************!*\\\\
!*** ../foo/public/lib.ts ***!
\\\\****************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
exports.fooLibFn = fooLibFn;
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the \\"License\\"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* \\"AS IS\\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
function fooLibFn() {
return 'foo';
}
/***/ }),
/***/ \\"./public/index.ts\\":
/*!*************************!*\\\\
!*** ./public/index.ts ***!
\\\\*************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
var _exportNames = {
fooLibFn: true
};
Object.defineProperty(exports, \\"fooLibFn\\", {
enumerable: true,
get: function () {
return _index.fooLibFn;
}
});
var _index = __webpack_require__(/*! ../../foo/public/index */ \\"../foo/public/index.ts\\");
var _lib = __webpack_require__(/*! ./lib */ \\"./public/lib.ts\\");
Object.keys(_lib).forEach(function (key) {
if (key === \\"default\\" || key === \\"__esModule\\") return;
if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _lib[key];
}
});
});
/***/ }),
/***/ \\"./public/lib.ts\\":
/*!***********************!*\\\\
!*** ./public/lib.ts ***!
\\\\***********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
exports.barLibFn = barLibFn;
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the \\"License\\"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* \\"AS IS\\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
function barLibFn() {
return 'bar';
}
/***/ })
/******/ })[\\"plugin\\"];
//# sourceMappingURL=bar.plugin.js.map"
`;
exports[`builds expected bundles, saves bundle counts to metadata: foo bundle 1`] = `
"var __kbnBundles__ = typeof __kbnBundles__ === \\"object\\" ? __kbnBundles__ : {}; __kbnBundles__[\\"plugin/foo\\"] =
/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
/******/ }
/******/ };
/******/
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/ __webpack_require__.t = function(value, mode) {
/******/ if(mode & 1) value = __webpack_require__(value);
/******/ if(mode & 8) return value;
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
/******/ return ns;
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = \\"__REPLACE_WITH_PUBLIC_PATH__\\";
/******/
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = \\"./public/index.ts\\");
/******/ })
/************************************************************************/
/******/ ({
/***/ \\"./public/ext.ts\\":
/*!***********************!*\\\\
!*** ./public/ext.ts ***!
\\\\***********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
exports.ext = void 0;
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the \\"License\\"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* \\"AS IS\\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
const ext = 'TRUE';
exports.ext = ext;
/***/ }),
/***/ \\"./public/index.ts\\":
/*!*************************!*\\\\
!*** ./public/index.ts ***!
\\\\*************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
var _lib = __webpack_require__(/*! ./lib */ \\"./public/lib.ts\\");
Object.keys(_lib).forEach(function (key) {
if (key === \\"default\\" || key === \\"__esModule\\") return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _lib[key];
}
});
});
var _ext = __webpack_require__(/*! ./ext */ \\"./public/ext.ts\\");
Object.keys(_ext).forEach(function (key) {
if (key === \\"default\\" || key === \\"__esModule\\") return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _ext[key];
}
});
});
/***/ }),
/***/ \\"./public/lib.ts\\":
/*!***********************!*\\\\
!*** ./public/lib.ts ***!
\\\\***********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
\\"use strict\\";
Object.defineProperty(exports, \\"__esModule\\", {
value: true
});
exports.fooLibFn = fooLibFn;
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the \\"License\\"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* \\"AS IS\\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
function fooLibFn() {
return 'foo';
}
/***/ })
/******/ })[\\"plugin\\"];
//# sourceMappingURL=foo.plugin.js.map"
`;

View file

@ -0,0 +1,155 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import Fs from 'fs';
import { inspect } from 'util';
import cpy from 'cpy';
import del from 'del';
import { toArray, tap } from 'rxjs/operators';
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
import { runOptimizer, OptimizerConfig, OptimizerUpdate } from '@kbn/optimizer';
const TMP_DIR = Path.resolve(__dirname, '../__fixtures__/__tmp__');
const MOCK_REPO_SRC = Path.resolve(__dirname, '../__fixtures__/mock_repo');
const MOCK_REPO_DIR = Path.resolve(TMP_DIR, 'mock_repo');
expect.addSnapshotSerializer(createAbsolutePathSerializer(MOCK_REPO_DIR));
beforeEach(async () => {
await del(TMP_DIR);
await cpy('**/*', MOCK_REPO_DIR, {
cwd: MOCK_REPO_SRC,
parents: true,
deep: true,
});
});
afterEach(async () => {
await del(TMP_DIR);
});
it('builds expected bundles, saves bundle counts to metadata', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [Path.resolve(MOCK_REPO_DIR, 'plugins')],
maxWorkerCount: 1,
});
expect(config).toMatchSnapshot('OptimizerConfig');
const msgs = await runOptimizer(config)
.pipe(
tap(state => {
if (state.event?.type === 'worker stdio') {
// eslint-disable-next-line no-console
console.log('worker', state.event.stream, state.event.chunk.toString('utf8'));
}
}),
toArray()
)
.toPromise();
const assert = (statement: string, truth: boolean, altStates?: OptimizerUpdate[]) => {
if (!truth) {
throw new Error(
`expected optimizer to ${statement}, states: ${inspect(altStates || msgs, {
colors: true,
depth: Infinity,
})}`
);
}
};
const initializingStates = msgs.filter(msg => msg.state.phase === 'initializing');
assert('produce at least one initializing event', initializingStates.length >= 1);
const bundleCacheStates = msgs.filter(
msg =>
(msg.event?.type === 'bundle cached' || msg.event?.type === 'bundle not cached') &&
msg.state.phase === 'initializing'
);
assert('produce two bundle cache events while initializing', bundleCacheStates.length === 2);
const initializedStates = msgs.filter(msg => msg.state.phase === 'initialized');
assert('produce at least one initialized event', initializedStates.length >= 1);
const workerStarted = msgs.filter(msg => msg.event?.type === 'worker started');
assert('produce one worker started event', workerStarted.length === 1);
const runningStates = msgs.filter(msg => msg.state.phase === 'running');
assert(
'produce two or three "running" states',
runningStates.length === 2 || runningStates.length === 3
);
const bundleNotCachedEvents = msgs.filter(msg => msg.event?.type === 'bundle not cached');
assert('produce two "bundle not cached" events', bundleNotCachedEvents.length === 2);
const successStates = msgs.filter(msg => msg.state.phase === 'success');
assert(
'produce one or two "compiler success" states',
successStates.length === 1 || successStates.length === 2
);
const otherStates = msgs.filter(
msg =>
msg.state.phase !== 'initializing' &&
msg.state.phase !== 'success' &&
msg.state.phase !== 'running' &&
msg.state.phase !== 'initialized' &&
msg.event?.type !== 'bundle not cached'
);
assert('produce zero unexpected states', otherStates.length === 0, otherStates);
expect(
Fs.readFileSync(Path.resolve(MOCK_REPO_DIR, 'plugins/foo/target/public/foo.plugin.js'), 'utf8')
).toMatchSnapshot('foo bundle');
expect(
Fs.readFileSync(Path.resolve(MOCK_REPO_DIR, 'plugins/bar/target/public/bar.plugin.js'), 'utf8')
).toMatchSnapshot('bar bundle');
const foo = config.bundles.find(b => b.id === 'foo')!;
expect(foo).toBeTruthy();
foo.cache.refresh();
expect(foo.cache.getModuleCount()).toBe(3);
expect(foo.cache.getReferencedFiles()).toMatchInlineSnapshot(`
Array [
<absolute path>/plugins/foo/public/ext.ts,
<absolute path>/plugins/foo/public/index.ts,
<absolute path>/plugins/foo/public/lib.ts,
]
`);
const bar = config.bundles.find(b => b.id === 'bar')!;
expect(bar).toBeTruthy();
bar.cache.refresh();
expect(bar.cache.getModuleCount()).toBe(5);
expect(bar.cache.getReferencedFiles()).toMatchInlineSnapshot(`
Array [
<absolute path>/plugins/foo/public/ext.ts,
<absolute path>/plugins/foo/public/index.ts,
<absolute path>/plugins/foo/public/lib.ts,
<absolute path>/plugins/bar/public/index.ts,
<absolute path>/plugins/bar/public/lib.ts,
]
`);
});

View file

@ -0,0 +1,301 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import cpy from 'cpy';
import del from 'del';
import { toArray } from 'rxjs/operators';
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
import { getMtimes } from '../optimizer/get_mtimes';
import { OptimizerConfig } from '../optimizer/optimizer_config';
import { Bundle } from '../common/bundle';
import { getBundleCacheEvent$ } from '../optimizer/bundle_cache';
const TMP_DIR = Path.resolve(__dirname, '../__fixtures__/__tmp__');
const MOCK_REPO_SRC = Path.resolve(__dirname, '../__fixtures__/mock_repo');
const MOCK_REPO_DIR = Path.resolve(TMP_DIR, 'mock_repo');
expect.addSnapshotSerializer({
print: () => '<Bundle>',
test: v => v instanceof Bundle,
});
expect.addSnapshotSerializer(createAbsolutePathSerializer(MOCK_REPO_DIR));
beforeEach(async () => {
await del(TMP_DIR);
await cpy('**/*', MOCK_REPO_DIR, {
cwd: MOCK_REPO_SRC,
parents: true,
deep: true,
});
});
afterEach(async () => {
await del(TMP_DIR);
});
it('emits "bundle cached" event when everything is updated', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [],
pluginPaths: [Path.resolve(MOCK_REPO_DIR, 'plugins/foo')],
maxWorkerCount: 1,
});
const [bundle] = config.bundles;
const optimizerCacheKey = 'optimizerCacheKey';
const files = [
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/ext.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/index.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/lib.ts'),
];
const mtimes = await getMtimes(files);
const cacheKey = bundle.createCacheKey(files, mtimes);
bundle.cache.set({
cacheKey,
optimizerCacheKey,
files,
moduleCount: files.length,
});
const cacheEvents = await getBundleCacheEvent$(config, optimizerCacheKey)
.pipe(toArray())
.toPromise();
expect(cacheEvents).toMatchInlineSnapshot(`
Array [
Object {
"bundle": <Bundle>,
"type": "bundle cached",
},
]
`);
});
it('emits "bundle not cached" event when cacheKey is up to date but caching is disabled in config', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [],
pluginPaths: [Path.resolve(MOCK_REPO_DIR, 'plugins/foo')],
maxWorkerCount: 1,
cache: false,
});
const [bundle] = config.bundles;
const optimizerCacheKey = 'optimizerCacheKey';
const files = [
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/ext.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/index.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/lib.ts'),
];
const mtimes = await getMtimes(files);
const cacheKey = bundle.createCacheKey(files, mtimes);
bundle.cache.set({
cacheKey,
optimizerCacheKey,
files,
moduleCount: files.length,
});
const cacheEvents = await getBundleCacheEvent$(config, optimizerCacheKey)
.pipe(toArray())
.toPromise();
expect(cacheEvents).toMatchInlineSnapshot(`
Array [
Object {
"bundle": <Bundle>,
"reason": "cache disabled",
"type": "bundle not cached",
},
]
`);
});
it('emits "bundle not cached" event when optimizerCacheKey is missing', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [],
pluginPaths: [Path.resolve(MOCK_REPO_DIR, 'plugins/foo')],
maxWorkerCount: 1,
});
const [bundle] = config.bundles;
const optimizerCacheKey = 'optimizerCacheKey';
const files = [
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/ext.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/index.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/lib.ts'),
];
const mtimes = await getMtimes(files);
const cacheKey = bundle.createCacheKey(files, mtimes);
bundle.cache.set({
cacheKey,
optimizerCacheKey: undefined,
files,
moduleCount: files.length,
});
const cacheEvents = await getBundleCacheEvent$(config, optimizerCacheKey)
.pipe(toArray())
.toPromise();
expect(cacheEvents).toMatchInlineSnapshot(`
Array [
Object {
"bundle": <Bundle>,
"reason": "missing optimizer cache key",
"type": "bundle not cached",
},
]
`);
});
it('emits "bundle not cached" event when optimizerCacheKey is outdated, includes diff', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [],
pluginPaths: [Path.resolve(MOCK_REPO_DIR, 'plugins/foo')],
maxWorkerCount: 1,
});
const [bundle] = config.bundles;
const optimizerCacheKey = 'optimizerCacheKey';
const files = [
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/ext.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/index.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/lib.ts'),
];
const mtimes = await getMtimes(files);
const cacheKey = bundle.createCacheKey(files, mtimes);
bundle.cache.set({
cacheKey,
optimizerCacheKey: 'old',
files,
moduleCount: files.length,
});
const cacheEvents = await getBundleCacheEvent$(config, optimizerCacheKey)
.pipe(toArray())
.toPromise();
expect(cacheEvents).toMatchInlineSnapshot(`
Array [
Object {
"bundle": <Bundle>,
"diff": "- Expected
+ Received
- old
+ optimizerCacheKey",
"reason": "optimizer cache key mismatch",
"type": "bundle not cached",
},
]
`);
});
it('emits "bundle not cached" event when cacheKey is missing', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [],
pluginPaths: [Path.resolve(MOCK_REPO_DIR, 'plugins/foo')],
maxWorkerCount: 1,
});
const [bundle] = config.bundles;
const optimizerCacheKey = 'optimizerCacheKey';
const files = [
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/ext.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/index.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/lib.ts'),
];
bundle.cache.set({
cacheKey: undefined,
optimizerCacheKey,
files,
moduleCount: files.length,
});
const cacheEvents = await getBundleCacheEvent$(config, optimizerCacheKey)
.pipe(toArray())
.toPromise();
expect(cacheEvents).toMatchInlineSnapshot(`
Array [
Object {
"bundle": <Bundle>,
"reason": "missing cache key",
"type": "bundle not cached",
},
]
`);
});
it('emits "bundle not cached" event when cacheKey is outdated', async () => {
const config = OptimizerConfig.create({
repoRoot: MOCK_REPO_DIR,
pluginScanDirs: [],
pluginPaths: [Path.resolve(MOCK_REPO_DIR, 'plugins/foo')],
maxWorkerCount: 1,
});
const [bundle] = config.bundles;
const optimizerCacheKey = 'optimizerCacheKey';
const files = [
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/ext.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/index.ts'),
Path.resolve(MOCK_REPO_DIR, 'plugins/foo/public/lib.ts'),
];
bundle.cache.set({
cacheKey: 'old',
optimizerCacheKey,
files,
moduleCount: files.length,
});
jest.spyOn(bundle, 'createCacheKey').mockImplementation(() => 'new');
const cacheEvents = await getBundleCacheEvent$(config, optimizerCacheKey)
.pipe(toArray())
.toPromise();
expect(cacheEvents).toMatchInlineSnapshot(`
Array [
Object {
"bundle": <Bundle>,
"diff": "- Expected
+ Received
- old
+ new",
"reason": "cache key mismatch",
"type": "bundle not cached",
},
]
`);
});

View file

@ -0,0 +1,143 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { map } from 'rxjs/operators';
import ActualWatchpack from 'watchpack';
import { Bundle, ascending } from '../common';
import { watchBundlesForChanges$ } from '../optimizer/watch_bundles_for_changes';
import { BundleCacheEvent } from '../optimizer';
jest.mock('fs');
jest.mock('watchpack');
const MockWatchPack: jest.MockedClass<typeof ActualWatchpack> = jest.requireMock('watchpack');
const bundleEntryPath = (bundle: Bundle) => `${bundle.contextDir}/${bundle.entry}`;
const makeTestBundle = (id: string) => {
const bundle = new Bundle({
type: 'plugin',
id,
contextDir: `/repo/plugins/${id}/public`,
entry: 'index.ts',
outputDir: `/repo/plugins/${id}/target/public`,
sourceRoot: `/repo`,
});
bundle.cache.set({
cacheKey: 'abc',
moduleCount: 1,
optimizerCacheKey: 'abc',
files: [bundleEntryPath(bundle)],
});
return bundle;
};
const FOO_BUNDLE = makeTestBundle('foo');
const BAR_BUNDLE = makeTestBundle('bar');
const BAZ_BUNDLE = makeTestBundle('baz');
const BOX_BUNDLE = makeTestBundle('box');
const CAR_BUNDLE = makeTestBundle('car');
const BUNDLES = [FOO_BUNDLE, BAR_BUNDLE, BAZ_BUNDLE, BOX_BUNDLE, CAR_BUNDLE];
const bundleCacheEvent$ = Rx.from(BUNDLES).pipe(
map(
(bundle): BundleCacheEvent => ({
type: 'bundle cached',
bundle,
})
)
);
beforeEach(async () => {
jest.useFakeTimers();
});
afterEach(async () => {
jest.useRealTimers();
});
it('notifies of changes and completes once all bundles have changed', async () => {
expect.assertions(18);
const promise = watchBundlesForChanges$(bundleCacheEvent$, Date.now())
.pipe(
map((event, i) => {
// each time we trigger a change event we get a 'changed detected' event
if (i === 0 || i === 2 || i === 4 || i === 6) {
expect(event).toHaveProperty('type', 'changes detected');
return;
}
expect(event).toHaveProperty('type', 'changes');
// to teach TS what we're doing
if (event.type !== 'changes') {
return;
}
// first we change foo and bar, and after 1 second get that change comes though
if (i === 1) {
expect(event.bundles).toHaveLength(2);
const [bar, foo] = event.bundles.sort(ascending(b => b.id));
expect(bar).toHaveProperty('id', 'bar');
expect(foo).toHaveProperty('id', 'foo');
}
// next we change just the baz package and it's represented on its own
if (i === 3) {
expect(event.bundles).toHaveLength(1);
expect(event.bundles[0]).toHaveProperty('id', 'baz');
}
// finally we change box and car together
if (i === 5) {
expect(event.bundles).toHaveLength(2);
const [bar, foo] = event.bundles.sort(ascending(b => b.id));
expect(bar).toHaveProperty('id', 'box');
expect(foo).toHaveProperty('id', 'car');
}
})
)
.toPromise();
expect(MockWatchPack.mock.instances).toHaveLength(1);
const [watcher] = (MockWatchPack.mock.instances as any) as Array<jest.Mocked<ActualWatchpack>>;
expect(watcher.on).toHaveBeenCalledTimes(1);
expect(watcher.on).toHaveBeenCalledWith('change', expect.any(Function));
const [, changeListener] = watcher.on.mock.calls[0];
// foo and bar are changes without 1sec so they are batched
changeListener(bundleEntryPath(FOO_BUNDLE), 'modified');
jest.advanceTimersByTime(900);
changeListener(bundleEntryPath(BAR_BUNDLE), 'modified');
jest.advanceTimersByTime(1000);
// baz is the only change in 1sec so it is on its own
changeListener(bundleEntryPath(BAZ_BUNDLE), 'modified');
jest.advanceTimersByTime(1000);
// finish by changing box and car
changeListener(bundleEntryPath(BOX_BUNDLE), 'deleted');
changeListener(bundleEntryPath(CAR_BUNDLE), 'deleted');
jest.advanceTimersByTime(1000);
await expect(promise).resolves.toEqual(undefined);
});

View file

@ -0,0 +1,137 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { inspect } from 'util';
import { ToolingLog } from '@kbn/dev-utils';
import { tap } from 'rxjs/operators';
import { OptimizerConfig } from './optimizer';
import { OptimizerUpdate$ } from './run_optimizer';
import { CompilerMsg, pipeClosure } from './common';
export function logOptimizerState(log: ToolingLog, config: OptimizerConfig) {
return pipeClosure((update$: OptimizerUpdate$) => {
const bundleStates = new Map<string, CompilerMsg['type']>();
const bundlesThatWereBuilt = new Set<string>();
let loggedInit = false;
return update$.pipe(
tap(update => {
const { event, state } = update;
if (event?.type === 'worker stdio') {
const chunk = event.chunk.toString('utf8');
log.warning(
`worker`,
event.stream,
chunk.slice(0, chunk.length - (chunk.endsWith('\n') ? 1 : 0))
);
}
if (event?.type === 'bundle not cached') {
log.debug(
`[${event.bundle.id}] bundle not cached because [${event.reason}]${
event.diff ? `, diff:\n${event.diff}` : ''
}`
);
}
if (event?.type === 'bundle cached') {
log.debug(`[${event.bundle.id}] bundle cached`);
}
if (event?.type === 'worker started') {
let moduleCount = 0;
for (const bundle of event.bundles) {
moduleCount += bundle.cache.getModuleCount() ?? NaN;
}
const mcString = isFinite(moduleCount) ? String(moduleCount) : '?';
const bcString = String(event.bundles.length);
log.info(`starting worker [${bcString} bundles, ${mcString} modules]`);
}
if (state.phase === 'reallocating') {
log.debug(`changes detected...`);
return;
}
if (state.phase === 'initialized') {
if (!loggedInit) {
loggedInit = true;
log.info(`initialized, ${state.offlineBundles.length} bundles cached`);
}
if (state.onlineBundles.length === 0) {
log.success(`all bundles cached, success after ${state.durSec}`);
}
return;
}
for (const { bundleId: id, type } of state.compilerStates) {
const prevBundleState = bundleStates.get(id);
if (type === prevBundleState) {
continue;
}
if (type === 'running') {
bundlesThatWereBuilt.add(id);
}
bundleStates.set(id, type);
log.debug(
`[${id}] state = "${type}"${type !== 'running' ? ` after ${state.durSec} sec` : ''}`
);
}
if (state.phase === 'running' || state.phase === 'initializing') {
return true;
}
if (state.phase === 'issue') {
log.error(`webpack compile errors`);
log.indent(4);
for (const b of state.compilerStates) {
if (b.type === 'compiler issue') {
log.error(`[${b.bundleId}] build`);
log.indent(4);
log.error(b.failure);
log.indent(-4);
}
}
log.indent(-4);
return true;
}
if (state.phase === 'success') {
const buildCount = bundlesThatWereBuilt.size;
bundlesThatWereBuilt.clear();
log.success(
`${buildCount} bundles compiled successfully after ${state.durSec} sec` +
(config.watch ? ', watching for changes' : '')
);
return true;
}
throw new Error(`unhandled optimizer message: ${inspect(update)}`);
})
);
});
}

View file

@ -0,0 +1,226 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
jest.mock('fs');
import { Bundle } from '../common';
import { assignBundlesToWorkers, Assignments } from './assign_bundles_to_workers';
const hasModuleCount = (b: Bundle) => b.cache.getModuleCount() !== undefined;
const noModuleCount = (b: Bundle) => b.cache.getModuleCount() === undefined;
const summarizeBundles = (w: Assignments) =>
[
w.moduleCount ? `${w.moduleCount} known modules` : '',
w.newBundles ? `${w.newBundles} new bundles` : '',
]
.filter(Boolean)
.join(', ');
const readConfigs = (workers: Assignments[]) =>
workers.map(
(w, i) => `worker ${i} (${summarizeBundles(w)}) => ${w.bundles.map(b => b.id).join(',')}`
);
const assertReturnVal = (workers: Assignments[]) => {
expect(workers).toBeInstanceOf(Array);
for (const worker of workers) {
expect(worker).toEqual({
moduleCount: expect.any(Number),
newBundles: expect.any(Number),
bundles: expect.any(Array),
});
expect(worker.bundles.filter(noModuleCount).length).toBe(worker.newBundles);
expect(
worker.bundles.filter(hasModuleCount).reduce((sum, b) => sum + b.cache.getModuleCount()!, 0)
).toBe(worker.moduleCount);
}
};
const testBundle = (id: string) =>
new Bundle({
contextDir: `/repo/plugin/${id}/public`,
entry: 'index.ts',
id,
outputDir: `/repo/plugins/${id}/target/public`,
sourceRoot: `/repo`,
type: 'plugin',
});
const getBundles = ({
withCounts = 0,
withoutCounts = 0,
}: {
withCounts?: number;
withoutCounts?: number;
}) => {
const bundles: Bundle[] = [];
for (let i = 1; i <= withCounts; i++) {
const id = `foo${i}`;
const bundle = testBundle(id);
bundle.cache.set({ moduleCount: i % 5 === 0 ? i * 10 : i });
bundles.push(bundle);
}
for (let i = 0; i < withoutCounts; i++) {
const id = `bar${i}`;
bundles.push(testBundle(id));
}
return bundles;
};
it('creates less workers if maxWorkersCount is larger than bundle count', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 2 }), 10);
assertReturnVal(workers);
expect(workers.length).toBe(2);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (1 known modules) => foo1",
"worker 1 (2 known modules) => foo2",
]
`);
});
it('assigns unknown plugin counts as evenly as possible', () => {
const workers = assignBundlesToWorkers(getBundles({ withoutCounts: 10 }), 3);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (4 new bundles) => bar9,bar6,bar3,bar0",
"worker 1 (3 new bundles) => bar8,bar5,bar2",
"worker 2 (3 new bundles) => bar7,bar4,bar1",
]
`);
});
it('distributes bundles without module counts evenly after assigning modules with known counts evenly', () => {
const bundles = getBundles({ withCounts: 16, withoutCounts: 10 });
const workers = assignBundlesToWorkers(bundles, 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (78 known modules, 3 new bundles) => foo5,foo11,foo8,foo6,foo2,foo1,bar9,bar5,bar1",
"worker 1 (78 known modules, 3 new bundles) => foo16,foo14,foo13,foo12,foo9,foo7,foo4,foo3,bar8,bar4,bar0",
"worker 2 (100 known modules, 2 new bundles) => foo10,bar7,bar3",
"worker 3 (150 known modules, 2 new bundles) => foo15,bar6,bar2",
]
`);
});
it('distributes 2 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 2 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (1 known modules) => foo1",
"worker 1 (2 known modules) => foo2",
]
`);
});
it('distributes 5 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 5 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (3 known modules) => foo2,foo1",
"worker 1 (3 known modules) => foo3",
"worker 2 (4 known modules) => foo4",
"worker 3 (50 known modules) => foo5",
]
`);
});
it('distributes 10 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 10 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (20 known modules) => foo9,foo6,foo4,foo1",
"worker 1 (20 known modules) => foo8,foo7,foo3,foo2",
"worker 2 (50 known modules) => foo5",
"worker 3 (100 known modules) => foo10",
]
`);
});
it('distributes 15 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 15 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (70 known modules) => foo14,foo13,foo12,foo11,foo9,foo6,foo4,foo1",
"worker 1 (70 known modules) => foo5,foo8,foo7,foo3,foo2",
"worker 2 (100 known modules) => foo10",
"worker 3 (150 known modules) => foo15",
]
`);
});
it('distributes 20 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 20 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (153 known modules) => foo15,foo3",
"worker 1 (153 known modules) => foo10,foo16,foo13,foo11,foo7,foo6",
"worker 2 (154 known modules) => foo5,foo19,foo18,foo17,foo14,foo12,foo9,foo8,foo4,foo2,foo1",
"worker 3 (200 known modules) => foo20",
]
`);
});
it('distributes 25 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 25 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (250 known modules) => foo20,foo17,foo13,foo9,foo8,foo2,foo1",
"worker 1 (250 known modules) => foo15,foo23,foo22,foo18,foo16,foo11,foo7,foo3",
"worker 2 (250 known modules) => foo10,foo5,foo24,foo21,foo19,foo14,foo12,foo6,foo4",
"worker 3 (250 known modules) => foo25",
]
`);
});
it('distributes 30 bundles to workers evenly', () => {
const workers = assignBundlesToWorkers(getBundles({ withCounts: 30 }), 4);
assertReturnVal(workers);
expect(readConfigs(workers)).toMatchInlineSnapshot(`
Array [
"worker 0 (352 known modules) => foo30,foo22,foo14,foo11,foo4,foo1",
"worker 1 (352 known modules) => foo15,foo10,foo28,foo24,foo19,foo16,foo9,foo6",
"worker 2 (353 known modules) => foo20,foo5,foo29,foo23,foo21,foo13,foo12,foo3,foo2",
"worker 3 (353 known modules) => foo25,foo27,foo26,foo18,foo17,foo8,foo7",
]
`);
});

View file

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Bundle, descending, ascending } from '../common';
// helper types used inside getWorkerConfigs so we don't have
// to calculate moduleCounts over and over
export interface Assignments {
moduleCount: number;
newBundles: number;
bundles: Bundle[];
}
/** assign a wrapped bundle to a worker */
const assignBundle = (worker: Assignments, bundle: Bundle) => {
const moduleCount = bundle.cache.getModuleCount();
if (moduleCount !== undefined) {
worker.moduleCount += moduleCount;
} else {
worker.newBundles += 1;
}
worker.bundles.push(bundle);
};
/**
* Create WorkerConfig objects for each worker we will use to build the bundles.
*
* We need to evenly assign bundles to workers so that each worker will have
* about the same amount of work to do. We do this by tracking the module count
* of each bundle in the OptimizerCache and determining the overall workload
* of a worker by the sum of modules it will have to compile for all of its
* bundles.
*
* We only know the module counts after the first build of a new bundle, so
* when we encounter a bundle without a module count in the cache we just
* assign them to workers round-robin, starting with the workers which have
* the smallest number of modules to build.
*/
export function assignBundlesToWorkers(bundles: Bundle[], maxWorkerCount: number) {
const workerCount = Math.min(bundles.length, maxWorkerCount);
const workers: Assignments[] = [];
for (let i = 0; i < workerCount; i++) {
workers.push({
moduleCount: 0,
newBundles: 0,
bundles: [],
});
}
/**
* separate the bundles which do and don't have module
* counts and sort them by [moduleCount, id]
*/
const bundlesWithCountsDesc = bundles
.filter(b => b.cache.getModuleCount() !== undefined)
.sort(
descending(
b => b.cache.getModuleCount(),
b => b.id
)
);
const bundlesWithoutModuleCounts = bundles
.filter(b => b.cache.getModuleCount() === undefined)
.sort(descending(b => b.id));
/**
* assign largest bundles to the smallest worker until it is
* no longer the smallest worker and repeat until all bundles
* with module counts are assigned
*/
while (bundlesWithCountsDesc.length) {
const [smallestWorker, nextSmallestWorker] = workers.sort(ascending(w => w.moduleCount));
while (!nextSmallestWorker || smallestWorker.moduleCount <= nextSmallestWorker.moduleCount) {
const bundle = bundlesWithCountsDesc.shift();
if (!bundle) {
break;
}
assignBundle(smallestWorker, bundle);
}
}
/**
* assign bundles without module counts to workers round-robin
* starting with the smallest workers
*/
workers.sort(ascending(w => w.moduleCount));
while (bundlesWithoutModuleCounts.length) {
for (const worker of workers) {
const bundle = bundlesWithoutModuleCounts.shift();
if (!bundle) {
break;
}
assignBundle(worker, bundle);
}
}
return workers;
}

View file

@ -0,0 +1,132 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { mergeAll } from 'rxjs/operators';
import { Bundle } from '../common';
import { OptimizerConfig } from './optimizer_config';
import { getMtimes } from './get_mtimes';
import { diffCacheKey } from './cache_keys';
export type BundleCacheEvent = BundleNotCachedEvent | BundleCachedEvent;
export interface BundleNotCachedEvent {
type: 'bundle not cached';
reason:
| 'missing optimizer cache key'
| 'optimizer cache key mismatch'
| 'missing cache key'
| 'cache key mismatch'
| 'cache disabled';
diff?: string;
bundle: Bundle;
}
export interface BundleCachedEvent {
type: 'bundle cached';
bundle: Bundle;
}
export function getBundleCacheEvent$(
config: OptimizerConfig,
optimizerCacheKey: unknown
): Rx.Observable<BundleCacheEvent> {
return Rx.defer(async () => {
const events: BundleCacheEvent[] = [];
const eligibleBundles: Bundle[] = [];
for (const bundle of config.bundles) {
if (!config.cache) {
events.push({
type: 'bundle not cached',
reason: 'cache disabled',
bundle,
});
continue;
}
const cachedOptimizerCacheKeys = bundle.cache.getOptimizerCacheKey();
if (!cachedOptimizerCacheKeys) {
events.push({
type: 'bundle not cached',
reason: 'missing optimizer cache key',
bundle,
});
continue;
}
const optimizerCacheKeyDiff = diffCacheKey(cachedOptimizerCacheKeys, optimizerCacheKey);
if (optimizerCacheKeyDiff !== undefined) {
events.push({
type: 'bundle not cached',
reason: 'optimizer cache key mismatch',
diff: optimizerCacheKeyDiff,
bundle,
});
continue;
}
if (!bundle.cache.getCacheKey()) {
events.push({
type: 'bundle not cached',
reason: 'missing cache key',
bundle,
});
continue;
}
eligibleBundles.push(bundle);
}
const mtimes = await getMtimes(
new Set<string>(
eligibleBundles.reduce(
(acc: string[], bundle) => [...acc, ...(bundle.cache.getReferencedFiles() || [])],
[]
)
)
);
for (const bundle of eligibleBundles) {
const diff = diffCacheKey(
bundle.cache.getCacheKey(),
bundle.createCacheKey(bundle.cache.getReferencedFiles() || [], mtimes)
);
if (diff) {
events.push({
type: 'bundle not cached',
reason: 'cache key mismatch',
diff,
bundle,
});
continue;
}
events.push({
type: 'bundle cached',
bundle,
});
}
return events;
}).pipe(mergeAll());
}

View file

@ -0,0 +1,178 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import jestDiff from 'jest-diff';
import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
import { reformatJestDiff, getOptimizerCacheKey, diffCacheKey } from './cache_keys';
import { OptimizerConfig } from './optimizer_config';
jest.mock('./get_changes.ts');
jest.mock('execa');
expect.addSnapshotSerializer(createAbsolutePathSerializer());
jest.requireMock('execa').mockImplementation(async (cmd: string, args: string[], opts: object) => {
expect(cmd).toBe('git');
expect(args).toEqual([
'log',
'-n',
'1',
'--pretty=format:%H',
'--',
expect.stringContaining('kbn-optimizer'),
]);
expect(opts).toEqual({
cwd: REPO_ROOT,
});
return {
stdout: '<last commit sha>',
};
});
jest.requireMock('./get_changes.ts').getChanges.mockImplementation(
async () =>
new Map([
['/foo/bar/a', 'modified'],
['/foo/bar/b', 'modified'],
['/foo/bar/c', 'deleted'],
])
);
describe('getOptimizerCacheKey()', () => {
it('uses latest commit and changes files to create unique value', async () => {
const config = OptimizerConfig.create({
repoRoot: REPO_ROOT,
});
await expect(getOptimizerCacheKey(config)).resolves.toMatchInlineSnapshot(`
Object {
"deletedPaths": Array [
"/foo/bar/c",
],
"lastCommit": "<last commit sha>",
"modifiedPaths": Object {},
"workerConfig": Object {
"browserslistEnv": "dev",
"cache": true,
"dist": false,
"optimizerCacheKey": "♻",
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
},
}
`);
});
});
describe('diffCacheKey()', () => {
it('returns undefined if values are equal', () => {
expect(diffCacheKey('1', '1')).toBe(undefined);
expect(diffCacheKey(1, 1)).toBe(undefined);
expect(diffCacheKey(['1', '2', { a: 'b' }], ['1', '2', { a: 'b' }])).toBe(undefined);
expect(
diffCacheKey(
{
a: '1',
b: '2',
},
{
b: '2',
a: '1',
}
)
).toBe(undefined);
});
it('returns a diff if the values are different', () => {
expect(diffCacheKey(['1', '2', { a: 'b' }], ['1', '2', { b: 'a' }])).toMatchInlineSnapshot(`
"- Expected
+ Received
 Array [
 \\"1\\",
 \\"2\\",
 Object {
- \\"a\\": \\"b\\",
+ \\"b\\": \\"a\\",
 },
 ]"
`);
expect(
diffCacheKey(
{
a: '1',
b: '1',
},
{
b: '2',
a: '2',
}
)
).toMatchInlineSnapshot(`
"- Expected
+ Received
 Object {
- \\"a\\": \\"1\\",
- \\"b\\": \\"1\\",
+ \\"a\\": \\"2\\",
+ \\"b\\": \\"2\\",
 }"
`);
});
});
describe('reformatJestDiff()', () => {
it('reformats large jestDiff output to focus on the changed lines', () => {
const diff = jestDiff(
{
a: ['1', '1', '1', '1', '1', '1', '1', '2', '1', '1', '1', '1', '1', '1', '1', '1', '1'],
},
{
b: ['1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '2', '1', '1', '1', '1'],
}
);
expect(reformatJestDiff(diff)).toMatchInlineSnapshot(`
"- Expected
+ Received
 Object {
- \\"a\\": Array [
+ \\"b\\": Array [
 \\"1\\",
 \\"1\\",
 ...
 \\"1\\",
 \\"1\\",
- \\"2\\",
 \\"1\\",
 \\"1\\",
 ...
 \\"1\\",
 \\"1\\",
+ \\"2\\",
 \\"1\\",
 \\"1\\",
 ..."
`);
});
});

View file

@ -0,0 +1,155 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import Chalk from 'chalk';
import execa from 'execa';
import { REPO_ROOT } from '@kbn/dev-utils';
import stripAnsi from 'strip-ansi';
import jestDiff from 'jest-diff';
import jsonStable from 'json-stable-stringify';
import { ascending, WorkerConfig } from '../common';
import { getMtimes } from './get_mtimes';
import { getChanges } from './get_changes';
import { OptimizerConfig } from './optimizer_config';
const OPTIMIZER_DIR = Path.dirname(require.resolve('../../package.json'));
const RELATIVE_DIR = Path.relative(REPO_ROOT, OPTIMIZER_DIR);
export function diffCacheKey(expected?: unknown, actual?: unknown) {
if (jsonStable(expected) === jsonStable(actual)) {
return;
}
return reformatJestDiff(jestDiff(expected, actual));
}
export function reformatJestDiff(diff: string | null) {
const diffLines = diff?.split('\n') || [];
if (
diffLines.length < 4 ||
stripAnsi(diffLines[0]) !== '- Expected' ||
stripAnsi(diffLines[1]) !== '+ Received'
) {
throw new Error(`unexpected diff format: ${diff}`);
}
const outputLines = [diffLines.shift(), diffLines.shift(), diffLines.shift()];
/**
* buffer which contains between 0 and 5 lines from the diff which aren't additions or
* deletions. The first three are the first three lines seen since the buffer was cleared
* and the last two lines are the last two lines seen.
*
* When flushContext() is called we write the first two lines to output, an elipses if there
* are five lines, and then the last two lines.
*
* At the very end we will write the last two lines of context if they're defined
*/
const contextBuffer: string[] = [];
/**
* Convert a line to an empty line with elipses placed where the text on that line starts
*/
const toElipses = (line: string) => {
return stripAnsi(line).replace(/^(\s*).*/, '$1...');
};
while (diffLines.length) {
const line = diffLines.shift()!;
const plainLine = stripAnsi(line);
if (plainLine.startsWith('+ ') || plainLine.startsWith('- ')) {
// write contextBuffer to the outputLines
if (contextBuffer.length) {
outputLines.push(
...contextBuffer.slice(0, 2),
...(contextBuffer.length === 5
? [Chalk.dim(toElipses(contextBuffer[2])), ...contextBuffer.slice(3, 5)]
: contextBuffer.slice(2, 4))
);
contextBuffer.length = 0;
}
// add this line to the outputLines
outputLines.push(line);
} else {
// update the contextBuffer with this line which doesn't represent a change
if (contextBuffer.length === 5) {
contextBuffer[3] = contextBuffer[4];
contextBuffer[4] = line;
} else {
contextBuffer.push(line);
}
}
}
if (contextBuffer.length) {
outputLines.push(
...contextBuffer.slice(0, 2),
...(contextBuffer.length > 2 ? [Chalk.dim(toElipses(contextBuffer[2]))] : [])
);
}
return outputLines.join('\n');
}
export interface OptimizerCacheKey {
readonly lastCommit: string | undefined;
readonly workerConfig: WorkerConfig;
readonly deletedPaths: string[];
readonly modifiedPaths: Record<string, number>;
}
async function getLastCommit() {
const { stdout } = await execa(
'git',
['log', '-n', '1', '--pretty=format:%H', '--', RELATIVE_DIR],
{
cwd: REPO_ROOT,
}
);
return stdout.trim() || undefined;
}
export async function getOptimizerCacheKey(config: OptimizerConfig) {
const changes = Array.from((await getChanges(OPTIMIZER_DIR)).entries());
const cacheKeys: OptimizerCacheKey = {
lastCommit: await getLastCommit(),
workerConfig: config.getWorkerConfig('♻'),
deletedPaths: changes.filter(e => e[1] === 'deleted').map(e => e[0]),
modifiedPaths: {} as Record<string, number>,
};
const modified = changes.filter(e => e[1] === 'modified').map(e => e[0]);
const mtimes = await getMtimes(modified);
for (const [path, mtime] of Array.from(mtimes.entries()).sort(ascending(e => e[0]))) {
if (typeof mtime === 'number') {
cacheKeys.modifiedPaths[path] = mtime;
}
}
return cacheKeys;
}

View file

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
import { getBundles } from './get_bundles';
expect.addSnapshotSerializer(createAbsolutePathSerializer('/repo'));
it('returns a bundle for each plugin', () => {
expect(
getBundles(
[
{
directory: '/repo/plugins/foo',
id: 'foo',
isUiPlugin: true,
},
{
directory: '/repo/plugins/bar',
id: 'bar',
isUiPlugin: false,
},
{
directory: '/outside/of/repo/plugins/baz',
id: 'baz',
isUiPlugin: true,
},
],
'/repo'
).map(b => b.toSpec())
).toMatchInlineSnapshot(`
Array [
Object {
"contextDir": <absolute path>/plugins/foo,
"entry": "./public/index",
"id": "foo",
"outputDir": <absolute path>/plugins/foo/target/public,
"sourceRoot": <absolute path>,
"type": "plugin",
},
Object {
"contextDir": "/outside/of/repo/plugins/baz",
"entry": "./public/index",
"id": "baz",
"outputDir": "/outside/of/repo/plugins/baz/target/public",
"sourceRoot": <absolute path>,
"type": "plugin",
},
]
`);
});

View file

@ -17,18 +17,24 @@
* under the License.
*/
import _ from 'lodash';
import Path from 'path';
const log = _.restParam(function(color, label, rest1) {
console.log.apply(console, [color(` ${_.trim(label)} `)].concat(rest1));
});
import { Bundle } from '../common';
import { green, yellow, red } from './color';
import { KibanaPlatformPlugin } from './kibana_platform_plugins';
export default class Log {
constructor(quiet, silent) {
this.good = quiet || silent ? _.noop : _.partial(log, green);
this.warn = quiet || silent ? _.noop : _.partial(log, yellow);
this.bad = silent ? _.noop : _.partial(log, red);
}
export function getBundles(plugins: KibanaPlatformPlugin[], repoRoot: string) {
return plugins
.filter(p => p.isUiPlugin)
.map(
p =>
new Bundle({
type: 'plugin',
id: p.id,
entry: './public/index',
sourceRoot: repoRoot,
contextDir: p.directory,
outputDir: Path.resolve(p.directory, 'target/public'),
})
);
}

View file

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
jest.mock('execa');
import { getChanges } from './get_changes';
const execa: jest.Mock = jest.requireMock('execa');
it('parses git ls-files output', async () => {
expect.assertions(4);
execa.mockImplementation((cmd, args, options) => {
expect(cmd).toBe('git');
expect(args).toEqual(['ls-files', '-dmt', '--', '/foo/bar/x']);
expect(options).toEqual({
cwd: '/foo/bar/x',
});
return {
stdout: [
'C kbn-optimizer/package.json',
'C kbn-optimizer/src/common/bundle.ts',
'R kbn-optimizer/src/common/bundles.ts',
'C kbn-optimizer/src/common/bundles.ts',
'R kbn-optimizer/src/get_bundle_definitions.test.ts',
'C kbn-optimizer/src/get_bundle_definitions.test.ts',
].join('\n'),
};
});
await expect(getChanges('/foo/bar/x')).resolves.toMatchInlineSnapshot(`
Map {
"/foo/bar/x/kbn-optimizer/package.json" => "modified",
"/foo/bar/x/kbn-optimizer/src/common/bundle.ts" => "modified",
"/foo/bar/x/kbn-optimizer/src/common/bundles.ts" => "deleted",
"/foo/bar/x/kbn-optimizer/src/get_bundle_definitions.test.ts" => "deleted",
}
`);
});

View file

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import execa from 'execa';
export type Changes = Map<string, 'modified' | 'deleted'>;
/**
* get the changes in all the context directories (plugin public paths)
*/
export async function getChanges(dir: string) {
const { stdout } = await execa('git', ['ls-files', '-dmt', '--', dir], {
cwd: dir,
});
const changes: Changes = new Map();
const output = stdout.trim();
if (output) {
for (const line of output.split('\n')) {
const [tag, ...pathParts] = line.trim().split(' ');
const path = Path.resolve(dir, pathParts.join(' '));
switch (tag) {
case 'M':
case 'C':
// for some reason ls-files returns deleted files as both deleted
// and modified, so make sure not to overwrite changes already
// tracked as "deleted"
if (changes.get(path) !== 'deleted') {
changes.set(path, 'modified');
}
break;
case 'R':
changes.set(path, 'deleted');
break;
default:
throw new Error(`unexpected path status ${tag} for path ${path}`);
}
}
}
return changes;
}

View file

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
jest.mock('fs');
import { getMtimes } from './get_mtimes';
const { stat }: { stat: jest.Mock } = jest.requireMock('fs');
it('returns mtimes Map', async () => {
stat.mockImplementation((path, cb) => {
if (path.includes('missing')) {
const error = new Error('file not found');
(error as any).code = 'ENOENT';
cb(error);
} else {
cb(null, {
mtimeMs: 1234,
});
}
});
await expect(getMtimes(['/foo/bar', '/foo/missing', '/foo/baz', '/foo/bar'])).resolves
.toMatchInlineSnapshot(`
Map {
"/foo/bar" => 1234,
"/foo/baz" => 1234,
}
`);
});

View file

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Fs from 'fs';
import * as Rx from 'rxjs';
import { mergeMap, toArray, map, catchError } from 'rxjs/operators';
const stat$ = Rx.bindNodeCallback(Fs.stat);
/**
* get mtimes of referenced paths concurrently, limit concurrency to 100
*/
export async function getMtimes(paths: Iterable<string>) {
return await Rx.from(paths)
.pipe(
// map paths to [path, mtimeMs] entries with concurrency of
// 100 at a time, ignoring missing paths
mergeMap(
path =>
stat$(path).pipe(
map(stat => [path, stat.mtimeMs] as const),
catchError((error: any) => (error?.code === 'ENOENT' ? Rx.EMPTY : Rx.throwError(error)))
),
100
),
toArray(),
map(entries => new Map(entries))
)
.toPromise();
}

View file

@ -0,0 +1,26 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export * from './optimizer_config';
export { WorkerStdio } from './observe_worker';
export * from './optimizer_reducer';
export * from './cache_keys';
export * from './watch_bundles_for_changes';
export * from './run_workers';
export * from './bundle_cache';

View file

@ -0,0 +1,60 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
import { findKibanaPlatformPlugins } from './kibana_platform_plugins';
expect.addSnapshotSerializer(createAbsolutePathSerializer());
const FIXTURES_PATH = Path.resolve(__dirname, '../__fixtures__');
it('parses kibana.json files of plugins found in pluginDirs', () => {
expect(
findKibanaPlatformPlugins(
[Path.resolve(FIXTURES_PATH, 'mock_repo/plugins')],
[Path.resolve(FIXTURES_PATH, 'mock_repo/test_plugins/test_baz')]
)
).toMatchInlineSnapshot(`
Array [
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/plugins/bar,
"id": "bar",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/plugins/baz,
"id": "baz",
"isUiPlugin": false,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/plugins/foo,
"id": "foo",
"isUiPlugin": true,
},
Object {
"directory": <absolute path>/packages/kbn-optimizer/src/__fixtures__/mock_repo/test_plugins/test_baz,
"id": "test_baz",
"isUiPlugin": false,
},
]
`);
});

View file

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import globby from 'globby';
import loadJsonFile from 'load-json-file';
export interface KibanaPlatformPlugin {
readonly directory: string;
readonly id: string;
readonly isUiPlugin: boolean;
}
/**
* Helper to find the new platform plugins.
*/
export function findKibanaPlatformPlugins(scanDirs: string[], paths: string[]) {
return globby
.sync(
Array.from(
new Set([
...scanDirs.map(dir => `${dir}/*/kibana.json`),
...paths.map(path => `${path}/kibana.json`),
])
),
{
absolute: true,
}
)
.map(path =>
// absolute paths returned from globby are using normalize or something so the path separators are `/` even on windows, Path.resolve solves this
readKibanaPlatformPlugin(Path.resolve(path))
);
}
function readKibanaPlatformPlugin(manifestPath: string): KibanaPlatformPlugin {
if (!Path.isAbsolute(manifestPath)) {
throw new TypeError('expected new platform manifest path to be absolute');
}
const manifest = loadJsonFile.sync(manifestPath);
if (!manifest || typeof manifest !== 'object' || Array.isArray(manifest)) {
throw new TypeError('expected new platform plugin manifest to be a JSON encoded object');
}
if (typeof manifest.id !== 'string') {
throw new TypeError('expected new platform plugin manifest to have a string id');
}
return {
directory: Path.dirname(manifestPath),
id: manifest.id,
isUiPlugin: !!manifest.ui,
};
}

View file

@ -0,0 +1,199 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { fork, ChildProcess } from 'child_process';
import { Readable } from 'stream';
import { inspect } from 'util';
import * as Rx from 'rxjs';
import { map, takeUntil } from 'rxjs/operators';
import { isWorkerMsg, WorkerConfig, WorkerMsg, Bundle } from '../common';
import { OptimizerConfig } from './optimizer_config';
export interface WorkerStdio {
type: 'worker stdio';
stream: 'stdout' | 'stderr';
chunk: Buffer;
}
export interface WorkerStarted {
type: 'worker started';
bundles: Bundle[];
}
export type WorkerStatus = WorkerStdio | WorkerStarted;
interface ProcResource extends Rx.Unsubscribable {
proc: ChildProcess;
}
const isNumeric = (input: any) => String(input).match(/^[0-9]+$/);
let inspectPortCounter = 9230;
const inspectFlagIndex = process.execArgv.findIndex(flag => flag.startsWith('--inspect'));
let inspectFlag: string | undefined;
if (inspectFlagIndex !== -1) {
const argv = process.execArgv[inspectFlagIndex];
if (argv.includes('=')) {
// --inspect=port
const [flag, port] = argv.split('=');
inspectFlag = flag;
inspectPortCounter = Number.parseInt(port, 10) + 1;
} else {
// --inspect
inspectFlag = argv;
if (isNumeric(process.execArgv[inspectFlagIndex + 1])) {
// --inspect port
inspectPortCounter = Number.parseInt(process.execArgv[inspectFlagIndex + 1], 10) + 1;
}
}
}
function usingWorkerProc<T>(
config: OptimizerConfig,
workerConfig: WorkerConfig,
bundles: Bundle[],
fn: (proc: ChildProcess) => Rx.Observable<T>
) {
return Rx.using(
(): ProcResource => {
const args = [JSON.stringify(workerConfig), JSON.stringify(bundles.map(b => b.toSpec()))];
const proc = fork(require.resolve('../worker/run_worker'), args, {
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
execArgv: [
...(inspectFlag && config.inspectWorkers
? [`${inspectFlag}=${inspectPortCounter++}`]
: []),
...(config.maxWorkerCount <= 3 ? ['--max-old-space-size=2048'] : []),
],
});
return {
proc,
unsubscribe() {
proc.kill('SIGKILL');
},
};
},
resource => {
const { proc } = resource as ProcResource;
return fn(proc);
}
);
}
function observeStdio$(stream: Readable, name: WorkerStdio['stream']) {
return Rx.fromEvent<Buffer>(stream, 'data').pipe(
takeUntil(
Rx.race(
Rx.fromEvent<void>(stream, 'end'),
Rx.fromEvent<Error>(stream, 'error').pipe(
map(error => {
throw error;
})
)
)
),
map(
(chunk): WorkerStdio => ({
type: 'worker stdio',
chunk,
stream: name,
})
)
);
}
/**
* Start a worker process with the specified `workerConfig` and
* `bundles` and return an observable of the events related to
* that worker, including the messages sent to us by that worker
* and the status of the process (stdio, started).
*/
export function observeWorker(
config: OptimizerConfig,
workerConfig: WorkerConfig,
bundles: Bundle[]
): Rx.Observable<WorkerMsg | WorkerStatus> {
return usingWorkerProc(config, workerConfig, bundles, proc => {
let lastMsg: WorkerMsg;
return Rx.merge(
Rx.of({
type: 'worker started',
bundles,
}),
observeStdio$(proc.stdout, 'stdout'),
observeStdio$(proc.stderr, 'stderr'),
Rx.fromEvent<[unknown]>(proc, 'message')
.pipe(
// validate the messages from the process
map(([msg]) => {
if (!isWorkerMsg(msg)) {
throw new Error(`unexpected message from worker: ${JSON.stringify(msg)}`);
}
lastMsg = msg;
return msg;
})
)
.pipe(
takeUntil(
Rx.race(
// throw into stream on error events
Rx.fromEvent<Error>(proc, 'error').pipe(
map(error => {
throw new Error(`worker failed to spawn: ${error.message}`);
})
),
// throw into stream on unexpected exits, or emit to trigger the stream to close
Rx.fromEvent<[number | void]>(proc, 'exit').pipe(
map(([code]) => {
const terminalMsgTypes: Array<WorkerMsg['type']> = [
'compiler error',
'worker error',
];
if (!config.watch) {
terminalMsgTypes.push('compiler issue', 'compiler success');
}
// verify that this is an expected exit state
if (code === 0 && lastMsg && terminalMsgTypes.includes(lastMsg.type)) {
// emit undefined so that takeUntil completes the observable
return;
}
throw new Error(
`worker exitted unexpectedly with code ${code} [last message: ${inspect(
lastMsg
)}]`
);
})
)
)
)
)
);
});
}

View file

@ -0,0 +1,408 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
jest.mock('./assign_bundles_to_workers.ts');
jest.mock('./kibana_platform_plugins.ts');
jest.mock('./get_bundles.ts');
import Path from 'path';
import Os from 'os';
import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
import { OptimizerConfig } from './optimizer_config';
jest.spyOn(Os, 'cpus').mockReturnValue(['foo'] as any);
expect.addSnapshotSerializer(createAbsolutePathSerializer());
beforeEach(() => {
delete process.env.KBN_OPTIMIZER_MAX_WORKERS;
delete process.env.KBN_OPTIMIZER_NO_CACHE;
jest.clearAllMocks();
});
describe('OptimizerConfig::parseOptions()', () => {
it('validates that repoRoot is absolute', () => {
expect(() =>
OptimizerConfig.parseOptions({ repoRoot: 'foo/bar' })
).toThrowErrorMatchingInlineSnapshot(`"repoRoot must be an absolute path"`);
});
it('validates that pluginScanDirs are absolute', () => {
expect(() =>
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: ['foo/bar'],
})
).toThrowErrorMatchingInlineSnapshot(`"pluginScanDirs must all be absolute paths"`);
});
it('validates that pluginPaths are absolute', () => {
expect(() =>
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginPaths: ['foo/bar'],
})
).toThrowErrorMatchingInlineSnapshot(`"pluginPaths must all be absolute paths"`);
});
it('validates that extraPluginScanDirs are absolute', () => {
expect(() =>
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
extraPluginScanDirs: ['foo/bar'],
})
).toThrowErrorMatchingInlineSnapshot(`"extraPluginScanDirs must all be absolute paths"`);
});
it('validates that maxWorkerCount is a number', () => {
expect(() => {
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
maxWorkerCount: NaN,
});
}).toThrowErrorMatchingInlineSnapshot(`"worker count must be a number"`);
});
it('applies defaults', () => {
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
})
).toMatchInlineSnapshot(`
Object {
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 2,
"pluginPaths": Array [],
"pluginScanDirs": Array [
<absolute path>/src/plugins,
<absolute path>/x-pack/plugins,
<absolute path>/plugins,
<absolute path>-extra,
],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
cache: false,
})
).toMatchInlineSnapshot(`
Object {
"cache": false,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 2,
"pluginPaths": Array [],
"pluginScanDirs": Array [
<absolute path>/src/plugins,
<absolute path>/x-pack/plugins,
<absolute path>/plugins,
<absolute path>-extra,
],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
examples: true,
})
).toMatchInlineSnapshot(`
Object {
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 2,
"pluginPaths": Array [],
"pluginScanDirs": Array [
<absolute path>/src/plugins,
<absolute path>/x-pack/plugins,
<absolute path>/plugins,
<absolute path>/examples,
<absolute path>-extra,
],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
oss: true,
})
).toMatchInlineSnapshot(`
Object {
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 2,
"pluginPaths": Array [],
"pluginScanDirs": Array [
<absolute path>/src/plugins,
<absolute path>/plugins,
<absolute path>-extra,
],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: [Path.resolve(REPO_ROOT, 'x/y/z'), '/outside/of/repo'],
})
).toMatchInlineSnapshot(`
Object {
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 2,
"pluginPaths": Array [],
"pluginScanDirs": Array [
<absolute path>/x/y/z,
"/outside/of/repo",
],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
process.env.KBN_OPTIMIZER_MAX_WORKERS = '100';
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: [],
})
).toMatchInlineSnapshot(`
Object {
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 100,
"pluginPaths": Array [],
"pluginScanDirs": Array [],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
process.env.KBN_OPTIMIZER_NO_CACHE = '0';
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: [],
})
).toMatchInlineSnapshot(`
Object {
"cache": false,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 100,
"pluginPaths": Array [],
"pluginScanDirs": Array [],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
process.env.KBN_OPTIMIZER_NO_CACHE = '1';
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: [],
})
).toMatchInlineSnapshot(`
Object {
"cache": false,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 100,
"pluginPaths": Array [],
"pluginScanDirs": Array [],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
process.env.KBN_OPTIMIZER_NO_CACHE = '1';
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: [],
cache: true,
})
).toMatchInlineSnapshot(`
Object {
"cache": false,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 100,
"pluginPaths": Array [],
"pluginScanDirs": Array [],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
delete process.env.KBN_OPTIMIZER_NO_CACHE;
expect(
OptimizerConfig.parseOptions({
repoRoot: REPO_ROOT,
pluginScanDirs: [],
cache: true,
})
).toMatchInlineSnapshot(`
Object {
"cache": true,
"dist": false,
"inspectWorkers": false,
"maxWorkerCount": 100,
"pluginPaths": Array [],
"pluginScanDirs": Array [],
"profileWebpack": false,
"repoRoot": <absolute path>,
"watch": false,
}
`);
});
});
/**
* NOTE: this method is basically just calling others, so we're mocking out the return values
* of each function with a Symbol, including the return values of OptimizerConfig.parseOptions
* and just making sure that the arguments are coming from where we expect
*/
describe('OptimizerConfig::create()', () => {
const assignBundlesToWorkers: jest.Mock = jest.requireMock('./assign_bundles_to_workers.ts')
.assignBundlesToWorkers;
const findKibanaPlatformPlugins: jest.Mock = jest.requireMock('./kibana_platform_plugins.ts')
.findKibanaPlatformPlugins;
const getBundles: jest.Mock = jest.requireMock('./get_bundles.ts').getBundles;
beforeEach(() => {
if ('mock' in OptimizerConfig.parseOptions) {
(OptimizerConfig.parseOptions as jest.Mock).mockRestore();
}
assignBundlesToWorkers.mockReturnValue([
{ config: Symbol('worker config 1') },
{ config: Symbol('worker config 2') },
]);
findKibanaPlatformPlugins.mockReturnValue(Symbol('new platform plugins'));
getBundles.mockReturnValue(Symbol('bundles'));
jest.spyOn(OptimizerConfig, 'parseOptions').mockImplementation((): any => ({
cache: Symbol('parsed cache'),
dist: Symbol('parsed dist'),
maxWorkerCount: Symbol('parsed max worker count'),
pluginPaths: Symbol('parsed plugin paths'),
pluginScanDirs: Symbol('parsed plugin scan dirs'),
repoRoot: Symbol('parsed repo root'),
watch: Symbol('parsed watch'),
inspectWorkers: Symbol('parsed inspect workers'),
profileWebpack: Symbol('parsed profile webpack'),
}));
});
it('passes parsed options to findKibanaPlatformPlugins, getBundles, and assignBundlesToWorkers', () => {
const config = OptimizerConfig.create({
repoRoot: REPO_ROOT,
});
expect(config).toMatchInlineSnapshot(`
OptimizerConfig {
"bundles": Symbol(bundles),
"cache": Symbol(parsed cache),
"dist": Symbol(parsed dist),
"inspectWorkers": Symbol(parsed inspect workers),
"maxWorkerCount": Symbol(parsed max worker count),
"plugins": Symbol(new platform plugins),
"profileWebpack": Symbol(parsed profile webpack),
"repoRoot": Symbol(parsed repo root),
"watch": Symbol(parsed watch),
}
`);
expect(findKibanaPlatformPlugins.mock).toMatchInlineSnapshot(`
Object {
"calls": Array [
Array [
Symbol(parsed plugin scan dirs),
Symbol(parsed plugin paths),
],
],
"instances": Array [
[Window],
],
"invocationCallOrder": Array [
7,
],
"results": Array [
Object {
"type": "return",
"value": Symbol(new platform plugins),
},
],
}
`);
expect(getBundles.mock).toMatchInlineSnapshot(`
Object {
"calls": Array [
Array [
Symbol(new platform plugins),
Symbol(parsed repo root),
],
],
"instances": Array [
[Window],
],
"invocationCallOrder": Array [
8,
],
"results": Array [
Object {
"type": "return",
"value": Symbol(bundles),
},
],
}
`);
});
});

View file

@ -0,0 +1,172 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import Os from 'os';
import { Bundle, WorkerConfig } from '../common';
import { findKibanaPlatformPlugins, KibanaPlatformPlugin } from './kibana_platform_plugins';
import { getBundles } from './get_bundles';
interface Options {
/** absolute path to root of the repo/build */
repoRoot: string;
/** enable to run the optimizer in watch mode */
watch?: boolean;
/** the maximum number of workers that will be created */
maxWorkerCount?: number;
/** set to false to disabling writing/reading of caches */
cache?: boolean;
/** build assets suitable for use in the distributable */
dist?: boolean;
/** enable webpack profiling, writes stats.json files to the root of each plugin's output dir */
profileWebpack?: boolean;
/** set to true to inspecting workers when the parent process is being inspected */
inspectWorkers?: boolean;
/** include only oss plugins in default scan dirs */
oss?: boolean;
/** include examples in default scan dirs */
examples?: boolean;
/** absolute paths to specific plugins that should be built */
pluginPaths?: string[];
/** absolute paths to directories that should be built, overrides the default scan dirs */
pluginScanDirs?: string[];
/** absolute paths that should be added to the default scan dirs */
extraPluginScanDirs?: string[];
}
interface ParsedOptions {
repoRoot: string;
watch: boolean;
maxWorkerCount: number;
profileWebpack: boolean;
cache: boolean;
dist: boolean;
pluginPaths: string[];
pluginScanDirs: string[];
inspectWorkers: boolean;
}
export class OptimizerConfig {
static parseOptions(options: Options): ParsedOptions {
const watch = !!options.watch;
const oss = !!options.oss;
const dist = !!options.dist;
const examples = !!options.examples;
const profileWebpack = !!options.profileWebpack;
const inspectWorkers = !!options.inspectWorkers;
const cache = options.cache !== false && !process.env.KBN_OPTIMIZER_NO_CACHE;
const repoRoot = options.repoRoot;
if (!Path.isAbsolute(repoRoot)) {
throw new TypeError('repoRoot must be an absolute path');
}
/**
* BEWARE: this needs to stay roughly synchronized with
* `src/core/server/config/env.ts` which determins which paths
* should be searched for plugins to load
*/
const pluginScanDirs = options.pluginScanDirs || [
Path.resolve(repoRoot, 'src/plugins'),
...(oss ? [] : [Path.resolve(repoRoot, 'x-pack/plugins')]),
Path.resolve(repoRoot, 'plugins'),
...(examples ? [Path.resolve('examples')] : []),
Path.resolve(repoRoot, '../kibana-extra'),
];
if (!pluginScanDirs.every(p => Path.isAbsolute(p))) {
throw new TypeError('pluginScanDirs must all be absolute paths');
}
for (const extraPluginScanDir of options.extraPluginScanDirs || []) {
if (!Path.isAbsolute(extraPluginScanDir)) {
throw new TypeError('extraPluginScanDirs must all be absolute paths');
}
pluginScanDirs.push(extraPluginScanDir);
}
const pluginPaths = options.pluginPaths || [];
if (!pluginPaths.every(s => Path.isAbsolute(s))) {
throw new TypeError('pluginPaths must all be absolute paths');
}
const maxWorkerCount = process.env.KBN_OPTIMIZER_MAX_WORKERS
? parseInt(process.env.KBN_OPTIMIZER_MAX_WORKERS, 10)
: options.maxWorkerCount ?? Math.max(Math.ceil(Math.max(Os.cpus()?.length, 1) / 3), 2);
if (typeof maxWorkerCount !== 'number' || !Number.isFinite(maxWorkerCount)) {
throw new TypeError('worker count must be a number');
}
return {
watch,
dist,
repoRoot,
maxWorkerCount,
profileWebpack,
cache,
pluginScanDirs,
pluginPaths,
inspectWorkers,
};
}
static create(inputOptions: Options) {
const options = OptimizerConfig.parseOptions(inputOptions);
const plugins = findKibanaPlatformPlugins(options.pluginScanDirs, options.pluginPaths);
const bundles = getBundles(plugins, options.repoRoot);
return new OptimizerConfig(
bundles,
options.cache,
options.watch,
options.inspectWorkers,
plugins,
options.repoRoot,
options.maxWorkerCount,
options.dist,
options.profileWebpack
);
}
constructor(
public readonly bundles: Bundle[],
public readonly cache: boolean,
public readonly watch: boolean,
public readonly inspectWorkers: boolean,
public readonly plugins: KibanaPlatformPlugin[],
public readonly repoRoot: string,
public readonly maxWorkerCount: number,
public readonly dist: boolean,
public readonly profileWebpack: boolean
) {}
getWorkerConfig(optimizerCacheKey: unknown): WorkerConfig {
return {
cache: this.cache,
dist: this.dist,
profileWebpack: this.profileWebpack,
repoRoot: this.repoRoot,
watch: this.watch,
optimizerCacheKey,
browserslistEnv: this.dist ? 'production' : process.env.BROWSERSLIST_ENV || 'dev',
};
}
}

View file

@ -0,0 +1,170 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { inspect } from 'util';
import { WorkerMsg, CompilerMsg, Bundle, Summarizer } from '../common';
import { ChangeEvent } from './watcher';
import { WorkerStatus } from './observe_worker';
import { BundleCacheEvent } from './bundle_cache';
import { OptimizerConfig } from './optimizer_config';
export interface OptimizerInitializedEvent {
type: 'optimizer initialized';
}
export type OptimizerEvent =
| OptimizerInitializedEvent
| ChangeEvent
| WorkerMsg
| WorkerStatus
| BundleCacheEvent;
export interface OptimizerState {
phase: 'initializing' | 'initialized' | 'running' | 'issue' | 'success' | 'reallocating';
startTime: number;
durSec: number;
compilerStates: CompilerMsg[];
onlineBundles: Bundle[];
offlineBundles: Bundle[];
}
const msToSec = (ms: number) => Math.round(ms / 100) / 10;
/**
* merge a state and some updates into a new optimizer state, apply some
* standard updates related to timing
*/
function createOptimizerState(
prevState: OptimizerState,
update?: Partial<Omit<OptimizerState, 'version' | 'durSec' | 'startTime'>>
): OptimizerState {
// reset start time if we are transitioning into running
const startTime =
(prevState.phase === 'success' || prevState.phase === 'issue') &&
(update?.phase === 'running' || update?.phase === 'reallocating')
? Date.now()
: prevState.startTime;
return {
...prevState,
...update,
startTime,
durSec: msToSec(Date.now() - startTime),
};
}
/**
* calculate the total state, given a set of compiler messages
*/
function getStatePhase(states: CompilerMsg[]) {
const types = states.map(s => s.type);
if (types.includes('running')) {
return 'running';
}
if (types.includes('compiler issue')) {
return 'issue';
}
if (types.every(s => s === 'compiler success')) {
return 'success';
}
throw new Error(`unable to summarize bundle states: ${JSON.stringify(states)}`);
}
export function createOptimizerReducer(
config: OptimizerConfig
): Summarizer<OptimizerEvent, OptimizerState> {
return (state, event) => {
if (event.type === 'optimizer initialized') {
return createOptimizerState(state, {
phase: 'initialized',
});
}
if (event.type === 'worker error' || event.type === 'compiler error') {
// unrecoverable error states
const error = new Error(event.errorMsg);
error.stack = event.errorStack;
throw error;
}
if (event.type === 'worker stdio' || event.type === 'worker started') {
// same state, but updated to the event is shared externally
return createOptimizerState(state);
}
if (event.type === 'changes detected') {
// switch to running early, before workers are started, so that
// base path proxy can prevent requests in the delay between changes
// and workers started
return createOptimizerState(state, {
phase: 'reallocating',
});
}
if (
event.type === 'changes' ||
event.type === 'bundle cached' ||
event.type === 'bundle not cached'
) {
const onlineBundles: Bundle[] = [...state.onlineBundles];
if (event.type === 'changes') {
onlineBundles.push(...event.bundles);
}
if (event.type === 'bundle not cached') {
onlineBundles.push(event.bundle);
}
const offlineBundles: Bundle[] = [];
for (const bundle of config.bundles) {
if (!onlineBundles.includes(bundle)) {
offlineBundles.push(bundle);
}
}
return createOptimizerState(state, {
phase: state.phase === 'initializing' ? 'initializing' : 'running',
onlineBundles,
offlineBundles,
});
}
if (
event.type === 'compiler issue' ||
event.type === 'compiler success' ||
event.type === 'running'
) {
const compilerStates: CompilerMsg[] = [
...state.compilerStates.filter(c => c.bundleId !== event.bundleId),
event,
];
return createOptimizerState(state, {
phase: getStatePhase(compilerStates),
compilerStates,
});
}
throw new Error(`unexpected optimizer event ${inspect(event)}`);
};
}

View file

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { mergeMap, toArray } from 'rxjs/operators';
import { maybeMap } from '../common';
import { OptimizerConfig } from './optimizer_config';
import { BundleCacheEvent } from './bundle_cache';
import { ChangeEvent } from './watcher';
import { assignBundlesToWorkers } from './assign_bundles_to_workers';
import { observeWorker } from './observe_worker';
/**
* Create a stream of all worker events, these include messages
* from workers and events about the status of workers. To get
* these events we assign the bundles to workers via
* `assignBundlesToWorkers()` and then start a worler for each
* assignment with `observeWorker()`.
*
* Subscribes to `changeEvent$` in order to determine when more
* bundles should be assigned to workers.
*
* Completes when all workers have exitted. If we are running in
* watch mode this observable will never exit.
*/
export function runWorkers(
config: OptimizerConfig,
optimizerCacheKey: unknown,
bundleCache$: Rx.Observable<BundleCacheEvent>,
changeEvent$: Rx.Observable<ChangeEvent>
) {
return Rx.concat(
// first batch of bundles are based on how up-to-date the cache is
bundleCache$.pipe(
maybeMap(event => (event.type === 'bundle not cached' ? event.bundle : undefined)),
toArray()
),
// subsequent batches are defined by changeEvent$
changeEvent$.pipe(maybeMap(c => (c.type === 'changes' ? c.bundles : undefined)))
).pipe(
mergeMap(bundles =>
Rx.from(assignBundlesToWorkers(bundles, config.maxWorkerCount)).pipe(
mergeMap(assignment =>
observeWorker(config, config.getWorkerConfig(optimizerCacheKey), assignment.bundles)
)
)
)
);
}

View file

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { mergeMap, toArray } from 'rxjs/operators';
import { Bundle, maybeMap } from '../common';
import { BundleCacheEvent } from './bundle_cache';
import { Watcher } from './watcher';
/**
* Recursively call watcher.getNextChange$, passing it
* just the bundles that haven't been changed yet until
* all bundles have changed, then exit
*/
function recursiveGetNextChange$(
watcher: Watcher,
bundles: Bundle[],
startTime: number
): ReturnType<Watcher['getNextChange$']> {
return !bundles.length
? Rx.EMPTY
: watcher.getNextChange$(bundles, startTime).pipe(
mergeMap(event => {
if (event.type === 'changes detected') {
return Rx.of(event);
}
return Rx.concat(
Rx.of(event),
recursiveGetNextChange$(
watcher,
bundles.filter(b => !event.bundles.includes(b)),
Date.now()
)
);
})
);
}
/**
* Create an observable that emits change events for offline
* bundles.
*
* Once changes are seen in a bundle that bundles
* files will no longer be watched.
*
* Once changes have been seen in all bundles changeEvent$
* will complete.
*
* If there are no bundles to watch or we config.watch === false
* the observable completes without sending any notifications.
*/
export function watchBundlesForChanges$(
bundleCacheEvent$: Rx.Observable<BundleCacheEvent>,
initialStartTime: number
) {
return bundleCacheEvent$.pipe(
maybeMap(event => (event.type === 'bundle cached' ? event.bundle : undefined)),
toArray(),
mergeMap(bundles =>
bundles.length
? Watcher.using(watcher => recursiveGetNextChange$(watcher, bundles, initialStartTime))
: Rx.EMPTY
)
);
}

View file

@ -0,0 +1,109 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { take, map, share } from 'rxjs/operators';
import Watchpack from 'watchpack';
import { debounceTimeBuffer, Bundle } from '../common';
export interface ChangesStarted {
type: 'changes detected';
}
export interface Changes {
type: 'changes';
bundles: Bundle[];
}
export type ChangeEvent = ChangesStarted | Changes;
export class Watcher {
/**
* Use watcher as an RxJS Resource, which is a special type of observable
* that calls unsubscribe on the resource (the Watcher instance in this case)
* when the observable is unsubscribed.
*/
static using<T>(fn: (watcher: Watcher) => Rx.Observable<T>) {
return Rx.using(
() => new Watcher(),
resource => fn(resource as Watcher)
);
}
private readonly watchpack = new Watchpack({
aggregateTimeout: 0,
ignored: /node_modules\/([^\/]+[\/])*(?!package.json)([^\/]+)$/,
});
private readonly change$ = Rx.fromEvent<[string]>(this.watchpack, 'change').pipe(share());
public getNextChange$(bundles: Bundle[], startTime: number) {
return Rx.merge(
// emit ChangesStarted as soon as we have been triggered
this.change$.pipe(
take(1),
map(
(): ChangesStarted => ({
type: 'changes detected',
})
)
),
// debounce and bufffer change events for 1 second to create
// final change notification
this.change$.pipe(
map(event => event[0]),
debounceTimeBuffer(1000),
map(
(changes): Changes => ({
type: 'changes',
bundles: bundles.filter(bundle => {
const referencedFiles = bundle.cache.getReferencedFiles();
return changes.some(change => referencedFiles?.includes(change));
}),
})
),
take(1)
),
// call watchpack.watch after listerners are setup
Rx.defer(() => {
const watchPaths: string[] = [];
for (const bundle of bundles) {
for (const path of bundle.cache.getReferencedFiles() || []) {
watchPaths.push(path);
}
}
this.watchpack.watch(watchPaths, [], startTime);
return Rx.EMPTY;
})
);
}
/**
* Called automatically by RxJS when Watcher instances
* are used as resources
*/
unsubscribe() {
this.watchpack.close();
}
}

View file

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { mergeMap, share, observeOn } from 'rxjs/operators';
import { summarizeEvent$, Update } from './common';
import {
OptimizerConfig,
OptimizerEvent,
OptimizerState,
getBundleCacheEvent$,
getOptimizerCacheKey,
watchBundlesForChanges$,
runWorkers,
OptimizerInitializedEvent,
createOptimizerReducer,
} from './optimizer';
export type OptimizerUpdate = Update<OptimizerEvent, OptimizerState>;
export type OptimizerUpdate$ = Rx.Observable<OptimizerUpdate>;
export function runOptimizer(config: OptimizerConfig) {
return Rx.defer(async () => ({
startTime: Date.now(),
cacheKey: await getOptimizerCacheKey(config),
})).pipe(
mergeMap(({ startTime, cacheKey }) => {
const bundleCacheEvent$ = getBundleCacheEvent$(config, cacheKey).pipe(
observeOn(Rx.asyncScheduler),
share()
);
// initialization completes once all bundle caches have been resolved
const init$ = Rx.concat(
bundleCacheEvent$,
Rx.of<OptimizerInitializedEvent>({
type: 'optimizer initialized',
})
);
// watch the offline bundles for changes, turning them online...
const changeEvent$ = config.watch
? watchBundlesForChanges$(bundleCacheEvent$, startTime).pipe(share())
: Rx.EMPTY;
// run workers to build all the online bundles, including the bundles turned online by changeEvent$
const workerEvent$ = runWorkers(config, cacheKey, bundleCacheEvent$, changeEvent$);
// create the stream that summarized all the events into specific states
return summarizeEvent$<OptimizerEvent, OptimizerState>(
Rx.merge(init$, changeEvent$, workerEvent$),
{
phase: 'initializing',
compilerStates: [],
offlineBundles: [],
onlineBundles: [],
startTime,
durSec: 0,
},
createOptimizerReducer(config)
);
})
);
}

View file

@ -0,0 +1,22 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
module.exports = {
plugins: [require('autoprefixer')()],
};

View file

@ -0,0 +1,210 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import 'source-map-support/register';
import Fs from 'fs';
import Path from 'path';
import { inspect } from 'util';
import webpack, { Stats } from 'webpack';
import * as Rx from 'rxjs';
import { mergeMap, map, mapTo, takeUntil } from 'rxjs/operators';
import { CompilerMsgs, CompilerMsg, maybeMap, Bundle, WorkerConfig } from '../common';
import { getWebpackConfig } from './webpack.config';
import { isFailureStats, failedStatsToErrorMessage } from './webpack_helpers';
import {
isExternalModule,
isNormalModule,
isIgnoredModule,
isConcatenatedModule,
WebpackNormalModule,
getModulePath,
} from './webpack_helpers';
const PLUGIN_NAME = '@kbn/optimizer';
/**
* Create an Observable<CompilerMsg> for a specific child compiler + bundle
*/
const observeCompiler = (
workerConfig: WorkerConfig,
bundle: Bundle,
compiler: webpack.Compiler
): Rx.Observable<CompilerMsg> => {
const compilerMsgs = new CompilerMsgs(bundle.id);
const done$ = new Rx.Subject();
const { beforeRun, watchRun, done } = compiler.hooks;
/**
* Called by webpack as a single run compilation is starting
*/
const started$ = Rx.merge(
Rx.fromEventPattern(cb => beforeRun.tap(PLUGIN_NAME, cb)),
Rx.fromEventPattern(cb => watchRun.tap(PLUGIN_NAME, cb))
).pipe(mapTo(compilerMsgs.running()));
/**
* Called by webpack as any compilation is complete. If the
* needAdditionalPass property is set then another compilation
* is about to be started, so we shouldn't send complete quite yet
*/
const complete$ = Rx.fromEventPattern<Stats>(cb => done.tap(PLUGIN_NAME, cb)).pipe(
maybeMap(stats => {
// @ts-ignore not included in types, but it is real https://github.com/webpack/webpack/blob/ab4fa8ddb3f433d286653cd6af7e3aad51168649/lib/Watching.js#L58
if (stats.compilation.needAdditionalPass) {
return undefined;
}
if (workerConfig.profileWebpack) {
Fs.writeFileSync(
Path.resolve(bundle.outputDir, 'stats.json'),
JSON.stringify(stats.toJson())
);
}
if (!workerConfig.watch) {
process.nextTick(() => done$.next());
}
if (isFailureStats(stats)) {
return compilerMsgs.compilerFailure({
failure: failedStatsToErrorMessage(stats),
});
}
const normalModules = stats.compilation.modules.filter(
(module): module is WebpackNormalModule => {
if (isNormalModule(module)) {
return true;
}
if (isExternalModule(module) || isIgnoredModule(module) || isConcatenatedModule(module)) {
return false;
}
throw new Error(`Unexpected module type: ${inspect(module)}`);
}
);
const referencedFiles = new Set<string>();
for (const module of normalModules) {
const path = getModulePath(module);
const parsedPath = Path.parse(path);
const dirSegments = parsedPath.dir.split(Path.sep);
if (!dirSegments.includes('node_modules')) {
referencedFiles.add(path);
continue;
}
const nmIndex = dirSegments.lastIndexOf('node_modules');
const isScoped = dirSegments[nmIndex + 1].startsWith('@');
referencedFiles.add(
Path.join(
parsedPath.root,
...dirSegments.slice(0, nmIndex + 1 + (isScoped ? 2 : 1)),
'package.json'
)
);
}
const files = Array.from(referencedFiles);
const mtimes = new Map(
files.map((path): [string, number | undefined] => {
try {
return [path, compiler.inputFileSystem.statSync(path)?.mtimeMs];
} catch (error) {
if (error?.code === 'ENOENT') {
return [path, undefined];
}
throw error;
}
})
);
bundle.cache.set({
optimizerCacheKey: workerConfig.optimizerCacheKey,
cacheKey: bundle.createCacheKey(files, mtimes),
moduleCount: normalModules.length,
files,
});
return compilerMsgs.compilerSuccess({
moduleCount: normalModules.length,
});
})
);
/**
* Called whenever the compilation results in an error that
* prevets assets from being emitted, and prevents watching
* from continuing.
*/
const error$ = Rx.fromEventPattern<Error>(cb => compiler.hooks.failed.tap(PLUGIN_NAME, cb)).pipe(
map(error => {
throw compilerMsgs.error(error);
})
);
/**
* Merge events into a single stream, if we're not watching
* complete the stream after our first complete$ event
*/
return Rx.merge(started$, complete$, error$).pipe(takeUntil(done$));
};
/**
* Run webpack compilers
*/
export const runCompilers = (workerConfig: WorkerConfig, bundles: Bundle[]) => {
const multiCompiler = webpack(bundles.map(def => getWebpackConfig(def, workerConfig)));
return Rx.merge(
/**
* convert each compiler into an event stream that represents
* the status of each compiler, if we aren't watching the streams
* will complete after the compilers are complete.
*
* If a significant error occurs the stream will error
*/
Rx.from(multiCompiler.compilers.entries()).pipe(
mergeMap(([compilerIndex, compiler]) => {
const bundle = bundles[compilerIndex];
return observeCompiler(workerConfig, bundle, compiler);
})
),
/**
* compilers have been hooked up for their events, trigger run()/watch()
*/
Rx.defer(() => {
if (!workerConfig.watch) {
multiCompiler.run(() => {});
} else {
multiCompiler.watch({}, () => {});
}
return [];
})
);
};

View file

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import * as Rx from 'rxjs';
import { parseBundles, parseWorkerConfig, WorkerMsg, isWorkerMsg, WorkerMsgs } from '../common';
import { runCompilers } from './run_compilers';
/**
**
**
** Entry file for optimizer workers, this hooks into the process, handles
** sending messages to the parent, makes sure the worker exits properly
** and triggers all the compilers by calling runCompilers()
**
**
**/
const workerMsgs = new WorkerMsgs();
if (!process.send) {
throw new Error('worker process was not started with an IPC channel');
}
const send = (msg: WorkerMsg) => {
if (!process.send) {
// parent is gone
process.exit(0);
} else {
process.send(msg);
}
};
/**
* set the exitCode and wait for the process to exit, if it
* doesn't exit naturally do so forcibly and fail.
*/
const exit = (code: number) => {
process.exitCode = code;
setTimeout(() => {
send(
workerMsgs.error(
new Error('process did not automatically exit within 5 seconds, forcing exit')
)
);
process.exit(1);
}, 5000).unref();
};
// check for connected parent on an unref'd timer rather than listening
// to "disconnect" since that listner prevents the process from exiting
setInterval(() => {
if (!process.connected) {
// parent is gone
process.exit(0);
}
}, 1000).unref();
Rx.defer(() => {
const workerConfig = parseWorkerConfig(process.argv[2]);
const bundles = parseBundles(process.argv[3]);
// set BROWSERSLIST_ENV so that style/babel loaders see it before running compilers
process.env.BROWSERSLIST_ENV = workerConfig.browserslistEnv;
return runCompilers(workerConfig, bundles);
}).subscribe(
msg => {
send(msg);
},
error => {
if (isWorkerMsg(error)) {
send(error);
} else {
send(workerMsgs.error(error));
}
exit(1);
},
() => {
exit(0);
}
);

View file

@ -0,0 +1,32 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import webpack from 'webpack';
import { stringifyRequest } from 'loader-utils';
// eslint-disable-next-line import/no-default-export
export default function(this: webpack.loader.LoaderContext) {
return `
if (window.__kbnDarkMode__) {
require(${stringifyRequest(this, `${this.resourcePath}?dark`)})
} else {
require(${stringifyRequest(this, `${this.resourcePath}?light`)});
}
`;
}

View file

@ -0,0 +1,240 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Path from 'path';
import { stringifyRequest } from 'loader-utils';
import webpack from 'webpack';
// @ts-ignore
import TerserPlugin from 'terser-webpack-plugin';
// @ts-ignore
import webpackMerge from 'webpack-merge';
// @ts-ignore
import { CleanWebpackPlugin } from 'clean-webpack-plugin';
import * as SharedDeps from '@kbn/ui-shared-deps';
import { Bundle, WorkerConfig } from '../common';
const PUBLIC_PATH_PLACEHOLDER = '__REPLACE_WITH_PUBLIC_PATH__';
const BABEL_PRESET_PATH = require.resolve('@kbn/babel-preset/webpack_preset');
export function getWebpackConfig(bundle: Bundle, worker: WorkerConfig) {
const commonConfig: webpack.Configuration = {
node: { fs: 'empty' },
context: bundle.contextDir,
cache: true,
entry: {
[bundle.id]: bundle.entry,
},
devtool: worker.dist ? false : '#cheap-source-map',
profile: worker.profileWebpack,
output: {
path: bundle.outputDir,
filename: '[name].plugin.js',
publicPath: PUBLIC_PATH_PLACEHOLDER,
devtoolModuleFilenameTemplate: info =>
`/${bundle.type}:${bundle.id}/${Path.relative(
bundle.sourceRoot,
info.absoluteResourcePath
)}${info.query}`,
jsonpFunction: `${bundle.id}_bundle_jsonpfunction`,
...(bundle.type === 'plugin'
? {
// When the entry point is loaded, assign it's exported `plugin`
// value to a key on the global `__kbnBundles__` object.
library: ['__kbnBundles__', `plugin/${bundle.id}`],
libraryExport: 'plugin',
}
: {}),
},
optimization: {
noEmitOnErrors: true,
},
externals: {
...SharedDeps.externals,
},
plugins: [new CleanWebpackPlugin()],
module: {
// no parse rules for a few known large packages which have no require() statements
noParse: [
/[\///]node_modules[\///]elasticsearch-browser[\///]/,
/[\///]node_modules[\///]lodash[\///]index\.js/,
],
rules: [
{
test: /\.css$/,
include: /node_modules/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
options: {
sourceMap: !worker.dist,
},
},
],
},
{
test: /\.scss$/,
exclude: /node_modules/,
oneOf: [
{
resourceQuery: /dark|light/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
options: {
sourceMap: !worker.dist,
},
},
{
loader: 'postcss-loader',
options: {
sourceMap: !worker.dist,
config: {
path: require.resolve('./postcss.config'),
},
},
},
{
loader: 'sass-loader',
options: {
sourceMap: !worker.dist,
prependData(loaderContext: webpack.loader.LoaderContext) {
return `@import ${stringifyRequest(
loaderContext,
Path.resolve(
worker.repoRoot,
'src/legacy/ui/public/styles/_styling_constants.scss'
)
)};\n`;
},
webpackImporter: false,
implementation: require('node-sass'),
sassOptions(loaderContext: webpack.loader.LoaderContext) {
const darkMode = loaderContext.resourceQuery === '?dark';
return {
outputStyle: 'nested',
includePaths: [Path.resolve(worker.repoRoot, 'node_modules')],
sourceMapRoot: `/${bundle.type}:${bundle.id}`,
importer: (url: string) => {
if (darkMode && url.includes('eui_colors_light')) {
return { file: url.replace('eui_colors_light', 'eui_colors_dark') };
}
return { file: url };
},
};
},
},
},
],
},
{
loader: require.resolve('./theme_loader'),
},
],
},
{
test: /\.(woff|woff2|ttf|eot|svg|ico|png|jpg|gif|jpeg)(\?|$)/,
loader: 'url-loader',
options: {
limit: 8192,
},
},
{
test: /\.(js|tsx?)$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
options: {
babelrc: false,
presets: [BABEL_PRESET_PATH],
},
},
},
{
test: /\.(html|md|txt|tmpl)$/,
use: {
loader: 'raw-loader',
},
},
],
},
resolve: {
extensions: ['.js', '.ts', '.tsx', '.json'],
alias: {
tinymath: require.resolve('tinymath/lib/tinymath.es5.js'),
},
},
performance: {
// NOTE: we are disabling this as those hints
// are more tailored for the final bundles result
// and not for the webpack compilations performance itself
hints: false,
},
};
const nonDistributableConfig: webpack.Configuration = {
mode: 'development',
};
const distributableConfig: webpack.Configuration = {
mode: 'production',
plugins: [
new webpack.DefinePlugin({
'process.env': {
IS_KIBANA_DISTRIBUTABLE: `"true"`,
},
}),
],
optimization: {
minimizer: [
new TerserPlugin({
cache: false,
sourceMap: false,
extractComments: false,
terserOptions: {
compress: false,
mangle: false,
},
}),
],
},
};
return webpackMerge(commonConfig, worker.dist ? distributableConfig : nonDistributableConfig);
}

View file

@ -0,0 +1,166 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import webpack from 'webpack';
import { defaults } from 'lodash';
// @ts-ignore
import Stats from 'webpack/lib/Stats';
export function isFailureStats(stats: webpack.Stats) {
if (stats.hasErrors()) {
return true;
}
const { warnings } = stats.toJson({ all: false, warnings: true });
// 1 - when typescript doesn't do a full type check, as we have the ts-loader
// configured here, it does not have enough information to determine
// whether an imported name is a type or not, so when the name is then
// exported, typescript has no choice but to emit the export. Fortunately,
// the extraneous export should not be harmful, so we just suppress these warnings
// https://github.com/TypeStrong/ts-loader#transpileonly-boolean-defaultfalse
//
// 2 - Mini Css Extract plugin tracks the order for each css import we have
// through the project (and it's successive imports) since version 0.4.2.
// In case we have the same imports more than one time with different
// sequences, this plugin will throw a warning. This should not be harmful,
// but the an issue was opened and can be followed on:
// https://github.com/webpack-contrib/mini-css-extract-plugin/issues/250#issuecomment-415345126
const filteredWarnings = Stats.filterWarnings(warnings, STATS_WARNINGS_FILTER);
return filteredWarnings.length > 0;
}
const STATS_WARNINGS_FILTER = new RegExp(
[
'(export .* was not found in)',
'|(chunk .* \\[mini-css-extract-plugin\\]\\\nConflicting order between:)',
].join('')
);
export function failedStatsToErrorMessage(stats: webpack.Stats) {
const details = stats.toString(
defaults(
{ colors: true, warningsFilter: STATS_WARNINGS_FILTER },
Stats.presetToOptions('minimal')
)
);
return `Optimizations failure.\n${details.split('\n').join('\n ')}`;
}
export interface WebpackResolveData {
/** compilation context */
context: string;
/** full request (with loaders) */
request: string;
dependencies: [
{
module: unknown;
weak: boolean;
optional: boolean;
loc: unknown;
request: string;
userRequest: string;
}
];
/** absolute path, but probably includes loaders in some cases */
userRequest: string;
/** string from source code */
rawRequest: string;
loaders: unknown;
/** absolute path to file, but probablt includes loaders in some cases */
resource: string;
/** module type */
type: string | 'javascript/auto';
resourceResolveData: {
context: {
/** absolute path to the file that issued the request */
issuer: string;
};
/** absolute path to the resolved file */
path: string;
};
}
interface Dependency {
type: 'null' | 'cjs require';
module: unknown;
}
/** used for standard js/ts modules */
export interface WebpackNormalModule {
type: string;
/** absolute path to file on disk */
resource: string;
buildInfo: {
cacheable: boolean;
fileDependencies: Set<string>;
};
dependencies: Dependency[];
}
export function isNormalModule(module: any): module is WebpackNormalModule {
return module?.constructor?.name === 'NormalModule';
}
/** module used for ignored code */
export interface WebpackIgnoredModule {
type: string;
/** unique string to identify this module with (starts with `ignored`) */
identifierStr: string;
/** human readable identifier */
readableIdentifierStr: string;
}
export function isIgnoredModule(module: any): module is WebpackIgnoredModule {
return module?.constructor?.name === 'RawModule' && module.identifierStr?.startsWith('ignored ');
}
/** module replacing imports for webpack externals */
export interface WebpackExternalModule {
type: string;
id: string;
/** JS used to get instance of External */
request: string;
/** module name that is handled by externals */
userRequest: string;
}
export function isExternalModule(module: any): module is WebpackExternalModule {
return module?.constructor?.name === 'ExternalModule';
}
/** module replacing imports for webpack externals */
export interface WebpackConcatenatedModule {
type: string;
id: number;
dependencies: Dependency[];
usedExports: string[];
}
export function isConcatenatedModule(module: any): module is WebpackConcatenatedModule {
return module?.constructor?.name === 'ConcatenatedModule';
}
export function getModulePath(module: WebpackNormalModule) {
const queryIndex = module.resource.indexOf('?');
return queryIndex === -1 ? module.resource : module.resource.slice(0, queryIndex);
}

View file

@ -0,0 +1,7 @@
{
"extends": "../../tsconfig.json",
"include": [
"index.d.ts",
"src/**/*"
]
}

View file

@ -0,0 +1 @@
../../yarn.lock

View file

@ -24,7 +24,7 @@ import util from 'util';
import { stat, readFileSync } from 'fs';
import { snakeCase } from 'lodash';
import del from 'del';
import { withProcRunner, ToolingLog } from '@kbn/dev-utils';
import { ProcRunner, ToolingLog } from '@kbn/dev-utils';
import { createLegacyEsTestCluster } from '@kbn/test';
import execa from 'execa';
@ -92,27 +92,30 @@ describe(`running the plugin-generator via 'node scripts/generate_plugin.js plug
});
describe('with es instance', () => {
const log = new ToolingLog();
const log = new ToolingLog({
level: 'verbose',
writeTo: process.stdout,
});
const pr = new ProcRunner(log);
const es = createLegacyEsTestCluster({ license: 'basic', log });
beforeAll(es.start);
afterAll(es.stop);
afterAll(() => pr.teardown());
it(`'yarn start' should result in the spec plugin being initialized on kibana's stdout`, async () => {
await withProcRunner(log, async proc => {
await proc.run('kibana', {
cmd: 'yarn',
args: [
'start',
'--optimize.enabled=false',
'--logging.json=false',
'--migrations.skip=true',
],
cwd: generatedPath,
wait: /ispec_plugin.+Status changed from uninitialized to green - Ready/,
});
await proc.stop('kibana');
await pr.run('kibana', {
cmd: 'yarn',
args: [
'start',
'--optimize.enabled=false',
'--logging.json=false',
'--migrations.skip=true',
],
cwd: generatedPath,
wait: /ispec_plugin.+Status changed from uninitialized to green - Ready/,
});
await pr.stop('kibana');
});
});

File diff suppressed because it is too large Load diff

View file

@ -39,7 +39,7 @@
"babel-loader": "^8.0.6",
"chalk": "^2.4.2",
"cmd-shim": "^2.1.0",
"cpy": "^7.3.0",
"cpy": "^8.0.0",
"dedent": "^0.7.0",
"del": "^5.1.0",
"execa": "^3.2.0",
@ -63,8 +63,8 @@
"tempy": "^0.3.0",
"typescript": "3.7.2",
"unlazy-loader": "^0.1.3",
"webpack": "^4.41.0",
"webpack-cli": "^3.3.9",
"webpack": "^4.41.5",
"webpack-cli": "^3.3.10",
"wrap-ansi": "^3.0.1",
"write-pkg": "^4.0.0"
},

View file

@ -27,6 +27,6 @@
"rxjs": "6.5.2",
"serve-static": "1.14.1",
"styled-components": "^3",
"webpack": "4.34.0"
"webpack": "^4.41.5"
}
}

View file

@ -59,6 +59,19 @@ const makeSuccessMessage = options => {
* @property {string} options.esFrom Optionally run from source instead of snapshot
*/
export async function runTests(options) {
if (!process.env.KBN_NP_PLUGINS_BUILT) {
const log = options.createLogger();
log.warning('❗️❗️❗️');
log.warning('❗️❗️❗️');
log.warning('❗️❗️❗️');
log.warning(
" Don't forget to use `node scripts/build_kibana_platform_plugins` to build plugins you plan on testing"
);
log.warning('❗️❗️❗️');
log.warning('❗️❗️❗️');
log.warning('❗️❗️❗️');
}
for (const configPath of options.configs) {
const log = options.createLogger();
const opts = {

View file

@ -33,13 +33,13 @@
"@babel/core": "^7.5.5",
"@elastic/eui": "0.0.55",
"@kbn/babel-preset": "1.0.0",
"autoprefixer": "9.6.1",
"autoprefixer": "^9.7.4",
"babel-loader": "^8.0.6",
"brace": "0.11.1",
"chalk": "^2.4.2",
"chokidar": "3.2.1",
"core-js": "^3.2.1",
"css-loader": "^2.1.1",
"css-loader": "^3.4.2",
"expose-loader": "^0.7.5",
"file-loader": "^4.2.0",
"grunt": "1.0.4",
@ -54,7 +54,7 @@
"keymirror": "0.1.1",
"moment": "^2.24.0",
"node-sass": "^4.13.1",
"postcss": "^7.0.5",
"postcss": "^7.0.26",
"postcss-loader": "^3.0.0",
"raw-loader": "^3.1.0",
"react-dom": "^16.12.0",
@ -64,10 +64,10 @@
"redux": "3.7.2",
"redux-thunk": "2.2.0",
"regenerator-runtime": "^0.13.3",
"sass-loader": "^7.3.1",
"sass-loader": "^8.0.2",
"sinon": "^7.4.2",
"style-loader": "^0.23.1",
"webpack": "^4.41.0",
"style-loader": "^1.1.3",
"webpack": "^4.41.5",
"webpack-dev-server": "^3.8.2",
"yeoman-generator": "1.1.1",
"yo": "2.0.6"

View file

@ -17,7 +17,7 @@
"abortcontroller-polyfill": "^1.3.0",
"angular": "^1.7.9",
"core-js": "^3.2.1",
"css-loader": "^2.1.1",
"css-loader": "^3.4.2",
"custom-event-polyfill": "^0.3.0",
"del": "^5.1.0",
"jquery": "^3.4.1",
@ -30,7 +30,7 @@
"read-pkg": "^5.2.0",
"regenerator-runtime": "^0.13.3",
"symbol-observable": "^1.2.0",
"webpack": "4.41.0",
"webpack": "^4.41.5",
"whatwg-fetch": "^3.0.0"
}
}

View file

@ -1,989 +0,0 @@
/**
* PLEASE DO NOT MODIFY
*
* This file is automatically generated by running `node scripts/build_renovate_config`
*
*/
{
extends: [
'config:base',
],
includePaths: [
'package.json',
'x-pack/package.json',
'x-pack/legacy/plugins/*/package.json',
'packages/*/package.json',
'examples/*/package.json',
'test/plugin_functional/plugins/*/package.json',
'test/interpreter_functional/plugins/*/package.json',
],
baseBranches: [
'master',
],
labels: [
'release_note:skip',
'renovate',
'v8.0.0',
'v7.5.0',
],
major: {
labels: [
'release_note:skip',
'renovate',
'v8.0.0',
'v7.5.0',
'renovate:major',
],
},
masterIssue: true,
masterIssueApproval: true,
rangeStrategy: 'bump',
npm: {
lockFileMaintenance: {
enabled: false,
},
packageRules: [
{
groupSlug: 'eslint',
groupName: 'eslint related packages',
packagePatterns: [
'(\\b|_)eslint(\\b|_)',
],
},
{
groupSlug: 'babel',
groupName: 'babel related packages',
packagePatterns: [
'(\\b|_)babel(\\b|_)',
],
packageNames: [
'core-js',
'@types/core-js',
'@babel/preset-react',
'@types/babel__preset-react',
'@babel/preset-typescript',
'@types/babel__preset-typescript',
],
},
{
groupSlug: 'jest',
groupName: 'jest related packages',
packagePatterns: [
'(\\b|_)jest(\\b|_)',
],
},
{
groupSlug: '@elastic/charts',
groupName: '@elastic/charts related packages',
packageNames: [
'@elastic/charts',
'@types/elastic__charts',
],
reviewers: [
'markov00',
],
},
{
groupSlug: 'mocha',
groupName: 'mocha related packages',
packagePatterns: [
'(\\b|_)mocha(\\b|_)',
],
},
{
groupSlug: 'karma',
groupName: 'karma related packages',
packagePatterns: [
'(\\b|_)karma(\\b|_)',
],
},
{
groupSlug: 'gulp',
groupName: 'gulp related packages',
packagePatterns: [
'(\\b|_)gulp(\\b|_)',
],
},
{
groupSlug: 'grunt',
groupName: 'grunt related packages',
packagePatterns: [
'(\\b|_)grunt(\\b|_)',
],
},
{
groupSlug: 'angular',
groupName: 'angular related packages',
packagePatterns: [
'(\\b|_)angular(\\b|_)',
],
},
{
groupSlug: 'd3',
groupName: 'd3 related packages',
packagePatterns: [
'(\\b|_)d3(\\b|_)',
],
},
{
groupSlug: 'react',
groupName: 'react related packages',
packagePatterns: [
'(\\b|_)react(\\b|_)',
'(\\b|_)redux(\\b|_)',
'(\\b|_)enzyme(\\b|_)',
],
packageNames: [
'ngreact',
'@types/ngreact',
'recompose',
'@types/recompose',
'prop-types',
'@types/prop-types',
'typescript-fsa-reducers',
'@types/typescript-fsa-reducers',
'reselect',
'@types/reselect',
],
},
{
groupSlug: 'moment',
groupName: 'moment related packages',
packagePatterns: [
'(\\b|_)moment(\\b|_)',
],
},
{
groupSlug: 'graphql',
groupName: 'graphql related packages',
packagePatterns: [
'(\\b|_)graphql(\\b|_)',
'(\\b|_)apollo(\\b|_)',
],
},
{
groupSlug: 'webpack',
groupName: 'webpack related packages',
packagePatterns: [
'(\\b|_)webpack(\\b|_)',
'(\\b|_)loader(\\b|_)',
'(\\b|_)acorn(\\b|_)',
'(\\b|_)terser(\\b|_)',
],
packageNames: [
'mini-css-extract-plugin',
'@types/mini-css-extract-plugin',
'chokidar',
'@types/chokidar',
],
},
{
groupSlug: 'vega',
groupName: 'vega related packages',
packagePatterns: [
'(\\b|_)vega(\\b|_)',
],
enabled: false,
},
{
groupSlug: 'language server',
groupName: 'language server related packages',
packageNames: [
'vscode-jsonrpc',
'@types/vscode-jsonrpc',
'vscode-languageserver',
'@types/vscode-languageserver',
'vscode-languageserver-types',
'@types/vscode-languageserver-types',
],
},
{
groupSlug: 'hapi',
groupName: 'hapi related packages',
packagePatterns: [
'(\\b|_)hapi(\\b|_)',
],
packageNames: [
'hapi',
'@types/hapi',
'joi',
'@types/joi',
'boom',
'@types/boom',
'hoek',
'@types/hoek',
'h2o2',
'@types/h2o2',
'@elastic/good',
'@types/elastic__good',
'good-squeeze',
'@types/good-squeeze',
'inert',
'@types/inert',
],
},
{
groupSlug: 'dragselect',
groupName: 'dragselect related packages',
packageNames: [
'dragselect',
'@types/dragselect',
],
labels: [
'release_note:skip',
'renovate',
'v8.0.0',
'v7.5.0',
':ml',
],
},
{
groupSlug: 'api-documenter',
groupName: 'api-documenter related packages',
packageNames: [
'@microsoft/api-documenter',
'@types/microsoft__api-documenter',
'@microsoft/api-extractor',
'@types/microsoft__api-extractor',
],
enabled: false,
},
{
groupSlug: 'jsts',
groupName: 'jsts related packages',
packageNames: [
'jsts',
'@types/jsts',
],
allowedVersions: '^1.6.2',
},
{
groupSlug: 'storybook',
groupName: 'storybook related packages',
packagePatterns: [
'(\\b|_)storybook(\\b|_)',
],
},
{
groupSlug: 'typescript',
groupName: 'typescript related packages',
packagePatterns: [
'(\\b|_)ts(\\b|_)',
'(\\b|_)typescript(\\b|_)',
],
packageNames: [
'tslib',
'@types/tslib',
],
},
{
groupSlug: 'json-stable-stringify',
groupName: 'json-stable-stringify related packages',
packageNames: [
'json-stable-stringify',
'@types/json-stable-stringify',
],
},
{
groupSlug: 'lodash.clonedeep',
groupName: 'lodash.clonedeep related packages',
packageNames: [
'lodash.clonedeep',
'@types/lodash.clonedeep',
],
},
{
groupSlug: 'bluebird',
groupName: 'bluebird related packages',
packageNames: [
'bluebird',
'@types/bluebird',
],
},
{
groupSlug: 'chance',
groupName: 'chance related packages',
packageNames: [
'chance',
'@types/chance',
],
},
{
groupSlug: 'cheerio',
groupName: 'cheerio related packages',
packageNames: [
'cheerio',
'@types/cheerio',
],
},
{
groupSlug: 'chromedriver',
groupName: 'chromedriver related packages',
packageNames: [
'chromedriver',
'@types/chromedriver',
],
},
{
groupSlug: 'classnames',
groupName: 'classnames related packages',
packageNames: [
'classnames',
'@types/classnames',
],
},
{
groupSlug: 'dedent',
groupName: 'dedent related packages',
packageNames: [
'dedent',
'@types/dedent',
],
},
{
groupSlug: 'deep-freeze-strict',
groupName: 'deep-freeze-strict related packages',
packageNames: [
'deep-freeze-strict',
'@types/deep-freeze-strict',
],
},
{
groupSlug: 'delete-empty',
groupName: 'delete-empty related packages',
packageNames: [
'delete-empty',
'@types/delete-empty',
],
},
{
groupSlug: 'elasticsearch',
groupName: 'elasticsearch related packages',
packageNames: [
'elasticsearch',
'@types/elasticsearch',
],
},
{
groupSlug: 'fetch-mock',
groupName: 'fetch-mock related packages',
packageNames: [
'fetch-mock',
'@types/fetch-mock',
],
},
{
groupSlug: 'flot',
groupName: 'flot related packages',
packageNames: [
'flot',
'@types/flot',
],
},
{
groupSlug: 'getopts',
groupName: 'getopts related packages',
packageNames: [
'getopts',
'@types/getopts',
],
},
{
groupSlug: 'glob',
groupName: 'glob related packages',
packageNames: [
'glob',
'@types/glob',
],
},
{
groupSlug: 'globby',
groupName: 'globby related packages',
packageNames: [
'globby',
'@types/globby',
],
},
{
groupSlug: 'has-ansi',
groupName: 'has-ansi related packages',
packageNames: [
'has-ansi',
'@types/has-ansi',
],
},
{
groupSlug: 'history',
groupName: 'history related packages',
packageNames: [
'history',
'@types/history',
],
},
{
groupSlug: 'jquery',
groupName: 'jquery related packages',
packageNames: [
'jquery',
'@types/jquery',
],
},
{
groupSlug: 'js-yaml',
groupName: 'js-yaml related packages',
packageNames: [
'js-yaml',
'@types/js-yaml',
],
},
{
groupSlug: 'json5',
groupName: 'json5 related packages',
packageNames: [
'json5',
'@types/json5',
],
},
{
groupSlug: 'license-checker',
groupName: 'license-checker related packages',
packageNames: [
'license-checker',
'@types/license-checker',
],
},
{
groupSlug: 'listr',
groupName: 'listr related packages',
packageNames: [
'listr',
'@types/listr',
],
},
{
groupSlug: 'lodash',
groupName: 'lodash related packages',
packageNames: [
'lodash',
'@types/lodash',
],
},
{
groupSlug: 'lru-cache',
groupName: 'lru-cache related packages',
packageNames: [
'lru-cache',
'@types/lru-cache',
],
},
{
groupSlug: 'markdown-it',
groupName: 'markdown-it related packages',
packageNames: [
'markdown-it',
'@types/markdown-it',
],
},
{
groupSlug: 'minimatch',
groupName: 'minimatch related packages',
packageNames: [
'minimatch',
'@types/minimatch',
],
},
{
groupSlug: 'mustache',
groupName: 'mustache related packages',
packageNames: [
'mustache',
'@types/mustache',
],
},
{
groupSlug: 'node',
groupName: 'node related packages',
packageNames: [
'node',
'@types/node',
],
},
{
groupSlug: 'opn',
groupName: 'opn related packages',
packageNames: [
'opn',
'@types/opn',
],
},
{
groupSlug: 'pegjs',
groupName: 'pegjs related packages',
packageNames: [
'pegjs',
'@types/pegjs',
],
},
{
groupSlug: 'pngjs',
groupName: 'pngjs related packages',
packageNames: [
'pngjs',
'@types/pngjs',
],
},
{
groupSlug: 'podium',
groupName: 'podium related packages',
packageNames: [
'podium',
'@types/podium',
],
},
{
groupSlug: 'request',
groupName: 'request related packages',
packageNames: [
'request',
'@types/request',
],
},
{
groupSlug: 'selenium-webdriver',
groupName: 'selenium-webdriver related packages',
packageNames: [
'selenium-webdriver',
'@types/selenium-webdriver',
],
},
{
groupSlug: 'semver',
groupName: 'semver related packages',
packageNames: [
'semver',
'@types/semver',
],
},
{
groupSlug: 'sinon',
groupName: 'sinon related packages',
packageNames: [
'sinon',
'@types/sinon',
],
},
{
groupSlug: 'strip-ansi',
groupName: 'strip-ansi related packages',
packageNames: [
'strip-ansi',
'@types/strip-ansi',
],
},
{
groupSlug: 'styled-components',
groupName: 'styled-components related packages',
packageNames: [
'styled-components',
'@types/styled-components',
],
},
{
groupSlug: 'supertest',
groupName: 'supertest related packages',
packageNames: [
'supertest',
'@types/supertest',
],
},
{
groupSlug: 'supertest-as-promised',
groupName: 'supertest-as-promised related packages',
packageNames: [
'supertest-as-promised',
'@types/supertest-as-promised',
],
},
{
groupSlug: 'type-detect',
groupName: 'type-detect related packages',
packageNames: [
'type-detect',
'@types/type-detect',
],
},
{
groupSlug: 'uuid',
groupName: 'uuid related packages',
packageNames: [
'uuid',
'@types/uuid',
],
},
{
groupSlug: 'vinyl-fs',
groupName: 'vinyl-fs related packages',
packageNames: [
'vinyl-fs',
'@types/vinyl-fs',
],
},
{
groupSlug: 'zen-observable',
groupName: 'zen-observable related packages',
packageNames: [
'zen-observable',
'@types/zen-observable',
],
},
{
groupSlug: 'archiver',
groupName: 'archiver related packages',
packageNames: [
'archiver',
'@types/archiver',
],
},
{
groupSlug: 'base64-js',
groupName: 'base64-js related packages',
packageNames: [
'base64-js',
'@types/base64-js',
],
},
{
groupSlug: 'chroma-js',
groupName: 'chroma-js related packages',
packageNames: [
'chroma-js',
'@types/chroma-js',
],
},
{
groupSlug: 'color',
groupName: 'color related packages',
packageNames: [
'color',
'@types/color',
],
},
{
groupSlug: 'cytoscape',
groupName: 'cytoscape related packages',
packageNames: [
'cytoscape',
'@types/cytoscape',
],
},
{
groupSlug: 'fancy-log',
groupName: 'fancy-log related packages',
packageNames: [
'fancy-log',
'@types/fancy-log',
],
},
{
groupSlug: 'file-saver',
groupName: 'file-saver related packages',
packageNames: [
'file-saver',
'@types/file-saver',
],
},
{
groupSlug: 'getos',
groupName: 'getos related packages',
packageNames: [
'getos',
'@types/getos',
],
},
{
groupSlug: 'git-url-parse',
groupName: 'git-url-parse related packages',
packageNames: [
'git-url-parse',
'@types/git-url-parse',
],
},
{
groupSlug: 'jsdom',
groupName: 'jsdom related packages',
packageNames: [
'jsdom',
'@types/jsdom',
],
},
{
groupSlug: 'jsonwebtoken',
groupName: 'jsonwebtoken related packages',
packageNames: [
'jsonwebtoken',
'@types/jsonwebtoken',
],
},
{
groupSlug: 'mapbox-gl',
groupName: 'mapbox-gl related packages',
packageNames: [
'mapbox-gl',
'@types/mapbox-gl',
],
},
{
groupSlug: 'memoize-one',
groupName: 'memoize-one related packages',
packageNames: [
'memoize-one',
'@types/memoize-one',
],
},
{
groupSlug: 'numeral',
groupName: 'numeral related packages',
packageNames: [
'numeral',
'@types/numeral',
],
},
{
groupSlug: 'mime',
groupName: 'mime related packages',
packageNames: [
'mime',
'@types/mime',
],
},
{
groupSlug: 'nock',
groupName: 'nock related packages',
packageNames: [
'nock',
'@types/nock',
],
},
{
groupSlug: 'node-fetch',
groupName: 'node-fetch related packages',
packageNames: [
'node-fetch',
'@types/node-fetch',
],
},
{
groupSlug: 'node-forge',
groupName: 'node-forge related packages',
packageNames: [
'node-forge',
'@types/node-forge',
],
},
{
groupSlug: 'nodemailer',
groupName: 'nodemailer related packages',
packageNames: [
'nodemailer',
'@types/nodemailer',
],
},
{
groupSlug: 'object-hash',
groupName: 'object-hash related packages',
packageNames: [
'object-hash',
'@types/object-hash',
],
},
{
groupSlug: 'papaparse',
groupName: 'papaparse related packages',
packageNames: [
'papaparse',
'@types/papaparse',
],
},
{
groupSlug: 'proper-lockfile',
groupName: 'proper-lockfile related packages',
packageNames: [
'proper-lockfile',
'@types/proper-lockfile',
],
},
{
groupSlug: 'puppeteer',
groupName: 'puppeteer related packages',
packageNames: [
'puppeteer',
'@types/puppeteer',
],
},
{
groupSlug: 'reduce-reducers',
groupName: 'reduce-reducers related packages',
packageNames: [
'reduce-reducers',
'@types/reduce-reducers',
],
},
{
groupSlug: 'tar-fs',
groupName: 'tar-fs related packages',
packageNames: [
'tar-fs',
'@types/tar-fs',
],
},
{
groupSlug: 'tinycolor2',
groupName: 'tinycolor2 related packages',
packageNames: [
'tinycolor2',
'@types/tinycolor2',
],
},
{
groupSlug: 'xml-crypto',
groupName: 'xml-crypto related packages',
packageNames: [
'xml-crypto',
'@types/xml-crypto',
],
},
{
groupSlug: 'xml2js',
groupName: 'xml2js related packages',
packageNames: [
'xml2js',
'@types/xml2js',
],
},
{
groupSlug: 'intl-relativeformat',
groupName: 'intl-relativeformat related packages',
packageNames: [
'intl-relativeformat',
'@types/intl-relativeformat',
],
},
{
groupSlug: 'cmd-shim',
groupName: 'cmd-shim related packages',
packageNames: [
'cmd-shim',
'@types/cmd-shim',
],
},
{
groupSlug: 'cpy',
groupName: 'cpy related packages',
packageNames: [
'cpy',
'@types/cpy',
],
},
{
groupSlug: 'indent-string',
groupName: 'indent-string related packages',
packageNames: [
'indent-string',
'@types/indent-string',
],
},
{
groupSlug: 'lodash.clonedeepwith',
groupName: 'lodash.clonedeepwith related packages',
packageNames: [
'lodash.clonedeepwith',
'@types/lodash.clonedeepwith',
],
},
{
groupSlug: 'log-symbols',
groupName: 'log-symbols related packages',
packageNames: [
'log-symbols',
'@types/log-symbols',
],
},
{
groupSlug: 'ncp',
groupName: 'ncp related packages',
packageNames: [
'ncp',
'@types/ncp',
],
},
{
groupSlug: 'ora',
groupName: 'ora related packages',
packageNames: [
'ora',
'@types/ora',
],
},
{
groupSlug: 'read-pkg',
groupName: 'read-pkg related packages',
packageNames: [
'read-pkg',
'@types/read-pkg',
],
},
{
groupSlug: 'strong-log-transformer',
groupName: 'strong-log-transformer related packages',
packageNames: [
'strong-log-transformer',
'@types/strong-log-transformer',
],
},
{
groupSlug: 'tempy',
groupName: 'tempy related packages',
packageNames: [
'tempy',
'@types/tempy',
],
},
{
groupSlug: 'wrap-ansi',
groupName: 'wrap-ansi related packages',
packageNames: [
'wrap-ansi',
'@types/wrap-ansi',
],
},
{
groupSlug: 'write-pkg',
groupName: 'write-pkg related packages',
packageNames: [
'write-pkg',
'@types/write-pkg',
],
},
{
groupSlug: 'parse-link-header',
groupName: 'parse-link-header related packages',
packageNames: [
'parse-link-header',
'@types/parse-link-header',
],
},
{
packagePatterns: [
'^@kbn/.*',
],
enabled: false,
},
],
},
prConcurrentLimit: 0,
vulnerabilityAlerts: {
enabled: false,
},
rebaseStalePrs: false,
semanticCommits: false,
}

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
require('@kbn/optimizer/target/cli');

View file

@ -17,7 +17,24 @@
* under the License.
*/
import * as Rx from 'rxjs';
import { mockCluster } from './cluster_manager.test.mocks';
jest.mock('./run_kbn_optimizer', () => {
// eslint-disable-next-line @typescript-eslint/no-var-requires,no-shadow
const Rx = require('rxjs');
return {
runKbnOptimizer: () =>
new Rx.BehaviorSubject({
type: 'compiler success',
durSec: 0,
bundles: [],
}),
};
});
jest.mock('readline', () => ({
createInterface: jest.fn(() => ({
on: jest.fn(),
@ -26,6 +43,13 @@ jest.mock('readline', () => ({
})),
}));
const mockConfig: any = {
get: (key: string) => {
expect(key).toBe('optimize.enabled');
return false;
},
};
import { sample } from 'lodash';
import { ClusterManager } from './cluster_manager';
@ -51,7 +75,7 @@ describe('CLI cluster manager', () => {
});
test('has two workers', () => {
const manager = new ClusterManager({}, {} as any);
const manager = new ClusterManager({}, mockConfig);
expect(manager.workers).toHaveLength(2);
for (const worker of manager.workers) expect(worker).toBeInstanceOf(Worker);
@ -61,7 +85,7 @@ describe('CLI cluster manager', () => {
});
test('delivers broadcast messages to other workers', () => {
const manager = new ClusterManager({}, {} as any);
const manager = new ClusterManager({}, mockConfig);
for (const worker of manager.workers) {
Worker.prototype.start.call(worker); // bypass the debounced start method
@ -86,92 +110,59 @@ describe('CLI cluster manager', () => {
test('correctly configures `BasePathProxy`.', async () => {
const basePathProxyMock = { start: jest.fn() };
new ClusterManager({}, {} as any, basePathProxyMock as any);
new ClusterManager({}, mockConfig, basePathProxyMock as any);
expect(basePathProxyMock.start).toHaveBeenCalledWith({
shouldRedirectFromOldBasePath: expect.any(Function),
blockUntil: expect.any(Function),
delayUntil: expect.any(Function),
});
});
describe('proxy is configured with the correct `shouldRedirectFromOldBasePath` and `blockUntil` functions.', () => {
describe('basePathProxy config', () => {
let clusterManager: ClusterManager;
let shouldRedirectFromOldBasePath: (path: string) => boolean;
let blockUntil: () => Promise<any>;
let delayUntil: () => Rx.Observable<undefined>;
beforeEach(async () => {
const basePathProxyMock = { start: jest.fn() };
clusterManager = new ClusterManager({}, {} as any, basePathProxyMock as any);
jest.spyOn(clusterManager.server, 'on');
jest.spyOn(clusterManager.server, 'off');
[[{ blockUntil, shouldRedirectFromOldBasePath }]] = basePathProxyMock.start.mock.calls;
clusterManager = new ClusterManager({}, mockConfig, basePathProxyMock as any);
[[{ delayUntil, shouldRedirectFromOldBasePath }]] = basePathProxyMock.start.mock.calls;
});
test('`shouldRedirectFromOldBasePath()` returns `false` for unknown paths.', () => {
expect(shouldRedirectFromOldBasePath('')).toBe(false);
expect(shouldRedirectFromOldBasePath('some-path/')).toBe(false);
expect(shouldRedirectFromOldBasePath('some-other-path')).toBe(false);
describe('shouldRedirectFromOldBasePath()', () => {
test('returns `false` for unknown paths.', () => {
expect(shouldRedirectFromOldBasePath('')).toBe(false);
expect(shouldRedirectFromOldBasePath('some-path/')).toBe(false);
expect(shouldRedirectFromOldBasePath('some-other-path')).toBe(false);
});
test('returns `true` for `app` and other known paths.', () => {
expect(shouldRedirectFromOldBasePath('app/')).toBe(true);
expect(shouldRedirectFromOldBasePath('login')).toBe(true);
expect(shouldRedirectFromOldBasePath('logout')).toBe(true);
expect(shouldRedirectFromOldBasePath('status')).toBe(true);
});
});
test('`shouldRedirectFromOldBasePath()` returns `true` for `app` and other known paths.', () => {
expect(shouldRedirectFromOldBasePath('app/')).toBe(true);
expect(shouldRedirectFromOldBasePath('login')).toBe(true);
expect(shouldRedirectFromOldBasePath('logout')).toBe(true);
expect(shouldRedirectFromOldBasePath('status')).toBe(true);
});
describe('delayUntil()', () => {
test('returns an observable which emits when the server and kbnOptimizer are ready and completes', async () => {
clusterManager.serverReady$.next(false);
clusterManager.optimizerReady$.next(false);
clusterManager.kbnOptimizerReady$.next(false);
test('`blockUntil()` resolves immediately if worker has already crashed.', async () => {
clusterManager.server.crashed = true;
const events: Array<string | Error> = [];
delayUntil().subscribe(
() => events.push('next'),
error => events.push(error),
() => events.push('complete')
);
await expect(blockUntil()).resolves.not.toBeDefined();
expect(clusterManager.server.on).not.toHaveBeenCalled();
expect(clusterManager.server.off).not.toHaveBeenCalled();
});
clusterManager.serverReady$.next(true);
expect(events).toEqual([]);
test('`blockUntil()` resolves immediately if worker is already listening.', async () => {
clusterManager.server.listening = true;
await expect(blockUntil()).resolves.not.toBeDefined();
expect(clusterManager.server.on).not.toHaveBeenCalled();
expect(clusterManager.server.off).not.toHaveBeenCalled();
});
test('`blockUntil()` resolves when worker crashes.', async () => {
const blockUntilPromise = blockUntil();
expect(clusterManager.server.on).toHaveBeenCalledTimes(2);
expect(clusterManager.server.on).toHaveBeenCalledWith('crashed', expect.any(Function));
const [, [eventName, onCrashed]] = (clusterManager.server.on as jest.Mock).mock.calls;
// Check event name to make sure we call the right callback,
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
expect(eventName).toBe('crashed');
expect(clusterManager.server.off).not.toHaveBeenCalled();
onCrashed();
await expect(blockUntilPromise).resolves.not.toBeDefined();
expect(clusterManager.server.off).toHaveBeenCalledTimes(2);
});
test('`blockUntil()` resolves when worker starts listening.', async () => {
const blockUntilPromise = blockUntil();
expect(clusterManager.server.on).toHaveBeenCalledTimes(2);
expect(clusterManager.server.on).toHaveBeenCalledWith('listening', expect.any(Function));
const [[eventName, onListening]] = (clusterManager.server.on as jest.Mock).mock.calls;
// Check event name to make sure we call the right callback,
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
expect(eventName).toBe('listening');
expect(clusterManager.server.off).not.toHaveBeenCalled();
onListening();
await expect(blockUntilPromise).resolves.not.toBeDefined();
expect(clusterManager.server.off).toHaveBeenCalledTimes(2);
clusterManager.kbnOptimizerReady$.next(true);
expect(events).toEqual(['next', 'complete']);
});
});
});
});

View file

@ -19,22 +19,29 @@
import { resolve } from 'path';
import { format as formatUrl } from 'url';
import opn from 'opn';
import { debounce, invoke, bindAll, once, uniq } from 'lodash';
import * as Rx from 'rxjs';
import { first, mapTo, filter, map, take } from 'rxjs/operators';
import { REPO_ROOT } from '@kbn/dev-utils';
import { FSWatcher } from 'chokidar';
import * as Rx from 'rxjs';
import { startWith, mapTo, filter, map, take, tap } from 'rxjs/operators';
import { runKbnOptimizer } from './run_kbn_optimizer';
import { LegacyConfig } from '../../core/server/legacy';
import { BasePathProxyServer } from '../../core/server/http';
// @ts-ignore
import Log from '../log';
import { Log } from './log';
import { Worker } from './worker';
process.env.kbnWorkerType = 'managr';
const firstAllTrue = (...sources: Array<Rx.Observable<boolean>>) =>
Rx.combineLatest(...sources).pipe(
filter(values => values.every(v => v === true)),
take(1),
mapTo(undefined)
);
export class ClusterManager {
public optimizer: Worker;
public server: Worker;
@ -42,10 +49,17 @@ export class ClusterManager {
private watcher: FSWatcher | null = null;
private basePathProxy: BasePathProxyServer | undefined;
private log: any;
private log: Log;
private addedCount = 0;
private inReplMode: boolean;
// exposed for testing
public readonly serverReady$ = new Rx.ReplaySubject<boolean>(1);
// exposed for testing
public readonly optimizerReady$ = new Rx.ReplaySubject<boolean>(1);
// exposed for testing
public readonly kbnOptimizerReady$ = new Rx.ReplaySubject<boolean>(1);
constructor(
opts: Record<string, any>,
config: LegacyConfig,
@ -55,6 +69,23 @@ export class ClusterManager {
this.inReplMode = !!opts.repl;
this.basePathProxy = basePathProxy;
if (config.get('optimize.enabled') !== false) {
// run @kbn/optimizer and write it's state to kbnOptimizerReady$
runKbnOptimizer(opts, config)
.pipe(
map(({ state }) => state.phase === 'success' || state.phase === 'issue'),
tap({
error: error => {
this.log.bad('New platform optimizer error', error.stack);
process.exit(1);
},
})
)
.subscribe(this.kbnOptimizerReady$);
} else {
this.kbnOptimizerReady$.next(true);
}
const serverArgv = [];
const optimizerArgv = ['--plugins.initialize=false', '--server.autoListen=false'];
@ -86,6 +117,27 @@ export class ClusterManager {
})),
];
// write server status to the serverReady$ subject
Rx.merge(
Rx.fromEvent(this.server, 'starting').pipe(mapTo(false)),
Rx.fromEvent(this.server, 'listening').pipe(mapTo(true)),
Rx.fromEvent(this.server, 'crashed').pipe(mapTo(true))
)
.pipe(startWith(this.server.listening || this.server.crashed))
.subscribe(this.serverReady$);
// write optimizer status to the optimizerReady$ subject
Rx.merge(
Rx.fromEvent(this.optimizer, 'optimizeStatus'),
Rx.defer(() => {
if (this.optimizer.fork) {
this.optimizer.fork.send({ optimizeReady: '?' });
}
})
)
.pipe(map((msg: any) => msg && !!msg.success))
.subscribe(this.optimizerReady$);
// broker messages between workers
this.workers.forEach(worker => {
worker.on('broadcast', msg => {
@ -109,8 +161,6 @@ export class ClusterManager {
});
});
bindAll(this, 'onWatcherAdd', 'onWatcherError', 'onWatcherChange');
if (opts.open) {
this.setupOpen(
formatUrl({
@ -137,11 +187,11 @@ export class ClusterManager {
.reduce(
(acc, path) =>
acc.concat(
resolve(path, 'test'),
resolve(path, 'build'),
resolve(path, 'target'),
resolve(path, 'scripts'),
resolve(path, 'docs')
resolve(path, 'test/**'),
resolve(path, 'build/**'),
resolve(path, 'target/**'),
resolve(path, 'scripts/**'),
resolve(path, 'docs/**')
),
[] as string[]
);
@ -152,33 +202,36 @@ export class ClusterManager {
startCluster() {
this.setupManualRestart();
invoke(this.workers, 'start');
for (const worker of this.workers) {
worker.start();
}
if (this.basePathProxy) {
this.basePathProxy.start({
blockUntil: this.blockUntil.bind(this),
shouldRedirectFromOldBasePath: this.shouldRedirectFromOldBasePath.bind(this),
delayUntil: () => firstAllTrue(this.serverReady$, this.kbnOptimizerReady$),
shouldRedirectFromOldBasePath: (path: string) => {
// strip `s/{id}` prefix when checking for need to redirect
if (path.startsWith('s/')) {
path = path
.split('/')
.slice(2)
.join('/');
}
const isApp = path.startsWith('app/');
const isKnownShortPath = ['login', 'logout', 'status'].includes(path);
return isApp || isKnownShortPath;
},
});
}
}
setupOpen(openUrl: string) {
const serverListening$ = Rx.merge(
Rx.fromEvent(this.server, 'listening').pipe(mapTo(true)),
Rx.fromEvent(this.server, 'fork:exit').pipe(mapTo(false)),
Rx.fromEvent(this.server, 'crashed').pipe(mapTo(false))
);
const optimizeSuccess$ = Rx.fromEvent(this.optimizer, 'optimizeStatus').pipe(
map((msg: any) => !!msg.success)
);
Rx.combineLatest(serverListening$, optimizeSuccess$)
.pipe(
filter(([serverListening, optimizeSuccess]) => serverListening && optimizeSuccess),
take(1)
)
firstAllTrue(this.serverReady$, this.kbnOptimizerReady$, this.optimizerReady$)
.toPromise()
.then(() => opn(openUrl));
.then(() => {
opn(openUrl);
});
}
setupWatching(extraPaths: string[], pluginInternalDirsIgnore: string[]) {
@ -187,52 +240,51 @@ export class ClusterManager {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { fromRoot } = require('../../core/server/utils');
const watchPaths = [
fromRoot('src/core'),
fromRoot('src/legacy/core_plugins'),
fromRoot('src/legacy/server'),
fromRoot('src/legacy/ui'),
fromRoot('src/legacy/utils'),
fromRoot('x-pack/legacy/common'),
fromRoot('x-pack/legacy/plugins'),
fromRoot('x-pack/legacy/server'),
fromRoot('config'),
...extraPaths,
].map(path => resolve(path));
const watchPaths = Array.from(
new Set(
[
fromRoot('src/core'),
fromRoot('src/legacy/core_plugins'),
fromRoot('src/legacy/server'),
fromRoot('src/legacy/ui'),
fromRoot('src/legacy/utils'),
fromRoot('x-pack/legacy/common'),
fromRoot('x-pack/legacy/plugins'),
fromRoot('x-pack/legacy/server'),
fromRoot('config'),
...extraPaths,
].map(path => resolve(path))
)
);
const ignorePaths = [
/[\\\/](\..*|node_modules|bower_components|public|__[a-z0-9_]+__|coverage)[\\\/]/,
/\.test\.(js|ts)$/,
...pluginInternalDirsIgnore,
fromRoot('src/legacy/server/sass/__tmp__'),
fromRoot('x-pack/legacy/plugins/reporting/.chromium'),
fromRoot('x-pack/legacy/plugins/siem/cypress'),
fromRoot('x-pack/legacy/plugins/apm/cypress'),
fromRoot('x-pack/legacy/plugins/canvas/canvas_plugin_src'), // prevents server from restarting twice for Canvas plugin changes
fromRoot('x-pack/legacy/plugins/apm/scripts'),
fromRoot('x-pack/legacy/plugins/canvas/canvas_plugin_src'), // prevents server from restarting twice for Canvas plugin changes,
'plugins/java_languageserver',
];
this.watcher = chokidar.watch(uniq(watchPaths), {
this.watcher = chokidar.watch(watchPaths, {
cwd: fromRoot('.'),
ignored: [
/[\\\/](\..*|node_modules|bower_components|public|__[a-z0-9_]+__|coverage)[\\\/]/,
/\.test\.(js|ts)$/,
...pluginInternalDirsIgnore,
...ignorePaths,
'plugins/java_languageserver',
],
ignored: ignorePaths,
}) as FSWatcher;
this.watcher.on('add', this.onWatcherAdd);
this.watcher.on('error', this.onWatcherError);
this.watcher.once('ready', () => {
// start sending changes to workers
this.watcher!.removeListener('add', this.onWatcherAdd);
this.watcher!.on('all', this.onWatcherChange);
this.watcher.on(
'ready',
once(() => {
// start sending changes to workers
this.watcher!.removeListener('add', this.onWatcherAdd);
this.watcher!.on('all', this.onWatcherChange);
this.log.good('watching for changes', `(${this.addedCount} files)`);
this.startCluster();
})
);
this.log.good('watching for changes', `(${this.addedCount} files)`);
this.startCluster();
});
}
setupManualRestart() {
@ -248,7 +300,20 @@ export class ClusterManager {
let nls = 0;
const clear = () => (nls = 0);
const clearSoon = debounce(clear, 2000);
let clearTimer: number | undefined;
const clearSoon = () => {
clearSoon.cancel();
clearTimer = setTimeout(() => {
clearTimer = undefined;
clear();
});
};
clearSoon.cancel = () => {
clearTimeout(clearTimer);
clearTimer = undefined;
};
rl.setPrompt('');
rl.prompt();
@ -273,41 +338,18 @@ export class ClusterManager {
});
}
onWatcherAdd() {
onWatcherAdd = () => {
this.addedCount += 1;
}
};
onWatcherChange(e: any, path: string) {
invoke(this.workers, 'onChange', path);
}
onWatcherChange = (e: any, path: string) => {
for (const worker of this.workers) {
worker.onChange(path);
}
};
onWatcherError(err: any) {
onWatcherError = (err: any) => {
this.log.bad('failed to watch files!\n', err.stack);
process.exit(1); // eslint-disable-line no-process-exit
}
shouldRedirectFromOldBasePath(path: string) {
// strip `s/{id}` prefix when checking for need to redirect
if (path.startsWith('s/')) {
path = path
.split('/')
.slice(2)
.join('/');
}
const isApp = path.startsWith('app/');
const isKnownShortPath = ['login', 'logout', 'status'].includes(path);
return isApp || isKnownShortPath;
}
blockUntil() {
// Wait until `server` worker either crashes or starts to listen.
if (this.server.listening || this.server.crashed) {
return Promise.resolve();
}
return Rx.race(Rx.fromEvent(this.server, 'listening'), Rx.fromEvent(this.server, 'crashed'))
.pipe(first())
.toPromise();
}
};
}

56
src/cli/cluster/log.ts Normal file
View file

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Chalk from 'chalk';
export class Log {
constructor(private readonly quiet: boolean, private readonly silent: boolean) {}
good(label: string, ...args: any[]) {
if (this.quiet || this.silent) {
return;
}
// eslint-disable-next-line no-console
console.log(Chalk.black.bgGreen(` ${label.trim()} `), ...args);
}
warn(label: string, ...args: any[]) {
if (this.quiet || this.silent) {
return;
}
// eslint-disable-next-line no-console
console.log(Chalk.black.bgYellow(` ${label.trim()} `), ...args);
}
bad(label: string, ...args: any[]) {
if (this.silent) {
return;
}
// eslint-disable-next-line no-console
console.log(Chalk.white.bgRed(` ${label.trim()} `), ...args);
}
write(label: string, ...args: any[]) {
// eslint-disable-next-line no-console
console.log(` ${label.trim()} `, ...args);
}
}

View file

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Chalk from 'chalk';
import moment from 'moment';
import {
ToolingLog,
pickLevelFromFlags,
ToolingLogTextWriter,
parseLogLevel,
REPO_ROOT,
} from '@kbn/dev-utils';
import { runOptimizer, OptimizerConfig, logOptimizerState } from '@kbn/optimizer';
import { LegacyConfig } from '../../core/server/legacy';
export function runKbnOptimizer(opts: Record<string, any>, config: LegacyConfig) {
const optimizerConfig = OptimizerConfig.create({
repoRoot: REPO_ROOT,
watch: true,
oss: !!opts.oss,
examples: !!opts.runExamples,
pluginPaths: config.get('plugins.paths'),
});
const dim = Chalk.dim('np bld');
const name = Chalk.magentaBright('@kbn/optimizer');
const time = () => moment().format('HH:mm:ss.SSS');
const level = (msgType: string) => {
switch (msgType) {
case 'info':
return Chalk.green(msgType);
case 'success':
return Chalk.cyan(msgType);
case 'debug':
return Chalk.gray(msgType);
default:
return msgType;
}
};
const { flags: levelFlags } = parseLogLevel(pickLevelFromFlags(opts));
const toolingLog = new ToolingLog();
const has = <T extends object>(obj: T, x: any): x is keyof T => obj.hasOwnProperty(x);
toolingLog.setWriters([
{
write(msg) {
if (has(levelFlags, msg.type) && !levelFlags[msg.type]) {
return false;
}
ToolingLogTextWriter.write(
process.stdout,
`${dim} log [${time()}] [${level(msg.type)}][${name}] `,
msg
);
return true;
},
},
]);
return runOptimizer(optimizerConfig).pipe(logOptimizerState(toolingLog, optimizerConfig));
}

View file

@ -20,8 +20,8 @@
import { mockCluster } from './cluster_manager.test.mocks';
import { Worker, ClusterWorker } from './worker';
// @ts-ignore
import Log from '../log';
import { Log } from './log';
const workersToShutdown: Worker[] = [];

View file

@ -199,6 +199,7 @@ export class Worker extends EventEmitter {
}
this.fork = cluster.fork(this.env) as ClusterWorker;
this.emit('starting');
this.forkBinder = new BinderFor(this.fork);
// when the fork sends a message, comes online, or loses its connection, then react

View file

@ -18,17 +18,17 @@
*/
import _ from 'lodash';
import Chalk from 'chalk';
import help from './help';
import { Command } from 'commander';
import { red } from './color';
Command.prototype.error = function(err) {
if (err && err.message) err = err.message;
console.log(
`
${red(' ERROR ')} ${err}
${Chalk.white.bgRed(' ERROR ')} ${err}
${help(this, ' ')}
`

View file

@ -195,7 +195,7 @@ export default function(program) {
[]
)
.option('--plugins <path>', 'an alias for --plugin-dir', pluginDirCollector)
.option('--optimize', 'Optimize and then stop the server');
.option('--optimize', 'Run the legacy plugin optimizer and then stop the server');
if (CAN_REPL) {
command.option('--repl', 'Run the server with a REPL prompt and access to the server object');

View file

@ -62,7 +62,7 @@ test('`loadPluginBundles` creates a script tag and loads initializer', async ()
const fakeScriptTag = createdScriptTags[0];
expect(fakeScriptTag.setAttribute).toHaveBeenCalledWith(
'src',
'/bundles/plugin/plugin-a.bundle.js'
'/bundles/plugin/plugin-a/plugin-a.plugin.js'
);
expect(fakeScriptTag.setAttribute).toHaveBeenCalledWith('id', 'kbn-plugin-plugin-a');
expect(fakeScriptTag.onload).toBeInstanceOf(Function);
@ -85,7 +85,7 @@ test('`loadPluginBundles` includes the basePath', async () => {
const fakeScriptTag = createdScriptTags[0];
expect(fakeScriptTag.setAttribute).toHaveBeenCalledWith(
'src',
'/mybasepath/bundles/plugin/plugin-a.bundle.js'
'/mybasepath/bundles/plugin/plugin-a/plugin-a.plugin.js'
);
});
@ -96,7 +96,7 @@ test('`loadPluginBundles` rejects if script.onerror is called', async () => {
fakeScriptTag1.onerror(new Error('Whoa there!'));
await expect(loadPromise).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failed to load \\"plugin-a\\" bundle (/bundles/plugin/plugin-a.bundle.js)"`
`"Failed to load \\"plugin-a\\" bundle (/bundles/plugin/plugin-a/plugin-a.plugin.js)"`
);
});
@ -105,7 +105,7 @@ test('`loadPluginBundles` rejects if timeout is reached', async () => {
// Override the timeout to 1 ms for testi.
loadPluginBundle(addBasePath, 'plugin-a', { timeoutMs: 1 })
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Timeout reached when loading \\"plugin-a\\" bundle (/bundles/plugin/plugin-a.bundle.js)"`
`"Timeout reached when loading \\"plugin-a\\" bundle (/bundles/plugin/plugin-a/plugin-a.plugin.js)"`
);
});
@ -120,6 +120,6 @@ test('`loadPluginBundles` rejects if bundle does attach an initializer to window
fakeScriptTag1.onload();
await expect(loadPromise).rejects.toThrowErrorMatchingInlineSnapshot(
`"Definition of plugin \\"plugin-a\\" should be a function (/bundles/plugin/plugin-a.bundle.js)."`
`"Definition of plugin \\"plugin-a\\" should be a function (/bundles/plugin/plugin-a/plugin-a.plugin.js)."`
);
});

View file

@ -74,7 +74,7 @@ export const loadPluginBundle: LoadPluginBundle = <
const coreWindow = (window as unknown) as CoreWindow;
// Assumes that all plugin bundles get put into the bundles/plugins subdirectory
const bundlePath = addBasePath(`/bundles/plugin/${pluginName}.bundle.js`);
const bundlePath = addBasePath(`/bundles/plugin/${pluginName}/${pluginName}.plugin.js`);
script.setAttribute('src', bundlePath);
script.setAttribute('id', `kbn-plugin-${pluginName}`);
script.setAttribute('async', '');

View file

@ -100,6 +100,11 @@ export class Env {
this.binDir = resolve(this.homeDir, 'bin');
this.logDir = resolve(this.homeDir, 'log');
/**
* BEWARE: this needs to stay roughly synchronized with the @kbn/optimizer
* `packages/kbn-optimizer/src/optimizer_config.ts` determines the paths
* that should be searched for plugins to build
*/
this.pluginSearchPaths = [
resolve(this.homeDir, 'src', 'plugins'),
...(options.cliArgs.oss ? [] : [resolve(this.homeDir, 'x-pack', 'plugins')]),

View file

@ -17,13 +17,17 @@
* under the License.
*/
import apm from 'elastic-apm-node';
import { ByteSizeValue } from '@kbn/config-schema';
import { Server, Request } from 'hapi';
import Url from 'url';
import { Agent as HttpsAgent, ServerOptions as TlsOptions } from 'https';
import apm from 'elastic-apm-node';
import { ByteSizeValue } from '@kbn/config-schema';
import { Server, Request, ResponseToolkit } from 'hapi';
import { sample } from 'lodash';
import BrowserslistUserAgent from 'browserslist-useragent';
import * as Rx from 'rxjs';
import { take } from 'rxjs/operators';
import { DevConfig } from '../dev';
import { Logger } from '../logging';
import { HttpConfig } from './http_config';
@ -33,9 +37,37 @@ const alphabet = 'abcdefghijklmnopqrztuvwxyz'.split('');
export interface BasePathProxyServerOptions {
shouldRedirectFromOldBasePath: (path: string) => boolean;
blockUntil: () => Promise<void>;
delayUntil: () => Rx.Observable<void>;
}
// Before we proxy request to a target port we may want to wait until some
// condition is met (e.g. until target listener is ready).
const checkForBrowserCompat = (log: Logger) => async (request: Request, h: ResponseToolkit) => {
if (!request.headers['user-agent'] || process.env.BROWSERSLIST_ENV === 'production') {
return h.continue;
}
const matches = BrowserslistUserAgent.matchesUA(request.headers['user-agent'], {
env: 'dev',
allowHigherVersions: true,
ignoreMinor: true,
ignorePath: true,
});
if (!matches) {
log.warn(`
Request with user-agent [${request.headers['user-agent']}]
seems like it is coming from a browser that is not supported by the dev browserlist.
Please run Kibana with the environment variable BROWSERSLIST_ENV=production to enable
support for all production browsers (like IE).
`);
}
return h.continue;
};
export class BasePathProxyServer {
private server?: Server;
private httpsAgent?: HttpsAgent;
@ -108,7 +140,7 @@ export class BasePathProxyServer {
}
private setupRoutes({
blockUntil,
delayUntil,
shouldRedirectFromOldBasePath,
}: Readonly<BasePathProxyServerOptions>) {
if (this.server === undefined) {
@ -122,6 +154,9 @@ export class BasePathProxyServer {
},
method: 'GET',
path: '/',
options: {
pre: [checkForBrowserCompat(this.log)],
},
});
this.server.route({
@ -138,11 +173,14 @@ export class BasePathProxyServer {
method: '*',
options: {
pre: [
checkForBrowserCompat(this.log),
// Before we proxy request to a target port we may want to wait until some
// condition is met (e.g. until target listener is ready).
async (request, responseToolkit) => {
apm.setTransactionName(`${request.method.toUpperCase()} /{basePath}/{kbnPath*}`);
await blockUntil();
await delayUntil()
.pipe(take(1))
.toPromise();
return responseToolkit.continue;
},
],
@ -172,10 +210,13 @@ export class BasePathProxyServer {
method: '*',
options: {
pre: [
checkForBrowserCompat(this.log),
// Before we proxy request to a target port we may want to wait until some
// condition is met (e.g. until target listener is ready).
async (request, responseToolkit) => {
await blockUntil();
await delayUntil()
.pipe(take(1))
.toPromise();
return responseToolkit.continue;
},
],

View file

@ -88,7 +88,7 @@ beforeEach(() => {
contracts: new Map([['plugin-id', 'plugin-value']]),
uiPlugins: {
public: new Map([['plugin-id', {} as DiscoveredPlugin]]),
internal: new Map([['plugin-id', { entryPointPath: 'path/to/plugin/public' }]]),
internal: new Map([['plugin-id', { publicTargetDir: 'path/to/target/public' }]]),
browserConfigs: new Map(),
},
},

View file

@ -22,6 +22,7 @@ import { mockDiscover, mockPackage } from './plugins_service.test.mocks';
import { resolve, join } from 'path';
import { BehaviorSubject, from } from 'rxjs';
import { schema } from '@kbn/config-schema';
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
import { ConfigPath, ConfigService, Env } from '../config';
import { rawConfigServiceMock } from '../config/raw_config_service.mock';
@ -48,6 +49,8 @@ let mockPluginSystem: jest.Mocked<PluginsSystem>;
const setupDeps = coreMock.createInternalSetup();
const logger = loggingServiceMock.create();
expect.addSnapshotSerializer(createAbsolutePathSerializer());
['path-1', 'path-2', 'path-3', 'path-4', 'path-5'].forEach(path => {
jest.doMock(join(path, 'server'), () => ({}), {
virtual: true,
@ -540,10 +543,10 @@ describe('PluginsService', () => {
expect(uiPlugins.internal).toMatchInlineSnapshot(`
Map {
"plugin-1" => Object {
"entryPointPath": "path-1/public",
"publicTargetDir": <absolute path>/path-1/target/public,
},
"plugin-2" => Object {
"entryPointPath": "path-2/public",
"publicTargetDir": <absolute path>/path-2/target/public,
},
}
`);

Some files were not shown because too many files have changed in this diff Show more