mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 10:40:07 -04:00
Increase prettier line width to 100 (#20535)
* Increase prettier line width to 100 * Fix packages JS prettier * Change style guide to 100 width * Fix line-width in latest master changes
This commit is contained in:
parent
433f8a919c
commit
cb5ee01c6a
126 changed files with 380 additions and 1272 deletions
20
.eslintrc.js
20
.eslintrc.js
|
@ -3,10 +3,7 @@ const { readdirSync } = require('fs');
|
|||
const dedent = require('dedent');
|
||||
|
||||
module.exports = {
|
||||
extends: [
|
||||
'@elastic/eslint-config-kibana',
|
||||
'@elastic/eslint-config-kibana/jest',
|
||||
],
|
||||
extends: ['@elastic/eslint-config-kibana', '@elastic/eslint-config-kibana/jest'],
|
||||
|
||||
settings: {
|
||||
'import/resolver': {
|
||||
|
@ -82,15 +79,12 @@ module.exports = {
|
|||
forceNode: false,
|
||||
rootPackageName: 'kibana',
|
||||
kibanaPath: '.',
|
||||
pluginMap: readdirSync(resolve(__dirname, 'x-pack/plugins')).reduce(
|
||||
(acc, name) => {
|
||||
if (!name.startsWith('_')) {
|
||||
acc[name] = `x-pack/plugins/${name}`;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
),
|
||||
pluginMap: readdirSync(resolve(__dirname, 'x-pack/plugins')).reduce((acc, name) => {
|
||||
if (!name.startsWith('_')) {
|
||||
acc[name] = `x-pack/plugins/${name}`;
|
||||
}
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
"trailingComma": "es5",
|
||||
"printWidth": 100
|
||||
}
|
||||
|
|
|
@ -33,10 +33,7 @@ const isValidDate = d => isDate(d) && !isNaN(d.valueOf());
|
|||
* will be done using this (and its locale settings) instead of the one bundled
|
||||
* with this library.
|
||||
*/
|
||||
function parse(
|
||||
text,
|
||||
{ roundUp = false, momentInstance = moment, forceNow } = {}
|
||||
) {
|
||||
function parse(text, { roundUp = false, momentInstance = moment, forceNow } = {}) {
|
||||
if (!text) return undefined;
|
||||
if (momentInstance.isMoment(text)) return text;
|
||||
if (isDate(text)) return momentInstance(text);
|
||||
|
|
|
@ -54,12 +54,9 @@ describe('dateMath', function() {
|
|||
expect(dateMath.parse('now&1d')).to.be(undefined);
|
||||
});
|
||||
|
||||
it(
|
||||
'should return undefined if I pass a unit besides' + spans.toString(),
|
||||
function() {
|
||||
expect(dateMath.parse('now+5f')).to.be(undefined);
|
||||
}
|
||||
);
|
||||
it('should return undefined if I pass a unit besides' + spans.toString(), function() {
|
||||
expect(dateMath.parse('now+5f')).to.be(undefined);
|
||||
});
|
||||
|
||||
it('should return undefined if rounding unit is not 1', function() {
|
||||
expect(dateMath.parse('now/2y')).to.be(undefined);
|
||||
|
@ -74,21 +71,16 @@ describe('dateMath', function() {
|
|||
|
||||
describe('forceNow', function() {
|
||||
it('should throw an Error if passed a string', function() {
|
||||
const fn = () =>
|
||||
dateMath.parse('now', { forceNow: '2000-01-01T00:00:00.000Z' });
|
||||
const fn = () => dateMath.parse('now', { forceNow: '2000-01-01T00:00:00.000Z' });
|
||||
expect(fn).to.throwError();
|
||||
});
|
||||
|
||||
it('should throw an Error if passed a moment', function() {
|
||||
expect(() =>
|
||||
dateMath.parse('now', { forceNow: moment() })
|
||||
).to.throwError();
|
||||
expect(() => dateMath.parse('now', { forceNow: moment() })).to.throwError();
|
||||
});
|
||||
|
||||
it('should throw an Error if passed an invalid date', function() {
|
||||
expect(() =>
|
||||
dateMath.parse('now', { forceNow: new Date('foobar') })
|
||||
).to.throwError();
|
||||
expect(() => dateMath.parse('now', { forceNow: new Date('foobar') })).to.throwError();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -128,9 +120,7 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should use the forceNow parameter when parsing now', function() {
|
||||
expect(
|
||||
dateMath.parse('now', { forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(unix);
|
||||
expect(dateMath.parse('now', { forceNow: anchoredDate }).valueOf()).to.eql(unix);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -164,9 +154,7 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should return ' + len + span + ' before forceNow', function() {
|
||||
const parsed = dateMath
|
||||
.parse(nowEx, { forceNow: anchoredDate })
|
||||
.valueOf();
|
||||
const parsed = dateMath.parse(nowEx, { forceNow: anchoredDate }).valueOf();
|
||||
expect(parsed).to.eql(anchored.subtract(len, span).valueOf());
|
||||
});
|
||||
});
|
||||
|
@ -193,9 +181,7 @@ describe('dateMath', function() {
|
|||
const thenEx = `${anchor}||+${len}${span}`;
|
||||
|
||||
it('should return ' + len + span + ' from now', function() {
|
||||
expect(dateMath.parse(nowEx).format(format)).to.eql(
|
||||
now.add(len, span).format(format)
|
||||
);
|
||||
expect(dateMath.parse(nowEx).format(format)).to.eql(now.add(len, span).format(format));
|
||||
});
|
||||
|
||||
it('should return ' + len + span + ' after ' + anchor, function() {
|
||||
|
@ -205,9 +191,9 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should return ' + len + span + ' after forceNow', function() {
|
||||
expect(
|
||||
dateMath.parse(nowEx, { forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(anchored.add(len, span).valueOf());
|
||||
expect(dateMath.parse(nowEx, { forceNow: anchoredDate }).valueOf()).to.eql(
|
||||
anchored.add(len, span).valueOf()
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -235,22 +221,20 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it(`should round now to the beginning of forceNow's ${span}`, function() {
|
||||
expect(
|
||||
dateMath.parse('now/' + span, { forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(anchored.startOf(span).valueOf());
|
||||
expect(dateMath.parse('now/' + span, { forceNow: anchoredDate }).valueOf()).to.eql(
|
||||
anchored.startOf(span).valueOf()
|
||||
);
|
||||
});
|
||||
|
||||
it(`should round now to the end of the ${span}`, function() {
|
||||
expect(
|
||||
dateMath.parse('now/' + span, { roundUp: true }).format(format)
|
||||
).to.eql(now.endOf(span).format(format));
|
||||
expect(dateMath.parse('now/' + span, { roundUp: true }).format(format)).to.eql(
|
||||
now.endOf(span).format(format)
|
||||
);
|
||||
});
|
||||
|
||||
it(`should round now to the end of forceNow's ${span}`, function() {
|
||||
expect(
|
||||
dateMath
|
||||
.parse('now/' + span, { roundUp: true, forceNow: anchoredDate })
|
||||
.valueOf()
|
||||
dateMath.parse('now/' + span, { roundUp: true, forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(anchored.endOf(span).valueOf());
|
||||
});
|
||||
});
|
||||
|
@ -336,9 +320,7 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should round relative to forceNow', function() {
|
||||
const val = dateMath
|
||||
.parse('now-0s/s', { forceNow: anchoredDate })
|
||||
.valueOf();
|
||||
const val = dateMath.parse('now-0s/s', { forceNow: anchoredDate }).valueOf();
|
||||
expect(val).to.eql(anchored.startOf('s').valueOf());
|
||||
});
|
||||
|
||||
|
@ -402,29 +384,11 @@ describe('dateMath', function() {
|
|||
|
||||
describe('units', function() {
|
||||
it('should have units descending for unitsDesc', function() {
|
||||
expect(dateMath.unitsDesc).to.eql([
|
||||
'y',
|
||||
'M',
|
||||
'w',
|
||||
'd',
|
||||
'h',
|
||||
'm',
|
||||
's',
|
||||
'ms',
|
||||
]);
|
||||
expect(dateMath.unitsDesc).to.eql(['y', 'M', 'w', 'd', 'h', 'm', 's', 'ms']);
|
||||
});
|
||||
|
||||
it('should have units ascending for unitsAsc', function() {
|
||||
expect(dateMath.unitsAsc).to.eql([
|
||||
'ms',
|
||||
's',
|
||||
'm',
|
||||
'h',
|
||||
'd',
|
||||
'w',
|
||||
'M',
|
||||
'y',
|
||||
]);
|
||||
expect(dateMath.unitsAsc).to.eql(['ms', 's', 'm', 'h', 'd', 'w', 'M', 'y']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -18,15 +18,7 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import {
|
||||
scan,
|
||||
takeUntil,
|
||||
share,
|
||||
materialize,
|
||||
mergeMap,
|
||||
last,
|
||||
catchError,
|
||||
} from 'rxjs/operators';
|
||||
import { scan, takeUntil, share, materialize, mergeMap, last, catchError } from 'rxjs/operators';
|
||||
|
||||
const SEP = /\r?\n/;
|
||||
|
||||
|
|
|
@ -32,9 +32,6 @@ export function observeReadable(readable) {
|
|||
return Rx.race(
|
||||
Rx.fromEvent(readable, 'end').pipe(first(), ignoreElements()),
|
||||
|
||||
Rx.fromEvent(readable, 'error').pipe(
|
||||
first(),
|
||||
map(err => Rx.throwError(err))
|
||||
)
|
||||
Rx.fromEvent(readable, 'error').pipe(first(), map(err => Rx.throwError(err)))
|
||||
);
|
||||
}
|
||||
|
|
|
@ -21,14 +21,7 @@ import execa from 'execa';
|
|||
import { statSync } from 'fs';
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import {
|
||||
tap,
|
||||
share,
|
||||
take,
|
||||
mergeMap,
|
||||
map,
|
||||
ignoreElements,
|
||||
} from 'rxjs/operators';
|
||||
import { tap, share, take, mergeMap, map, ignoreElements } from 'rxjs/operators';
|
||||
import { gray } from 'chalk';
|
||||
|
||||
import treeKill from 'tree-kill';
|
||||
|
@ -46,9 +39,7 @@ async function withTimeout(attempt, ms, onTimeout) {
|
|||
try {
|
||||
await Promise.race([
|
||||
attempt(),
|
||||
new Promise((resolve, reject) =>
|
||||
setTimeout(() => reject(TIMEOUT), STOP_TIMEOUT)
|
||||
),
|
||||
new Promise((resolve, reject) => setTimeout(() => reject(TIMEOUT), STOP_TIMEOUT)),
|
||||
]);
|
||||
} catch (error) {
|
||||
if (error === TIMEOUT) {
|
||||
|
@ -90,10 +81,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
|
|||
return new class Proc {
|
||||
name = name;
|
||||
|
||||
lines$ = Rx.merge(
|
||||
observeLines(childProcess.stdout),
|
||||
observeLines(childProcess.stderr)
|
||||
).pipe(
|
||||
lines$ = Rx.merge(observeLines(childProcess.stdout), observeLines(childProcess.stderr)).pipe(
|
||||
tap(line => log.write(` ${gray('proc')} [${gray(name)}] ${line}`)),
|
||||
share()
|
||||
);
|
||||
|
@ -121,10 +109,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
|
|||
return Rx.race(exit$, error$);
|
||||
}).pipe(share());
|
||||
|
||||
_outcomePromise = Rx.merge(
|
||||
this.lines$.pipe(ignoreElements()),
|
||||
this.outcome$
|
||||
).toPromise();
|
||||
_outcomePromise = Rx.merge(this.lines$.pipe(ignoreElements()), this.outcome$).toPromise();
|
||||
|
||||
getOutcomePromise() {
|
||||
return this._outcomePromise;
|
||||
|
|
|
@ -97,9 +97,7 @@ export class ProcRunner {
|
|||
first(),
|
||||
catchError(err => {
|
||||
if (err.name !== 'EmptyError') {
|
||||
throw createCliError(
|
||||
`[${name}] exited without matching pattern: ${wait}`
|
||||
);
|
||||
throw createCliError(`[${name}] exited without matching pattern: ${wait}`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
|
@ -191,12 +189,7 @@ export class ProcRunner {
|
|||
proc.outcome$.subscribe({
|
||||
next: code => {
|
||||
const duration = moment.duration(Date.now() - startMs);
|
||||
this._log.info(
|
||||
'[%s] exited with %s after %s',
|
||||
name,
|
||||
code,
|
||||
duration.humanize()
|
||||
);
|
||||
this._log.info('[%s] exited with %s after %s', name, code, duration.humanize());
|
||||
},
|
||||
complete: () => {
|
||||
remove();
|
||||
|
|
|
@ -76,10 +76,7 @@ export async function createPromiseFromStreams(streams) {
|
|||
|
||||
// wait (and rethrow) the first error, or for the last stream
|
||||
// to both finish writing and providing values to read
|
||||
await Promise.race([
|
||||
anyStreamFailure,
|
||||
Promise.all([lastFinishedWriting, lastFinishedReading]),
|
||||
]);
|
||||
await Promise.race([anyStreamFailure, Promise.all([lastFinishedWriting, lastFinishedReading])]);
|
||||
|
||||
// return the final chunk read from the last stream
|
||||
return await lastFinishedReading;
|
||||
|
|
|
@ -58,10 +58,7 @@ describe('utils: createToolingLog(logLevel, output)', () => {
|
|||
log.info('Baz');
|
||||
log.end();
|
||||
|
||||
const output = await createPromiseFromStreams([
|
||||
log,
|
||||
createConcatStream(''),
|
||||
]);
|
||||
const output = await createPromiseFromStreams([log, createConcatStream('')]);
|
||||
|
||||
expect(output).to.contain('Foo');
|
||||
expect(output).to.contain('Bar');
|
||||
|
|
|
@ -31,8 +31,7 @@ export function parseLogLevel(name) {
|
|||
const i = LEVELS.indexOf(name);
|
||||
|
||||
if (i === -1) {
|
||||
const msg =
|
||||
`Invalid log level "${name}" ` + `(expected one of ${LEVELS.join(',')})`;
|
||||
const msg = `Invalid log level "${name}" ` + `(expected one of ${LEVELS.join(',')})`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
|
||||
|
|
|
@ -93,8 +93,7 @@ export function createToolingLog(initialLogLevelName = 'silent') {
|
|||
const subLineIndent = i === 0 ? '' : ' ';
|
||||
const indent = !indentString
|
||||
? ''
|
||||
: indentString.slice(0, -1) +
|
||||
(i === 0 && line[0] === '-' ? '└' : '│');
|
||||
: indentString.slice(0, -1) + (i === 0 && line[0] === '-' ? '└' : '│');
|
||||
super.write(`${indent}${subLineIndent}${line}\n`);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -65,9 +65,7 @@ exports.run = async (defaults = {}) => {
|
|||
const command = commands[commandName];
|
||||
|
||||
if (command === undefined) {
|
||||
log.error(
|
||||
chalk.red(`[${commandName}] is not a valid command, see 'es --help'`)
|
||||
);
|
||||
log.error(chalk.red(`[${commandName}] is not a valid command, see 'es --help'`));
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -24,11 +24,7 @@ const { Cluster } = require('../cluster');
|
|||
exports.description = 'Downloads and run from a nightly snapshot';
|
||||
|
||||
exports.help = (defaults = {}) => {
|
||||
const {
|
||||
license = 'basic',
|
||||
password = 'changeme',
|
||||
'base-path': basePath,
|
||||
} = defaults;
|
||||
const { license = 'basic', password = 'changeme', 'base-path': basePath } = defaults;
|
||||
|
||||
return dedent`
|
||||
Options:
|
||||
|
|
|
@ -24,11 +24,7 @@ const { Cluster } = require('../cluster');
|
|||
exports.description = 'Build and run from source';
|
||||
|
||||
exports.help = (defaults = {}) => {
|
||||
const {
|
||||
license = 'basic',
|
||||
password = 'changeme',
|
||||
'base-path': basePath,
|
||||
} = defaults;
|
||||
const { license = 'basic', password = 'changeme', 'base-path': basePath } = defaults;
|
||||
|
||||
return dedent`
|
||||
Options:
|
||||
|
|
|
@ -186,9 +186,7 @@ exports.Cluster = class Cluster {
|
|||
lines.forEach(line => this._log.info(line.formattedMessage));
|
||||
});
|
||||
|
||||
this._process.stderr.on('data', data =>
|
||||
this._log.error(chalk.red(data.toString()))
|
||||
);
|
||||
this._process.stderr.on('data', data => this._log.error(chalk.red(data.toString())));
|
||||
|
||||
this._outcome = new Promise((resolve, reject) => {
|
||||
this._process.once('exit', code => {
|
||||
|
|
|
@ -53,11 +53,7 @@ exports.installArchive = async function installArchive(archive, options = {}) {
|
|||
log.info('extracted to %s', chalk.bold(installPath));
|
||||
|
||||
if (license !== 'oss') {
|
||||
await appendToConfig(
|
||||
installPath,
|
||||
'xpack.license.self_generated.type',
|
||||
license
|
||||
);
|
||||
await appendToConfig(installPath, 'xpack.license.self_generated.type', license);
|
||||
|
||||
await appendToConfig(installPath, 'xpack.security.enabled', 'true');
|
||||
await configureKeystore(installPath, password, log);
|
||||
|
@ -94,11 +90,7 @@ function rmrfSync(path) {
|
|||
* @param {String} value
|
||||
*/
|
||||
async function appendToConfig(installPath, key, value) {
|
||||
fs.appendFileSync(
|
||||
path.resolve(installPath, ES_CONFIG),
|
||||
`${key}: ${value}\n`,
|
||||
'utf8'
|
||||
);
|
||||
fs.appendFileSync(path.resolve(installPath, ES_CONFIG), `${key}: ${value}\n`, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -84,10 +84,7 @@ function downloadFile(url, dest, log) {
|
|||
res =>
|
||||
new Promise((resolve, reject) => {
|
||||
if (res.status === 304) {
|
||||
log.info(
|
||||
'etags match, using cache from %s',
|
||||
chalk.bold(cacheMeta.ts)
|
||||
);
|
||||
log.info('etags match, using cache from %s', chalk.bold(cacheMeta.ts));
|
||||
return resolve();
|
||||
}
|
||||
|
||||
|
@ -118,9 +115,7 @@ function downloadFile(url, dest, log) {
|
|||
|
||||
function getFilename(license, version) {
|
||||
const extension = os.platform().startsWith('win') ? 'zip' : 'tar.gz';
|
||||
const basename = `elasticsearch${
|
||||
license === 'oss' ? '-oss-' : '-'
|
||||
}${version}`;
|
||||
const basename = `elasticsearch${license === 'oss' ? '-oss-' : '-'}${version}`;
|
||||
|
||||
return `${basename}-SNAPSHOT.${extension}`;
|
||||
}
|
||||
|
|
|
@ -27,15 +27,9 @@ const simpleGit = require('simple-git/promise');
|
|||
const { installArchive } = require('./archive');
|
||||
const { createCliError } = require('../errors');
|
||||
const { findMostRecentlyChanged, log: defaultLog, cache } = require('../utils');
|
||||
const {
|
||||
GRADLE_BIN,
|
||||
ES_ARCHIVE_PATTERN,
|
||||
ES_OSS_ARCHIVE_PATTERN,
|
||||
BASE_PATH,
|
||||
} = require('../paths');
|
||||
const { GRADLE_BIN, ES_ARCHIVE_PATTERN, ES_OSS_ARCHIVE_PATTERN, BASE_PATH } = require('../paths');
|
||||
|
||||
const onceEvent = (emitter, event) =>
|
||||
new Promise(resolve => emitter.once(event, resolve));
|
||||
const onceEvent = (emitter, event) => new Promise(resolve => emitter.once(event, resolve));
|
||||
|
||||
/**
|
||||
* Installs ES from source
|
||||
|
@ -65,15 +59,10 @@ exports.installSource = async function installSource({
|
|||
|
||||
const cacheMeta = cache.readMeta(dest);
|
||||
const isCached = cacheMeta.exists && cacheMeta.etag === metadata.etag;
|
||||
const archive = isCached
|
||||
? dest
|
||||
: await createSnapshot({ sourcePath, log, license });
|
||||
const archive = isCached ? dest : await createSnapshot({ sourcePath, log, license });
|
||||
|
||||
if (isCached) {
|
||||
log.info(
|
||||
'source path unchanged since %s, using cache',
|
||||
chalk.bold(cacheMeta.ts)
|
||||
);
|
||||
log.info('source path unchanged since %s, using cache', chalk.bold(cacheMeta.ts));
|
||||
} else {
|
||||
cache.writeMeta(dest, metadata);
|
||||
fs.copyFileSync(archive, dest);
|
||||
|
@ -168,11 +157,8 @@ async function createSnapshot({ license, sourcePath, log = defaultLog }) {
|
|||
throw createCliError('unable to build ES');
|
||||
}
|
||||
|
||||
const archivePattern =
|
||||
license === 'oss' ? ES_OSS_ARCHIVE_PATTERN : ES_ARCHIVE_PATTERN;
|
||||
const esTarballPath = findMostRecentlyChanged(
|
||||
path.resolve(sourcePath, archivePattern)
|
||||
);
|
||||
const archivePattern = license === 'oss' ? ES_OSS_ARCHIVE_PATTERN : ES_ARCHIVE_PATTERN;
|
||||
const esTarballPath = findMostRecentlyChanged(path.resolve(sourcePath, archivePattern));
|
||||
|
||||
if (!esTarballPath) {
|
||||
throw createCliError('could not locate ES distribution');
|
||||
|
|
|
@ -20,11 +20,7 @@
|
|||
const { createToolingLog } = require('@kbn/dev-utils');
|
||||
const execa = require('execa');
|
||||
const { Cluster } = require('../cluster');
|
||||
const {
|
||||
installSource,
|
||||
installSnapshot,
|
||||
installArchive,
|
||||
} = require('../install');
|
||||
const { installSource, installSnapshot, installArchive } = require('../install');
|
||||
|
||||
jest.mock('../install', () => ({
|
||||
installSource: jest.fn(),
|
||||
|
@ -55,9 +51,7 @@ async function ensureResolve(promise) {
|
|||
return await Promise.race([
|
||||
promise,
|
||||
sleep(100).then(() => {
|
||||
throw new Error(
|
||||
'promise was supposed to resolve with installSource() resolution'
|
||||
);
|
||||
throw new Error('promise was supposed to resolve with installSource() resolution');
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
@ -203,33 +197,25 @@ describe('#start(installPath)', () => {
|
|||
it('rejects when bin/elasticsearch exists with 0 before starting', async () => {
|
||||
mockEsBin({ exitCode: 0, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited without starting'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited without starting');
|
||||
});
|
||||
|
||||
it('rejects when bin/elasticsearch exists with 143 before starting', async () => {
|
||||
mockEsBin({ exitCode: 143, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited without starting'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited without starting');
|
||||
});
|
||||
|
||||
it('rejects when bin/elasticsearch exists with 130 before starting', async () => {
|
||||
mockEsBin({ exitCode: 130, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited without starting'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited without starting');
|
||||
});
|
||||
|
||||
it('rejects when bin/elasticsearch exists with 1 before starting', async () => {
|
||||
mockEsBin({ exitCode: 1, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited with code 1'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited with code 1');
|
||||
});
|
||||
|
||||
it('resolves when bin/elasticsearch logs "started"', async () => {
|
||||
|
@ -243,9 +229,7 @@ describe('#start(installPath)', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.start();
|
||||
await expect(cluster.start()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.start()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
|
||||
it('rejects if #run() was called previously', async () => {
|
||||
|
@ -253,9 +237,7 @@ describe('#start(installPath)', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.run();
|
||||
await expect(cluster.start()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.start()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -281,9 +263,7 @@ describe('#run()', () => {
|
|||
it('rejects when bin/elasticsearch exists with 1', async () => {
|
||||
mockEsBin({ exitCode: 1 });
|
||||
|
||||
await expect(new Cluster(log).run()).rejects.toThrowError(
|
||||
'ES exited with code 1'
|
||||
);
|
||||
await expect(new Cluster(log).run()).rejects.toThrowError('ES exited with code 1');
|
||||
});
|
||||
|
||||
it('rejects if #start() was called previously', async () => {
|
||||
|
@ -291,9 +271,7 @@ describe('#run()', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.start();
|
||||
await expect(cluster.run()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.run()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
|
||||
it('rejects if #run() was called previously', async () => {
|
||||
|
@ -301,18 +279,14 @@ describe('#run()', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.run();
|
||||
await expect(cluster.run()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.run()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#stop()', () => {
|
||||
it('rejects if #run() or #start() was not called', async () => {
|
||||
const cluster = new Cluster(log);
|
||||
await expect(cluster.stop()).rejects.toThrowError(
|
||||
'ES has not been started'
|
||||
);
|
||||
await expect(cluster.stop()).rejects.toThrowError('ES has not been started');
|
||||
});
|
||||
|
||||
it('resolves when ES exits with 0', async () => {
|
||||
|
|
|
@ -25,12 +25,8 @@ const path = require('path');
|
|||
beforeEach(() => {
|
||||
mockFs({
|
||||
'/data': {
|
||||
'snapshot.zip': fs.readFileSync(
|
||||
path.resolve(__dirname, '__fixtures__/snapshot.zip')
|
||||
),
|
||||
'snapshot.tar.gz': fs.readFileSync(
|
||||
path.resolve(__dirname, '__fixtures__/snapshot.tar.gz')
|
||||
),
|
||||
'snapshot.zip': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.zip')),
|
||||
'snapshot.tar.gz': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.tar.gz')),
|
||||
},
|
||||
'/.es': {},
|
||||
});
|
||||
|
|
|
@ -29,11 +29,7 @@ const mkdirp = require('mkdirp');
|
|||
* @param {Array} config
|
||||
* @param {String} dest
|
||||
*/
|
||||
exports.extractConfigFiles = function extractConfigFiles(
|
||||
config,
|
||||
dest,
|
||||
options = {}
|
||||
) {
|
||||
exports.extractConfigFiles = function extractConfigFiles(config, dest, options = {}) {
|
||||
const originalConfig = typeof config === 'string' ? [config] : config;
|
||||
const localConfig = [];
|
||||
|
||||
|
|
|
@ -54,10 +54,7 @@ test('ignores non-paths', () => {
|
|||
});
|
||||
|
||||
test('ignores directories', () => {
|
||||
const config = extractConfigFiles(
|
||||
['path=/data/foo.yml', 'foo.bar=/data/bar'],
|
||||
'/es'
|
||||
);
|
||||
const config = extractConfigFiles(['path=/data/foo.yml', 'foo.bar=/data/bar'], '/es');
|
||||
|
||||
expect(config).toEqual(['path=foo.yml', 'foo.bar=/data/bar']);
|
||||
});
|
||||
|
|
|
@ -23,9 +23,7 @@ const LRU = require('lru-cache');
|
|||
|
||||
const DIR = Symbol('dir');
|
||||
const FILE = Symbol('file');
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE
|
||||
? new Map()
|
||||
: new LRU({ maxAge: 1000 });
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE ? new Map() : new LRU({ maxAge: 1000 });
|
||||
|
||||
function getPathType(path) {
|
||||
const cached = cache.get(path);
|
||||
|
|
|
@ -27,12 +27,8 @@ function getConfig(config) {
|
|||
projectRoot: true,
|
||||
};
|
||||
|
||||
if (!config || !config['@elastic/eslint-import-resolver-kibana'])
|
||||
return defaults;
|
||||
return Object.assign(
|
||||
defaults,
|
||||
config['@elastic/eslint-import-resolver-kibana']
|
||||
);
|
||||
if (!config || !config['@elastic/eslint-import-resolver-kibana']) return defaults;
|
||||
return Object.assign(defaults, config['@elastic/eslint-import-resolver-kibana']);
|
||||
}
|
||||
|
||||
function getRootPackageDir(dirRoot, dir, rootPackageName) {
|
||||
|
@ -54,8 +50,7 @@ function getRootPackageDir(dirRoot, dir, rootPackageName) {
|
|||
// recurse until a matching package.json is found
|
||||
return getRootPackageDir(dirRoot, dirname(dir), rootPackageName);
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT')
|
||||
return getRootPackageDir(dirRoot, dirname(dir), rootPackageName);
|
||||
if (e.code === 'ENOENT') return getRootPackageDir(dirRoot, dirname(dir), rootPackageName);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,9 +36,7 @@ exports.getWebpackConfig = function(kibanaPath, projectRoot, config) {
|
|||
|
||||
// Dev defaults for test bundle https://github.com/elastic/kibana/blob/6998f074542e8c7b32955db159d15661aca253d7/src/core_plugins/tests_bundle/index.js#L73-L78
|
||||
ng_mock$: fromKibana('src/test_utils/public/ng_mock'),
|
||||
'angular-mocks$': fromKibana(
|
||||
'src/core_plugins/tests_bundle/webpackShims/angular-mocks.js'
|
||||
),
|
||||
'angular-mocks$': fromKibana('src/core_plugins/tests_bundle/webpackShims/angular-mocks.js'),
|
||||
fixtures: fromKibana('src/fixtures'),
|
||||
test_utils: fromKibana('src/test_utils/public'),
|
||||
};
|
||||
|
|
|
@ -24,9 +24,7 @@ const LRU = require('lru-cache');
|
|||
|
||||
const { isDirectory } = require('./get_path_type');
|
||||
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE
|
||||
? new Map()
|
||||
: new LRU({ max: 1000 });
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE ? new Map() : new LRU({ max: 1000 });
|
||||
|
||||
function readShimNames(shimDirectory) {
|
||||
if (!isDirectory(shimDirectory)) {
|
||||
|
@ -47,12 +45,9 @@ function findRelativeWebpackShims(directory) {
|
|||
const ownShims = readShimNames(join(directory, 'webpackShims'));
|
||||
|
||||
const parent = dirname(directory);
|
||||
const parentShims =
|
||||
parent !== directory ? findRelativeWebpackShims(parent) : [];
|
||||
const parentShims = parent !== directory ? findRelativeWebpackShims(parent) : [];
|
||||
|
||||
const allShims = !ownShims.length
|
||||
? parentShims
|
||||
: ownShims.concat(parentShims);
|
||||
const allShims = !ownShims.length ? parentShims : ownShims.concat(parentShims);
|
||||
|
||||
cache.set(directory, allShims);
|
||||
return allShims;
|
||||
|
|
|
@ -27,12 +27,7 @@ export const unique = (arr = []) => [...new Set(arr)];
|
|||
|
||||
const merge = (a, b) =>
|
||||
unique([...Object.keys(a), ...Object.keys(b)]).reduce((acc, key) => {
|
||||
if (
|
||||
isObject(a[key]) &&
|
||||
isObject(b[key]) &&
|
||||
!Array.isArray(a[key]) &&
|
||||
!Array.isArray(b[key])
|
||||
) {
|
||||
if (isObject(a[key]) && isObject(b[key]) && !Array.isArray(a[key]) && !Array.isArray(b[key])) {
|
||||
return {
|
||||
...acc,
|
||||
[key]: merge(a[key], b[key]),
|
||||
|
|
|
@ -70,9 +70,7 @@ function normalizeLocale(locale) {
|
|||
*/
|
||||
export function addMessages(newMessages = {}, locale = newMessages.locale) {
|
||||
if (!locale || !isString(locale)) {
|
||||
throw new Error(
|
||||
'[I18n] A `locale` must be a non-empty string to add messages.'
|
||||
);
|
||||
throw new Error('[I18n] A `locale` must be a non-empty string to add messages.');
|
||||
}
|
||||
|
||||
const normalizedLocale = normalizeLocale(locale);
|
||||
|
@ -178,17 +176,13 @@ export function getRegisteredLocales() {
|
|||
*/
|
||||
export function translate(id, { values = {}, defaultMessage = '' } = {}) {
|
||||
if (!id || !isString(id)) {
|
||||
throw new Error(
|
||||
'[I18n] An `id` must be a non-empty string to translate a message.'
|
||||
);
|
||||
throw new Error('[I18n] An `id` must be a non-empty string to translate a message.');
|
||||
}
|
||||
|
||||
const message = getMessageById(id);
|
||||
|
||||
if (!message && !defaultMessage) {
|
||||
throw new Error(
|
||||
`[I18n] Cannot format message: "${id}". Default message must be provided.`
|
||||
);
|
||||
throw new Error(`[I18n] Cannot format message: "${id}". Default message must be provided.`);
|
||||
}
|
||||
|
||||
if (!hasValues(values)) {
|
||||
|
@ -208,17 +202,11 @@ export function translate(id, { values = {}, defaultMessage = '' } = {}) {
|
|||
}
|
||||
|
||||
try {
|
||||
const msg = getMessageFormat(
|
||||
defaultMessage,
|
||||
getDefaultLocale(),
|
||||
getFormats()
|
||||
);
|
||||
const msg = getMessageFormat(defaultMessage, getDefaultLocale(), getFormats());
|
||||
|
||||
return msg.format(values);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`[I18n] Error formatting the default message for: "${id}".\n${e}`
|
||||
);
|
||||
throw new Error(`[I18n] Error formatting the default message for: "${id}".\n${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -101,10 +101,7 @@ module.exports = function({ name }) {
|
|||
cwd: KBN_DIR,
|
||||
stdio: 'inherit',
|
||||
}).then(() => {
|
||||
const dir = relative(
|
||||
process.cwd(),
|
||||
resolve(KBN_DIR, `../kibana-extra`, snakeCase(name))
|
||||
);
|
||||
const dir = relative(process.cwd(), resolve(KBN_DIR, `../kibana-extra`, snakeCase(name)));
|
||||
|
||||
log.success(chalk`🎉
|
||||
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
const nodeMajorVersion = parseFloat(
|
||||
process.version.replace(/^v(\d+)\..+/, '$1')
|
||||
);
|
||||
const nodeMajorVersion = parseFloat(process.version.replace(/^v(\d+)\..+/, '$1'));
|
||||
if (nodeMajorVersion < 6) {
|
||||
console.error('FATAL: kibana-plugin-helpers requires node 6+');
|
||||
process.exit(1);
|
||||
|
|
|
@ -42,19 +42,13 @@ program
|
|||
.command('build [files...]')
|
||||
.description('Build a distributable archive')
|
||||
.on('--help', docs('build'))
|
||||
.option(
|
||||
'--skip-archive',
|
||||
"Don't create the zip file, leave the build path alone"
|
||||
)
|
||||
.option('--skip-archive', "Don't create the zip file, leave the build path alone")
|
||||
.option(
|
||||
'-d, --build-destination <path>',
|
||||
'Target path for the build output, absolute or relative to the plugin root'
|
||||
)
|
||||
.option('-b, --build-version <version>', 'Version for the build output')
|
||||
.option(
|
||||
'-k, --kibana-version <version>',
|
||||
'Kibana version for the build output'
|
||||
)
|
||||
.option('-k, --kibana-version <version>', 'Kibana version for the build output')
|
||||
.action(
|
||||
createCommanderAction('build', (command, files) => ({
|
||||
buildDestination: command.buildDestination,
|
||||
|
@ -75,10 +69,7 @@ program
|
|||
.command('test:browser')
|
||||
.description('Run the browser tests in a real web browser')
|
||||
.option('--dev', 'Enable dev mode, keeps the test server running')
|
||||
.option(
|
||||
'-p, --plugins <plugin-ids>',
|
||||
"Manually specify which plugins' test bundles to run"
|
||||
)
|
||||
.option('-p, --plugins <plugin-ids>', "Manually specify which plugins' test bundles to run")
|
||||
.on('--help', docs('test/browser'))
|
||||
.action(
|
||||
createCommanderAction('testBrowser', command => ({
|
||||
|
|
|
@ -19,19 +19,14 @@
|
|||
|
||||
const run = require('./run');
|
||||
|
||||
module.exports = function createCommanderAction(
|
||||
taskName,
|
||||
getOptions = () => {}
|
||||
) {
|
||||
module.exports = function createCommanderAction(taskName, getOptions = () => {}) {
|
||||
return async (...args) => {
|
||||
try {
|
||||
// command is the last arg passed by commander, but we move it to the front of the list
|
||||
const command = args.pop();
|
||||
await run(taskName, getOptions(command, ...args));
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`Task "${taskName}" failed:\n\n${error.stack || error.message}\n`
|
||||
);
|
||||
process.stderr.write(`Task "${taskName}" failed:\n\n${error.stack || error.message}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -20,10 +20,7 @@
|
|||
const resolve = require('path').resolve;
|
||||
const readFileSync = require('fs').readFileSync;
|
||||
|
||||
const configFiles = [
|
||||
'.kibana-plugin-helpers.json',
|
||||
'.kibana-plugin-helpers.dev.json',
|
||||
];
|
||||
const configFiles = ['.kibana-plugin-helpers.json', '.kibana-plugin-helpers.dev.json'];
|
||||
const configCache = {};
|
||||
|
||||
module.exports = function(root) {
|
||||
|
@ -62,8 +59,7 @@ module.exports = function(root) {
|
|||
|
||||
// use resolve to ensure correct resolution of paths
|
||||
const { includePlugins } = config;
|
||||
if (includePlugins)
|
||||
config.includePlugins = includePlugins.map(path => resolve(root, path));
|
||||
if (includePlugins) config.includePlugins = includePlugins.map(path => resolve(root, path));
|
||||
|
||||
return config;
|
||||
};
|
||||
|
|
|
@ -26,10 +26,7 @@ function indent(txt, n) {
|
|||
}
|
||||
|
||||
module.exports = function docs(name) {
|
||||
const md = readFileSync(
|
||||
resolve(__dirname, '../tasks', name, 'README.md'),
|
||||
'utf8'
|
||||
);
|
||||
const md = readFileSync(resolve(__dirname, '../tasks', name, 'README.md'), 'utf8');
|
||||
|
||||
return function() {
|
||||
console.log('\n Docs:');
|
||||
|
|
|
@ -38,10 +38,7 @@ module.exports = function(root) {
|
|||
return Object.assign(
|
||||
{
|
||||
root: root,
|
||||
kibanaRoot:
|
||||
pkg.name === 'x-pack'
|
||||
? resolve(root, '..')
|
||||
: resolve(root, '../../kibana'),
|
||||
kibanaRoot: pkg.name === 'x-pack' ? resolve(root, '..') : resolve(root, '../../kibana'),
|
||||
serverTestPatterns: ['server/**/__tests__/**/*.js'],
|
||||
buildSourcePatterns: buildSourcePatterns,
|
||||
skipInstallDependencies: false,
|
||||
|
|
|
@ -42,9 +42,11 @@ function resolveKibanaPath(path) {
|
|||
}
|
||||
|
||||
function readFtrConfigFile(log, path, settingOverrides) {
|
||||
return require(resolveKibanaPath(
|
||||
'src/functional_test_runner'
|
||||
)).readConfigFile(log, path, settingOverrides);
|
||||
return require(resolveKibanaPath('src/functional_test_runner')).readConfigFile(
|
||||
log,
|
||||
path,
|
||||
settingOverrides
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -27,8 +27,7 @@ const createPackage = require('./create_package');
|
|||
module.exports = function(plugin, run, options) {
|
||||
options = options || {};
|
||||
let buildVersion = plugin.version;
|
||||
let kibanaVersion =
|
||||
(plugin.pkg.kibana && plugin.pkg.kibana.version) || plugin.pkg.version;
|
||||
let kibanaVersion = (plugin.pkg.kibana && plugin.pkg.kibana.version) || plugin.pkg.version;
|
||||
let buildFiles = plugin.buildSourcePatterns;
|
||||
let buildTarget = join(plugin.root, 'build');
|
||||
|
||||
|
@ -38,30 +37,17 @@ module.exports = function(plugin, run, options) {
|
|||
}
|
||||
|
||||
// allow options to override plugin info
|
||||
if (options.buildDestination)
|
||||
buildTarget = resolve(plugin.root, options.buildDestination);
|
||||
if (options.buildDestination) buildTarget = resolve(plugin.root, options.buildDestination);
|
||||
if (options.buildVersion) buildVersion = options.buildVersion;
|
||||
if (options.kibanaVersion) kibanaVersion = options.kibanaVersion;
|
||||
|
||||
let buildStep;
|
||||
if (kibanaVersion === 'kibana') {
|
||||
buildStep = askForKibanaVersion().then(function(customKibanaVersion) {
|
||||
return createBuild(
|
||||
plugin,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
customKibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
return createBuild(plugin, buildTarget, buildVersion, customKibanaVersion, buildFiles);
|
||||
});
|
||||
} else {
|
||||
buildStep = createBuild(
|
||||
plugin,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
buildStep = createBuild(plugin, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
}
|
||||
|
||||
return buildStep.then(function() {
|
||||
|
|
|
@ -21,10 +21,7 @@ const resolve = require('path').resolve;
|
|||
const fs = require('fs');
|
||||
const del = require('del');
|
||||
|
||||
const PLUGIN_FIXTURE = resolve(
|
||||
__dirname,
|
||||
'__fixtures__/build_action_test_plugin'
|
||||
);
|
||||
const PLUGIN_FIXTURE = resolve(__dirname, '__fixtures__/build_action_test_plugin');
|
||||
const PLUGIN_BUILD_DIR = resolve(PLUGIN_FIXTURE, 'build');
|
||||
const PLUGIN = require('../../lib/plugin_config')(PLUGIN_FIXTURE);
|
||||
const noop = () => {};
|
||||
|
@ -38,10 +35,7 @@ describe('creating build zip', () => {
|
|||
it('creates a zip in the build directory', async () => {
|
||||
await buildAction(PLUGIN);
|
||||
|
||||
const buildFile = resolve(
|
||||
PLUGIN_BUILD_DIR,
|
||||
PLUGIN.id + '-' + PLUGIN.version + '.zip'
|
||||
);
|
||||
const buildFile = resolve(PLUGIN_BUILD_DIR, PLUGIN.id + '-' + PLUGIN.version + '.zip');
|
||||
if (!fs.existsSync(buildFile)) {
|
||||
throw new Error('Build file not found: ' + buildFile);
|
||||
}
|
||||
|
@ -50,10 +44,7 @@ describe('creating build zip', () => {
|
|||
it('skips zip creation based on flag', async () => {
|
||||
await buildAction(PLUGIN, noop, { skipArchive: true });
|
||||
|
||||
const buildFile = resolve(
|
||||
PLUGIN_BUILD_DIR,
|
||||
PLUGIN.id + '-' + PLUGIN.version + '.zip'
|
||||
);
|
||||
const buildFile = resolve(PLUGIN_BUILD_DIR, PLUGIN.id + '-' + PLUGIN.version + '.zip');
|
||||
if (fs.existsSync(buildFile)) {
|
||||
throw new Error('Build file not found: ' + buildFile);
|
||||
}
|
||||
|
@ -109,12 +100,7 @@ describe('calling create_build', () => {
|
|||
|
||||
it('uses only files passed in', async () => {
|
||||
const options = {
|
||||
files: [
|
||||
'index.js',
|
||||
'LICENSE.txt',
|
||||
'plugins/**/*',
|
||||
'{server,public}/**/*',
|
||||
],
|
||||
files: ['index.js', 'LICENSE.txt', 'plugins/**/*', '{server,public}/**/*'],
|
||||
};
|
||||
|
||||
await buildAction(PLUGIN, noop, options);
|
||||
|
@ -136,8 +122,6 @@ describe('calling create_build', () => {
|
|||
throw new Error('foo bar');
|
||||
});
|
||||
|
||||
await expect(
|
||||
buildAction(PLUGIN, noop)
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
await expect(buildAction(PLUGIN, noop)).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -65,13 +65,7 @@ function parseTsconfig(pluginSourcePath, configPath) {
|
|||
return config;
|
||||
}
|
||||
|
||||
module.exports = function createBuild(
|
||||
plugin,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
files
|
||||
) {
|
||||
module.exports = function createBuild(plugin, buildTarget, buildVersion, kibanaVersion, files) {
|
||||
const buildSource = plugin.root;
|
||||
const buildRoot = path.join(buildTarget, 'kibana', plugin.id);
|
||||
|
||||
|
@ -90,14 +84,8 @@ module.exports = function createBuild(
|
|||
// put all files inside the correct directories
|
||||
.pipe(
|
||||
rename(function nestFileInDir(filePath) {
|
||||
const nonRelativeDirname = filePath.dirname.replace(
|
||||
/^(\.\.\/?)+/g,
|
||||
''
|
||||
);
|
||||
filePath.dirname = path.join(
|
||||
relative(buildTarget, buildRoot),
|
||||
nonRelativeDirname
|
||||
);
|
||||
const nonRelativeDirname = filePath.dirname.replace(/^(\.\.\/?)+/g, '');
|
||||
filePath.dirname = path.join(relative(buildTarget, buildRoot), nonRelativeDirname);
|
||||
})
|
||||
)
|
||||
|
||||
|
@ -112,13 +100,9 @@ module.exports = function createBuild(
|
|||
}
|
||||
|
||||
// install packages in build
|
||||
execa.sync(
|
||||
winCmd('yarn'),
|
||||
['install', '--production', '--pure-lockfile'],
|
||||
{
|
||||
cwd: buildRoot,
|
||||
}
|
||||
);
|
||||
execa.sync(winCmd('yarn'), ['install', '--production', '--pure-lockfile'], {
|
||||
cwd: buildRoot,
|
||||
});
|
||||
})
|
||||
.then(function() {
|
||||
if (!plugin.styleSheetToCompile) {
|
||||
|
@ -127,17 +111,11 @@ module.exports = function createBuild(
|
|||
|
||||
const file = path.resolve(plugin.root, plugin.styleSheetToCompile);
|
||||
if (!existsSync(file)) {
|
||||
throw new Error(
|
||||
`Path provided for styleSheetToCompile does not exist: ${file}`
|
||||
);
|
||||
throw new Error(`Path provided for styleSheetToCompile does not exist: ${file}`);
|
||||
}
|
||||
|
||||
const outputFileName = path.basename(file, path.extname(file)) + '.css';
|
||||
const output = path.join(
|
||||
buildRoot,
|
||||
path.dirname(plugin.styleSheetToCompile),
|
||||
outputFileName
|
||||
);
|
||||
const output = path.join(buildRoot, path.dirname(plugin.styleSheetToCompile), outputFileName);
|
||||
|
||||
const rendered = sass.renderSync({ file, output });
|
||||
writeFileSync(output, rendered.css);
|
||||
|
@ -161,10 +139,7 @@ module.exports = function createBuild(
|
|||
const buildConfig = parseTsconfig(buildSource, buildConfigPath);
|
||||
|
||||
if (buildConfig.extends) {
|
||||
buildConfig.extends = path.join(
|
||||
relative(buildRoot, buildSource),
|
||||
buildConfig.extends
|
||||
);
|
||||
buildConfig.extends = path.join(relative(buildRoot, buildSource), buildConfig.extends);
|
||||
|
||||
writeFileSync(buildConfigPath, JSON.stringify(buildConfig));
|
||||
}
|
||||
|
|
|
@ -22,10 +22,7 @@ const { readdirSync, existsSync, unlink } = require('fs');
|
|||
const del = require('del');
|
||||
const createBuild = require('./create_build');
|
||||
|
||||
const PLUGIN_FIXTURE = resolve(
|
||||
__dirname,
|
||||
'__fixtures__/create_build_test_plugin'
|
||||
);
|
||||
const PLUGIN_FIXTURE = resolve(__dirname, '__fixtures__/create_build_test_plugin');
|
||||
const PLUGIN = require('../../lib/plugin_config')(PLUGIN_FIXTURE);
|
||||
const PLUGIN_BUILD_DIR = resolve(PLUGIN_FIXTURE, 'build');
|
||||
const PLUGIN_BUILD_TARGET = resolve(PLUGIN_BUILD_DIR, 'kibana', PLUGIN.id);
|
||||
|
@ -43,13 +40,7 @@ describe('creating the build', () => {
|
|||
expect(PLUGIN.pkg.scripts).not.toBeUndefined();
|
||||
expect(PLUGIN.pkg.devDependencies).not.toBeUndefined();
|
||||
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
|
||||
const pkg = require(resolve(PLUGIN_BUILD_TARGET, 'package.json'));
|
||||
expect(pkg).not.toHaveProperty('scripts');
|
||||
|
@ -59,13 +50,7 @@ describe('creating the build', () => {
|
|||
it('adds build metadata to package.json', async () => {
|
||||
expect(PLUGIN.pkg.build).toBeUndefined();
|
||||
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
|
||||
const pkg = require(resolve(PLUGIN_BUILD_TARGET, 'package.json'));
|
||||
expect(pkg).toHaveProperty('build');
|
||||
|
@ -77,20 +62,10 @@ describe('creating the build', () => {
|
|||
it('installs node_modules as a part of build', async () => {
|
||||
expect(PLUGIN.skipInstallDependencies).toBe(false);
|
||||
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
|
||||
expect(readdirSync(resolve(PLUGIN_BUILD_TARGET))).toContain(
|
||||
'node_modules'
|
||||
);
|
||||
expect(
|
||||
readdirSync(resolve(PLUGIN_BUILD_TARGET, 'node_modules'))
|
||||
).toContain('noop3');
|
||||
expect(readdirSync(resolve(PLUGIN_BUILD_TARGET))).toContain('node_modules');
|
||||
expect(readdirSync(resolve(PLUGIN_BUILD_TARGET, 'node_modules'))).toContain('noop3');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -103,17 +78,9 @@ describe('creating the build', () => {
|
|||
it('does not install node_modules as a part of build', async () => {
|
||||
expect(PLUGIN.skipInstallDependencies).toBe(true);
|
||||
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
|
||||
expect(readdirSync(resolve(PLUGIN_BUILD_TARGET))).not.toContain(
|
||||
'node_modules'
|
||||
);
|
||||
expect(readdirSync(resolve(PLUGIN_BUILD_TARGET))).not.toContain('node_modules');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -132,13 +99,7 @@ describe('creating the build', () => {
|
|||
it('produces CSS', async () => {
|
||||
expect(PLUGIN.styleSheetToCompile).toBe(sassPath);
|
||||
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
|
||||
expect(existsSync(cssPath)).toBe(true);
|
||||
});
|
||||
|
|
|
@ -23,10 +23,7 @@ const del = require('del');
|
|||
const createBuild = require('./create_build');
|
||||
const createPackage = require('./create_package');
|
||||
|
||||
const PLUGIN_FIXTURE = resolve(
|
||||
__dirname,
|
||||
'__fixtures__/create_package_test_plugin'
|
||||
);
|
||||
const PLUGIN_FIXTURE = resolve(__dirname, '__fixtures__/create_package_test_plugin');
|
||||
const PLUGIN = require('../../lib/plugin_config')(PLUGIN_FIXTURE);
|
||||
const PLUGIN_BUILD_DIR = resolve(PLUGIN_FIXTURE, 'build-custom');
|
||||
|
||||
|
@ -40,13 +37,7 @@ afterAll(() => del(PLUGIN_BUILD_DIR));
|
|||
|
||||
describe('creating the package', () => {
|
||||
it('creates zip file in build target path', async () => {
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
PLUGIN_BUILD_DIR,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, PLUGIN_BUILD_DIR, buildVersion, kibanaVersion, buildFiles);
|
||||
await createPackage(PLUGIN, PLUGIN_BUILD_DIR, buildVersion);
|
||||
|
||||
const zipFile = resolve(PLUGIN_BUILD_DIR, packageFile);
|
||||
|
|
|
@ -27,15 +27,11 @@ module.exports = function gitInfo(rootPath) {
|
|||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
encoding: 'utf8',
|
||||
});
|
||||
const logLine = execFileSync(
|
||||
'git',
|
||||
['log', '--pretty=%h' + LOG_SEPARATOR + '%cD', '-n', '1'],
|
||||
{
|
||||
cwd: rootPath,
|
||||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
encoding: 'utf8',
|
||||
}
|
||||
).split(LOG_SEPARATOR);
|
||||
const logLine = execFileSync('git', ['log', '--pretty=%h' + LOG_SEPARATOR + '%cD', '-n', '1'], {
|
||||
cwd: rootPath,
|
||||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
encoding: 'utf8',
|
||||
}).split(LOG_SEPARATOR);
|
||||
|
||||
return {
|
||||
count: commitCount.trim(),
|
||||
|
|
|
@ -20,11 +20,7 @@
|
|||
const map = require('through2-map').obj;
|
||||
const gitInfo = require('./git_info');
|
||||
|
||||
module.exports = function rewritePackage(
|
||||
buildSource,
|
||||
buildVersion,
|
||||
kibanaVersion
|
||||
) {
|
||||
module.exports = function rewritePackage(buildSource, buildVersion, kibanaVersion) {
|
||||
return map(function(file) {
|
||||
if (file.basename === 'package.json' && file.dirname === buildSource) {
|
||||
const pkg = JSON.parse(file.contents.toString('utf8'));
|
||||
|
|
|
@ -28,12 +28,7 @@ module.exports = function(plugin, run, options) {
|
|||
const script = join('scripts', 'kibana.js');
|
||||
const nodeOptions = split(process.env.NODE_OPTIONS || '');
|
||||
|
||||
let args = nodeOptions.concat([
|
||||
script,
|
||||
'--dev',
|
||||
'--plugin-path',
|
||||
plugin.root,
|
||||
]);
|
||||
let args = nodeOptions.concat([script, '--dev', '--plugin-path', plugin.root]);
|
||||
|
||||
if (Array.isArray(plugin.includePlugins)) {
|
||||
plugin.includePlugins.forEach(path => {
|
||||
|
|
|
@ -55,11 +55,7 @@ export async function run(argv: string[]) {
|
|||
// starts forwarding the `--` directly to this script, see
|
||||
// https://github.com/yarnpkg/yarn/blob/b2d3e1a8fe45ef376b716d597cc79b38702a9320/src/cli/index.js#L174-L182
|
||||
if (argv.includes('--')) {
|
||||
log.write(
|
||||
chalk.red(
|
||||
`Using "--" is not allowed, as it doesn't work with 'yarn kbn'.`
|
||||
)
|
||||
);
|
||||
log.write(chalk.red(`Using "--" is not allowed, as it doesn't work with 'yarn kbn'.`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
@ -89,9 +85,7 @@ export async function run(argv: string[]) {
|
|||
|
||||
const command = commands[commandName];
|
||||
if (command === undefined) {
|
||||
log.write(
|
||||
chalk.red(`[${commandName}] is not a valid command, see 'kbn --help'`)
|
||||
);
|
||||
log.write(chalk.red(`[${commandName}] is not a valid command, see 'kbn --help'`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,7 @@ jest.mock('../utils/link_project_executables');
|
|||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import {
|
||||
absolutePathSnapshotSerializer,
|
||||
stripAnsiSnapshotSerializer,
|
||||
} from '../test_helpers';
|
||||
import { absolutePathSnapshotSerializer, stripAnsiSnapshotSerializer } from '../test_helpers';
|
||||
import { linkProjectExecutables } from '../utils/link_project_executables';
|
||||
import { IPackageJson } from '../utils/package_json';
|
||||
import { Project } from '../utils/project';
|
||||
|
@ -88,12 +85,7 @@ test('handles dependencies of dependencies', async () => {
|
|||
},
|
||||
'packages/baz'
|
||||
);
|
||||
const projects = new Map([
|
||||
['kibana', kibana],
|
||||
['foo', foo],
|
||||
['bar', bar],
|
||||
['baz', baz],
|
||||
]);
|
||||
const projects = new Map([['kibana', kibana], ['foo', foo], ['bar', bar], ['baz', baz]]);
|
||||
const projectGraph = buildProjectGraph(projects);
|
||||
|
||||
const logMock = jest.spyOn(console, 'log').mockImplementation(noop);
|
||||
|
|
|
@ -45,9 +45,7 @@ export const BootstrapCommand: ICommand = {
|
|||
}
|
||||
}
|
||||
|
||||
log.write(
|
||||
chalk.bold('\nInstalls completed, linking package executables:\n')
|
||||
);
|
||||
log.write(chalk.bold('\nInstalls completed, linking package executables:\n'));
|
||||
await linkProjectExecutables(projects, projectGraph);
|
||||
|
||||
/**
|
||||
|
@ -56,11 +54,7 @@ export const BootstrapCommand: ICommand = {
|
|||
* transpiled before they can be used. Ideally we shouldn't do this unless we
|
||||
* have to, as it will slow down the bootstrapping process.
|
||||
*/
|
||||
log.write(
|
||||
chalk.bold(
|
||||
'\nLinking executables completed, running `kbn:bootstrap` scripts\n'
|
||||
)
|
||||
);
|
||||
log.write(chalk.bold('\nLinking executables completed, running `kbn:bootstrap` scripts\n'));
|
||||
await parallelizeBatches(batchedProjects, async pkg => {
|
||||
if (pkg.hasScript('kbn:bootstrap')) {
|
||||
await pkg.runScriptStreaming('kbn:bootstrap');
|
||||
|
|
|
@ -27,8 +27,7 @@ import { log } from '../utils/log';
|
|||
import { ICommand } from './';
|
||||
|
||||
export const CleanCommand: ICommand = {
|
||||
description:
|
||||
'Remove the node_modules and target directories from all projects.',
|
||||
description: 'Remove the node_modules and target directories from all projects.',
|
||||
name: 'clean',
|
||||
|
||||
async run(projects, projectGraph, { rootPath }) {
|
||||
|
|
|
@ -29,11 +29,7 @@ export interface ICommand {
|
|||
name: string;
|
||||
description: string;
|
||||
|
||||
run: (
|
||||
projects: ProjectMap,
|
||||
projectGraph: ProjectGraph,
|
||||
config: ICommandConfig
|
||||
) => Promise<void>;
|
||||
run: (projects: ProjectMap, projectGraph: ProjectGraph, config: ICommandConfig) => Promise<void>;
|
||||
}
|
||||
|
||||
import { BootstrapCommand } from './bootstrap';
|
||||
|
|
|
@ -25,8 +25,7 @@ import { topologicallyBatchProjects } from '../utils/projects';
|
|||
import { ICommand } from './';
|
||||
|
||||
export const RunCommand: ICommand = {
|
||||
description:
|
||||
'Run script defined in package.json in each package that contains that script.',
|
||||
description: 'Run script defined in package.json in each package that contains that script.',
|
||||
name: 'run',
|
||||
|
||||
async run(projects, projectGraph, { extraArgs }) {
|
||||
|
@ -41,11 +40,7 @@ export const RunCommand: ICommand = {
|
|||
const scriptArgs = extraArgs.slice(1);
|
||||
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`\nRunning script [${chalk.green(
|
||||
scriptName
|
||||
)}] in batched topological order\n`
|
||||
)
|
||||
chalk.bold(`\nRunning script [${chalk.green(scriptName)}] in batched topological order\n`)
|
||||
);
|
||||
|
||||
await parallelizeBatches(batchedProjects, async pkg => {
|
||||
|
|
|
@ -69,9 +69,7 @@ export const WatchCommand: ICommand = {
|
|||
const projectNames = Array.from(projectsToWatch.keys());
|
||||
log.write(
|
||||
chalk.bold(
|
||||
chalk.green(
|
||||
`Running ${watchScriptName} scripts for [${projectNames.join(', ')}].`
|
||||
)
|
||||
chalk.green(`Running ${watchScriptName} scripts for [${projectNames.join(', ')}].`)
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -79,10 +77,7 @@ export const WatchCommand: ICommand = {
|
|||
// topological batching and push it to the last one-entry batch manually.
|
||||
const shouldWatchKibanaProject = projectsToWatch.delete(kibanaProjectName);
|
||||
|
||||
const batchedProjects = topologicallyBatchProjects(
|
||||
projectsToWatch,
|
||||
projectGraph
|
||||
);
|
||||
const batchedProjects = topologicallyBatchProjects(projectsToWatch, projectGraph);
|
||||
|
||||
if (shouldWatchKibanaProject) {
|
||||
batchedProjects.push([projects.get(kibanaProjectName)!]);
|
||||
|
@ -94,11 +89,7 @@ export const WatchCommand: ICommand = {
|
|||
);
|
||||
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`[${chalk.green(
|
||||
pkg.name
|
||||
)}] Initial build completed (${completionHint}).`
|
||||
)
|
||||
chalk.bold(`[${chalk.green(pkg.name)}] Initial build completed (${completionHint}).`)
|
||||
);
|
||||
});
|
||||
},
|
||||
|
|
|
@ -27,10 +27,7 @@ export interface IProjectPathOptions {
|
|||
/**
|
||||
* Returns all the paths where plugins are located
|
||||
*/
|
||||
export function getProjectPaths(
|
||||
rootPath: string,
|
||||
options: IProjectPathOptions
|
||||
) {
|
||||
export function getProjectPaths(rootPath: string, options: IProjectPathOptions) {
|
||||
const skipKibanaExtra = Boolean(options['skip-kibana-extra']);
|
||||
const ossOnly = Boolean(options.oss);
|
||||
|
||||
|
|
|
@ -18,8 +18,5 @@
|
|||
*/
|
||||
|
||||
export { run } from './cli';
|
||||
export {
|
||||
buildProductionProjects,
|
||||
prepareExternalProjectDependencies,
|
||||
} from './production';
|
||||
export { buildProductionProjects, prepareExternalProjectDependencies } from './production';
|
||||
export { transformDependencies } from './utils/package_json';
|
||||
|
|
|
@ -70,11 +70,9 @@ async function getProductionProjects(rootPath: string) {
|
|||
const projectPaths = getProjectPaths(rootPath, {});
|
||||
const projects = await getProjects(rootPath, projectPaths);
|
||||
|
||||
const productionProjects = includeTransitiveProjects(
|
||||
[projects.get('kibana')!],
|
||||
projects,
|
||||
{ onlyProductionDependencies: true }
|
||||
);
|
||||
const productionProjects = includeTransitiveProjects([projects.get('kibana')!], projects, {
|
||||
onlyProductionDependencies: true,
|
||||
});
|
||||
|
||||
// We remove Kibana, as we're already building Kibana
|
||||
productionProjects.delete('kibana');
|
||||
|
@ -107,11 +105,7 @@ async function buildProject(project: Project) {
|
|||
* manage dependencies is that it will "dedupe" them, so we don't include
|
||||
* unnecessary copies of dependencies.
|
||||
*/
|
||||
async function copyToBuild(
|
||||
project: Project,
|
||||
kibanaRoot: string,
|
||||
buildRoot: string
|
||||
) {
|
||||
async function copyToBuild(project: Project, kibanaRoot: string, buildRoot: string) {
|
||||
// We want the package to have the same relative location within the build
|
||||
const relativeProjectPath = relative(kibanaRoot, project.path);
|
||||
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
|
||||
|
|
|
@ -18,6 +18,4 @@
|
|||
*/
|
||||
|
||||
export { buildProductionProjects } from './build_production_projects';
|
||||
export {
|
||||
prepareExternalProjectDependencies,
|
||||
} from './prepare_project_dependencies';
|
||||
export { prepareExternalProjectDependencies } from './prepare_project_dependencies';
|
||||
|
|
|
@ -61,9 +61,7 @@ describe('kbn-pm production', () => {
|
|||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('package.json')) {
|
||||
expect(await readPackageJson(join(buildRoot, file))).toMatchSnapshot(
|
||||
file
|
||||
);
|
||||
expect(await readPackageJson(join(buildRoot, file))).toMatchSnapshot(file);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -28,9 +28,7 @@ test('does nothing when Kibana `link:` dependencies', async () => {
|
|||
|
||||
// We're checking for undefined, but we don't really care about what's
|
||||
// returned, we only care about it resolving.
|
||||
await expect(
|
||||
prepareExternalProjectDependencies(projectPath)
|
||||
).resolves.toBeUndefined();
|
||||
await expect(prepareExternalProjectDependencies(projectPath)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('throws if non-Kibana `link` dependencies', async () => {
|
||||
|
|
|
@ -24,8 +24,7 @@ import { Project } from '../utils/project';
|
|||
* All external projects are located within `../kibana-extra/{plugin}` relative
|
||||
* to Kibana itself.
|
||||
*/
|
||||
const isKibanaDep = (depVersion: string) =>
|
||||
depVersion.includes('../../kibana/');
|
||||
const isKibanaDep = (depVersion: string) => depVersion.includes('../../kibana/');
|
||||
|
||||
/**
|
||||
* This prepares the dependencies for an _external_ project.
|
||||
|
@ -49,9 +48,7 @@ export async function prepareExternalProjectDependencies(projectPath: string) {
|
|||
if (isLinkDependency(depVersion) && !isKibanaDep(depVersion)) {
|
||||
// For non-Kibana packages we need to set up symlinks during the
|
||||
// installation process, but this is not something we support yet.
|
||||
throw new Error(
|
||||
'This plugin is using `link:` dependencies for non-Kibana packages'
|
||||
);
|
||||
throw new Error('This plugin is using `link:` dependencies for non-Kibana packages');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,21 +25,14 @@ import { Project } from './utils/project';
|
|||
const rootPath = resolve(`${__dirname}/utils/__fixtures__/kibana`);
|
||||
|
||||
function getExpectedProjectsAndGraph(runMock: any) {
|
||||
const [fullProjects, fullProjectGraph] = (runMock as jest.Mock<
|
||||
any
|
||||
>).mock.calls[0];
|
||||
const [fullProjects, fullProjectGraph] = (runMock as jest.Mock<any>).mock.calls[0];
|
||||
|
||||
const projects = [...fullProjects.keys()].sort();
|
||||
|
||||
const graph = [...fullProjectGraph.entries()].reduce(
|
||||
(expected, [projectName, dependencies]) => {
|
||||
expected[projectName] = dependencies.map(
|
||||
(project: Project) => project.name
|
||||
);
|
||||
return expected;
|
||||
},
|
||||
{}
|
||||
);
|
||||
const graph = [...fullProjectGraph.entries()].reduce((expected, [projectName, dependencies]) => {
|
||||
expected[projectName] = dependencies.map((project: Project) => project.name);
|
||||
return expected;
|
||||
}, {});
|
||||
|
||||
return { projects, graph };
|
||||
}
|
||||
|
|
|
@ -32,16 +32,11 @@ export async function runCommand(command: ICommand, config: ICommandConfig) {
|
|||
try {
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`Running [${chalk.green(command.name)}] command from [${chalk.yellow(
|
||||
config.rootPath
|
||||
)}]:\n`
|
||||
`Running [${chalk.green(command.name)}] command from [${chalk.yellow(config.rootPath)}]:\n`
|
||||
)
|
||||
);
|
||||
|
||||
const projectPaths = getProjectPaths(
|
||||
config.rootPath,
|
||||
config.options as IProjectPathOptions
|
||||
);
|
||||
const projectPaths = getProjectPaths(config.rootPath, config.options as IProjectPathOptions);
|
||||
|
||||
const projects = await getProjects(config.rootPath, projectPaths, {
|
||||
exclude: toArray(config.options.exclude),
|
||||
|
@ -59,9 +54,7 @@ export async function runCommand(command: ICommand, config: ICommandConfig) {
|
|||
|
||||
const projectGraph = buildProjectGraph(projects);
|
||||
|
||||
log.write(
|
||||
chalk.bold(`Found [${chalk.green(projects.size.toString())}] projects:\n`)
|
||||
);
|
||||
log.write(chalk.bold(`Found [${chalk.green(projects.size.toString())}] projects:\n`));
|
||||
log.write(renderProjectsTree(config.rootPath, projects));
|
||||
|
||||
await command.run(projects, projectGraph, config);
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export {
|
||||
absolutePathSnapshotSerializer,
|
||||
} from './absolute_path_snapshot_serializer';
|
||||
export { absolutePathSnapshotSerializer } from './absolute_path_snapshot_serializer';
|
||||
|
||||
export { stripAnsiSnapshotSerializer } from './strip_ansi_snapshot_serializer';
|
||||
|
|
|
@ -23,14 +23,7 @@ import logSymbols from 'log-symbols';
|
|||
import logTransformer from 'strong-log-transformer';
|
||||
|
||||
function generateColors() {
|
||||
const colorWheel = [
|
||||
chalk.cyan,
|
||||
chalk.magenta,
|
||||
chalk.blue,
|
||||
chalk.yellow,
|
||||
chalk.green,
|
||||
chalk.red,
|
||||
];
|
||||
const colorWheel = [chalk.cyan, chalk.magenta, chalk.blue, chalk.yellow, chalk.green, chalk.red];
|
||||
|
||||
const count = colorWheel.length;
|
||||
let children = 0;
|
||||
|
|
|
@ -21,10 +21,7 @@ jest.mock('./fs');
|
|||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import {
|
||||
absolutePathSnapshotSerializer,
|
||||
stripAnsiSnapshotSerializer,
|
||||
} from '../test_helpers';
|
||||
import { absolutePathSnapshotSerializer, stripAnsiSnapshotSerializer } from '../test_helpers';
|
||||
import { linkProjectExecutables } from './link_project_executables';
|
||||
import { Project } from './project';
|
||||
import { buildProjectGraph } from './projects';
|
||||
|
|
|
@ -59,9 +59,7 @@ export async function linkProjectExecutables(
|
|||
.split(sep)
|
||||
.join('/');
|
||||
|
||||
log.write(
|
||||
chalk`{dim [${project.name}]} ${name} -> {dim ${projectRelativePath}}`
|
||||
);
|
||||
log.write(chalk`{dim [${project.name}]} ${name} -> {dim ${projectRelativePath}}`);
|
||||
|
||||
await mkdirp(dirname(dest));
|
||||
await createSymlink(srcPath, dest, 'exec');
|
||||
|
|
|
@ -43,8 +43,7 @@ export const createProductionPackageJson = (pkgJson: IPackageJson) => ({
|
|||
dependencies: transformDependencies(pkgJson.dependencies),
|
||||
});
|
||||
|
||||
export const isLinkDependency = (depVersion: string) =>
|
||||
depVersion.startsWith('link:');
|
||||
export const isLinkDependency = (depVersion: string) => depVersion.startsWith('link:');
|
||||
|
||||
/**
|
||||
* Replaces `link:` dependencies with `file:` dependencies. When installing
|
||||
|
|
|
@ -17,10 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export async function parallelizeBatches<T>(
|
||||
batches: T[][],
|
||||
fn: (item: T) => Promise<void>
|
||||
) {
|
||||
export async function parallelizeBatches<T>(batches: T[][], fn: (item: T) => Promise<void>) {
|
||||
for (const batch of batches) {
|
||||
// We need to make sure the entire batch has completed before we can move on
|
||||
// to the next batch
|
||||
|
@ -28,11 +25,7 @@ export async function parallelizeBatches<T>(
|
|||
}
|
||||
}
|
||||
|
||||
export async function parallelize<T>(
|
||||
items: T[],
|
||||
fn: (item: T) => Promise<void>,
|
||||
concurrency = 4
|
||||
) {
|
||||
export async function parallelize<T>(items: T[], fn: (item: T) => Promise<void>, concurrency = 4) {
|
||||
if (items.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -96,9 +96,7 @@ describe('#ensureValidProjectDependency', () => {
|
|||
'packages/foo'
|
||||
);
|
||||
|
||||
expect(() =>
|
||||
root.ensureValidProjectDependency(foo)
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => root.ensureValidProjectDependency(foo)).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('using version instead of link:', () => {
|
||||
|
@ -118,9 +116,7 @@ describe('#ensureValidProjectDependency', () => {
|
|||
'packages/foo'
|
||||
);
|
||||
|
||||
expect(() =>
|
||||
root.ensureValidProjectDependency(foo)
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => root.ensureValidProjectDependency(foo)).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -157,9 +153,7 @@ describe('#getExecutables()', () => {
|
|||
});
|
||||
|
||||
test('throws CliError when bin is something strange', () => {
|
||||
expect(() =>
|
||||
createProjectWith({ bin: 1 }).getExecutables()
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => createProjectWith({ bin: 1 }).getExecutables()).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -30,11 +30,7 @@ import {
|
|||
isLinkDependency,
|
||||
readPackageJson,
|
||||
} from './package_json';
|
||||
import {
|
||||
installInDir,
|
||||
runScriptInPackage,
|
||||
runScriptInPackageStreaming,
|
||||
} from './scripts';
|
||||
import { installInDir, runScriptInPackage, runScriptInPackageStreaming } from './scripts';
|
||||
|
||||
interface IBuildConfig {
|
||||
skip?: boolean;
|
||||
|
@ -80,9 +76,7 @@ export class Project {
|
|||
}
|
||||
|
||||
public ensureValidProjectDependency(project: Project) {
|
||||
const relativePathToProject = normalizePath(
|
||||
relative(this.path, project.path)
|
||||
);
|
||||
const relativePathToProject = normalizePath(relative(this.path, project.path));
|
||||
|
||||
const versionInPackageJson = this.allDependencies[project.name];
|
||||
const expectedVersionInPackageJson = `link:${relativePathToProject}`;
|
||||
|
@ -125,10 +119,7 @@ export class Project {
|
|||
* instead of everything located in the project directory.
|
||||
*/
|
||||
public getIntermediateBuildDirectory() {
|
||||
return resolvePath(
|
||||
this.path,
|
||||
this.getBuildConfig().intermediateBuildDirectory || '.'
|
||||
);
|
||||
return resolvePath(this.path, this.getBuildConfig().intermediateBuildDirectory || '.');
|
||||
}
|
||||
|
||||
public hasScript(name: string) {
|
||||
|
@ -169,9 +160,7 @@ export class Project {
|
|||
public async runScript(scriptName: string, args: string[] = []) {
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`\n\nRunning script [${chalk.green(scriptName)}] in [${chalk.green(
|
||||
this.name
|
||||
)}]:\n`
|
||||
`\n\nRunning script [${chalk.green(scriptName)}] in [${chalk.green(this.name)}]:\n`
|
||||
)
|
||||
);
|
||||
return runScriptInPackage(scriptName, args, this);
|
||||
|
@ -186,11 +175,7 @@ export class Project {
|
|||
}
|
||||
|
||||
public async installDependencies({ extraArgs }: { extraArgs: string[] }) {
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`\n\nInstalling dependencies in [${chalk.green(this.name)}]:\n`
|
||||
)
|
||||
);
|
||||
log.write(chalk.bold(`\n\nInstalling dependencies in [${chalk.green(this.name)}]:\n`));
|
||||
return installInDir(this.path, extraArgs);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,9 +39,7 @@ describe('#getProjects', () => {
|
|||
const expectedProjects = ['bar', 'foo'];
|
||||
|
||||
expect(projects.size).toBe(2);
|
||||
expect([...projects.keys()]).toEqual(
|
||||
expect.arrayContaining(expectedProjects)
|
||||
);
|
||||
expect([...projects.keys()]).toEqual(expect.arrayContaining(expectedProjects));
|
||||
});
|
||||
|
||||
test('can specify root as a separate project', async () => {
|
||||
|
@ -57,36 +55,22 @@ describe('#getProjects', () => {
|
|||
const expectedProjects = ['baz', 'quux'];
|
||||
|
||||
expect(projects.size).toBe(2);
|
||||
expect([...projects.keys()]).toEqual(
|
||||
expect.arrayContaining(expectedProjects)
|
||||
);
|
||||
expect([...projects.keys()]).toEqual(expect.arrayContaining(expectedProjects));
|
||||
});
|
||||
|
||||
test('throws if multiple projects has the same name', async () => {
|
||||
await expect(
|
||||
getProjects(rootPath, ['../plugins/*', '../other-plugins/*'])
|
||||
).rejects.toHaveProperty(
|
||||
'message',
|
||||
'There are multiple projects with the same name [baz]'
|
||||
);
|
||||
).rejects.toHaveProperty('message', 'There are multiple projects with the same name [baz]');
|
||||
});
|
||||
|
||||
test('includes additional projects in package.json', async () => {
|
||||
const projectPaths = getProjectPaths(rootPath, {});
|
||||
const projects = await getProjects(rootPath, projectPaths);
|
||||
|
||||
const expectedProjects = [
|
||||
'kibana',
|
||||
'bar',
|
||||
'foo',
|
||||
'with-additional-projects',
|
||||
'quux',
|
||||
'baz',
|
||||
];
|
||||
const expectedProjects = ['kibana', 'bar', 'foo', 'with-additional-projects', 'quux', 'baz'];
|
||||
|
||||
expect([...projects.keys()]).toEqual(
|
||||
expect.arrayContaining(expectedProjects)
|
||||
);
|
||||
expect([...projects.keys()]).toEqual(expect.arrayContaining(expectedProjects));
|
||||
expect(projects.size).toBe(expectedProjects.length);
|
||||
});
|
||||
|
||||
|
@ -101,11 +85,7 @@ describe('#getProjects', () => {
|
|||
exclude: ['foo', 'bar', 'baz'],
|
||||
});
|
||||
|
||||
expect([...projects.keys()].sort()).toEqual([
|
||||
'kibana',
|
||||
'quux',
|
||||
'with-additional-projects',
|
||||
]);
|
||||
expect([...projects.keys()].sort()).toEqual(['kibana', 'quux', 'with-additional-projects']);
|
||||
});
|
||||
|
||||
test('ignores unknown projects specified in `exclude` filter', async () => {
|
||||
|
@ -157,14 +137,7 @@ describe('#getProjects', () => {
|
|||
|
||||
test('does not return any project if `exclude` filter is specified for all projects', async () => {
|
||||
const projects = await getProjects(rootPath, projectPaths, {
|
||||
exclude: [
|
||||
'kibana',
|
||||
'bar',
|
||||
'foo',
|
||||
'with-additional-projects',
|
||||
'quux',
|
||||
'baz',
|
||||
],
|
||||
exclude: ['kibana', 'bar', 'foo', 'with-additional-projects', 'quux', 'baz'],
|
||||
});
|
||||
|
||||
expect(projects.size).toBe(0);
|
||||
|
@ -183,11 +156,7 @@ describe('#getProjects', () => {
|
|||
|
||||
describe('#buildProjectGraph', () => {
|
||||
test('builds full project graph', async () => {
|
||||
const allProjects = await getProjects(rootPath, [
|
||||
'.',
|
||||
'packages/*',
|
||||
'../plugins/*',
|
||||
]);
|
||||
const allProjects = await getProjects(rootPath, ['.', 'packages/*', '../plugins/*']);
|
||||
const graph = buildProjectGraph(allProjects);
|
||||
|
||||
const expected: { [k: string]: string[] } = {};
|
||||
|
@ -210,9 +179,7 @@ describe('#topologicallyBatchProjects', () => {
|
|||
test('batches projects topologically based on their project dependencies', async () => {
|
||||
const batches = topologicallyBatchProjects(projects, graph);
|
||||
|
||||
const expectedBatches = batches.map(batch =>
|
||||
batch.map(project => project.name)
|
||||
);
|
||||
const expectedBatches = batches.map(batch => batch.map(project => project.name));
|
||||
|
||||
expect(expectedBatches).toMatchSnapshot();
|
||||
});
|
||||
|
@ -223,9 +190,7 @@ describe('#topologicallyBatchProjects', () => {
|
|||
|
||||
const batches = topologicallyBatchProjects(projects, graph);
|
||||
|
||||
const expectedBatches = batches.map(batch =>
|
||||
batch.map(project => project.name)
|
||||
);
|
||||
const expectedBatches = batches.map(batch => batch.map(project => project.name));
|
||||
|
||||
expect(expectedBatches).toMatchSnapshot();
|
||||
});
|
||||
|
@ -261,11 +226,7 @@ describe('#includeTransitiveProjects', () => {
|
|||
});
|
||||
|
||||
test('includes dependencies of dependencies', async () => {
|
||||
const projects = await getProjects(rootPath, [
|
||||
'.',
|
||||
'packages/*',
|
||||
'../plugins/*',
|
||||
]);
|
||||
const projects = await getProjects(rootPath, ['.', 'packages/*', '../plugins/*']);
|
||||
|
||||
const quux = projects.get('quux')!;
|
||||
const withTransitive = includeTransitiveProjects([quux], projects);
|
||||
|
|
|
@ -49,21 +49,17 @@ export async function getProjects(
|
|||
const project = await Project.fromPath(projectDir);
|
||||
|
||||
const excludeProject =
|
||||
exclude.includes(project.name) ||
|
||||
(include.length > 0 && !include.includes(project.name));
|
||||
exclude.includes(project.name) || (include.length > 0 && !include.includes(project.name));
|
||||
|
||||
if (excludeProject) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (projects.has(project.name)) {
|
||||
throw new CliError(
|
||||
`There are multiple projects with the same name [${project.name}]`,
|
||||
{
|
||||
name: project.name,
|
||||
paths: [project.path, projects.get(project.name)!.path],
|
||||
}
|
||||
);
|
||||
throw new CliError(`There are multiple projects with the same name [${project.name}]`, {
|
||||
name: project.name,
|
||||
paths: [project.path, projects.get(project.name)!.path],
|
||||
});
|
||||
}
|
||||
|
||||
projects.set(project.name, project);
|
||||
|
@ -73,13 +69,7 @@ export async function getProjects(
|
|||
return projects;
|
||||
}
|
||||
|
||||
function packagesFromGlobPattern({
|
||||
pattern,
|
||||
rootPath,
|
||||
}: {
|
||||
pattern: string;
|
||||
rootPath: string;
|
||||
}) {
|
||||
function packagesFromGlobPattern({ pattern, rootPath }: { pattern: string; rootPath: string }) {
|
||||
const globOptions = {
|
||||
cwd: rootPath,
|
||||
|
||||
|
@ -141,9 +131,7 @@ export function topologicallyBatchProjects(
|
|||
const batch = [];
|
||||
for (const projectName of projectToBatchNames) {
|
||||
const projectDeps = projectGraph.get(projectName)!;
|
||||
const hasNotBatchedDependencies = projectDeps.some(dep =>
|
||||
projectToBatchNames.has(dep.name)
|
||||
);
|
||||
const hasNotBatchedDependencies = projectDeps.some(dep => projectToBatchNames.has(dep.name));
|
||||
|
||||
if (!hasNotBatchedDependencies) {
|
||||
batch.push(projectsToBatch.get(projectName)!);
|
||||
|
|
|
@ -36,11 +36,7 @@ test('handles projects with root folder', async () => {
|
|||
});
|
||||
|
||||
test('handles projects outside root folder', async () => {
|
||||
const projects = await getProjects(rootPath, [
|
||||
'.',
|
||||
'packages/*',
|
||||
'../plugins/*',
|
||||
]);
|
||||
const projects = await getProjects(rootPath, ['.', 'packages/*', '../plugins/*']);
|
||||
|
||||
const tree = await renderProjectsTree(rootPath, projects);
|
||||
expect(tree).toMatchSnapshot();
|
||||
|
|
|
@ -24,10 +24,7 @@ import { Project } from './project';
|
|||
|
||||
const projectKey = Symbol('__project');
|
||||
|
||||
export function renderProjectsTree(
|
||||
rootPath: string,
|
||||
projects: Map<string, Project>
|
||||
) {
|
||||
export function renderProjectsTree(rootPath: string, projects: Map<string, Project>) {
|
||||
const projectsTree = buildProjectsTree(rootPath, projects);
|
||||
return treeToString(createTreeStructure(projectsTree));
|
||||
}
|
||||
|
@ -46,10 +43,7 @@ function treeToString(tree: ITree) {
|
|||
return [tree.name].concat(childrenToStrings(tree.children, '')).join('\n');
|
||||
}
|
||||
|
||||
function childrenToStrings(
|
||||
tree: ITreeChildren | undefined,
|
||||
treePrefix: string
|
||||
) {
|
||||
function childrenToStrings(tree: ITreeChildren | undefined, treePrefix: string) {
|
||||
if (tree === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
@ -149,11 +143,7 @@ function buildProjectsTree(rootPath: string, projects: Map<string, Project>) {
|
|||
return tree;
|
||||
}
|
||||
|
||||
function addProjectToTree(
|
||||
tree: IProjectsTree,
|
||||
pathParts: string[],
|
||||
project: Project
|
||||
) {
|
||||
function addProjectToTree(tree: IProjectsTree, pathParts: string[], project: Project) {
|
||||
if (pathParts.length === 0) {
|
||||
tree.set(projectKey, project.name);
|
||||
} else {
|
||||
|
|
|
@ -23,16 +23,8 @@ import { Project } from './project';
|
|||
/**
|
||||
* Install all dependencies in the given directory
|
||||
*/
|
||||
export async function installInDir(
|
||||
directory: string,
|
||||
extraArgs: string[] = []
|
||||
) {
|
||||
const options = [
|
||||
'install',
|
||||
'--non-interactive',
|
||||
'--mutex file',
|
||||
...extraArgs,
|
||||
];
|
||||
export async function installInDir(directory: string, extraArgs: string[] = []) {
|
||||
const options = ['install', '--non-interactive', '--mutex file', ...extraArgs];
|
||||
|
||||
// We pass the mutex flag to ensure only one instance of yarn runs at any
|
||||
// given time (e.g. to avoid conflicts).
|
||||
|
@ -44,11 +36,7 @@ export async function installInDir(
|
|||
/**
|
||||
* Run script in the given directory
|
||||
*/
|
||||
export async function runScriptInPackage(
|
||||
script: string,
|
||||
args: string[],
|
||||
pkg: Project
|
||||
) {
|
||||
export async function runScriptInPackage(script: string, args: string[], pkg: Project) {
|
||||
const execOpts = {
|
||||
cwd: pkg.path,
|
||||
};
|
||||
|
@ -59,11 +47,7 @@ export async function runScriptInPackage(
|
|||
/**
|
||||
* Run script in the given directory
|
||||
*/
|
||||
export function runScriptInPackageStreaming(
|
||||
script: string,
|
||||
args: string[],
|
||||
pkg: Project
|
||||
) {
|
||||
export function runScriptInPackageStreaming(script: string, args: string[], pkg: Project) {
|
||||
const execOpts = {
|
||||
cwd: pkg.path,
|
||||
};
|
||||
|
|
|
@ -18,16 +18,7 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import {
|
||||
catchError,
|
||||
delay,
|
||||
finalize,
|
||||
first,
|
||||
map,
|
||||
mapTo,
|
||||
mergeMap,
|
||||
timeout,
|
||||
} from 'rxjs/operators';
|
||||
import { catchError, delay, finalize, first, map, mapTo, mergeMap, timeout } from 'rxjs/operators';
|
||||
|
||||
/**
|
||||
* Number of milliseconds we wait before we fall back to the default watch handler.
|
||||
|
@ -67,43 +58,26 @@ function getWatchHandlers(
|
|||
const typescriptHandler = buildOutput$.pipe(
|
||||
first(data => data.includes('$ tsc')),
|
||||
map(() =>
|
||||
buildOutput$.pipe(
|
||||
first(data => data.includes('Compilation complete.')),
|
||||
mapTo('tsc')
|
||||
)
|
||||
buildOutput$.pipe(first(data => data.includes('Compilation complete.')), mapTo('tsc'))
|
||||
)
|
||||
);
|
||||
|
||||
const webpackHandler = buildOutput$.pipe(
|
||||
first(data => data.includes('$ webpack')),
|
||||
map(() =>
|
||||
buildOutput$.pipe(
|
||||
first(data => data.includes('Chunk Names')),
|
||||
mapTo('webpack')
|
||||
)
|
||||
)
|
||||
map(() => buildOutput$.pipe(first(data => data.includes('Chunk Names')), mapTo('webpack')))
|
||||
);
|
||||
|
||||
const defaultHandler = Rx.of(undefined).pipe(
|
||||
delay(handlerReadinessTimeout),
|
||||
map(() =>
|
||||
buildOutput$.pipe(
|
||||
timeout(handlerDelay),
|
||||
catchError(() => Rx.of('timeout'))
|
||||
)
|
||||
)
|
||||
map(() => buildOutput$.pipe(timeout(handlerDelay), catchError(() => Rx.of('timeout'))))
|
||||
);
|
||||
|
||||
return [typescriptHandler, webpackHandler, defaultHandler];
|
||||
}
|
||||
|
||||
export function waitUntilWatchIsReady(
|
||||
stream: NodeJS.EventEmitter,
|
||||
opts: IWatchOptions = {}
|
||||
) {
|
||||
export function waitUntilWatchIsReady(stream: NodeJS.EventEmitter, opts: IWatchOptions = {}) {
|
||||
const buildOutput$ = new Rx.Subject<string>();
|
||||
const onDataListener = (data: Buffer) =>
|
||||
buildOutput$.next(data.toString('utf-8'));
|
||||
const onDataListener = (data: Buffer) => buildOutput$.next(data.toString('utf-8'));
|
||||
const onEndListener = () => buildOutput$.complete();
|
||||
const onErrorListener = (e: Error) => buildOutput$.error(e);
|
||||
|
||||
|
|
|
@ -35,9 +35,7 @@ function toSortable(systems: Map<SystemName, System<any, any, any, any>>) {
|
|||
/**
|
||||
* Sorts systems in topological order based on dependencies
|
||||
*/
|
||||
export function getSortedSystemNames(
|
||||
systems: Map<SystemName, System<any, any, any, any>>
|
||||
) {
|
||||
export function getSortedSystemNames(systems: Map<SystemName, System<any, any, any, any>>) {
|
||||
const sorted = topologicalSort(toSortable(systems));
|
||||
return [...sorted];
|
||||
}
|
||||
|
|
|
@ -26,9 +26,7 @@ import {
|
|||
} from './system_types';
|
||||
|
||||
function isPromise(obj: any) {
|
||||
return (
|
||||
obj != null && typeof obj === 'object' && typeof obj.then === 'function'
|
||||
);
|
||||
return obj != null && typeof obj === 'object' && typeof obj.then === 'function';
|
||||
}
|
||||
|
||||
export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
||||
|
@ -56,19 +54,14 @@ export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
|||
|
||||
public getExposedValues(): E {
|
||||
if (this.systemInstance === undefined) {
|
||||
throw new Error(
|
||||
'trying to get the exposed value of a system that is NOT running'
|
||||
);
|
||||
throw new Error('trying to get the exposed value of a system that is NOT running');
|
||||
}
|
||||
|
||||
return this.exposedValues!;
|
||||
}
|
||||
|
||||
public start(kibanaValues: C, dependenciesValues: D) {
|
||||
this.systemInstance = new this.systemClass(
|
||||
kibanaValues,
|
||||
dependenciesValues
|
||||
);
|
||||
this.systemInstance = new this.systemClass(kibanaValues, dependenciesValues);
|
||||
const exposedValues = this.systemInstance.start();
|
||||
|
||||
if (isPromise(exposedValues)) {
|
||||
|
@ -79,8 +72,7 @@ export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
|||
);
|
||||
}
|
||||
|
||||
this.exposedValues =
|
||||
exposedValues === undefined ? ({} as E) : exposedValues;
|
||||
this.exposedValues = exposedValues === undefined ? ({} as E) : exposedValues;
|
||||
}
|
||||
|
||||
public stop() {
|
||||
|
@ -91,9 +83,7 @@ export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
|||
|
||||
if (isPromise(stoppedResponse)) {
|
||||
throw new Error(
|
||||
`A promise was returned when stopping [${
|
||||
this.name
|
||||
}], but systems must stop synchronously.`
|
||||
`A promise was returned when stopping [${this.name}], but systems must stop synchronously.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,10 +59,7 @@ test('starts system with core api', () => {
|
|||
},
|
||||
});
|
||||
|
||||
const createSystemApi: KibanaSystemApiFactory<IKibanaCoreApi, IMetadata> = (
|
||||
name,
|
||||
metadata
|
||||
) => {
|
||||
const createSystemApi: KibanaSystemApiFactory<IKibanaCoreApi, IMetadata> = (name, metadata) => {
|
||||
return {
|
||||
fromCore: true,
|
||||
metadata,
|
||||
|
@ -226,11 +223,7 @@ test('receives values from dependencies but not transitive dependencies', () =>
|
|||
};
|
||||
}
|
||||
|
||||
class GrandchildSystem extends KibanaSystem<
|
||||
CoreType,
|
||||
{},
|
||||
IGrandchild['grandchild']
|
||||
> {
|
||||
class GrandchildSystem extends KibanaSystem<CoreType, {}, IGrandchild['grandchild']> {
|
||||
public start() {
|
||||
return {
|
||||
value: 'grandchild',
|
||||
|
@ -238,11 +231,7 @@ test('receives values from dependencies but not transitive dependencies', () =>
|
|||
}
|
||||
}
|
||||
|
||||
class ChildSystem extends KibanaSystem<
|
||||
CoreType,
|
||||
IGrandchild,
|
||||
IChild['child']
|
||||
> {
|
||||
class ChildSystem extends KibanaSystem<CoreType, IGrandchild, IChild['child']> {
|
||||
public start() {
|
||||
expect(this.deps.grandchild).toEqual({ value: 'grandchild' });
|
||||
|
||||
|
|
|
@ -21,10 +21,7 @@ import { getSortedSystemNames } from './sorted_systems';
|
|||
import { System } from './system';
|
||||
import { SystemMetadata, SystemName, SystemsType } from './system_types';
|
||||
|
||||
export type KibanaSystemApiFactory<C, M> = (
|
||||
name: SystemName,
|
||||
metadata?: M
|
||||
) => C;
|
||||
export type KibanaSystemApiFactory<C, M> = (name: SystemName, metadata?: M) => C;
|
||||
|
||||
export class SystemLoader<C, M extends SystemMetadata> {
|
||||
private readonly systems = new Map<SystemName, System<C, M, any, any>>();
|
||||
|
@ -45,9 +42,7 @@ export class SystemLoader<C, M extends SystemMetadata> {
|
|||
});
|
||||
}
|
||||
|
||||
public addSystem<D extends SystemsType, E = void>(
|
||||
system: System<C, M, D, E>
|
||||
) {
|
||||
public addSystem<D extends SystemsType, E = void>(system: System<C, M, D, E>) {
|
||||
if (this.systems.has(system.name)) {
|
||||
throw new Error(`a system named [${system.name}] has already been added`);
|
||||
}
|
||||
|
@ -92,21 +87,14 @@ export class SystemLoader<C, M extends SystemMetadata> {
|
|||
}
|
||||
}
|
||||
|
||||
private startSystem<D extends SystemsType, E = void>(
|
||||
system: System<C, M, D, E>
|
||||
) {
|
||||
private startSystem<D extends SystemsType, E = void>(system: System<C, M, D, E>) {
|
||||
const dependenciesValues = {} as D;
|
||||
|
||||
for (const dependency of system.dependencies) {
|
||||
dependenciesValues[dependency] = this.systems
|
||||
.get(dependency)!
|
||||
.getExposedValues();
|
||||
dependenciesValues[dependency] = this.systems.get(dependency)!.getExposedValues();
|
||||
}
|
||||
|
||||
const kibanaSystemApi = this.kibanaSystemApiFactory(
|
||||
system.name,
|
||||
system.metadata
|
||||
);
|
||||
const kibanaSystemApi = this.kibanaSystemApiFactory(system.name, system.metadata);
|
||||
|
||||
system.start(kibanaSystemApi, dependenciesValues);
|
||||
this.startedSystems.push(system.name);
|
||||
|
|
|
@ -20,12 +20,7 @@
|
|||
import { topologicalSort } from './topological_sort';
|
||||
|
||||
test('returns a topologically ordered sequence', () => {
|
||||
const nodes = new Map([
|
||||
['a', []],
|
||||
['b', ['a']],
|
||||
['c', ['a', 'b']],
|
||||
['d', ['a']],
|
||||
]);
|
||||
const nodes = new Map([['a', []], ['b', ['a']], ['c', ['a', 'b']], ['d', ['a']]]);
|
||||
|
||||
const sorted = topologicalSort(nodes);
|
||||
|
||||
|
@ -35,12 +30,7 @@ test('returns a topologically ordered sequence', () => {
|
|||
});
|
||||
|
||||
test('handles multiple "roots" with no deps', () => {
|
||||
const nodes = new Map([
|
||||
['a', []],
|
||||
['b', []],
|
||||
['c', ['a', 'b']],
|
||||
['d', ['a']],
|
||||
]);
|
||||
const nodes = new Map([['a', []], ['b', []], ['c', ['a', 'b']], ['d', ['a']]]);
|
||||
|
||||
const sorted = topologicalSort(nodes);
|
||||
|
||||
|
|
|
@ -22,14 +22,8 @@ const expect = require('expect.js');
|
|||
|
||||
describe('testSubjSelector()', function() {
|
||||
it('converts subjectSelectors to cssSelectors', function() {
|
||||
expect(testSubjSelector('foo bar')).to.eql(
|
||||
'[data-test-subj~="foo"] [data-test-subj~="bar"]'
|
||||
);
|
||||
expect(testSubjSelector('foo&bar')).to.eql(
|
||||
'[data-test-subj~="foo"][data-test-subj~="bar"]'
|
||||
);
|
||||
expect(testSubjSelector('foo & bar')).to.eql(
|
||||
'[data-test-subj~="foo"][data-test-subj~="bar"]'
|
||||
);
|
||||
expect(testSubjSelector('foo bar')).to.eql('[data-test-subj~="foo"] [data-test-subj~="bar"]');
|
||||
expect(testSubjSelector('foo&bar')).to.eql('[data-test-subj~="foo"][data-test-subj~="bar"]');
|
||||
expect(testSubjSelector('foo & bar')).to.eql('[data-test-subj~="foo"][data-test-subj~="bar"]');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -42,8 +42,7 @@ const options = {
|
|||
desc: 'Pattern to select which tests to run.',
|
||||
},
|
||||
updateBaselines: {
|
||||
desc:
|
||||
'Replace baseline screenshots with whatever is generated from the test.',
|
||||
desc: 'Replace baseline screenshots with whatever is generated from the test.',
|
||||
},
|
||||
verbose: { desc: 'Log everything.' },
|
||||
debug: { desc: 'Run in debug mode.' },
|
||||
|
@ -125,9 +124,7 @@ function validateOptions(userOptions) {
|
|||
// Validate enum flags
|
||||
(options[key].choices && !options[key].choices.includes(val))
|
||||
) {
|
||||
throw new Error(
|
||||
`functional_tests: invalid argument [${val}] to option [${key}]`
|
||||
);
|
||||
throw new Error(`functional_tests: invalid argument [${val}] to option [${key}]`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -57,9 +57,7 @@ describe('process options for run tests CLI', () => {
|
|||
it('rejects boolean value for kibana-install-dir', () => {
|
||||
expect(() => {
|
||||
processOptions({ 'kibana-install-dir': true }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests: invalid argument [true] to option [kibana-install-dir]'
|
||||
);
|
||||
}).toThrow('functional_tests: invalid argument [true] to option [kibana-install-dir]');
|
||||
});
|
||||
|
||||
it('accepts boolean value for updateBaselines', () => {
|
||||
|
@ -75,9 +73,7 @@ describe('process options for run tests CLI', () => {
|
|||
it('rejects non-enum value for esFrom', () => {
|
||||
expect(() => {
|
||||
processOptions({ esFrom: 'butter' }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests: invalid argument [butter] to option [esFrom]'
|
||||
);
|
||||
}).toThrow('functional_tests: invalid argument [butter] to option [esFrom]');
|
||||
});
|
||||
|
||||
it('accepts value for grep', () => {
|
||||
|
|
|
@ -111,9 +111,7 @@ function validateOptions(userOptions) {
|
|||
// Validate enum flags
|
||||
(options[key].choices && !options[key].choices.includes(val))
|
||||
) {
|
||||
throw new Error(
|
||||
`functional_tests_server: invalid argument [${val}] to option [${key}]`
|
||||
);
|
||||
throw new Error(`functional_tests_server: invalid argument [${val}] to option [${key}]`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -29,9 +29,7 @@ describe('process options for start servers CLI', () => {
|
|||
it('rejects boolean config value', () => {
|
||||
expect(() => {
|
||||
processOptions({ config: true });
|
||||
}).toThrow(
|
||||
'functional_tests_server: invalid argument [true] to option [config]'
|
||||
);
|
||||
}).toThrow('functional_tests_server: invalid argument [true] to option [config]');
|
||||
});
|
||||
|
||||
it('rejects empty config value if no default passed', () => {
|
||||
|
@ -59,9 +57,7 @@ describe('process options for start servers CLI', () => {
|
|||
it('rejects boolean value for kibana-install-dir', () => {
|
||||
expect(() => {
|
||||
processOptions({ 'kibana-install-dir': true }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests_server: invalid argument [true] to option [kibana-install-dir]'
|
||||
);
|
||||
}).toThrow('functional_tests_server: invalid argument [true] to option [kibana-install-dir]');
|
||||
});
|
||||
|
||||
it('accepts source value for esFrom', () => {
|
||||
|
@ -72,9 +68,7 @@ describe('process options for start servers CLI', () => {
|
|||
it('rejects non-enum value for esFrom', () => {
|
||||
expect(() => {
|
||||
processOptions({ esFrom: 'butter' }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests_server: invalid argument [butter] to option [esFrom]'
|
||||
);
|
||||
}).toThrow('functional_tests_server: invalid argument [butter] to option [esFrom]');
|
||||
});
|
||||
|
||||
it('accepts debug option', () => {
|
||||
|
|
|
@ -62,9 +62,7 @@ async function updateCredentials(port, auth, username, password, retries = 10) {
|
|||
return await updateCredentials(port, auth, username, password, retries - 1);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`
|
||||
);
|
||||
throw new Error(`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`);
|
||||
}
|
||||
|
||||
export async function setupUsers(log, config) {
|
||||
|
@ -75,10 +73,7 @@ export async function setupUsers(log, config) {
|
|||
let auth = `elastic:${DEFAULT_SUPERUSER_PASS}`;
|
||||
|
||||
// list of updates we need to apply
|
||||
const updates = [
|
||||
config.get('servers.elasticsearch'),
|
||||
config.get('servers.kibana'),
|
||||
];
|
||||
const updates = [config.get('servers.elasticsearch'), config.get('servers.kibana')];
|
||||
|
||||
for (const { username, password } of updates) {
|
||||
log.info('setting %j user password to %j', username, password);
|
||||
|
|
|
@ -20,9 +20,4 @@
|
|||
export { runKibanaServer } from './run_kibana_server';
|
||||
export { runElasticsearch } from './run_elasticsearch';
|
||||
export { runFtr } from './run_ftr';
|
||||
export {
|
||||
KIBANA_ROOT,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
FUNCTIONAL_CONFIG_PATH,
|
||||
API_CONFIG_PATH,
|
||||
} from './paths';
|
||||
export { KIBANA_ROOT, KIBANA_FTR_SCRIPT, FUNCTIONAL_CONFIG_PATH, API_CONFIG_PATH } from './paths';
|
||||
|
|
|
@ -28,17 +28,8 @@ function resolveRelative(path) {
|
|||
export const KIBANA_EXEC = 'node';
|
||||
export const KIBANA_EXEC_PATH = resolveRelative('scripts/kibana');
|
||||
export const KIBANA_ROOT = resolve(__dirname, '../../../../../');
|
||||
export const KIBANA_FTR_SCRIPT = resolve(
|
||||
KIBANA_ROOT,
|
||||
'scripts/functional_test_runner'
|
||||
);
|
||||
export const KIBANA_FTR_SCRIPT = resolve(KIBANA_ROOT, 'scripts/functional_test_runner');
|
||||
export const PROJECT_ROOT = resolve(__dirname, '../../../../../../');
|
||||
export const FUNCTIONAL_CONFIG_PATH = resolve(
|
||||
KIBANA_ROOT,
|
||||
'test/functional/config'
|
||||
);
|
||||
export const API_CONFIG_PATH = resolve(
|
||||
KIBANA_ROOT,
|
||||
'test/api_integration/config'
|
||||
);
|
||||
export const FUNCTIONAL_CONFIG_PATH = resolve(KIBANA_ROOT, 'test/functional/config');
|
||||
export const API_CONFIG_PATH = resolve(KIBANA_ROOT, 'test/api_integration/config');
|
||||
export const OPTIMIZE_BUNDLE_DIR = resolve(KIBANA_ROOT, 'optimize/bundles');
|
||||
|
|
|
@ -29,9 +29,7 @@ export async function runElasticsearch({ config, options }) {
|
|||
|
||||
const cluster = createEsTestCluster({
|
||||
port: config.get('servers.elasticsearch.port'),
|
||||
password: !isOss
|
||||
? DEFAULT_SUPERUSER_PASS
|
||||
: config.get('servers.elasticsearch.password'),
|
||||
password: !isOss ? DEFAULT_SUPERUSER_PASS : config.get('servers.elasticsearch.password'),
|
||||
license: config.get('esTestCluster.license'),
|
||||
log,
|
||||
basePath: resolve(KIBANA_ROOT, '.es'),
|
||||
|
|
|
@ -59,9 +59,7 @@ function collectCliArgs(config, { installDir, extraKbnOpts }) {
|
|||
serverArgs,
|
||||
args => (installDir ? args.filter(a => a !== '--oss') : args),
|
||||
args => {
|
||||
return installDir
|
||||
? [...args, ...buildArgs]
|
||||
: [KIBANA_EXEC_PATH, ...args, ...sourceArgs];
|
||||
return installDir ? [...args, ...buildArgs] : [KIBANA_EXEC_PATH, ...args, ...sourceArgs];
|
||||
},
|
||||
args => args.concat(extraKbnOpts || [])
|
||||
);
|
||||
|
|
|
@ -22,12 +22,7 @@ import * as Rx from 'rxjs';
|
|||
import { startWith, switchMap, take } from 'rxjs/operators';
|
||||
import { withProcRunner } from '@kbn/dev-utils';
|
||||
|
||||
import {
|
||||
runElasticsearch,
|
||||
runKibanaServer,
|
||||
runFtr,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
} from './lib';
|
||||
import { runElasticsearch, runKibanaServer, runFtr, KIBANA_FTR_SCRIPT } from './lib';
|
||||
|
||||
import { readConfigFile } from '../../../../src/functional_test_runner/lib';
|
||||
|
||||
|
|
|
@ -25,9 +25,4 @@ export { OPTIMIZE_BUNDLE_DIR, KIBANA_ROOT } from './functional_tests/lib/paths';
|
|||
|
||||
export { esTestConfig, createEsTestCluster } from './es';
|
||||
|
||||
export {
|
||||
kbnTestConfig,
|
||||
kibanaServerTestUser,
|
||||
kibanaTestUser,
|
||||
adminTestUser,
|
||||
} from './kbn';
|
||||
export { kbnTestConfig, kibanaServerTestUser, kibanaTestUser, adminTestUser } from './kbn';
|
||||
|
|
|
@ -39,10 +39,8 @@ export const kbnTestConfig = new class KbnTestConfig {
|
|||
};
|
||||
}
|
||||
|
||||
const username =
|
||||
process.env.TEST_KIBANA_USERNAME || kibanaTestUser.username;
|
||||
const password =
|
||||
process.env.TEST_KIBANA_PASSWORD || kibanaTestUser.password;
|
||||
const username = process.env.TEST_KIBANA_USERNAME || kibanaTestUser.username;
|
||||
const password = process.env.TEST_KIBANA_PASSWORD || kibanaTestUser.password;
|
||||
return {
|
||||
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
|
||||
|
|
|
@ -41,17 +41,13 @@ describe('staged filters', () => {
|
|||
});
|
||||
|
||||
test('can set a staged filter', () => {
|
||||
store.dispatch(
|
||||
setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo1' })
|
||||
);
|
||||
store.dispatch(setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo1' }));
|
||||
const stagedFilters = getStagedFilters(store.getState());
|
||||
expect(stagedFilters.length).toBe(1);
|
||||
});
|
||||
|
||||
test('getStagedFilters returns filters for all embeddables', () => {
|
||||
store.dispatch(
|
||||
setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo2' })
|
||||
);
|
||||
store.dispatch(setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo2' }));
|
||||
const stagedFilters = getStagedFilters(store.getState());
|
||||
expect(stagedFilters.length).toBe(2);
|
||||
});
|
||||
|
|
|
@ -20,11 +20,7 @@
|
|||
import _ from 'lodash';
|
||||
import { Dispatch } from 'redux';
|
||||
import { createAction } from 'redux-actions';
|
||||
import {
|
||||
CoreKibanaState,
|
||||
getEmbeddableCustomization,
|
||||
getPanel,
|
||||
} from '../../selectors';
|
||||
import { CoreKibanaState, getEmbeddableCustomization, getPanel } from '../../selectors';
|
||||
import { PanelId, PanelState } from '../selectors';
|
||||
import { updatePanel } from './panels';
|
||||
|
||||
|
@ -40,10 +36,7 @@ export enum EmbeddableActionTypeKeys {
|
|||
}
|
||||
|
||||
export interface EmbeddableIsInitializingAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING,
|
||||
PanelId
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING, PanelId> {}
|
||||
|
||||
export interface EmbeddableIsInitializedActionPayload {
|
||||
panelId: PanelId;
|
||||
|
@ -62,16 +55,10 @@ export interface SetStagedFilterActionPayload {
|
|||
}
|
||||
|
||||
export interface SetStagedFilterAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.SET_STAGED_FILTER,
|
||||
SetStagedFilterActionPayload
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.SET_STAGED_FILTER, SetStagedFilterActionPayload> {}
|
||||
|
||||
export interface ClearStagedFiltersAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS,
|
||||
undefined
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS, undefined> {}
|
||||
|
||||
export interface EmbeddableErrorActionPayload {
|
||||
error: string | object;
|
||||
|
@ -79,10 +66,7 @@ export interface EmbeddableErrorActionPayload {
|
|||
}
|
||||
|
||||
export interface EmbeddableErrorAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_ERROR,
|
||||
EmbeddableErrorActionPayload
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.EMBEDDABLE_ERROR, EmbeddableErrorActionPayload> {}
|
||||
|
||||
export type EmbeddableActions =
|
||||
| EmbeddableIsInitializingAction
|
||||
|
@ -94,15 +78,13 @@ export type EmbeddableActions =
|
|||
export const embeddableIsInitializing = createAction<PanelId>(
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING
|
||||
);
|
||||
export const embeddableIsInitialized = createAction<
|
||||
EmbeddableIsInitializedActionPayload
|
||||
>(EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED);
|
||||
export const embeddableIsInitialized = createAction<EmbeddableIsInitializedActionPayload>(
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED
|
||||
);
|
||||
export const setStagedFilter = createAction<SetStagedFilterActionPayload>(
|
||||
EmbeddableActionTypeKeys.SET_STAGED_FILTER
|
||||
);
|
||||
export const clearStagedFilters = createAction(
|
||||
EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS
|
||||
);
|
||||
export const clearStagedFilters = createAction(EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS);
|
||||
export const embeddableError = createAction<EmbeddableErrorActionPayload>(
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_ERROR
|
||||
);
|
||||
|
@ -120,10 +102,7 @@ export function embeddableStateChanged(changeData: {
|
|||
embeddableState: EmbeddableState;
|
||||
}) {
|
||||
const { panelId, embeddableState } = changeData;
|
||||
return (
|
||||
dispatch: Dispatch<CoreKibanaState>,
|
||||
getState: () => CoreKibanaState
|
||||
) => {
|
||||
return (dispatch: Dispatch<CoreKibanaState>, getState: () => CoreKibanaState) => {
|
||||
// Translate embeddableState to things redux cares about.
|
||||
const customization = getEmbeddableCustomization(getState(), panelId);
|
||||
if (!_.isEqual(embeddableState.customization, customization)) {
|
||||
|
@ -136,9 +115,7 @@ export function embeddableStateChanged(changeData: {
|
|||
}
|
||||
|
||||
if (embeddableState.stagedFilter) {
|
||||
dispatch(
|
||||
setStagedFilter({ stagedFilter: embeddableState.stagedFilter, panelId })
|
||||
);
|
||||
dispatch(setStagedFilter({ stagedFilter: embeddableState.stagedFilter, panelId }));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -28,24 +28,16 @@ export enum MetadataActionTypeKeys {
|
|||
export type UpdateTitleActionPayload = string;
|
||||
|
||||
export interface UpdateTitleAction
|
||||
extends KibanaAction<
|
||||
MetadataActionTypeKeys.UPDATE_TITLE,
|
||||
UpdateTitleActionPayload
|
||||
> {}
|
||||
extends KibanaAction<MetadataActionTypeKeys.UPDATE_TITLE, UpdateTitleActionPayload> {}
|
||||
|
||||
export type UpdateDescriptionActionPayload = string;
|
||||
|
||||
export interface UpdateDescriptionAction
|
||||
extends KibanaAction<
|
||||
MetadataActionTypeKeys.UPDATE_DESCRIPTION,
|
||||
UpdateDescriptionActionPayload
|
||||
> {}
|
||||
extends KibanaAction<MetadataActionTypeKeys.UPDATE_DESCRIPTION, UpdateDescriptionActionPayload> {}
|
||||
|
||||
export type MetadataActions = UpdateDescriptionAction | UpdateTitleAction;
|
||||
|
||||
export const updateDescription = createAction<UpdateDescriptionAction>(
|
||||
MetadataActionTypeKeys.UPDATE_DESCRIPTION
|
||||
);
|
||||
export const updateTitle = createAction<UpdateTitleAction>(
|
||||
MetadataActionTypeKeys.UPDATE_TITLE
|
||||
);
|
||||
export const updateTitle = createAction<UpdateTitleAction>(MetadataActionTypeKeys.UPDATE_TITLE);
|
||||
|
|
|
@ -48,13 +48,9 @@ export interface SetPanelTitleActionPayload {
|
|||
}
|
||||
|
||||
export interface SetPanelTitleAction
|
||||
extends KibanaAction<
|
||||
PanelActionTypeKeys.SET_PANEl_TITLE,
|
||||
SetPanelTitleActionPayload
|
||||
> {}
|
||||
extends KibanaAction<PanelActionTypeKeys.SET_PANEl_TITLE, SetPanelTitleActionPayload> {}
|
||||
|
||||
export interface SetPanelsAction
|
||||
extends KibanaAction<PanelActionTypeKeys.SET_PANELS, PanelsMap> {}
|
||||
export interface SetPanelsAction extends KibanaAction<PanelActionTypeKeys.SET_PANELS, PanelsMap> {}
|
||||
|
||||
export type PanelActions =
|
||||
| DeletePanelAction
|
||||
|
@ -64,21 +60,11 @@ export type PanelActions =
|
|||
| SetPanelTitleAction
|
||||
| SetPanelsAction;
|
||||
|
||||
export const deletePanel = createAction<PanelId>(
|
||||
PanelActionTypeKeys.DELETE_PANEL
|
||||
);
|
||||
export const updatePanel = createAction<PanelState>(
|
||||
PanelActionTypeKeys.UPDATE_PANEL
|
||||
);
|
||||
export const resetPanelTitle = createAction<PanelId>(
|
||||
PanelActionTypeKeys.RESET_PANEl_TITLE
|
||||
);
|
||||
export const deletePanel = createAction<PanelId>(PanelActionTypeKeys.DELETE_PANEL);
|
||||
export const updatePanel = createAction<PanelState>(PanelActionTypeKeys.UPDATE_PANEL);
|
||||
export const resetPanelTitle = createAction<PanelId>(PanelActionTypeKeys.RESET_PANEl_TITLE);
|
||||
export const setPanelTitle = createAction<SetPanelTitleActionPayload>(
|
||||
PanelActionTypeKeys.SET_PANEl_TITLE
|
||||
);
|
||||
export const updatePanels = createAction<PanelsMap>(
|
||||
PanelActionTypeKeys.UPDATE_PANELS
|
||||
);
|
||||
export const setPanels = createAction<PanelsMap>(
|
||||
PanelActionTypeKeys.SET_PANELS
|
||||
);
|
||||
export const updatePanels = createAction<PanelsMap>(PanelActionTypeKeys.UPDATE_PANELS);
|
||||
export const setPanels = createAction<PanelsMap>(PanelActionTypeKeys.SET_PANELS);
|
||||
|
|
|
@ -37,16 +37,10 @@ export enum ViewActionTypeKeys {
|
|||
}
|
||||
|
||||
export interface UpdateViewModeAction
|
||||
extends KibanaAction<
|
||||
ViewActionTypeKeys.UPDATE_VIEW_MODE,
|
||||
DashboardViewMode
|
||||
> {}
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_VIEW_MODE, DashboardViewMode> {}
|
||||
|
||||
export interface SetVisibleContextMenuPanelIdAction
|
||||
extends KibanaAction<
|
||||
ViewActionTypeKeys.SET_VISIBLE_CONTEXT_MENU_PANEL_ID,
|
||||
PanelId
|
||||
> {}
|
||||
extends KibanaAction<ViewActionTypeKeys.SET_VISIBLE_CONTEXT_MENU_PANEL_ID, PanelId> {}
|
||||
|
||||
export interface MaximizePanelAction
|
||||
extends KibanaAction<ViewActionTypeKeys.MAXIMIZE_PANEl, PanelId> {}
|
||||
|
@ -55,10 +49,7 @@ export interface MinimizePanelAction
|
|||
extends KibanaAction<ViewActionTypeKeys.MINIMIZE_PANEL, undefined> {}
|
||||
|
||||
export interface UpdateIsFullScreenModeAction
|
||||
extends KibanaAction<
|
||||
ViewActionTypeKeys.UPDATE_IS_FULL_SCREEN_MODE,
|
||||
boolean
|
||||
> {}
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_IS_FULL_SCREEN_MODE, boolean> {}
|
||||
|
||||
export interface UpdateUseMarginsAction
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_USE_MARGINS, boolean> {}
|
||||
|
@ -72,8 +63,7 @@ export interface UpdateTimeRangeAction
|
|||
export interface UpdateFiltersAction
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_FILTERS, Filters> {}
|
||||
|
||||
export interface UpdateQueryAction
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_QUERY, Query> {}
|
||||
export interface UpdateQueryAction extends KibanaAction<ViewActionTypeKeys.UPDATE_QUERY, Query> {}
|
||||
|
||||
export type ViewActions =
|
||||
| UpdateViewModeAction
|
||||
|
@ -87,29 +77,19 @@ export type ViewActions =
|
|||
| UpdateFiltersAction
|
||||
| UpdateQueryAction;
|
||||
|
||||
export const updateViewMode = createAction<string>(
|
||||
ViewActionTypeKeys.UPDATE_VIEW_MODE
|
||||
);
|
||||
export const updateViewMode = createAction<string>(ViewActionTypeKeys.UPDATE_VIEW_MODE);
|
||||
export const setVisibleContextMenuPanelId = createAction<PanelId>(
|
||||
ViewActionTypeKeys.SET_VISIBLE_CONTEXT_MENU_PANEL_ID
|
||||
);
|
||||
export const maximizePanel = createAction<PanelId>(
|
||||
ViewActionTypeKeys.MAXIMIZE_PANEl
|
||||
);
|
||||
export const maximizePanel = createAction<PanelId>(ViewActionTypeKeys.MAXIMIZE_PANEl);
|
||||
export const minimizePanel = createAction(ViewActionTypeKeys.MINIMIZE_PANEL);
|
||||
export const updateIsFullScreenMode = createAction<boolean>(
|
||||
ViewActionTypeKeys.UPDATE_IS_FULL_SCREEN_MODE
|
||||
);
|
||||
export const updateUseMargins = createAction<boolean>(
|
||||
ViewActionTypeKeys.UPDATE_USE_MARGINS
|
||||
);
|
||||
export const updateUseMargins = createAction<boolean>(ViewActionTypeKeys.UPDATE_USE_MARGINS);
|
||||
export const updateHidePanelTitles = createAction<boolean>(
|
||||
ViewActionTypeKeys.UPDATE_HIDE_PANEL_TITLES
|
||||
);
|
||||
export const updateTimeRange = createAction<TimeRange>(
|
||||
ViewActionTypeKeys.UPDATE_TIME_RANGE
|
||||
);
|
||||
export const updateFilters = createAction<Filters>(
|
||||
ViewActionTypeKeys.UPDATE_FILTERS
|
||||
);
|
||||
export const updateTimeRange = createAction<TimeRange>(ViewActionTypeKeys.UPDATE_TIME_RANGE);
|
||||
export const updateFilters = createAction<Filters>(ViewActionTypeKeys.UPDATE_FILTERS);
|
||||
export const updateQuery = createAction<Query>(ViewActionTypeKeys.UPDATE_QUERY);
|
||||
|
|
|
@ -27,11 +27,7 @@ import {
|
|||
PanelActionTypeKeys,
|
||||
SetStagedFilterActionPayload,
|
||||
} from '../actions';
|
||||
import {
|
||||
EmbeddableReduxState,
|
||||
EmbeddablesMap,
|
||||
PanelId,
|
||||
} from '../selectors/types';
|
||||
import { EmbeddableReduxState, EmbeddablesMap, PanelId } from '../selectors/types';
|
||||
|
||||
const embeddableIsInitializing = (
|
||||
embeddables: EmbeddablesMap,
|
||||
|
@ -81,16 +77,12 @@ const embeddableError = (
|
|||
});
|
||||
|
||||
const clearStagedFilters = (embeddables: EmbeddablesMap): EmbeddablesMap => {
|
||||
const omitStagedFilters = (
|
||||
embeddable: EmbeddableReduxState
|
||||
): EmbeddablesMap => _.omit({ ...embeddable }, ['stagedFilter']);
|
||||
const omitStagedFilters = (embeddable: EmbeddableReduxState): EmbeddablesMap =>
|
||||
_.omit({ ...embeddable }, ['stagedFilter']);
|
||||
return _.mapValues<EmbeddablesMap>(embeddables, omitStagedFilters);
|
||||
};
|
||||
|
||||
const deleteEmbeddable = (
|
||||
embeddables: EmbeddablesMap,
|
||||
panelId: PanelId
|
||||
): EmbeddablesMap => {
|
||||
const deleteEmbeddable = (embeddables: EmbeddablesMap, panelId: PanelId): EmbeddablesMap => {
|
||||
const embeddablesCopy = { ...embeddables };
|
||||
delete embeddablesCopy[panelId];
|
||||
return embeddablesCopy;
|
||||
|
@ -100,9 +92,7 @@ export const embeddablesReducer: Reducer<EmbeddablesMap> = (
|
|||
embeddables = {},
|
||||
action
|
||||
): EmbeddablesMap => {
|
||||
switch (
|
||||
action.type as EmbeddableActionTypeKeys | PanelActionTypeKeys.DELETE_PANEL
|
||||
) {
|
||||
switch (action.type as EmbeddableActionTypeKeys | PanelActionTypeKeys.DELETE_PANEL) {
|
||||
case EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING:
|
||||
return embeddableIsInitializing(embeddables, action.payload);
|
||||
case EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED:
|
||||
|
|
|
@ -26,10 +26,7 @@ import {
|
|||
} from '../actions';
|
||||
import { DashboardMetadata } from '../selectors';
|
||||
|
||||
const updateTitle = (
|
||||
metadata: DashboardMetadata,
|
||||
title: UpdateTitleActionPayload
|
||||
) => ({
|
||||
const updateTitle = (metadata: DashboardMetadata, title: UpdateTitleActionPayload) => ({
|
||||
...metadata,
|
||||
title,
|
||||
});
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue