mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
* Increase prettier line width to 100 * Fix packages JS prettier * Change style guide to 100 width * Fix line-width in latest master changes
This commit is contained in:
parent
e5223de945
commit
de3e5b5cb6
107 changed files with 324 additions and 1060 deletions
20
.eslintrc.js
20
.eslintrc.js
|
@ -3,10 +3,7 @@ const { readdirSync } = require('fs');
|
|||
const dedent = require('dedent');
|
||||
|
||||
module.exports = {
|
||||
extends: [
|
||||
'@elastic/eslint-config-kibana',
|
||||
'@elastic/eslint-config-kibana/jest',
|
||||
],
|
||||
extends: ['@elastic/eslint-config-kibana', '@elastic/eslint-config-kibana/jest'],
|
||||
|
||||
settings: {
|
||||
'import/resolver': {
|
||||
|
@ -77,15 +74,12 @@ module.exports = {
|
|||
forceNode: false,
|
||||
rootPackageName: 'kibana',
|
||||
kibanaPath: '.',
|
||||
pluginMap: readdirSync(resolve(__dirname, 'x-pack/plugins')).reduce(
|
||||
(acc, name) => {
|
||||
if (!name.startsWith('_')) {
|
||||
acc[name] = `x-pack/plugins/${name}`;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
),
|
||||
pluginMap: readdirSync(resolve(__dirname, 'x-pack/plugins')).reduce((acc, name) => {
|
||||
if (!name.startsWith('_')) {
|
||||
acc[name] = `x-pack/plugins/${name}`;
|
||||
}
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
"trailingComma": "es5",
|
||||
"printWidth": 100
|
||||
}
|
||||
|
|
|
@ -33,10 +33,7 @@ const isValidDate = d => isDate(d) && !isNaN(d.valueOf());
|
|||
* will be done using this (and its locale settings) instead of the one bundled
|
||||
* with this library.
|
||||
*/
|
||||
function parse(
|
||||
text,
|
||||
{ roundUp = false, momentInstance = moment, forceNow } = {}
|
||||
) {
|
||||
function parse(text, { roundUp = false, momentInstance = moment, forceNow } = {}) {
|
||||
if (!text) return undefined;
|
||||
if (momentInstance.isMoment(text)) return text;
|
||||
if (isDate(text)) return momentInstance(text);
|
||||
|
|
|
@ -54,12 +54,9 @@ describe('dateMath', function() {
|
|||
expect(dateMath.parse('now&1d')).to.be(undefined);
|
||||
});
|
||||
|
||||
it(
|
||||
'should return undefined if I pass a unit besides' + spans.toString(),
|
||||
function() {
|
||||
expect(dateMath.parse('now+5f')).to.be(undefined);
|
||||
}
|
||||
);
|
||||
it('should return undefined if I pass a unit besides' + spans.toString(), function() {
|
||||
expect(dateMath.parse('now+5f')).to.be(undefined);
|
||||
});
|
||||
|
||||
it('should return undefined if rounding unit is not 1', function() {
|
||||
expect(dateMath.parse('now/2y')).to.be(undefined);
|
||||
|
@ -74,21 +71,16 @@ describe('dateMath', function() {
|
|||
|
||||
describe('forceNow', function() {
|
||||
it('should throw an Error if passed a string', function() {
|
||||
const fn = () =>
|
||||
dateMath.parse('now', { forceNow: '2000-01-01T00:00:00.000Z' });
|
||||
const fn = () => dateMath.parse('now', { forceNow: '2000-01-01T00:00:00.000Z' });
|
||||
expect(fn).to.throwError();
|
||||
});
|
||||
|
||||
it('should throw an Error if passed a moment', function() {
|
||||
expect(() =>
|
||||
dateMath.parse('now', { forceNow: moment() })
|
||||
).to.throwError();
|
||||
expect(() => dateMath.parse('now', { forceNow: moment() })).to.throwError();
|
||||
});
|
||||
|
||||
it('should throw an Error if passed an invalid date', function() {
|
||||
expect(() =>
|
||||
dateMath.parse('now', { forceNow: new Date('foobar') })
|
||||
).to.throwError();
|
||||
expect(() => dateMath.parse('now', { forceNow: new Date('foobar') })).to.throwError();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -128,9 +120,7 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should use the forceNow parameter when parsing now', function() {
|
||||
expect(
|
||||
dateMath.parse('now', { forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(unix);
|
||||
expect(dateMath.parse('now', { forceNow: anchoredDate }).valueOf()).to.eql(unix);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -164,9 +154,7 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should return ' + len + span + ' before forceNow', function() {
|
||||
const parsed = dateMath
|
||||
.parse(nowEx, { forceNow: anchoredDate })
|
||||
.valueOf();
|
||||
const parsed = dateMath.parse(nowEx, { forceNow: anchoredDate }).valueOf();
|
||||
expect(parsed).to.eql(anchored.subtract(len, span).valueOf());
|
||||
});
|
||||
});
|
||||
|
@ -193,9 +181,7 @@ describe('dateMath', function() {
|
|||
const thenEx = `${anchor}||+${len}${span}`;
|
||||
|
||||
it('should return ' + len + span + ' from now', function() {
|
||||
expect(dateMath.parse(nowEx).format(format)).to.eql(
|
||||
now.add(len, span).format(format)
|
||||
);
|
||||
expect(dateMath.parse(nowEx).format(format)).to.eql(now.add(len, span).format(format));
|
||||
});
|
||||
|
||||
it('should return ' + len + span + ' after ' + anchor, function() {
|
||||
|
@ -205,9 +191,9 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should return ' + len + span + ' after forceNow', function() {
|
||||
expect(
|
||||
dateMath.parse(nowEx, { forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(anchored.add(len, span).valueOf());
|
||||
expect(dateMath.parse(nowEx, { forceNow: anchoredDate }).valueOf()).to.eql(
|
||||
anchored.add(len, span).valueOf()
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -235,22 +221,20 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it(`should round now to the beginning of forceNow's ${span}`, function() {
|
||||
expect(
|
||||
dateMath.parse('now/' + span, { forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(anchored.startOf(span).valueOf());
|
||||
expect(dateMath.parse('now/' + span, { forceNow: anchoredDate }).valueOf()).to.eql(
|
||||
anchored.startOf(span).valueOf()
|
||||
);
|
||||
});
|
||||
|
||||
it(`should round now to the end of the ${span}`, function() {
|
||||
expect(
|
||||
dateMath.parse('now/' + span, { roundUp: true }).format(format)
|
||||
).to.eql(now.endOf(span).format(format));
|
||||
expect(dateMath.parse('now/' + span, { roundUp: true }).format(format)).to.eql(
|
||||
now.endOf(span).format(format)
|
||||
);
|
||||
});
|
||||
|
||||
it(`should round now to the end of forceNow's ${span}`, function() {
|
||||
expect(
|
||||
dateMath
|
||||
.parse('now/' + span, { roundUp: true, forceNow: anchoredDate })
|
||||
.valueOf()
|
||||
dateMath.parse('now/' + span, { roundUp: true, forceNow: anchoredDate }).valueOf()
|
||||
).to.eql(anchored.endOf(span).valueOf());
|
||||
});
|
||||
});
|
||||
|
@ -336,9 +320,7 @@ describe('dateMath', function() {
|
|||
});
|
||||
|
||||
it('should round relative to forceNow', function() {
|
||||
const val = dateMath
|
||||
.parse('now-0s/s', { forceNow: anchoredDate })
|
||||
.valueOf();
|
||||
const val = dateMath.parse('now-0s/s', { forceNow: anchoredDate }).valueOf();
|
||||
expect(val).to.eql(anchored.startOf('s').valueOf());
|
||||
});
|
||||
|
||||
|
@ -402,29 +384,11 @@ describe('dateMath', function() {
|
|||
|
||||
describe('units', function() {
|
||||
it('should have units descending for unitsDesc', function() {
|
||||
expect(dateMath.unitsDesc).to.eql([
|
||||
'y',
|
||||
'M',
|
||||
'w',
|
||||
'd',
|
||||
'h',
|
||||
'm',
|
||||
's',
|
||||
'ms',
|
||||
]);
|
||||
expect(dateMath.unitsDesc).to.eql(['y', 'M', 'w', 'd', 'h', 'm', 's', 'ms']);
|
||||
});
|
||||
|
||||
it('should have units ascending for unitsAsc', function() {
|
||||
expect(dateMath.unitsAsc).to.eql([
|
||||
'ms',
|
||||
's',
|
||||
'm',
|
||||
'h',
|
||||
'd',
|
||||
'w',
|
||||
'M',
|
||||
'y',
|
||||
]);
|
||||
expect(dateMath.unitsAsc).to.eql(['ms', 's', 'm', 'h', 'd', 'w', 'M', 'y']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -21,14 +21,7 @@ import execa from 'execa';
|
|||
import { statSync } from 'fs';
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import {
|
||||
tap,
|
||||
share,
|
||||
take,
|
||||
mergeMap,
|
||||
map,
|
||||
ignoreElements,
|
||||
} from 'rxjs/operators';
|
||||
import { tap, share, take, mergeMap, map, ignoreElements } from 'rxjs/operators';
|
||||
import { gray } from 'chalk';
|
||||
|
||||
import treeKill from 'tree-kill';
|
||||
|
@ -91,10 +84,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
|
|||
return new class Proc {
|
||||
name = name;
|
||||
|
||||
lines$ = Rx.merge(
|
||||
observeLines(childProcess.stdout),
|
||||
observeLines(childProcess.stderr)
|
||||
).pipe(
|
||||
lines$ = Rx.merge(observeLines(childProcess.stdout), observeLines(childProcess.stderr)).pipe(
|
||||
tap(line => log.write(` ${gray('proc')} [${gray(name)}] ${line}`)),
|
||||
share()
|
||||
);
|
||||
|
@ -122,10 +112,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
|
|||
return Rx.race(exit$, error$);
|
||||
}).pipe(share());
|
||||
|
||||
_outcomePromise = Rx.merge(
|
||||
this.lines$.pipe(ignoreElements()),
|
||||
this.outcome$
|
||||
).toPromise();
|
||||
_outcomePromise = Rx.merge(this.lines$.pipe(ignoreElements()), this.outcome$).toPromise();
|
||||
|
||||
getOutcomePromise() {
|
||||
return this._outcomePromise;
|
||||
|
|
|
@ -97,9 +97,7 @@ export class ProcRunner {
|
|||
first(),
|
||||
catchError(err => {
|
||||
if (err.name !== 'EmptyError') {
|
||||
throw createCliError(
|
||||
`[${name}] exited without matching pattern: ${wait}`
|
||||
);
|
||||
throw createCliError(`[${name}] exited without matching pattern: ${wait}`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
|
@ -195,12 +193,7 @@ export class ProcRunner {
|
|||
proc.outcome$.subscribe({
|
||||
next: (code) => {
|
||||
const duration = moment.duration(Date.now() - startMs);
|
||||
this._log.info(
|
||||
'[%s] exited with %s after %s',
|
||||
name,
|
||||
code,
|
||||
duration.humanize()
|
||||
);
|
||||
this._log.info('[%s] exited with %s after %s', name, code, duration.humanize());
|
||||
},
|
||||
complete: () => {
|
||||
remove();
|
||||
|
|
|
@ -58,10 +58,7 @@ describe('utils: createToolingLog(logLevel, output)', () => {
|
|||
log.info('Baz');
|
||||
log.end();
|
||||
|
||||
const output = await createPromiseFromStreams([
|
||||
log,
|
||||
createConcatStream(''),
|
||||
]);
|
||||
const output = await createPromiseFromStreams([log, createConcatStream('')]);
|
||||
|
||||
expect(output).to.contain('Foo');
|
||||
expect(output).to.contain('Bar');
|
||||
|
|
|
@ -65,9 +65,7 @@ exports.run = async (defaults = {}) => {
|
|||
const command = commands[commandName];
|
||||
|
||||
if (command === undefined) {
|
||||
log.error(
|
||||
chalk.red(`[${commandName}] is not a valid command, see 'es --help'`)
|
||||
);
|
||||
log.error(chalk.red(`[${commandName}] is not a valid command, see 'es --help'`));
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -24,11 +24,7 @@ const { Cluster } = require('../cluster');
|
|||
exports.description = 'Downloads and run from a nightly snapshot';
|
||||
|
||||
exports.help = (defaults = {}) => {
|
||||
const {
|
||||
license = 'basic',
|
||||
password = 'changeme',
|
||||
'base-path': basePath,
|
||||
} = defaults;
|
||||
const { license = 'basic', password = 'changeme', 'base-path': basePath } = defaults;
|
||||
|
||||
return dedent`
|
||||
Options:
|
||||
|
|
|
@ -24,11 +24,7 @@ const { Cluster } = require('../cluster');
|
|||
exports.description = 'Build and run from source';
|
||||
|
||||
exports.help = (defaults = {}) => {
|
||||
const {
|
||||
license = 'basic',
|
||||
password = 'changeme',
|
||||
'base-path': basePath,
|
||||
} = defaults;
|
||||
const { license = 'basic', password = 'changeme', 'base-path': basePath } = defaults;
|
||||
|
||||
return dedent`
|
||||
Options:
|
||||
|
|
|
@ -186,9 +186,7 @@ exports.Cluster = class Cluster {
|
|||
lines.forEach(line => this._log.info(line.formattedMessage));
|
||||
});
|
||||
|
||||
this._process.stderr.on('data', data =>
|
||||
this._log.error(chalk.red(data.toString()))
|
||||
);
|
||||
this._process.stderr.on('data', data => this._log.error(chalk.red(data.toString())));
|
||||
|
||||
this._outcome = new Promise((resolve, reject) => {
|
||||
this._process.once('exit', code => {
|
||||
|
|
|
@ -53,11 +53,7 @@ exports.installArchive = async function installArchive(archive, options = {}) {
|
|||
log.info('extracted to %s', chalk.bold(installPath));
|
||||
|
||||
if (license !== 'oss') {
|
||||
await appendToConfig(
|
||||
installPath,
|
||||
'xpack.license.self_generated.type',
|
||||
license
|
||||
);
|
||||
await appendToConfig(installPath, 'xpack.license.self_generated.type', license);
|
||||
|
||||
await appendToConfig(installPath, 'xpack.security.enabled', 'true');
|
||||
await configureKeystore(installPath, password, log);
|
||||
|
@ -94,11 +90,7 @@ function rmrfSync(path) {
|
|||
* @param {String} value
|
||||
*/
|
||||
async function appendToConfig(installPath, key, value) {
|
||||
fs.appendFileSync(
|
||||
path.resolve(installPath, ES_CONFIG),
|
||||
`${key}: ${value}\n`,
|
||||
'utf8'
|
||||
);
|
||||
fs.appendFileSync(path.resolve(installPath, ES_CONFIG), `${key}: ${value}\n`, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -84,10 +84,7 @@ function downloadFile(url, dest, log) {
|
|||
res =>
|
||||
new Promise((resolve, reject) => {
|
||||
if (res.status === 304) {
|
||||
log.info(
|
||||
'etags match, using cache from %s',
|
||||
chalk.bold(cacheMeta.ts)
|
||||
);
|
||||
log.info('etags match, using cache from %s', chalk.bold(cacheMeta.ts));
|
||||
return resolve();
|
||||
}
|
||||
|
||||
|
@ -118,9 +115,7 @@ function downloadFile(url, dest, log) {
|
|||
|
||||
function getFilename(license, version) {
|
||||
const extension = os.platform().startsWith('win') ? 'zip' : 'tar.gz';
|
||||
const basename = `elasticsearch${
|
||||
license === 'oss' ? '-oss-' : '-'
|
||||
}${version}`;
|
||||
const basename = `elasticsearch${license === 'oss' ? '-oss-' : '-'}${version}`;
|
||||
|
||||
return `${basename}-SNAPSHOT.${extension}`;
|
||||
}
|
||||
|
|
|
@ -27,15 +27,9 @@ const simpleGit = require('simple-git/promise');
|
|||
const { installArchive } = require('./archive');
|
||||
const { createCliError } = require('../errors');
|
||||
const { findMostRecentlyChanged, log: defaultLog, cache } = require('../utils');
|
||||
const {
|
||||
GRADLE_BIN,
|
||||
ES_ARCHIVE_PATTERN,
|
||||
ES_OSS_ARCHIVE_PATTERN,
|
||||
BASE_PATH,
|
||||
} = require('../paths');
|
||||
const { GRADLE_BIN, ES_ARCHIVE_PATTERN, ES_OSS_ARCHIVE_PATTERN, BASE_PATH } = require('../paths');
|
||||
|
||||
const onceEvent = (emitter, event) =>
|
||||
new Promise(resolve => emitter.once(event, resolve));
|
||||
const onceEvent = (emitter, event) => new Promise(resolve => emitter.once(event, resolve));
|
||||
|
||||
/**
|
||||
* Installs ES from source
|
||||
|
@ -65,15 +59,10 @@ exports.installSource = async function installSource({
|
|||
|
||||
const cacheMeta = cache.readMeta(dest);
|
||||
const isCached = cacheMeta.exists && cacheMeta.etag === metadata.etag;
|
||||
const archive = isCached
|
||||
? dest
|
||||
: await createSnapshot({ sourcePath, log, license });
|
||||
const archive = isCached ? dest : await createSnapshot({ sourcePath, log, license });
|
||||
|
||||
if (isCached) {
|
||||
log.info(
|
||||
'source path unchanged since %s, using cache',
|
||||
chalk.bold(cacheMeta.ts)
|
||||
);
|
||||
log.info('source path unchanged since %s, using cache', chalk.bold(cacheMeta.ts));
|
||||
} else {
|
||||
cache.writeMeta(dest, metadata);
|
||||
fs.copyFileSync(archive, dest);
|
||||
|
@ -168,11 +157,8 @@ async function createSnapshot({ license, sourcePath, log = defaultLog }) {
|
|||
throw createCliError('unable to build ES');
|
||||
}
|
||||
|
||||
const archivePattern =
|
||||
license === 'oss' ? ES_OSS_ARCHIVE_PATTERN : ES_ARCHIVE_PATTERN;
|
||||
const esTarballPath = findMostRecentlyChanged(
|
||||
path.resolve(sourcePath, archivePattern)
|
||||
);
|
||||
const archivePattern = license === 'oss' ? ES_OSS_ARCHIVE_PATTERN : ES_ARCHIVE_PATTERN;
|
||||
const esTarballPath = findMostRecentlyChanged(path.resolve(sourcePath, archivePattern));
|
||||
|
||||
if (!esTarballPath) {
|
||||
throw createCliError('could not locate ES distribution');
|
||||
|
|
|
@ -20,11 +20,7 @@
|
|||
const { createToolingLog } = require('@kbn/dev-utils');
|
||||
const execa = require('execa');
|
||||
const { Cluster } = require('../cluster');
|
||||
const {
|
||||
installSource,
|
||||
installSnapshot,
|
||||
installArchive,
|
||||
} = require('../install');
|
||||
const { installSource, installSnapshot, installArchive } = require('../install');
|
||||
|
||||
jest.mock('../install', () => ({
|
||||
installSource: jest.fn(),
|
||||
|
@ -55,9 +51,7 @@ async function ensureResolve(promise) {
|
|||
return await Promise.race([
|
||||
promise,
|
||||
sleep(100).then(() => {
|
||||
throw new Error(
|
||||
'promise was supposed to resolve with installSource() resolution'
|
||||
);
|
||||
throw new Error('promise was supposed to resolve with installSource() resolution');
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
@ -203,33 +197,25 @@ describe('#start(installPath)', () => {
|
|||
it('rejects when bin/elasticsearch exists with 0 before starting', async () => {
|
||||
mockEsBin({ exitCode: 0, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited without starting'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited without starting');
|
||||
});
|
||||
|
||||
it('rejects when bin/elasticsearch exists with 143 before starting', async () => {
|
||||
mockEsBin({ exitCode: 143, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited without starting'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited without starting');
|
||||
});
|
||||
|
||||
it('rejects when bin/elasticsearch exists with 130 before starting', async () => {
|
||||
mockEsBin({ exitCode: 130, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited without starting'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited without starting');
|
||||
});
|
||||
|
||||
it('rejects when bin/elasticsearch exists with 1 before starting', async () => {
|
||||
mockEsBin({ exitCode: 1, start: false });
|
||||
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError(
|
||||
'ES exited with code 1'
|
||||
);
|
||||
await expect(new Cluster(log).start()).rejects.toThrowError('ES exited with code 1');
|
||||
});
|
||||
|
||||
it('resolves when bin/elasticsearch logs "started"', async () => {
|
||||
|
@ -243,9 +229,7 @@ describe('#start(installPath)', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.start();
|
||||
await expect(cluster.start()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.start()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
|
||||
it('rejects if #run() was called previously', async () => {
|
||||
|
@ -253,9 +237,7 @@ describe('#start(installPath)', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.run();
|
||||
await expect(cluster.start()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.start()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -281,9 +263,7 @@ describe('#run()', () => {
|
|||
it('rejects when bin/elasticsearch exists with 1', async () => {
|
||||
mockEsBin({ exitCode: 1 });
|
||||
|
||||
await expect(new Cluster(log).run()).rejects.toThrowError(
|
||||
'ES exited with code 1'
|
||||
);
|
||||
await expect(new Cluster(log).run()).rejects.toThrowError('ES exited with code 1');
|
||||
});
|
||||
|
||||
it('rejects if #start() was called previously', async () => {
|
||||
|
@ -291,9 +271,7 @@ describe('#run()', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.start();
|
||||
await expect(cluster.run()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.run()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
|
||||
it('rejects if #run() was called previously', async () => {
|
||||
|
@ -301,18 +279,14 @@ describe('#run()', () => {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
await cluster.run();
|
||||
await expect(cluster.run()).rejects.toThrowError(
|
||||
'ES has already been started'
|
||||
);
|
||||
await expect(cluster.run()).rejects.toThrowError('ES has already been started');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#stop()', () => {
|
||||
it('rejects if #run() or #start() was not called', async () => {
|
||||
const cluster = new Cluster(log);
|
||||
await expect(cluster.stop()).rejects.toThrowError(
|
||||
'ES has not been started'
|
||||
);
|
||||
await expect(cluster.stop()).rejects.toThrowError('ES has not been started');
|
||||
});
|
||||
|
||||
it('resolves when ES exits with 0', async () => {
|
||||
|
|
|
@ -25,12 +25,8 @@ const path = require('path');
|
|||
beforeEach(() => {
|
||||
mockFs({
|
||||
'/data': {
|
||||
'snapshot.zip': fs.readFileSync(
|
||||
path.resolve(__dirname, '__fixtures__/snapshot.zip')
|
||||
),
|
||||
'snapshot.tar.gz': fs.readFileSync(
|
||||
path.resolve(__dirname, '__fixtures__/snapshot.tar.gz')
|
||||
),
|
||||
'snapshot.zip': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.zip')),
|
||||
'snapshot.tar.gz': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.tar.gz')),
|
||||
},
|
||||
'/.es': {},
|
||||
});
|
||||
|
|
|
@ -29,11 +29,7 @@ const mkdirp = require('mkdirp');
|
|||
* @param {Array} config
|
||||
* @param {String} dest
|
||||
*/
|
||||
exports.extractConfigFiles = function extractConfigFiles(
|
||||
config,
|
||||
dest,
|
||||
options = {}
|
||||
) {
|
||||
exports.extractConfigFiles = function extractConfigFiles(config, dest, options = {}) {
|
||||
const originalConfig = typeof config === 'string' ? [config] : config;
|
||||
const localConfig = [];
|
||||
|
||||
|
|
|
@ -54,10 +54,7 @@ test('ignores non-paths', () => {
|
|||
});
|
||||
|
||||
test('ignores directories', () => {
|
||||
const config = extractConfigFiles(
|
||||
['path=/data/foo.yml', 'foo.bar=/data/bar'],
|
||||
'/es'
|
||||
);
|
||||
const config = extractConfigFiles(['path=/data/foo.yml', 'foo.bar=/data/bar'], '/es');
|
||||
|
||||
expect(config).toEqual(['path=foo.yml', 'foo.bar=/data/bar']);
|
||||
});
|
||||
|
|
|
@ -23,9 +23,7 @@ const LRU = require('lru-cache');
|
|||
|
||||
const DIR = Symbol('dir');
|
||||
const FILE = Symbol('file');
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE
|
||||
? new Map()
|
||||
: new LRU({ maxAge: 1000 });
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE ? new Map() : new LRU({ maxAge: 1000 });
|
||||
|
||||
function getPathType(path) {
|
||||
const cached = cache.get(path);
|
||||
|
|
|
@ -27,12 +27,8 @@ function getConfig(config) {
|
|||
projectRoot: true,
|
||||
};
|
||||
|
||||
if (!config || !config['@elastic/eslint-import-resolver-kibana'])
|
||||
return defaults;
|
||||
return Object.assign(
|
||||
defaults,
|
||||
config['@elastic/eslint-import-resolver-kibana']
|
||||
);
|
||||
if (!config || !config['@elastic/eslint-import-resolver-kibana']) return defaults;
|
||||
return Object.assign(defaults, config['@elastic/eslint-import-resolver-kibana']);
|
||||
}
|
||||
|
||||
function getRootPackageDir(dirRoot, dir, rootPackageName) {
|
||||
|
@ -54,8 +50,7 @@ function getRootPackageDir(dirRoot, dir, rootPackageName) {
|
|||
// recurse until a matching package.json is found
|
||||
return getRootPackageDir(dirRoot, dirname(dir), rootPackageName);
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT')
|
||||
return getRootPackageDir(dirRoot, dirname(dir), rootPackageName);
|
||||
if (e.code === 'ENOENT') return getRootPackageDir(dirRoot, dirname(dir), rootPackageName);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,9 +36,7 @@ exports.getWebpackConfig = function(kibanaPath, projectRoot, config) {
|
|||
|
||||
// Dev defaults for test bundle https://github.com/elastic/kibana/blob/6998f074542e8c7b32955db159d15661aca253d7/src/core_plugins/tests_bundle/index.js#L73-L78
|
||||
ng_mock$: fromKibana('src/test_utils/public/ng_mock'),
|
||||
'angular-mocks$': fromKibana(
|
||||
'src/core_plugins/tests_bundle/webpackShims/angular-mocks.js'
|
||||
),
|
||||
'angular-mocks$': fromKibana('src/core_plugins/tests_bundle/webpackShims/angular-mocks.js'),
|
||||
fixtures: fromKibana('src/fixtures'),
|
||||
test_utils: fromKibana('src/test_utils/public'),
|
||||
};
|
||||
|
|
|
@ -24,9 +24,7 @@ const LRU = require('lru-cache');
|
|||
|
||||
const { isDirectory } = require('./get_path_type');
|
||||
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE
|
||||
? new Map()
|
||||
: new LRU({ max: 1000 });
|
||||
const cache = process.env.KIBANA_RESOLVER_HARD_CACHE ? new Map() : new LRU({ max: 1000 });
|
||||
|
||||
function readShimNames(shimDirectory) {
|
||||
if (!isDirectory(shimDirectory)) {
|
||||
|
@ -47,12 +45,9 @@ function findRelativeWebpackShims(directory) {
|
|||
const ownShims = readShimNames(join(directory, 'webpackShims'));
|
||||
|
||||
const parent = dirname(directory);
|
||||
const parentShims =
|
||||
parent !== directory ? findRelativeWebpackShims(parent) : [];
|
||||
const parentShims = parent !== directory ? findRelativeWebpackShims(parent) : [];
|
||||
|
||||
const allShims = !ownShims.length
|
||||
? parentShims
|
||||
: ownShims.concat(parentShims);
|
||||
const allShims = !ownShims.length ? parentShims : ownShims.concat(parentShims);
|
||||
|
||||
cache.set(directory, allShims);
|
||||
return allShims;
|
||||
|
|
|
@ -101,10 +101,7 @@ module.exports = function({ name }) {
|
|||
cwd: KBN_DIR,
|
||||
stdio: 'inherit',
|
||||
}).then(() => {
|
||||
const dir = relative(
|
||||
process.cwd(),
|
||||
resolve(KBN_DIR, `../kibana-extra`, snakeCase(name))
|
||||
);
|
||||
const dir = relative(process.cwd(), resolve(KBN_DIR, `../kibana-extra`, snakeCase(name)));
|
||||
|
||||
log.success(chalk`🎉
|
||||
|
||||
|
|
|
@ -57,4 +57,4 @@ module.exports = {
|
|||
resolveKibanaPath: resolveKibanaPath,
|
||||
createToolingLog: createToolingLog,
|
||||
readFtrConfigFile: readFtrConfigFile,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -65,13 +65,7 @@ function parseTsconfig(pluginSourcePath, configPath) {
|
|||
return config;
|
||||
}
|
||||
|
||||
module.exports = function createBuild(
|
||||
plugin,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
files
|
||||
) {
|
||||
module.exports = function createBuild(plugin, buildTarget, buildVersion, kibanaVersion, files) {
|
||||
const buildSource = plugin.root;
|
||||
const buildRoot = path.join(buildTarget, 'kibana', plugin.id);
|
||||
|
||||
|
@ -104,13 +98,9 @@ module.exports = function createBuild(
|
|||
}
|
||||
|
||||
// install packages in build
|
||||
execa.sync(
|
||||
winCmd('yarn'),
|
||||
['install', '--production', '--pure-lockfile'],
|
||||
{
|
||||
cwd: buildRoot,
|
||||
}
|
||||
);
|
||||
execa.sync(winCmd('yarn'), ['install', '--production', '--pure-lockfile'], {
|
||||
cwd: buildRoot,
|
||||
});
|
||||
})
|
||||
.then(function () {
|
||||
if (!plugin.styleSheetToCompile) {
|
||||
|
@ -119,17 +109,11 @@ module.exports = function createBuild(
|
|||
|
||||
const file = path.resolve(plugin.root, plugin.styleSheetToCompile);
|
||||
if (!existsSync(file)) {
|
||||
throw new Error(
|
||||
`Path provided for styleSheetToCompile does not exist: ${file}`
|
||||
);
|
||||
throw new Error(`Path provided for styleSheetToCompile does not exist: ${file}`);
|
||||
}
|
||||
|
||||
const outputFileName = path.basename(file, path.extname(file)) + '.css';
|
||||
const output = path.join(
|
||||
buildRoot,
|
||||
path.dirname(plugin.styleSheetToCompile),
|
||||
outputFileName
|
||||
);
|
||||
const output = path.join(buildRoot, path.dirname(plugin.styleSheetToCompile), outputFileName);
|
||||
|
||||
const rendered = sass.renderSync({ file, output });
|
||||
writeFileSync(output, rendered.css);
|
||||
|
@ -153,10 +137,7 @@ module.exports = function createBuild(
|
|||
const buildConfig = parseTsconfig(buildSource, buildConfigPath);
|
||||
|
||||
if (buildConfig.extends) {
|
||||
buildConfig.extends = path.join(
|
||||
relative(buildRoot, buildSource),
|
||||
buildConfig.extends
|
||||
);
|
||||
buildConfig.extends = path.join(relative(buildRoot, buildSource), buildConfig.extends);
|
||||
|
||||
writeFileSync(buildConfigPath, JSON.stringify(buildConfig));
|
||||
}
|
||||
|
|
|
@ -99,13 +99,7 @@ describe('creating the build', () => {
|
|||
it('produces CSS', async () => {
|
||||
expect(PLUGIN.styleSheetToCompile).toBe(sassPath);
|
||||
|
||||
await createBuild(
|
||||
PLUGIN,
|
||||
buildTarget,
|
||||
buildVersion,
|
||||
kibanaVersion,
|
||||
buildFiles
|
||||
);
|
||||
await createBuild(PLUGIN, buildTarget, buildVersion, kibanaVersion, buildFiles);
|
||||
|
||||
expect(existsSync(cssPath)).toBe(true);
|
||||
});
|
||||
|
|
|
@ -56,4 +56,4 @@ function toBuffer(string) {
|
|||
// of Buffer.from(string, encoding)
|
||||
return new Buffer(string, 'utf8');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,11 +55,7 @@ export async function run(argv: string[]) {
|
|||
// starts forwarding the `--` directly to this script, see
|
||||
// https://github.com/yarnpkg/yarn/blob/b2d3e1a8fe45ef376b716d597cc79b38702a9320/src/cli/index.js#L174-L182
|
||||
if (argv.includes('--')) {
|
||||
log.write(
|
||||
chalk.red(
|
||||
`Using "--" is not allowed, as it doesn't work with 'yarn kbn'.`
|
||||
)
|
||||
);
|
||||
log.write(chalk.red(`Using "--" is not allowed, as it doesn't work with 'yarn kbn'.`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
@ -89,9 +85,7 @@ export async function run(argv: string[]) {
|
|||
|
||||
const command = commands[commandName];
|
||||
if (command === undefined) {
|
||||
log.write(
|
||||
chalk.red(`[${commandName}] is not a valid command, see 'kbn --help'`)
|
||||
);
|
||||
log.write(chalk.red(`[${commandName}] is not a valid command, see 'kbn --help'`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,7 @@ jest.mock('../utils/link_project_executables');
|
|||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import {
|
||||
absolutePathSnapshotSerializer,
|
||||
stripAnsiSnapshotSerializer,
|
||||
} from '../test_helpers';
|
||||
import { absolutePathSnapshotSerializer, stripAnsiSnapshotSerializer } from '../test_helpers';
|
||||
import { linkProjectExecutables } from '../utils/link_project_executables';
|
||||
import { IPackageJson } from '../utils/package_json';
|
||||
import { Project } from '../utils/project';
|
||||
|
@ -88,12 +85,7 @@ test('handles dependencies of dependencies', async () => {
|
|||
},
|
||||
'packages/baz'
|
||||
);
|
||||
const projects = new Map([
|
||||
['kibana', kibana],
|
||||
['foo', foo],
|
||||
['bar', bar],
|
||||
['baz', baz],
|
||||
]);
|
||||
const projects = new Map([['kibana', kibana], ['foo', foo], ['bar', bar], ['baz', baz]]);
|
||||
const projectGraph = buildProjectGraph(projects);
|
||||
|
||||
const logMock = jest.spyOn(console, 'log').mockImplementation(noop);
|
||||
|
|
|
@ -45,9 +45,7 @@ export const BootstrapCommand: ICommand = {
|
|||
}
|
||||
}
|
||||
|
||||
log.write(
|
||||
chalk.bold('\nInstalls completed, linking package executables:\n')
|
||||
);
|
||||
log.write(chalk.bold('\nInstalls completed, linking package executables:\n'));
|
||||
await linkProjectExecutables(projects, projectGraph);
|
||||
|
||||
/**
|
||||
|
@ -56,11 +54,7 @@ export const BootstrapCommand: ICommand = {
|
|||
* transpiled before they can be used. Ideally we shouldn't do this unless we
|
||||
* have to, as it will slow down the bootstrapping process.
|
||||
*/
|
||||
log.write(
|
||||
chalk.bold(
|
||||
'\nLinking executables completed, running `kbn:bootstrap` scripts\n'
|
||||
)
|
||||
);
|
||||
log.write(chalk.bold('\nLinking executables completed, running `kbn:bootstrap` scripts\n'));
|
||||
await parallelizeBatches(batchedProjects, async pkg => {
|
||||
if (pkg.hasScript('kbn:bootstrap')) {
|
||||
await pkg.runScriptStreaming('kbn:bootstrap');
|
||||
|
|
|
@ -27,8 +27,7 @@ import { log } from '../utils/log';
|
|||
import { ICommand } from './';
|
||||
|
||||
export const CleanCommand: ICommand = {
|
||||
description:
|
||||
'Remove the node_modules and target directories from all projects.',
|
||||
description: 'Remove the node_modules and target directories from all projects.',
|
||||
name: 'clean',
|
||||
|
||||
async run(projects, projectGraph, { rootPath }) {
|
||||
|
|
|
@ -29,11 +29,7 @@ export interface ICommand {
|
|||
name: string;
|
||||
description: string;
|
||||
|
||||
run: (
|
||||
projects: ProjectMap,
|
||||
projectGraph: ProjectGraph,
|
||||
config: ICommandConfig
|
||||
) => Promise<void>;
|
||||
run: (projects: ProjectMap, projectGraph: ProjectGraph, config: ICommandConfig) => Promise<void>;
|
||||
}
|
||||
|
||||
import { BootstrapCommand } from './bootstrap';
|
||||
|
|
|
@ -25,8 +25,7 @@ import { topologicallyBatchProjects } from '../utils/projects';
|
|||
import { ICommand } from './';
|
||||
|
||||
export const RunCommand: ICommand = {
|
||||
description:
|
||||
'Run script defined in package.json in each package that contains that script.',
|
||||
description: 'Run script defined in package.json in each package that contains that script.',
|
||||
name: 'run',
|
||||
|
||||
async run(projects, projectGraph, { extraArgs }) {
|
||||
|
@ -41,11 +40,7 @@ export const RunCommand: ICommand = {
|
|||
const scriptArgs = extraArgs.slice(1);
|
||||
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`\nRunning script [${chalk.green(
|
||||
scriptName
|
||||
)}] in batched topological order\n`
|
||||
)
|
||||
chalk.bold(`\nRunning script [${chalk.green(scriptName)}] in batched topological order\n`)
|
||||
);
|
||||
|
||||
await parallelizeBatches(batchedProjects, async pkg => {
|
||||
|
|
|
@ -69,9 +69,7 @@ export const WatchCommand: ICommand = {
|
|||
const projectNames = Array.from(projectsToWatch.keys());
|
||||
log.write(
|
||||
chalk.bold(
|
||||
chalk.green(
|
||||
`Running ${watchScriptName} scripts for [${projectNames.join(', ')}].`
|
||||
)
|
||||
chalk.green(`Running ${watchScriptName} scripts for [${projectNames.join(', ')}].`)
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -79,10 +77,7 @@ export const WatchCommand: ICommand = {
|
|||
// topological batching and push it to the last one-entry batch manually.
|
||||
const shouldWatchKibanaProject = projectsToWatch.delete(kibanaProjectName);
|
||||
|
||||
const batchedProjects = topologicallyBatchProjects(
|
||||
projectsToWatch,
|
||||
projectGraph
|
||||
);
|
||||
const batchedProjects = topologicallyBatchProjects(projectsToWatch, projectGraph);
|
||||
|
||||
if (shouldWatchKibanaProject) {
|
||||
batchedProjects.push([projects.get(kibanaProjectName)!]);
|
||||
|
@ -94,11 +89,7 @@ export const WatchCommand: ICommand = {
|
|||
);
|
||||
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`[${chalk.green(
|
||||
pkg.name
|
||||
)}] Initial build completed (${completionHint}).`
|
||||
)
|
||||
chalk.bold(`[${chalk.green(pkg.name)}] Initial build completed (${completionHint}).`)
|
||||
);
|
||||
});
|
||||
},
|
||||
|
|
|
@ -27,10 +27,7 @@ export interface IProjectPathOptions {
|
|||
/**
|
||||
* Returns all the paths where plugins are located
|
||||
*/
|
||||
export function getProjectPaths(
|
||||
rootPath: string,
|
||||
options: IProjectPathOptions
|
||||
) {
|
||||
export function getProjectPaths(rootPath: string, options: IProjectPathOptions) {
|
||||
const skipKibanaExtra = Boolean(options['skip-kibana-extra']);
|
||||
const ossOnly = Boolean(options.oss);
|
||||
|
||||
|
|
|
@ -18,8 +18,5 @@
|
|||
*/
|
||||
|
||||
export { run } from './cli';
|
||||
export {
|
||||
buildProductionProjects,
|
||||
prepareExternalProjectDependencies,
|
||||
} from './production';
|
||||
export { buildProductionProjects, prepareExternalProjectDependencies } from './production';
|
||||
export { transformDependencies } from './utils/package_json';
|
||||
|
|
|
@ -70,11 +70,9 @@ async function getProductionProjects(rootPath: string) {
|
|||
const projectPaths = getProjectPaths(rootPath, {});
|
||||
const projects = await getProjects(rootPath, projectPaths);
|
||||
|
||||
const productionProjects = includeTransitiveProjects(
|
||||
[projects.get('kibana')!],
|
||||
projects,
|
||||
{ onlyProductionDependencies: true }
|
||||
);
|
||||
const productionProjects = includeTransitiveProjects([projects.get('kibana')!], projects, {
|
||||
onlyProductionDependencies: true,
|
||||
});
|
||||
|
||||
// We remove Kibana, as we're already building Kibana
|
||||
productionProjects.delete('kibana');
|
||||
|
@ -107,11 +105,7 @@ async function buildProject(project: Project) {
|
|||
* manage dependencies is that it will "dedupe" them, so we don't include
|
||||
* unnecessary copies of dependencies.
|
||||
*/
|
||||
async function copyToBuild(
|
||||
project: Project,
|
||||
kibanaRoot: string,
|
||||
buildRoot: string
|
||||
) {
|
||||
async function copyToBuild(project: Project, kibanaRoot: string, buildRoot: string) {
|
||||
// We want the package to have the same relative location within the build
|
||||
const relativeProjectPath = relative(kibanaRoot, project.path);
|
||||
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
|
||||
|
|
|
@ -18,6 +18,4 @@
|
|||
*/
|
||||
|
||||
export { buildProductionProjects } from './build_production_projects';
|
||||
export {
|
||||
prepareExternalProjectDependencies,
|
||||
} from './prepare_project_dependencies';
|
||||
export { prepareExternalProjectDependencies } from './prepare_project_dependencies';
|
||||
|
|
|
@ -61,9 +61,7 @@ describe('kbn-pm production', () => {
|
|||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('package.json')) {
|
||||
expect(await readPackageJson(join(buildRoot, file))).toMatchSnapshot(
|
||||
file
|
||||
);
|
||||
expect(await readPackageJson(join(buildRoot, file))).toMatchSnapshot(file);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -28,9 +28,7 @@ test('does nothing when Kibana `link:` dependencies', async () => {
|
|||
|
||||
// We're checking for undefined, but we don't really care about what's
|
||||
// returned, we only care about it resolving.
|
||||
await expect(
|
||||
prepareExternalProjectDependencies(projectPath)
|
||||
).resolves.toBeUndefined();
|
||||
await expect(prepareExternalProjectDependencies(projectPath)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('throws if non-Kibana `link` dependencies', async () => {
|
||||
|
|
|
@ -24,8 +24,7 @@ import { Project } from '../utils/project';
|
|||
* All external projects are located within `../kibana-extra/{plugin}` relative
|
||||
* to Kibana itself.
|
||||
*/
|
||||
const isKibanaDep = (depVersion: string) =>
|
||||
depVersion.includes('../../kibana/');
|
||||
const isKibanaDep = (depVersion: string) => depVersion.includes('../../kibana/');
|
||||
|
||||
/**
|
||||
* This prepares the dependencies for an _external_ project.
|
||||
|
@ -49,9 +48,7 @@ export async function prepareExternalProjectDependencies(projectPath: string) {
|
|||
if (isLinkDependency(depVersion) && !isKibanaDep(depVersion)) {
|
||||
// For non-Kibana packages we need to set up symlinks during the
|
||||
// installation process, but this is not something we support yet.
|
||||
throw new Error(
|
||||
'This plugin is using `link:` dependencies for non-Kibana packages'
|
||||
);
|
||||
throw new Error('This plugin is using `link:` dependencies for non-Kibana packages');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,21 +25,14 @@ import { Project } from './utils/project';
|
|||
const rootPath = resolve(`${__dirname}/utils/__fixtures__/kibana`);
|
||||
|
||||
function getExpectedProjectsAndGraph(runMock: any) {
|
||||
const [fullProjects, fullProjectGraph] = (runMock as jest.Mock<
|
||||
any
|
||||
>).mock.calls[0];
|
||||
const [fullProjects, fullProjectGraph] = (runMock as jest.Mock<any>).mock.calls[0];
|
||||
|
||||
const projects = [...fullProjects.keys()].sort();
|
||||
|
||||
const graph = [...fullProjectGraph.entries()].reduce(
|
||||
(expected, [projectName, dependencies]) => {
|
||||
expected[projectName] = dependencies.map(
|
||||
(project: Project) => project.name
|
||||
);
|
||||
return expected;
|
||||
},
|
||||
{}
|
||||
);
|
||||
const graph = [...fullProjectGraph.entries()].reduce((expected, [projectName, dependencies]) => {
|
||||
expected[projectName] = dependencies.map((project: Project) => project.name);
|
||||
return expected;
|
||||
}, {});
|
||||
|
||||
return { projects, graph };
|
||||
}
|
||||
|
|
|
@ -32,16 +32,11 @@ export async function runCommand(command: ICommand, config: ICommandConfig) {
|
|||
try {
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`Running [${chalk.green(command.name)}] command from [${chalk.yellow(
|
||||
config.rootPath
|
||||
)}]:\n`
|
||||
`Running [${chalk.green(command.name)}] command from [${chalk.yellow(config.rootPath)}]:\n`
|
||||
)
|
||||
);
|
||||
|
||||
const projectPaths = getProjectPaths(
|
||||
config.rootPath,
|
||||
config.options as IProjectPathOptions
|
||||
);
|
||||
const projectPaths = getProjectPaths(config.rootPath, config.options as IProjectPathOptions);
|
||||
|
||||
const projects = await getProjects(config.rootPath, projectPaths, {
|
||||
exclude: toArray(config.options.exclude),
|
||||
|
@ -59,9 +54,7 @@ export async function runCommand(command: ICommand, config: ICommandConfig) {
|
|||
|
||||
const projectGraph = buildProjectGraph(projects);
|
||||
|
||||
log.write(
|
||||
chalk.bold(`Found [${chalk.green(projects.size.toString())}] projects:\n`)
|
||||
);
|
||||
log.write(chalk.bold(`Found [${chalk.green(projects.size.toString())}] projects:\n`));
|
||||
log.write(renderProjectsTree(config.rootPath, projects));
|
||||
|
||||
await command.run(projects, projectGraph, config);
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export {
|
||||
absolutePathSnapshotSerializer,
|
||||
} from './absolute_path_snapshot_serializer';
|
||||
export { absolutePathSnapshotSerializer } from './absolute_path_snapshot_serializer';
|
||||
|
||||
export { stripAnsiSnapshotSerializer } from './strip_ansi_snapshot_serializer';
|
||||
|
|
|
@ -23,14 +23,7 @@ import logSymbols from 'log-symbols';
|
|||
import logTransformer from 'strong-log-transformer';
|
||||
|
||||
function generateColors() {
|
||||
const colorWheel = [
|
||||
chalk.cyan,
|
||||
chalk.magenta,
|
||||
chalk.blue,
|
||||
chalk.yellow,
|
||||
chalk.green,
|
||||
chalk.red,
|
||||
];
|
||||
const colorWheel = [chalk.cyan, chalk.magenta, chalk.blue, chalk.yellow, chalk.green, chalk.red];
|
||||
|
||||
const count = colorWheel.length;
|
||||
let children = 0;
|
||||
|
|
|
@ -21,10 +21,7 @@ jest.mock('./fs');
|
|||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import {
|
||||
absolutePathSnapshotSerializer,
|
||||
stripAnsiSnapshotSerializer,
|
||||
} from '../test_helpers';
|
||||
import { absolutePathSnapshotSerializer, stripAnsiSnapshotSerializer } from '../test_helpers';
|
||||
import { linkProjectExecutables } from './link_project_executables';
|
||||
import { Project } from './project';
|
||||
import { buildProjectGraph } from './projects';
|
||||
|
|
|
@ -59,9 +59,7 @@ export async function linkProjectExecutables(
|
|||
.split(sep)
|
||||
.join('/');
|
||||
|
||||
log.write(
|
||||
chalk`{dim [${project.name}]} ${name} -> {dim ${projectRelativePath}}`
|
||||
);
|
||||
log.write(chalk`{dim [${project.name}]} ${name} -> {dim ${projectRelativePath}}`);
|
||||
|
||||
await mkdirp(dirname(dest));
|
||||
await createSymlink(srcPath, dest, 'exec');
|
||||
|
|
|
@ -43,8 +43,7 @@ export const createProductionPackageJson = (pkgJson: IPackageJson) => ({
|
|||
dependencies: transformDependencies(pkgJson.dependencies),
|
||||
});
|
||||
|
||||
export const isLinkDependency = (depVersion: string) =>
|
||||
depVersion.startsWith('link:');
|
||||
export const isLinkDependency = (depVersion: string) => depVersion.startsWith('link:');
|
||||
|
||||
/**
|
||||
* Replaces `link:` dependencies with `file:` dependencies. When installing
|
||||
|
|
|
@ -17,10 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export async function parallelizeBatches<T>(
|
||||
batches: T[][],
|
||||
fn: (item: T) => Promise<void>
|
||||
) {
|
||||
export async function parallelizeBatches<T>(batches: T[][], fn: (item: T) => Promise<void>) {
|
||||
for (const batch of batches) {
|
||||
// We need to make sure the entire batch has completed before we can move on
|
||||
// to the next batch
|
||||
|
@ -28,11 +25,7 @@ export async function parallelizeBatches<T>(
|
|||
}
|
||||
}
|
||||
|
||||
export async function parallelize<T>(
|
||||
items: T[],
|
||||
fn: (item: T) => Promise<void>,
|
||||
concurrency = 4
|
||||
) {
|
||||
export async function parallelize<T>(items: T[], fn: (item: T) => Promise<void>, concurrency = 4) {
|
||||
if (items.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -96,9 +96,7 @@ describe('#ensureValidProjectDependency', () => {
|
|||
'packages/foo'
|
||||
);
|
||||
|
||||
expect(() =>
|
||||
root.ensureValidProjectDependency(foo)
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => root.ensureValidProjectDependency(foo)).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('using version instead of link:', () => {
|
||||
|
@ -118,9 +116,7 @@ describe('#ensureValidProjectDependency', () => {
|
|||
'packages/foo'
|
||||
);
|
||||
|
||||
expect(() =>
|
||||
root.ensureValidProjectDependency(foo)
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => root.ensureValidProjectDependency(foo)).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -157,9 +153,7 @@ describe('#getExecutables()', () => {
|
|||
});
|
||||
|
||||
test('throws CliError when bin is something strange', () => {
|
||||
expect(() =>
|
||||
createProjectWith({ bin: 1 }).getExecutables()
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
expect(() => createProjectWith({ bin: 1 }).getExecutables()).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -30,11 +30,7 @@ import {
|
|||
isLinkDependency,
|
||||
readPackageJson,
|
||||
} from './package_json';
|
||||
import {
|
||||
installInDir,
|
||||
runScriptInPackage,
|
||||
runScriptInPackageStreaming,
|
||||
} from './scripts';
|
||||
import { installInDir, runScriptInPackage, runScriptInPackageStreaming } from './scripts';
|
||||
|
||||
interface IBuildConfig {
|
||||
skip?: boolean;
|
||||
|
@ -80,9 +76,7 @@ export class Project {
|
|||
}
|
||||
|
||||
public ensureValidProjectDependency(project: Project) {
|
||||
const relativePathToProject = normalizePath(
|
||||
relative(this.path, project.path)
|
||||
);
|
||||
const relativePathToProject = normalizePath(relative(this.path, project.path));
|
||||
|
||||
const versionInPackageJson = this.allDependencies[project.name];
|
||||
const expectedVersionInPackageJson = `link:${relativePathToProject}`;
|
||||
|
@ -125,10 +119,7 @@ export class Project {
|
|||
* instead of everything located in the project directory.
|
||||
*/
|
||||
public getIntermediateBuildDirectory() {
|
||||
return resolvePath(
|
||||
this.path,
|
||||
this.getBuildConfig().intermediateBuildDirectory || '.'
|
||||
);
|
||||
return resolvePath(this.path, this.getBuildConfig().intermediateBuildDirectory || '.');
|
||||
}
|
||||
|
||||
public hasScript(name: string) {
|
||||
|
@ -169,9 +160,7 @@ export class Project {
|
|||
public async runScript(scriptName: string, args: string[] = []) {
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`\n\nRunning script [${chalk.green(scriptName)}] in [${chalk.green(
|
||||
this.name
|
||||
)}]:\n`
|
||||
`\n\nRunning script [${chalk.green(scriptName)}] in [${chalk.green(this.name)}]:\n`
|
||||
)
|
||||
);
|
||||
return runScriptInPackage(scriptName, args, this);
|
||||
|
@ -186,11 +175,7 @@ export class Project {
|
|||
}
|
||||
|
||||
public async installDependencies({ extraArgs }: { extraArgs: string[] }) {
|
||||
log.write(
|
||||
chalk.bold(
|
||||
`\n\nInstalling dependencies in [${chalk.green(this.name)}]:\n`
|
||||
)
|
||||
);
|
||||
log.write(chalk.bold(`\n\nInstalling dependencies in [${chalk.green(this.name)}]:\n`));
|
||||
return installInDir(this.path, extraArgs);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,9 +39,7 @@ describe('#getProjects', () => {
|
|||
const expectedProjects = ['bar', 'foo'];
|
||||
|
||||
expect(projects.size).toBe(2);
|
||||
expect([...projects.keys()]).toEqual(
|
||||
expect.arrayContaining(expectedProjects)
|
||||
);
|
||||
expect([...projects.keys()]).toEqual(expect.arrayContaining(expectedProjects));
|
||||
});
|
||||
|
||||
test('can specify root as a separate project', async () => {
|
||||
|
@ -57,36 +55,22 @@ describe('#getProjects', () => {
|
|||
const expectedProjects = ['baz', 'quux'];
|
||||
|
||||
expect(projects.size).toBe(2);
|
||||
expect([...projects.keys()]).toEqual(
|
||||
expect.arrayContaining(expectedProjects)
|
||||
);
|
||||
expect([...projects.keys()]).toEqual(expect.arrayContaining(expectedProjects));
|
||||
});
|
||||
|
||||
test('throws if multiple projects has the same name', async () => {
|
||||
await expect(
|
||||
getProjects(rootPath, ['../plugins/*', '../other-plugins/*'])
|
||||
).rejects.toHaveProperty(
|
||||
'message',
|
||||
'There are multiple projects with the same name [baz]'
|
||||
);
|
||||
).rejects.toHaveProperty('message', 'There are multiple projects with the same name [baz]');
|
||||
});
|
||||
|
||||
test('includes additional projects in package.json', async () => {
|
||||
const projectPaths = getProjectPaths(rootPath, {});
|
||||
const projects = await getProjects(rootPath, projectPaths);
|
||||
|
||||
const expectedProjects = [
|
||||
'kibana',
|
||||
'bar',
|
||||
'foo',
|
||||
'with-additional-projects',
|
||||
'quux',
|
||||
'baz',
|
||||
];
|
||||
const expectedProjects = ['kibana', 'bar', 'foo', 'with-additional-projects', 'quux', 'baz'];
|
||||
|
||||
expect([...projects.keys()]).toEqual(
|
||||
expect.arrayContaining(expectedProjects)
|
||||
);
|
||||
expect([...projects.keys()]).toEqual(expect.arrayContaining(expectedProjects));
|
||||
expect(projects.size).toBe(expectedProjects.length);
|
||||
});
|
||||
|
||||
|
@ -101,11 +85,7 @@ describe('#getProjects', () => {
|
|||
exclude: ['foo', 'bar', 'baz'],
|
||||
});
|
||||
|
||||
expect([...projects.keys()].sort()).toEqual([
|
||||
'kibana',
|
||||
'quux',
|
||||
'with-additional-projects',
|
||||
]);
|
||||
expect([...projects.keys()].sort()).toEqual(['kibana', 'quux', 'with-additional-projects']);
|
||||
});
|
||||
|
||||
test('ignores unknown projects specified in `exclude` filter', async () => {
|
||||
|
@ -157,14 +137,7 @@ describe('#getProjects', () => {
|
|||
|
||||
test('does not return any project if `exclude` filter is specified for all projects', async () => {
|
||||
const projects = await getProjects(rootPath, projectPaths, {
|
||||
exclude: [
|
||||
'kibana',
|
||||
'bar',
|
||||
'foo',
|
||||
'with-additional-projects',
|
||||
'quux',
|
||||
'baz',
|
||||
],
|
||||
exclude: ['kibana', 'bar', 'foo', 'with-additional-projects', 'quux', 'baz'],
|
||||
});
|
||||
|
||||
expect(projects.size).toBe(0);
|
||||
|
@ -183,11 +156,7 @@ describe('#getProjects', () => {
|
|||
|
||||
describe('#buildProjectGraph', () => {
|
||||
test('builds full project graph', async () => {
|
||||
const allProjects = await getProjects(rootPath, [
|
||||
'.',
|
||||
'packages/*',
|
||||
'../plugins/*',
|
||||
]);
|
||||
const allProjects = await getProjects(rootPath, ['.', 'packages/*', '../plugins/*']);
|
||||
const graph = buildProjectGraph(allProjects);
|
||||
|
||||
const expected: { [k: string]: string[] } = {};
|
||||
|
@ -210,9 +179,7 @@ describe('#topologicallyBatchProjects', () => {
|
|||
test('batches projects topologically based on their project dependencies', async () => {
|
||||
const batches = topologicallyBatchProjects(projects, graph);
|
||||
|
||||
const expectedBatches = batches.map(batch =>
|
||||
batch.map(project => project.name)
|
||||
);
|
||||
const expectedBatches = batches.map(batch => batch.map(project => project.name));
|
||||
|
||||
expect(expectedBatches).toMatchSnapshot();
|
||||
});
|
||||
|
@ -223,9 +190,7 @@ describe('#topologicallyBatchProjects', () => {
|
|||
|
||||
const batches = topologicallyBatchProjects(projects, graph);
|
||||
|
||||
const expectedBatches = batches.map(batch =>
|
||||
batch.map(project => project.name)
|
||||
);
|
||||
const expectedBatches = batches.map(batch => batch.map(project => project.name));
|
||||
|
||||
expect(expectedBatches).toMatchSnapshot();
|
||||
});
|
||||
|
@ -261,11 +226,7 @@ describe('#includeTransitiveProjects', () => {
|
|||
});
|
||||
|
||||
test('includes dependencies of dependencies', async () => {
|
||||
const projects = await getProjects(rootPath, [
|
||||
'.',
|
||||
'packages/*',
|
||||
'../plugins/*',
|
||||
]);
|
||||
const projects = await getProjects(rootPath, ['.', 'packages/*', '../plugins/*']);
|
||||
|
||||
const quux = projects.get('quux')!;
|
||||
const withTransitive = includeTransitiveProjects([quux], projects);
|
||||
|
|
|
@ -49,21 +49,17 @@ export async function getProjects(
|
|||
const project = await Project.fromPath(projectDir);
|
||||
|
||||
const excludeProject =
|
||||
exclude.includes(project.name) ||
|
||||
(include.length > 0 && !include.includes(project.name));
|
||||
exclude.includes(project.name) || (include.length > 0 && !include.includes(project.name));
|
||||
|
||||
if (excludeProject) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (projects.has(project.name)) {
|
||||
throw new CliError(
|
||||
`There are multiple projects with the same name [${project.name}]`,
|
||||
{
|
||||
name: project.name,
|
||||
paths: [project.path, projects.get(project.name)!.path],
|
||||
}
|
||||
);
|
||||
throw new CliError(`There are multiple projects with the same name [${project.name}]`, {
|
||||
name: project.name,
|
||||
paths: [project.path, projects.get(project.name)!.path],
|
||||
});
|
||||
}
|
||||
|
||||
projects.set(project.name, project);
|
||||
|
@ -73,13 +69,7 @@ export async function getProjects(
|
|||
return projects;
|
||||
}
|
||||
|
||||
function packagesFromGlobPattern({
|
||||
pattern,
|
||||
rootPath,
|
||||
}: {
|
||||
pattern: string;
|
||||
rootPath: string;
|
||||
}) {
|
||||
function packagesFromGlobPattern({ pattern, rootPath }: { pattern: string; rootPath: string }) {
|
||||
const globOptions = {
|
||||
cwd: rootPath,
|
||||
|
||||
|
@ -141,9 +131,7 @@ export function topologicallyBatchProjects(
|
|||
const batch = [];
|
||||
for (const projectName of projectToBatchNames) {
|
||||
const projectDeps = projectGraph.get(projectName)!;
|
||||
const hasNotBatchedDependencies = projectDeps.some(dep =>
|
||||
projectToBatchNames.has(dep.name)
|
||||
);
|
||||
const hasNotBatchedDependencies = projectDeps.some(dep => projectToBatchNames.has(dep.name));
|
||||
|
||||
if (!hasNotBatchedDependencies) {
|
||||
batch.push(projectsToBatch.get(projectName)!);
|
||||
|
|
|
@ -36,11 +36,7 @@ test('handles projects with root folder', async () => {
|
|||
});
|
||||
|
||||
test('handles projects outside root folder', async () => {
|
||||
const projects = await getProjects(rootPath, [
|
||||
'.',
|
||||
'packages/*',
|
||||
'../plugins/*',
|
||||
]);
|
||||
const projects = await getProjects(rootPath, ['.', 'packages/*', '../plugins/*']);
|
||||
|
||||
const tree = await renderProjectsTree(rootPath, projects);
|
||||
expect(tree).toMatchSnapshot();
|
||||
|
|
|
@ -24,10 +24,7 @@ import { Project } from './project';
|
|||
|
||||
const projectKey = Symbol('__project');
|
||||
|
||||
export function renderProjectsTree(
|
||||
rootPath: string,
|
||||
projects: Map<string, Project>
|
||||
) {
|
||||
export function renderProjectsTree(rootPath: string, projects: Map<string, Project>) {
|
||||
const projectsTree = buildProjectsTree(rootPath, projects);
|
||||
return treeToString(createTreeStructure(projectsTree));
|
||||
}
|
||||
|
@ -46,10 +43,7 @@ function treeToString(tree: ITree) {
|
|||
return [tree.name].concat(childrenToStrings(tree.children, '')).join('\n');
|
||||
}
|
||||
|
||||
function childrenToStrings(
|
||||
tree: ITreeChildren | undefined,
|
||||
treePrefix: string
|
||||
) {
|
||||
function childrenToStrings(tree: ITreeChildren | undefined, treePrefix: string) {
|
||||
if (tree === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
@ -149,11 +143,7 @@ function buildProjectsTree(rootPath: string, projects: Map<string, Project>) {
|
|||
return tree;
|
||||
}
|
||||
|
||||
function addProjectToTree(
|
||||
tree: IProjectsTree,
|
||||
pathParts: string[],
|
||||
project: Project
|
||||
) {
|
||||
function addProjectToTree(tree: IProjectsTree, pathParts: string[], project: Project) {
|
||||
if (pathParts.length === 0) {
|
||||
tree.set(projectKey, project.name);
|
||||
} else {
|
||||
|
|
|
@ -23,16 +23,8 @@ import { Project } from './project';
|
|||
/**
|
||||
* Install all dependencies in the given directory
|
||||
*/
|
||||
export async function installInDir(
|
||||
directory: string,
|
||||
extraArgs: string[] = []
|
||||
) {
|
||||
const options = [
|
||||
'install',
|
||||
'--non-interactive',
|
||||
'--mutex file',
|
||||
...extraArgs,
|
||||
];
|
||||
export async function installInDir(directory: string, extraArgs: string[] = []) {
|
||||
const options = ['install', '--non-interactive', '--mutex file', ...extraArgs];
|
||||
|
||||
// We pass the mutex flag to ensure only one instance of yarn runs at any
|
||||
// given time (e.g. to avoid conflicts).
|
||||
|
@ -44,11 +36,7 @@ export async function installInDir(
|
|||
/**
|
||||
* Run script in the given directory
|
||||
*/
|
||||
export async function runScriptInPackage(
|
||||
script: string,
|
||||
args: string[],
|
||||
pkg: Project
|
||||
) {
|
||||
export async function runScriptInPackage(script: string, args: string[], pkg: Project) {
|
||||
const execOpts = {
|
||||
cwd: pkg.path,
|
||||
};
|
||||
|
@ -59,11 +47,7 @@ export async function runScriptInPackage(
|
|||
/**
|
||||
* Run script in the given directory
|
||||
*/
|
||||
export function runScriptInPackageStreaming(
|
||||
script: string,
|
||||
args: string[],
|
||||
pkg: Project
|
||||
) {
|
||||
export function runScriptInPackageStreaming(script: string, args: string[], pkg: Project) {
|
||||
const execOpts = {
|
||||
cwd: pkg.path,
|
||||
};
|
||||
|
|
|
@ -18,16 +18,7 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import {
|
||||
catchError,
|
||||
delay,
|
||||
finalize,
|
||||
first,
|
||||
map,
|
||||
mapTo,
|
||||
mergeMap,
|
||||
timeout,
|
||||
} from 'rxjs/operators';
|
||||
import { catchError, delay, finalize, first, map, mapTo, mergeMap, timeout } from 'rxjs/operators';
|
||||
|
||||
/**
|
||||
* Number of milliseconds we wait before we fall back to the default watch handler.
|
||||
|
@ -67,43 +58,26 @@ function getWatchHandlers(
|
|||
const typescriptHandler = buildOutput$.pipe(
|
||||
first(data => data.includes('$ tsc')),
|
||||
map(() =>
|
||||
buildOutput$.pipe(
|
||||
first(data => data.includes('Compilation complete.')),
|
||||
mapTo('tsc')
|
||||
)
|
||||
buildOutput$.pipe(first(data => data.includes('Compilation complete.')), mapTo('tsc'))
|
||||
)
|
||||
);
|
||||
|
||||
const webpackHandler = buildOutput$.pipe(
|
||||
first(data => data.includes('$ webpack')),
|
||||
map(() =>
|
||||
buildOutput$.pipe(
|
||||
first(data => data.includes('Chunk Names')),
|
||||
mapTo('webpack')
|
||||
)
|
||||
)
|
||||
map(() => buildOutput$.pipe(first(data => data.includes('Chunk Names')), mapTo('webpack')))
|
||||
);
|
||||
|
||||
const defaultHandler = Rx.of(undefined).pipe(
|
||||
delay(handlerReadinessTimeout),
|
||||
map(() =>
|
||||
buildOutput$.pipe(
|
||||
timeout(handlerDelay),
|
||||
catchError(() => Rx.of('timeout'))
|
||||
)
|
||||
)
|
||||
map(() => buildOutput$.pipe(timeout(handlerDelay), catchError(() => Rx.of('timeout'))))
|
||||
);
|
||||
|
||||
return [typescriptHandler, webpackHandler, defaultHandler];
|
||||
}
|
||||
|
||||
export function waitUntilWatchIsReady(
|
||||
stream: NodeJS.EventEmitter,
|
||||
opts: IWatchOptions = {}
|
||||
) {
|
||||
export function waitUntilWatchIsReady(stream: NodeJS.EventEmitter, opts: IWatchOptions = {}) {
|
||||
const buildOutput$ = new Rx.Subject<string>();
|
||||
const onDataListener = (data: Buffer) =>
|
||||
buildOutput$.next(data.toString('utf-8'));
|
||||
const onDataListener = (data: Buffer) => buildOutput$.next(data.toString('utf-8'));
|
||||
const onEndListener = () => buildOutput$.complete();
|
||||
const onErrorListener = (e: Error) => buildOutput$.error(e);
|
||||
|
||||
|
|
|
@ -35,9 +35,7 @@ function toSortable(systems: Map<SystemName, System<any, any, any, any>>) {
|
|||
/**
|
||||
* Sorts systems in topological order based on dependencies
|
||||
*/
|
||||
export function getSortedSystemNames(
|
||||
systems: Map<SystemName, System<any, any, any, any>>
|
||||
) {
|
||||
export function getSortedSystemNames(systems: Map<SystemName, System<any, any, any, any>>) {
|
||||
const sorted = topologicalSort(toSortable(systems));
|
||||
return [...sorted];
|
||||
}
|
||||
|
|
|
@ -26,9 +26,7 @@ import {
|
|||
} from './system_types';
|
||||
|
||||
function isPromise(obj: any) {
|
||||
return (
|
||||
obj != null && typeof obj === 'object' && typeof obj.then === 'function'
|
||||
);
|
||||
return obj != null && typeof obj === 'object' && typeof obj.then === 'function';
|
||||
}
|
||||
|
||||
export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
||||
|
@ -56,19 +54,14 @@ export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
|||
|
||||
public getExposedValues(): E {
|
||||
if (this.systemInstance === undefined) {
|
||||
throw new Error(
|
||||
'trying to get the exposed value of a system that is NOT running'
|
||||
);
|
||||
throw new Error('trying to get the exposed value of a system that is NOT running');
|
||||
}
|
||||
|
||||
return this.exposedValues!;
|
||||
}
|
||||
|
||||
public start(kibanaValues: C, dependenciesValues: D) {
|
||||
this.systemInstance = new this.systemClass(
|
||||
kibanaValues,
|
||||
dependenciesValues
|
||||
);
|
||||
this.systemInstance = new this.systemClass(kibanaValues, dependenciesValues);
|
||||
const exposedValues = this.systemInstance.start();
|
||||
|
||||
if (isPromise(exposedValues)) {
|
||||
|
@ -79,8 +72,7 @@ export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
|||
);
|
||||
}
|
||||
|
||||
this.exposedValues =
|
||||
exposedValues === undefined ? ({} as E) : exposedValues;
|
||||
this.exposedValues = exposedValues === undefined ? ({} as E) : exposedValues;
|
||||
}
|
||||
|
||||
public stop() {
|
||||
|
@ -91,9 +83,7 @@ export class System<C, M extends SystemMetadata, D extends SystemsType, E> {
|
|||
|
||||
if (isPromise(stoppedResponse)) {
|
||||
throw new Error(
|
||||
`A promise was returned when stopping [${
|
||||
this.name
|
||||
}], but systems must stop synchronously.`
|
||||
`A promise was returned when stopping [${this.name}], but systems must stop synchronously.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,10 +59,7 @@ test('starts system with core api', () => {
|
|||
},
|
||||
});
|
||||
|
||||
const createSystemApi: KibanaSystemApiFactory<IKibanaCoreApi, IMetadata> = (
|
||||
name,
|
||||
metadata
|
||||
) => {
|
||||
const createSystemApi: KibanaSystemApiFactory<IKibanaCoreApi, IMetadata> = (name, metadata) => {
|
||||
return {
|
||||
fromCore: true,
|
||||
metadata,
|
||||
|
@ -226,11 +223,7 @@ test('receives values from dependencies but not transitive dependencies', () =>
|
|||
};
|
||||
}
|
||||
|
||||
class GrandchildSystem extends KibanaSystem<
|
||||
CoreType,
|
||||
{},
|
||||
IGrandchild['grandchild']
|
||||
> {
|
||||
class GrandchildSystem extends KibanaSystem<CoreType, {}, IGrandchild['grandchild']> {
|
||||
public start() {
|
||||
return {
|
||||
value: 'grandchild',
|
||||
|
@ -238,11 +231,7 @@ test('receives values from dependencies but not transitive dependencies', () =>
|
|||
}
|
||||
}
|
||||
|
||||
class ChildSystem extends KibanaSystem<
|
||||
CoreType,
|
||||
IGrandchild,
|
||||
IChild['child']
|
||||
> {
|
||||
class ChildSystem extends KibanaSystem<CoreType, IGrandchild, IChild['child']> {
|
||||
public start() {
|
||||
expect(this.deps.grandchild).toEqual({ value: 'grandchild' });
|
||||
|
||||
|
|
|
@ -21,10 +21,7 @@ import { getSortedSystemNames } from './sorted_systems';
|
|||
import { System } from './system';
|
||||
import { SystemMetadata, SystemName, SystemsType } from './system_types';
|
||||
|
||||
export type KibanaSystemApiFactory<C, M> = (
|
||||
name: SystemName,
|
||||
metadata?: M
|
||||
) => C;
|
||||
export type KibanaSystemApiFactory<C, M> = (name: SystemName, metadata?: M) => C;
|
||||
|
||||
export class SystemLoader<C, M extends SystemMetadata> {
|
||||
private readonly systems = new Map<SystemName, System<C, M, any, any>>();
|
||||
|
@ -45,9 +42,7 @@ export class SystemLoader<C, M extends SystemMetadata> {
|
|||
});
|
||||
}
|
||||
|
||||
public addSystem<D extends SystemsType, E = void>(
|
||||
system: System<C, M, D, E>
|
||||
) {
|
||||
public addSystem<D extends SystemsType, E = void>(system: System<C, M, D, E>) {
|
||||
if (this.systems.has(system.name)) {
|
||||
throw new Error(`a system named [${system.name}] has already been added`);
|
||||
}
|
||||
|
@ -92,21 +87,14 @@ export class SystemLoader<C, M extends SystemMetadata> {
|
|||
}
|
||||
}
|
||||
|
||||
private startSystem<D extends SystemsType, E = void>(
|
||||
system: System<C, M, D, E>
|
||||
) {
|
||||
private startSystem<D extends SystemsType, E = void>(system: System<C, M, D, E>) {
|
||||
const dependenciesValues = {} as D;
|
||||
|
||||
for (const dependency of system.dependencies) {
|
||||
dependenciesValues[dependency] = this.systems
|
||||
.get(dependency)!
|
||||
.getExposedValues();
|
||||
dependenciesValues[dependency] = this.systems.get(dependency)!.getExposedValues();
|
||||
}
|
||||
|
||||
const kibanaSystemApi = this.kibanaSystemApiFactory(
|
||||
system.name,
|
||||
system.metadata
|
||||
);
|
||||
const kibanaSystemApi = this.kibanaSystemApiFactory(system.name, system.metadata);
|
||||
|
||||
system.start(kibanaSystemApi, dependenciesValues);
|
||||
this.startedSystems.push(system.name);
|
||||
|
|
|
@ -20,12 +20,7 @@
|
|||
import { topologicalSort } from './topological_sort';
|
||||
|
||||
test('returns a topologically ordered sequence', () => {
|
||||
const nodes = new Map([
|
||||
['a', []],
|
||||
['b', ['a']],
|
||||
['c', ['a', 'b']],
|
||||
['d', ['a']],
|
||||
]);
|
||||
const nodes = new Map([['a', []], ['b', ['a']], ['c', ['a', 'b']], ['d', ['a']]]);
|
||||
|
||||
const sorted = topologicalSort(nodes);
|
||||
|
||||
|
@ -35,12 +30,7 @@ test('returns a topologically ordered sequence', () => {
|
|||
});
|
||||
|
||||
test('handles multiple "roots" with no deps', () => {
|
||||
const nodes = new Map([
|
||||
['a', []],
|
||||
['b', []],
|
||||
['c', ['a', 'b']],
|
||||
['d', ['a']],
|
||||
]);
|
||||
const nodes = new Map([['a', []], ['b', []], ['c', ['a', 'b']], ['d', ['a']]]);
|
||||
|
||||
const sorted = topologicalSort(nodes);
|
||||
|
||||
|
|
|
@ -42,8 +42,7 @@ const options = {
|
|||
desc: 'Pattern to select which tests to run.',
|
||||
},
|
||||
updateBaselines: {
|
||||
desc:
|
||||
'Replace baseline screenshots with whatever is generated from the test.',
|
||||
desc: 'Replace baseline screenshots with whatever is generated from the test.',
|
||||
},
|
||||
verbose: { desc: 'Log everything.' },
|
||||
debug: { desc: 'Run in debug mode.' },
|
||||
|
@ -125,9 +124,7 @@ function validateOptions(userOptions) {
|
|||
// Validate enum flags
|
||||
(options[key].choices && !options[key].choices.includes(val))
|
||||
) {
|
||||
throw new Error(
|
||||
`functional_tests: invalid argument [${val}] to option [${key}]`
|
||||
);
|
||||
throw new Error(`functional_tests: invalid argument [${val}] to option [${key}]`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -57,9 +57,7 @@ describe('process options for run tests CLI', () => {
|
|||
it('rejects boolean value for kibana-install-dir', () => {
|
||||
expect(() => {
|
||||
processOptions({ 'kibana-install-dir': true }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests: invalid argument [true] to option [kibana-install-dir]'
|
||||
);
|
||||
}).toThrow('functional_tests: invalid argument [true] to option [kibana-install-dir]');
|
||||
});
|
||||
|
||||
it('accepts boolean value for updateBaselines', () => {
|
||||
|
@ -75,9 +73,7 @@ describe('process options for run tests CLI', () => {
|
|||
it('rejects non-enum value for esFrom', () => {
|
||||
expect(() => {
|
||||
processOptions({ esFrom: 'butter' }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests: invalid argument [butter] to option [esFrom]'
|
||||
);
|
||||
}).toThrow('functional_tests: invalid argument [butter] to option [esFrom]');
|
||||
});
|
||||
|
||||
it('accepts value for grep', () => {
|
||||
|
|
|
@ -111,9 +111,7 @@ function validateOptions(userOptions) {
|
|||
// Validate enum flags
|
||||
(options[key].choices && !options[key].choices.includes(val))
|
||||
) {
|
||||
throw new Error(
|
||||
`functional_tests_server: invalid argument [${val}] to option [${key}]`
|
||||
);
|
||||
throw new Error(`functional_tests_server: invalid argument [${val}] to option [${key}]`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -29,9 +29,7 @@ describe('process options for start servers CLI', () => {
|
|||
it('rejects boolean config value', () => {
|
||||
expect(() => {
|
||||
processOptions({ config: true });
|
||||
}).toThrow(
|
||||
'functional_tests_server: invalid argument [true] to option [config]'
|
||||
);
|
||||
}).toThrow('functional_tests_server: invalid argument [true] to option [config]');
|
||||
});
|
||||
|
||||
it('rejects empty config value if no default passed', () => {
|
||||
|
@ -59,9 +57,7 @@ describe('process options for start servers CLI', () => {
|
|||
it('rejects boolean value for kibana-install-dir', () => {
|
||||
expect(() => {
|
||||
processOptions({ 'kibana-install-dir': true }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests_server: invalid argument [true] to option [kibana-install-dir]'
|
||||
);
|
||||
}).toThrow('functional_tests_server: invalid argument [true] to option [kibana-install-dir]');
|
||||
});
|
||||
|
||||
it('accepts source value for esFrom', () => {
|
||||
|
@ -72,9 +68,7 @@ describe('process options for start servers CLI', () => {
|
|||
it('rejects non-enum value for esFrom', () => {
|
||||
expect(() => {
|
||||
processOptions({ esFrom: 'butter' }, ['foo']);
|
||||
}).toThrow(
|
||||
'functional_tests_server: invalid argument [butter] to option [esFrom]'
|
||||
);
|
||||
}).toThrow('functional_tests_server: invalid argument [butter] to option [esFrom]');
|
||||
});
|
||||
|
||||
it('accepts debug option', () => {
|
||||
|
|
|
@ -62,9 +62,7 @@ async function updateCredentials(port, auth, username, password, retries = 10) {
|
|||
return await updateCredentials(port, auth, username, password, retries - 1);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`
|
||||
);
|
||||
throw new Error(`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`);
|
||||
}
|
||||
|
||||
export async function setupUsers(log, config) {
|
||||
|
@ -75,10 +73,7 @@ export async function setupUsers(log, config) {
|
|||
let auth = `elastic:${DEFAULT_SUPERUSER_PASS}`;
|
||||
|
||||
// list of updates we need to apply
|
||||
const updates = [
|
||||
config.get('servers.elasticsearch'),
|
||||
config.get('servers.kibana'),
|
||||
];
|
||||
const updates = [config.get('servers.elasticsearch'), config.get('servers.kibana')];
|
||||
|
||||
for (const { username, password } of updates) {
|
||||
log.info('setting %j user password to %j', username, password);
|
||||
|
|
|
@ -20,9 +20,4 @@
|
|||
export { runKibanaServer } from './run_kibana_server';
|
||||
export { runElasticsearch } from './run_elasticsearch';
|
||||
export { runFtr } from './run_ftr';
|
||||
export {
|
||||
KIBANA_ROOT,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
FUNCTIONAL_CONFIG_PATH,
|
||||
API_CONFIG_PATH,
|
||||
} from './paths';
|
||||
export { KIBANA_ROOT, KIBANA_FTR_SCRIPT, FUNCTIONAL_CONFIG_PATH, API_CONFIG_PATH } from './paths';
|
||||
|
|
|
@ -28,17 +28,8 @@ function resolveRelative(path) {
|
|||
export const KIBANA_EXEC = 'node';
|
||||
export const KIBANA_EXEC_PATH = resolveRelative('scripts/kibana');
|
||||
export const KIBANA_ROOT = resolve(__dirname, '../../../../../');
|
||||
export const KIBANA_FTR_SCRIPT = resolve(
|
||||
KIBANA_ROOT,
|
||||
'scripts/functional_test_runner'
|
||||
);
|
||||
export const KIBANA_FTR_SCRIPT = resolve(KIBANA_ROOT, 'scripts/functional_test_runner');
|
||||
export const PROJECT_ROOT = resolve(__dirname, '../../../../../../');
|
||||
export const FUNCTIONAL_CONFIG_PATH = resolve(
|
||||
KIBANA_ROOT,
|
||||
'test/functional/config'
|
||||
);
|
||||
export const API_CONFIG_PATH = resolve(
|
||||
KIBANA_ROOT,
|
||||
'test/api_integration/config'
|
||||
);
|
||||
export const FUNCTIONAL_CONFIG_PATH = resolve(KIBANA_ROOT, 'test/functional/config');
|
||||
export const API_CONFIG_PATH = resolve(KIBANA_ROOT, 'test/api_integration/config');
|
||||
export const OPTIMIZE_BUNDLE_DIR = resolve(KIBANA_ROOT, 'optimize/bundles');
|
||||
|
|
|
@ -29,9 +29,7 @@ export async function runElasticsearch({ config, options }) {
|
|||
|
||||
const cluster = createEsTestCluster({
|
||||
port: config.get('servers.elasticsearch.port'),
|
||||
password: !isOss
|
||||
? DEFAULT_SUPERUSER_PASS
|
||||
: config.get('servers.elasticsearch.password'),
|
||||
password: !isOss ? DEFAULT_SUPERUSER_PASS : config.get('servers.elasticsearch.password'),
|
||||
license: config.get('esTestCluster.license'),
|
||||
log,
|
||||
basePath: resolve(KIBANA_ROOT, '.es'),
|
||||
|
|
|
@ -59,9 +59,7 @@ function collectCliArgs(config, { installDir, extraKbnOpts }) {
|
|||
serverArgs,
|
||||
args => (installDir ? args.filter(a => a !== '--oss') : args),
|
||||
args => {
|
||||
return installDir
|
||||
? [...args, ...buildArgs]
|
||||
: [KIBANA_EXEC_PATH, ...args, ...sourceArgs];
|
||||
return installDir ? [...args, ...buildArgs] : [KIBANA_EXEC_PATH, ...args, ...sourceArgs];
|
||||
},
|
||||
args => args.concat(extraKbnOpts || [])
|
||||
);
|
||||
|
|
|
@ -22,12 +22,7 @@ import * as Rx from 'rxjs';
|
|||
import { startWith, switchMap, take } from 'rxjs/operators';
|
||||
import { withProcRunner } from '@kbn/dev-utils';
|
||||
|
||||
import {
|
||||
runElasticsearch,
|
||||
runKibanaServer,
|
||||
runFtr,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
} from './lib';
|
||||
import { runElasticsearch, runKibanaServer, runFtr, KIBANA_FTR_SCRIPT } from './lib';
|
||||
|
||||
import { readConfigFile } from '../../../../src/functional_test_runner/lib';
|
||||
|
||||
|
|
|
@ -25,9 +25,4 @@ export { OPTIMIZE_BUNDLE_DIR, KIBANA_ROOT } from './functional_tests/lib/paths';
|
|||
|
||||
export { esTestConfig, createEsTestCluster } from './es';
|
||||
|
||||
export {
|
||||
kbnTestConfig,
|
||||
kibanaServerTestUser,
|
||||
kibanaTestUser,
|
||||
adminTestUser,
|
||||
} from './kbn';
|
||||
export { kbnTestConfig, kibanaServerTestUser, kibanaTestUser, adminTestUser } from './kbn';
|
||||
|
|
|
@ -39,10 +39,8 @@ export const kbnTestConfig = new class KbnTestConfig {
|
|||
};
|
||||
}
|
||||
|
||||
const username =
|
||||
process.env.TEST_KIBANA_USERNAME || kibanaTestUser.username;
|
||||
const password =
|
||||
process.env.TEST_KIBANA_PASSWORD || kibanaTestUser.password;
|
||||
const username = process.env.TEST_KIBANA_USERNAME || kibanaTestUser.username;
|
||||
const password = process.env.TEST_KIBANA_PASSWORD || kibanaTestUser.password;
|
||||
return {
|
||||
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
|
||||
|
|
|
@ -41,17 +41,13 @@ describe('staged filters', () => {
|
|||
});
|
||||
|
||||
test('can set a staged filter', () => {
|
||||
store.dispatch(
|
||||
setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo1' })
|
||||
);
|
||||
store.dispatch(setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo1' }));
|
||||
const stagedFilters = getStagedFilters(store.getState());
|
||||
expect(stagedFilters.length).toBe(1);
|
||||
});
|
||||
|
||||
test('getStagedFilters returns filters for all embeddables', () => {
|
||||
store.dispatch(
|
||||
setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo2' })
|
||||
);
|
||||
store.dispatch(setStagedFilter({ stagedFilter: ['imafilter'], panelId: 'foo2' }));
|
||||
const stagedFilters = getStagedFilters(store.getState());
|
||||
expect(stagedFilters.length).toBe(2);
|
||||
});
|
||||
|
|
|
@ -20,11 +20,7 @@
|
|||
import _ from 'lodash';
|
||||
import { Dispatch } from 'redux';
|
||||
import { createAction } from 'redux-actions';
|
||||
import {
|
||||
CoreKibanaState,
|
||||
getEmbeddableCustomization,
|
||||
getPanel,
|
||||
} from '../../selectors';
|
||||
import { CoreKibanaState, getEmbeddableCustomization, getPanel } from '../../selectors';
|
||||
import { PanelId, PanelState } from '../selectors';
|
||||
import { updatePanel } from './panels';
|
||||
|
||||
|
@ -40,10 +36,7 @@ export enum EmbeddableActionTypeKeys {
|
|||
}
|
||||
|
||||
export interface EmbeddableIsInitializingAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING,
|
||||
PanelId
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING, PanelId> {}
|
||||
|
||||
export interface EmbeddableIsInitializedActionPayload {
|
||||
panelId: PanelId;
|
||||
|
@ -62,16 +55,10 @@ export interface SetStagedFilterActionPayload {
|
|||
}
|
||||
|
||||
export interface SetStagedFilterAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.SET_STAGED_FILTER,
|
||||
SetStagedFilterActionPayload
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.SET_STAGED_FILTER, SetStagedFilterActionPayload> {}
|
||||
|
||||
export interface ClearStagedFiltersAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS,
|
||||
undefined
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS, undefined> {}
|
||||
|
||||
export interface EmbeddableErrorActionPayload {
|
||||
error: string | object;
|
||||
|
@ -79,10 +66,7 @@ export interface EmbeddableErrorActionPayload {
|
|||
}
|
||||
|
||||
export interface EmbeddableErrorAction
|
||||
extends KibanaAction<
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_ERROR,
|
||||
EmbeddableErrorActionPayload
|
||||
> {}
|
||||
extends KibanaAction<EmbeddableActionTypeKeys.EMBEDDABLE_ERROR, EmbeddableErrorActionPayload> {}
|
||||
|
||||
export type EmbeddableActions =
|
||||
| EmbeddableIsInitializingAction
|
||||
|
@ -94,15 +78,13 @@ export type EmbeddableActions =
|
|||
export const embeddableIsInitializing = createAction<PanelId>(
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING
|
||||
);
|
||||
export const embeddableIsInitialized = createAction<
|
||||
EmbeddableIsInitializedActionPayload
|
||||
>(EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED);
|
||||
export const embeddableIsInitialized = createAction<EmbeddableIsInitializedActionPayload>(
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED
|
||||
);
|
||||
export const setStagedFilter = createAction<SetStagedFilterActionPayload>(
|
||||
EmbeddableActionTypeKeys.SET_STAGED_FILTER
|
||||
);
|
||||
export const clearStagedFilters = createAction(
|
||||
EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS
|
||||
);
|
||||
export const clearStagedFilters = createAction(EmbeddableActionTypeKeys.CLEAR_STAGED_FILTERS);
|
||||
export const embeddableError = createAction<EmbeddableErrorActionPayload>(
|
||||
EmbeddableActionTypeKeys.EMBEDDABLE_ERROR
|
||||
);
|
||||
|
@ -120,10 +102,7 @@ export function embeddableStateChanged(changeData: {
|
|||
embeddableState: EmbeddableState;
|
||||
}) {
|
||||
const { panelId, embeddableState } = changeData;
|
||||
return (
|
||||
dispatch: Dispatch<CoreKibanaState>,
|
||||
getState: () => CoreKibanaState
|
||||
) => {
|
||||
return (dispatch: Dispatch<CoreKibanaState>, getState: () => CoreKibanaState) => {
|
||||
// Translate embeddableState to things redux cares about.
|
||||
const customization = getEmbeddableCustomization(getState(), panelId);
|
||||
if (!_.isEqual(embeddableState.customization, customization)) {
|
||||
|
@ -136,9 +115,7 @@ export function embeddableStateChanged(changeData: {
|
|||
}
|
||||
|
||||
if (embeddableState.stagedFilter) {
|
||||
dispatch(
|
||||
setStagedFilter({ stagedFilter: embeddableState.stagedFilter, panelId })
|
||||
);
|
||||
dispatch(setStagedFilter({ stagedFilter: embeddableState.stagedFilter, panelId }));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -28,24 +28,16 @@ export enum MetadataActionTypeKeys {
|
|||
export type UpdateTitleActionPayload = string;
|
||||
|
||||
export interface UpdateTitleAction
|
||||
extends KibanaAction<
|
||||
MetadataActionTypeKeys.UPDATE_TITLE,
|
||||
UpdateTitleActionPayload
|
||||
> {}
|
||||
extends KibanaAction<MetadataActionTypeKeys.UPDATE_TITLE, UpdateTitleActionPayload> {}
|
||||
|
||||
export type UpdateDescriptionActionPayload = string;
|
||||
|
||||
export interface UpdateDescriptionAction
|
||||
extends KibanaAction<
|
||||
MetadataActionTypeKeys.UPDATE_DESCRIPTION,
|
||||
UpdateDescriptionActionPayload
|
||||
> {}
|
||||
extends KibanaAction<MetadataActionTypeKeys.UPDATE_DESCRIPTION, UpdateDescriptionActionPayload> {}
|
||||
|
||||
export type MetadataActions = UpdateDescriptionAction | UpdateTitleAction;
|
||||
|
||||
export const updateDescription = createAction<UpdateDescriptionAction>(
|
||||
MetadataActionTypeKeys.UPDATE_DESCRIPTION
|
||||
);
|
||||
export const updateTitle = createAction<UpdateTitleAction>(
|
||||
MetadataActionTypeKeys.UPDATE_TITLE
|
||||
);
|
||||
export const updateTitle = createAction<UpdateTitleAction>(MetadataActionTypeKeys.UPDATE_TITLE);
|
||||
|
|
|
@ -48,13 +48,9 @@ export interface SetPanelTitleActionPayload {
|
|||
}
|
||||
|
||||
export interface SetPanelTitleAction
|
||||
extends KibanaAction<
|
||||
PanelActionTypeKeys.SET_PANEl_TITLE,
|
||||
SetPanelTitleActionPayload
|
||||
> {}
|
||||
extends KibanaAction<PanelActionTypeKeys.SET_PANEl_TITLE, SetPanelTitleActionPayload> {}
|
||||
|
||||
export interface SetPanelsAction
|
||||
extends KibanaAction<PanelActionTypeKeys.SET_PANELS, PanelsMap> {}
|
||||
export interface SetPanelsAction extends KibanaAction<PanelActionTypeKeys.SET_PANELS, PanelsMap> {}
|
||||
|
||||
export type PanelActions =
|
||||
| DeletePanelAction
|
||||
|
@ -64,21 +60,11 @@ export type PanelActions =
|
|||
| SetPanelTitleAction
|
||||
| SetPanelsAction;
|
||||
|
||||
export const deletePanel = createAction<PanelId>(
|
||||
PanelActionTypeKeys.DELETE_PANEL
|
||||
);
|
||||
export const updatePanel = createAction<PanelState>(
|
||||
PanelActionTypeKeys.UPDATE_PANEL
|
||||
);
|
||||
export const resetPanelTitle = createAction<PanelId>(
|
||||
PanelActionTypeKeys.RESET_PANEl_TITLE
|
||||
);
|
||||
export const deletePanel = createAction<PanelId>(PanelActionTypeKeys.DELETE_PANEL);
|
||||
export const updatePanel = createAction<PanelState>(PanelActionTypeKeys.UPDATE_PANEL);
|
||||
export const resetPanelTitle = createAction<PanelId>(PanelActionTypeKeys.RESET_PANEl_TITLE);
|
||||
export const setPanelTitle = createAction<SetPanelTitleActionPayload>(
|
||||
PanelActionTypeKeys.SET_PANEl_TITLE
|
||||
);
|
||||
export const updatePanels = createAction<PanelsMap>(
|
||||
PanelActionTypeKeys.UPDATE_PANELS
|
||||
);
|
||||
export const setPanels = createAction<PanelsMap>(
|
||||
PanelActionTypeKeys.SET_PANELS
|
||||
);
|
||||
export const updatePanels = createAction<PanelsMap>(PanelActionTypeKeys.UPDATE_PANELS);
|
||||
export const setPanels = createAction<PanelsMap>(PanelActionTypeKeys.SET_PANELS);
|
||||
|
|
|
@ -37,16 +37,10 @@ export enum ViewActionTypeKeys {
|
|||
}
|
||||
|
||||
export interface UpdateViewModeAction
|
||||
extends KibanaAction<
|
||||
ViewActionTypeKeys.UPDATE_VIEW_MODE,
|
||||
DashboardViewMode
|
||||
> {}
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_VIEW_MODE, DashboardViewMode> {}
|
||||
|
||||
export interface SetVisibleContextMenuPanelIdAction
|
||||
extends KibanaAction<
|
||||
ViewActionTypeKeys.SET_VISIBLE_CONTEXT_MENU_PANEL_ID,
|
||||
PanelId
|
||||
> {}
|
||||
extends KibanaAction<ViewActionTypeKeys.SET_VISIBLE_CONTEXT_MENU_PANEL_ID, PanelId> {}
|
||||
|
||||
export interface MaximizePanelAction
|
||||
extends KibanaAction<ViewActionTypeKeys.MAXIMIZE_PANEl, PanelId> {}
|
||||
|
@ -55,10 +49,7 @@ export interface MinimizePanelAction
|
|||
extends KibanaAction<ViewActionTypeKeys.MINIMIZE_PANEL, undefined> {}
|
||||
|
||||
export interface UpdateIsFullScreenModeAction
|
||||
extends KibanaAction<
|
||||
ViewActionTypeKeys.UPDATE_IS_FULL_SCREEN_MODE,
|
||||
boolean
|
||||
> {}
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_IS_FULL_SCREEN_MODE, boolean> {}
|
||||
|
||||
export interface UpdateUseMarginsAction
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_USE_MARGINS, boolean> {}
|
||||
|
@ -72,8 +63,7 @@ export interface UpdateTimeRangeAction
|
|||
export interface UpdateFiltersAction
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_FILTERS, Filters> {}
|
||||
|
||||
export interface UpdateQueryAction
|
||||
extends KibanaAction<ViewActionTypeKeys.UPDATE_QUERY, Query> {}
|
||||
export interface UpdateQueryAction extends KibanaAction<ViewActionTypeKeys.UPDATE_QUERY, Query> {}
|
||||
|
||||
export type ViewActions =
|
||||
| UpdateViewModeAction
|
||||
|
@ -87,29 +77,19 @@ export type ViewActions =
|
|||
| UpdateFiltersAction
|
||||
| UpdateQueryAction;
|
||||
|
||||
export const updateViewMode = createAction<string>(
|
||||
ViewActionTypeKeys.UPDATE_VIEW_MODE
|
||||
);
|
||||
export const updateViewMode = createAction<string>(ViewActionTypeKeys.UPDATE_VIEW_MODE);
|
||||
export const setVisibleContextMenuPanelId = createAction<PanelId>(
|
||||
ViewActionTypeKeys.SET_VISIBLE_CONTEXT_MENU_PANEL_ID
|
||||
);
|
||||
export const maximizePanel = createAction<PanelId>(
|
||||
ViewActionTypeKeys.MAXIMIZE_PANEl
|
||||
);
|
||||
export const maximizePanel = createAction<PanelId>(ViewActionTypeKeys.MAXIMIZE_PANEl);
|
||||
export const minimizePanel = createAction(ViewActionTypeKeys.MINIMIZE_PANEL);
|
||||
export const updateIsFullScreenMode = createAction<boolean>(
|
||||
ViewActionTypeKeys.UPDATE_IS_FULL_SCREEN_MODE
|
||||
);
|
||||
export const updateUseMargins = createAction<boolean>(
|
||||
ViewActionTypeKeys.UPDATE_USE_MARGINS
|
||||
);
|
||||
export const updateUseMargins = createAction<boolean>(ViewActionTypeKeys.UPDATE_USE_MARGINS);
|
||||
export const updateHidePanelTitles = createAction<boolean>(
|
||||
ViewActionTypeKeys.UPDATE_HIDE_PANEL_TITLES
|
||||
);
|
||||
export const updateTimeRange = createAction<TimeRange>(
|
||||
ViewActionTypeKeys.UPDATE_TIME_RANGE
|
||||
);
|
||||
export const updateFilters = createAction<Filters>(
|
||||
ViewActionTypeKeys.UPDATE_FILTERS
|
||||
);
|
||||
export const updateTimeRange = createAction<TimeRange>(ViewActionTypeKeys.UPDATE_TIME_RANGE);
|
||||
export const updateFilters = createAction<Filters>(ViewActionTypeKeys.UPDATE_FILTERS);
|
||||
export const updateQuery = createAction<Query>(ViewActionTypeKeys.UPDATE_QUERY);
|
||||
|
|
|
@ -27,11 +27,7 @@ import {
|
|||
PanelActionTypeKeys,
|
||||
SetStagedFilterActionPayload,
|
||||
} from '../actions';
|
||||
import {
|
||||
EmbeddableReduxState,
|
||||
EmbeddablesMap,
|
||||
PanelId,
|
||||
} from '../selectors/types';
|
||||
import { EmbeddableReduxState, EmbeddablesMap, PanelId } from '../selectors/types';
|
||||
|
||||
const embeddableIsInitializing = (
|
||||
embeddables: EmbeddablesMap,
|
||||
|
@ -81,16 +77,12 @@ const embeddableError = (
|
|||
});
|
||||
|
||||
const clearStagedFilters = (embeddables: EmbeddablesMap): EmbeddablesMap => {
|
||||
const omitStagedFilters = (
|
||||
embeddable: EmbeddableReduxState
|
||||
): EmbeddablesMap => _.omit({ ...embeddable }, ['stagedFilter']);
|
||||
const omitStagedFilters = (embeddable: EmbeddableReduxState): EmbeddablesMap =>
|
||||
_.omit({ ...embeddable }, ['stagedFilter']);
|
||||
return _.mapValues<EmbeddablesMap>(embeddables, omitStagedFilters);
|
||||
};
|
||||
|
||||
const deleteEmbeddable = (
|
||||
embeddables: EmbeddablesMap,
|
||||
panelId: PanelId
|
||||
): EmbeddablesMap => {
|
||||
const deleteEmbeddable = (embeddables: EmbeddablesMap, panelId: PanelId): EmbeddablesMap => {
|
||||
const embeddablesCopy = { ...embeddables };
|
||||
delete embeddablesCopy[panelId];
|
||||
return embeddablesCopy;
|
||||
|
@ -100,9 +92,7 @@ export const embeddablesReducer: Reducer<EmbeddablesMap> = (
|
|||
embeddables = {},
|
||||
action
|
||||
): EmbeddablesMap => {
|
||||
switch (
|
||||
action.type as EmbeddableActionTypeKeys | PanelActionTypeKeys.DELETE_PANEL
|
||||
) {
|
||||
switch (action.type as EmbeddableActionTypeKeys | PanelActionTypeKeys.DELETE_PANEL) {
|
||||
case EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZING:
|
||||
return embeddableIsInitializing(embeddables, action.payload);
|
||||
case EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED:
|
||||
|
|
|
@ -26,10 +26,7 @@ import {
|
|||
} from '../actions';
|
||||
import { DashboardMetadata } from '../selectors';
|
||||
|
||||
const updateTitle = (
|
||||
metadata: DashboardMetadata,
|
||||
title: UpdateTitleActionPayload
|
||||
) => ({
|
||||
const updateTitle = (metadata: DashboardMetadata, title: UpdateTitleActionPayload) => ({
|
||||
...metadata,
|
||||
title,
|
||||
});
|
||||
|
|
|
@ -19,11 +19,7 @@
|
|||
|
||||
import _ from 'lodash';
|
||||
import { Reducer } from 'redux';
|
||||
import {
|
||||
PanelActions,
|
||||
PanelActionTypeKeys,
|
||||
SetPanelTitleActionPayload,
|
||||
} from '../actions';
|
||||
import { PanelActions, PanelActionTypeKeys, SetPanelTitleActionPayload } from '../actions';
|
||||
import { PanelId, PanelsMap, PanelState } from '../selectors';
|
||||
|
||||
/**
|
||||
|
@ -47,10 +43,7 @@ const updatePanel = (panels: PanelsMap, panelState: PanelState): PanelsMap => ({
|
|||
[panelState.panelIndex]: mergePanelData(panelState, panels),
|
||||
});
|
||||
|
||||
const updatePanels = (
|
||||
panels: PanelsMap,
|
||||
updatedPanels: PanelsMap
|
||||
): PanelsMap => {
|
||||
const updatePanels = (panels: PanelsMap, updatedPanels: PanelsMap): PanelsMap => {
|
||||
const panelsCopy = { ...panels };
|
||||
Object.values(updatedPanels).forEach(panel => {
|
||||
panelsCopy[panel.panelIndex] = mergePanelData(panel, panels);
|
||||
|
@ -66,10 +59,7 @@ const resetPanelTitle = (panels: PanelsMap, panelId: PanelId) => ({
|
|||
},
|
||||
});
|
||||
|
||||
const setPanelTitle = (
|
||||
panels: PanelsMap,
|
||||
payload: SetPanelTitleActionPayload
|
||||
) => ({
|
||||
const setPanelTitle = (panels: PanelsMap, payload: SetPanelTitleActionPayload) => ({
|
||||
...panels,
|
||||
[payload.panelId]: {
|
||||
...panels[payload.panelId],
|
||||
|
@ -77,13 +67,9 @@ const setPanelTitle = (
|
|||
},
|
||||
});
|
||||
|
||||
const setPanels = (panels: PanelsMap, newPanels: PanelsMap) =>
|
||||
_.cloneDeep(newPanels);
|
||||
const setPanels = (panels: PanelsMap, newPanels: PanelsMap) => _.cloneDeep(newPanels);
|
||||
|
||||
export const panelsReducer: Reducer<PanelsMap> = (
|
||||
panels = {},
|
||||
action
|
||||
): PanelsMap => {
|
||||
export const panelsReducer: Reducer<PanelsMap> = (panels = {}, action): PanelsMap => {
|
||||
switch ((action as PanelActions).type) {
|
||||
case PanelActionTypeKeys.DELETE_PANEL:
|
||||
return deletePanel(panels, action.payload);
|
||||
|
|
|
@ -18,18 +18,9 @@
|
|||
*/
|
||||
|
||||
import { store } from '../../store';
|
||||
import {
|
||||
maximizePanel,
|
||||
minimizePanel,
|
||||
updateIsFullScreenMode,
|
||||
updateViewMode,
|
||||
} from '../actions';
|
||||
import { maximizePanel, minimizePanel, updateIsFullScreenMode, updateViewMode } from '../actions';
|
||||
|
||||
import {
|
||||
getFullScreenMode,
|
||||
getMaximizedPanelId,
|
||||
getViewMode,
|
||||
} from '../../selectors';
|
||||
import { getFullScreenMode, getMaximizedPanelId, getViewMode } from '../../selectors';
|
||||
|
||||
import { DashboardViewMode } from '../dashboard_view_mode';
|
||||
|
||||
|
|
|
@ -46,10 +46,7 @@ const maximizePanel = (view: ViewState, panelId: PanelId) => ({
|
|||
maximizedPanelId: panelId,
|
||||
});
|
||||
|
||||
const updateIsFullScreenMode = (
|
||||
view: ViewState,
|
||||
isFullScreenMode: boolean
|
||||
) => ({
|
||||
const updateIsFullScreenMode = (view: ViewState, isFullScreenMode: boolean) => ({
|
||||
...view,
|
||||
isFullScreenMode,
|
||||
});
|
||||
|
|
|
@ -18,13 +18,7 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import {
|
||||
ContainerState,
|
||||
EmbeddableMetadata,
|
||||
Filters,
|
||||
Query,
|
||||
TimeRange,
|
||||
} from 'ui/embeddable';
|
||||
import { ContainerState, EmbeddableMetadata, Filters, Query, TimeRange } from 'ui/embeddable';
|
||||
import { DashboardViewMode } from '../dashboard_view_mode';
|
||||
import {
|
||||
DashboardMetadata,
|
||||
|
@ -36,33 +30,23 @@ import {
|
|||
PanelState,
|
||||
} from './types';
|
||||
|
||||
export const getPanels = (dashboard: DashboardState): PanelsMap =>
|
||||
dashboard.panels;
|
||||
export const getPanels = (dashboard: DashboardState): PanelsMap => dashboard.panels;
|
||||
|
||||
export const getPanel = (
|
||||
dashboard: DashboardState,
|
||||
panelId: PanelId
|
||||
): PanelState => getPanels(dashboard)[panelId];
|
||||
export const getPanel = (dashboard: DashboardState, panelId: PanelId): PanelState =>
|
||||
getPanels(dashboard)[panelId];
|
||||
|
||||
export const getPanelType = (
|
||||
dashboard: DashboardState,
|
||||
panelId: PanelId
|
||||
): string => getPanel(dashboard, panelId).type;
|
||||
export const getPanelType = (dashboard: DashboardState, panelId: PanelId): string =>
|
||||
getPanel(dashboard, panelId).type;
|
||||
|
||||
export const getEmbeddables = (dashboard: DashboardState): EmbeddablesMap =>
|
||||
dashboard.embeddables;
|
||||
export const getEmbeddables = (dashboard: DashboardState): EmbeddablesMap => dashboard.embeddables;
|
||||
|
||||
// TODO: rename panel.embeddableConfig to embeddableCustomization. Because it's on the panel that's stored on a
|
||||
// dashboard, renaming this will require a migration step.
|
||||
export const getEmbeddableCustomization = (
|
||||
dashboard: DashboardState,
|
||||
panelId: PanelId
|
||||
): object => getPanel(dashboard, panelId).embeddableConfig;
|
||||
export const getEmbeddableCustomization = (dashboard: DashboardState, panelId: PanelId): object =>
|
||||
getPanel(dashboard, panelId).embeddableConfig;
|
||||
|
||||
export const getEmbeddable = (
|
||||
dashboard: DashboardState,
|
||||
panelId: PanelId
|
||||
): EmbeddableReduxState => dashboard.embeddables[panelId];
|
||||
export const getEmbeddable = (dashboard: DashboardState, panelId: PanelId): EmbeddableReduxState =>
|
||||
dashboard.embeddables[panelId];
|
||||
|
||||
export const getEmbeddableError = (
|
||||
dashboard: DashboardState,
|
||||
|
@ -79,10 +63,8 @@ export const getEmbeddableTitle = (
|
|||
: '';
|
||||
};
|
||||
|
||||
export const getEmbeddableInitialized = (
|
||||
dashboard: DashboardState,
|
||||
panelId: PanelId
|
||||
): boolean => getEmbeddable(dashboard, panelId).initialized;
|
||||
export const getEmbeddableInitialized = (dashboard: DashboardState, panelId: PanelId): boolean =>
|
||||
getEmbeddable(dashboard, panelId).initialized;
|
||||
|
||||
export const getEmbeddableStagedFilter = (
|
||||
dashboard: DashboardState,
|
||||
|
@ -104,12 +86,10 @@ export const getEmbeddableEditUrl = (
|
|||
: '';
|
||||
};
|
||||
|
||||
export const getVisibleContextMenuPanelId = (
|
||||
dashboard: DashboardState
|
||||
): PanelId | undefined => dashboard.view.visibleContextMenuPanelId;
|
||||
export const getVisibleContextMenuPanelId = (dashboard: DashboardState): PanelId | undefined =>
|
||||
dashboard.view.visibleContextMenuPanelId;
|
||||
|
||||
export const getUseMargins = (dashboard: DashboardState): boolean =>
|
||||
dashboard.view.useMargins;
|
||||
export const getUseMargins = (dashboard: DashboardState): boolean => dashboard.view.useMargins;
|
||||
|
||||
export const getViewMode = (dashboard: DashboardState): DashboardViewMode =>
|
||||
dashboard.view.viewMode;
|
||||
|
@ -120,38 +100,27 @@ export const getFullScreenMode = (dashboard: DashboardState): boolean =>
|
|||
export const getHidePanelTitles = (dashboard: DashboardState): boolean =>
|
||||
dashboard.view.hidePanelTitles;
|
||||
|
||||
export const getMaximizedPanelId = (
|
||||
dashboard: DashboardState
|
||||
): PanelId | undefined => dashboard.view.maximizedPanelId;
|
||||
export const getMaximizedPanelId = (dashboard: DashboardState): PanelId | undefined =>
|
||||
dashboard.view.maximizedPanelId;
|
||||
|
||||
export const getTimeRange = (dashboard: DashboardState): TimeRange =>
|
||||
dashboard.view.timeRange;
|
||||
export const getTimeRange = (dashboard: DashboardState): TimeRange => dashboard.view.timeRange;
|
||||
|
||||
export const getFilters = (dashboard: DashboardState): Filters =>
|
||||
dashboard.view.filters;
|
||||
export const getFilters = (dashboard: DashboardState): Filters => dashboard.view.filters;
|
||||
|
||||
export const getQuery = (dashboard: DashboardState): Query =>
|
||||
dashboard.view.query;
|
||||
export const getQuery = (dashboard: DashboardState): Query => dashboard.view.query;
|
||||
|
||||
export const getMetadata = (dashboard: DashboardState): DashboardMetadata =>
|
||||
dashboard.metadata;
|
||||
export const getMetadata = (dashboard: DashboardState): DashboardMetadata => dashboard.metadata;
|
||||
|
||||
export const getTitle = (dashboard: DashboardState): string =>
|
||||
dashboard.metadata.title;
|
||||
export const getTitle = (dashboard: DashboardState): string => dashboard.metadata.title;
|
||||
|
||||
export const getDescription = (dashboard: DashboardState): string | undefined =>
|
||||
dashboard.metadata.description;
|
||||
|
||||
export const getContainerState = (
|
||||
dashboard: DashboardState,
|
||||
panelId: PanelId
|
||||
): ContainerState => {
|
||||
export const getContainerState = (dashboard: DashboardState, panelId: PanelId): ContainerState => {
|
||||
const time = getTimeRange(dashboard);
|
||||
return {
|
||||
customTitle: getPanel(dashboard, panelId).title,
|
||||
embeddableCustomization: _.cloneDeep(
|
||||
getEmbeddableCustomization(dashboard, panelId) || {}
|
||||
),
|
||||
embeddableCustomization: _.cloneDeep(getEmbeddableCustomization(dashboard, panelId) || {}),
|
||||
filters: getFilters(dashboard),
|
||||
hidePanelTitles: getHidePanelTitles(dashboard),
|
||||
isPanelExpanded: getMaximizedPanelId(dashboard) === panelId,
|
||||
|
|
|
@ -23,9 +23,8 @@ import * as DashboardSelectors from '../dashboard/selectors';
|
|||
import { PanelId } from '../dashboard/selectors/types';
|
||||
import { CoreKibanaState } from './types';
|
||||
|
||||
export const getDashboard = (
|
||||
state: CoreKibanaState
|
||||
): DashboardSelectors.DashboardState => state.dashboard;
|
||||
export const getDashboard = (state: CoreKibanaState): DashboardSelectors.DashboardState =>
|
||||
state.dashboard;
|
||||
|
||||
export const getPanels = (state: CoreKibanaState) =>
|
||||
DashboardSelectors.getPanels(getDashboard(state));
|
||||
|
@ -38,23 +37,14 @@ export const getEmbeddables = (state: CoreKibanaState) =>
|
|||
DashboardSelectors.getEmbeddables(getDashboard(state));
|
||||
export const getEmbeddableError = (state: CoreKibanaState, panelId: PanelId) =>
|
||||
DashboardSelectors.getEmbeddableError(getDashboard(state), panelId);
|
||||
export const getEmbeddableInitialized = (
|
||||
state: CoreKibanaState,
|
||||
panelId: PanelId
|
||||
) => DashboardSelectors.getEmbeddableInitialized(getDashboard(state), panelId);
|
||||
export const getEmbeddableCustomization = (
|
||||
state: CoreKibanaState,
|
||||
panelId: PanelId
|
||||
) =>
|
||||
export const getEmbeddableInitialized = (state: CoreKibanaState, panelId: PanelId) =>
|
||||
DashboardSelectors.getEmbeddableInitialized(getDashboard(state), panelId);
|
||||
export const getEmbeddableCustomization = (state: CoreKibanaState, panelId: PanelId) =>
|
||||
DashboardSelectors.getEmbeddableCustomization(getDashboard(state), panelId);
|
||||
export const getEmbeddableStagedFilter = (
|
||||
state: CoreKibanaState,
|
||||
panelId: PanelId
|
||||
) => DashboardSelectors.getEmbeddableStagedFilter(getDashboard(state), panelId);
|
||||
export const getEmbeddableMetadata = (
|
||||
state: CoreKibanaState,
|
||||
panelId: PanelId
|
||||
) => DashboardSelectors.getEmbeddableMetadata(getDashboard(state), panelId);
|
||||
export const getEmbeddableStagedFilter = (state: CoreKibanaState, panelId: PanelId) =>
|
||||
DashboardSelectors.getEmbeddableStagedFilter(getDashboard(state), panelId);
|
||||
export const getEmbeddableMetadata = (state: CoreKibanaState, panelId: PanelId) =>
|
||||
DashboardSelectors.getEmbeddableMetadata(getDashboard(state), panelId);
|
||||
|
||||
export const getStagedFilters = (state: CoreKibanaState): Filters =>
|
||||
DashboardSelectors.getStagedFilters(getDashboard(state));
|
||||
|
@ -62,9 +52,7 @@ export const getViewMode = (state: CoreKibanaState): DashboardViewMode =>
|
|||
DashboardSelectors.getViewMode(getDashboard(state));
|
||||
export const getFullScreenMode = (state: CoreKibanaState): boolean =>
|
||||
DashboardSelectors.getFullScreenMode(getDashboard(state));
|
||||
export const getMaximizedPanelId = (
|
||||
state: CoreKibanaState
|
||||
): PanelId | undefined =>
|
||||
export const getMaximizedPanelId = (state: CoreKibanaState): PanelId | undefined =>
|
||||
DashboardSelectors.getMaximizedPanelId(getDashboard(state));
|
||||
export const getUseMargins = (state: CoreKibanaState): boolean =>
|
||||
DashboardSelectors.getUseMargins(getDashboard(state));
|
||||
|
|
|
@ -72,11 +72,7 @@ export async function lintFiles(log: ToolingLog, files: File[]) {
|
|||
if (exitCode > 0) {
|
||||
throw createFailError(`[tslint] failure`, 1);
|
||||
} else {
|
||||
log.success(
|
||||
'[tslint/%s] %d files linted successfully',
|
||||
project.name,
|
||||
filesInProject.length
|
||||
);
|
||||
log.success('[tslint/%s] %d files linted successfully', project.name, filesInProject.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,15 +50,11 @@ export function runTslintCli() {
|
|||
return resolve(opts.project) === project.tsConfigPath;
|
||||
}).map(project => ({
|
||||
task: () =>
|
||||
execa(
|
||||
'tslint',
|
||||
[...process.argv.slice(2), '--project', project.tsConfigPath],
|
||||
{
|
||||
cwd: project.directory,
|
||||
env: chalk.enabled ? { FORCE_COLOR: 'true' } : {},
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
}
|
||||
).catch(error => {
|
||||
execa('tslint', [...process.argv.slice(2), '--project', project.tsConfigPath], {
|
||||
cwd: project.directory,
|
||||
env: chalk.enabled ? { FORCE_COLOR: 'true' } : {},
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
}).catch(error => {
|
||||
throw new LintFailure(project, error);
|
||||
}),
|
||||
title: project.name,
|
||||
|
|
|
@ -42,9 +42,7 @@ export function getTsProjectForAbsolutePath(path: string) {
|
|||
}
|
||||
|
||||
if (projects.length !== 1) {
|
||||
const configPaths = projects.map(
|
||||
p => `"${relative(REPO_ROOT, p.tsConfigPath)}"`
|
||||
);
|
||||
const configPaths = projects.map(p => `"${relative(REPO_ROOT, p.tsConfigPath)}"`);
|
||||
|
||||
const pathsMsg = `${configPaths.slice(0, -1).join(', ')} or ${
|
||||
configPaths[configPaths.length - 1]
|
||||
|
|
|
@ -19,6 +19,4 @@
|
|||
|
||||
export { Project } from './project';
|
||||
export { PROJECTS } from './projects';
|
||||
export {
|
||||
getTsProjectForAbsolutePath,
|
||||
} from './get_ts_project_for_absolute_path';
|
||||
export { getTsProjectForAbsolutePath } from './get_ts_project_for_absolute_path';
|
||||
|
|
|
@ -35,10 +35,7 @@ function makeMatchers(directory: string, patterns: string[]) {
|
|||
}
|
||||
|
||||
function parseTsConfig(path: string) {
|
||||
const { error, config } = parseConfigFileTextToJson(
|
||||
path,
|
||||
readFileSync(path, 'utf8')
|
||||
);
|
||||
const { error, config } = parseConfigFileTextToJson(path, readFileSync(path, 'utf8'));
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
|
@ -76,8 +73,6 @@ export class Project {
|
|||
}
|
||||
|
||||
public isAbsolutePathSelected(path: string) {
|
||||
return testMatchers(this.exclude, path)
|
||||
? false
|
||||
: testMatchers(this.include, path);
|
||||
return testMatchers(this.exclude, path) ? false : testMatchers(this.include, path);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,16 +50,10 @@ class AggTypeFilters {
|
|||
* @param aggConfig The aggConfig for which the returning list will be used.
|
||||
* @return A filtered list of the passed aggTypes.
|
||||
*/
|
||||
public filter(
|
||||
aggTypes: AggType[],
|
||||
indexPattern: IndexPattern,
|
||||
aggConfig: AggConfig
|
||||
) {
|
||||
public filter(aggTypes: AggType[], indexPattern: IndexPattern, aggConfig: AggConfig) {
|
||||
const allFilters = Array.from(this.filters);
|
||||
const allowedAggTypes = aggTypes.filter(aggType => {
|
||||
const isAggTypeAllowed = allFilters.every(filter =>
|
||||
filter(aggType, indexPattern, aggConfig)
|
||||
);
|
||||
const isAggTypeAllowed = allFilters.every(filter => filter(aggType, indexPattern, aggConfig));
|
||||
return isAggTypeAllowed;
|
||||
});
|
||||
return allowedAggTypes;
|
||||
|
|
|
@ -19,6 +19,4 @@
|
|||
|
||||
export { DashboardContextMenuPanel } from './dashboard_context_menu_panel';
|
||||
export { DashboardPanelAction } from './dashboard_panel_action';
|
||||
export {
|
||||
DashboardPanelActionsRegistryProvider,
|
||||
} from './dashboard_panel_actions_registry';
|
||||
export { DashboardPanelActionsRegistryProvider } from './dashboard_panel_actions_registry';
|
||||
|
|
|
@ -85,10 +85,7 @@ export abstract class Embeddable {
|
|||
/**
|
||||
* Embeddable should render itself at the given domNode.
|
||||
*/
|
||||
public abstract render(
|
||||
domNode: HTMLElement,
|
||||
containerState: ContainerState
|
||||
): void;
|
||||
public abstract render(domNode: HTMLElement, containerState: ContainerState): void;
|
||||
|
||||
/**
|
||||
* An embeddable can return inspector adapters if it want the inspector to be
|
||||
|
|
|
@ -19,13 +19,5 @@
|
|||
|
||||
export { EmbeddableFactory } from './embeddable_factory';
|
||||
export * from './embeddable';
|
||||
export {
|
||||
EmbeddableFactoriesRegistryProvider,
|
||||
} from './embeddable_factories_registry';
|
||||
export {
|
||||
ContainerState,
|
||||
EmbeddableState,
|
||||
Query,
|
||||
Filters,
|
||||
TimeRange,
|
||||
} from './types';
|
||||
export { EmbeddableFactoriesRegistryProvider } from './embeddable_factories_registry';
|
||||
export { ContainerState, EmbeddableState, Query, Filters, TimeRange } from './types';
|
||||
|
|
|
@ -36,10 +36,7 @@ class DataAdapter extends EventEmitter {
|
|||
private tabular?: TabularCallback;
|
||||
private tabularOptions?: TabularLoaderOptions;
|
||||
|
||||
public setTabularLoader(
|
||||
callback: TabularCallback,
|
||||
options: TabularLoaderOptions = {}
|
||||
): void {
|
||||
public setTabularLoader(callback: TabularCallback, options: TabularLoaderOptions = {}): void {
|
||||
this.tabular = callback;
|
||||
this.tabularOptions = options;
|
||||
this.emit('change', 'tabular');
|
||||
|
|
|
@ -109,10 +109,7 @@ interface InspectorOptions {
|
|||
* @param {InspectorOptions} options - Options that configure the inspector. See InspectorOptions type.
|
||||
* @return {InspectorSession} The session instance for the opened inspector.
|
||||
*/
|
||||
function open(
|
||||
adapters: Adapters,
|
||||
options: InspectorOptions = {}
|
||||
): InspectorSession {
|
||||
function open(adapters: Adapters, options: InspectorOptions = {}): InspectorSession {
|
||||
// If there is an active inspector session close it before opening a new one.
|
||||
if (activeSession) {
|
||||
activeSession.close();
|
||||
|
|
|
@ -33,10 +33,7 @@ import { EuiFlyoutBody } from '@elastic/eui';
|
|||
* inspector view. It makes sure, that the appropriate stylings are applied to the
|
||||
* view.
|
||||
*/
|
||||
const InspectorView: React.SFC<{ useFlex?: boolean }> = ({
|
||||
useFlex,
|
||||
children,
|
||||
}) => {
|
||||
const InspectorView: React.SFC<{ useFlex?: boolean }> = ({ useFlex, children }) => {
|
||||
const classes = classNames({
|
||||
'inspector-view__flex': Boolean(useFlex),
|
||||
});
|
||||
|
|
|
@ -17,10 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
InspectorViewDescription,
|
||||
InspectorViewRegistry,
|
||||
} from './view_registry';
|
||||
import { InspectorViewDescription, InspectorViewRegistry } from './view_registry';
|
||||
|
||||
import { Adapters } from './types';
|
||||
|
||||
|
|
|
@ -46,9 +46,7 @@ class InspectorViewRegistry extends EventEmitter {
|
|||
}
|
||||
this.views.push(view);
|
||||
// Keep registry sorted by the order property
|
||||
this.views.sort(
|
||||
(a, b) => (a.order || Number.MAX_VALUE) - (b.order || Number.MAX_VALUE)
|
||||
);
|
||||
this.views.sort((a, b) => (a.order || Number.MAX_VALUE) - (b.order || Number.MAX_VALUE));
|
||||
this.emit('change');
|
||||
}
|
||||
|
||||
|
@ -71,9 +69,7 @@ class InspectorViewRegistry extends EventEmitter {
|
|||
if (!adapters) {
|
||||
return [];
|
||||
}
|
||||
return this.views.filter(
|
||||
view => !view.shouldShow || view.shouldShow(adapters)
|
||||
);
|
||||
return this.views.filter(view => !view.shouldShow || view.shouldShow(adapters));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue