mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Refactor the FTR to live under KBN-TEST . Next, refactor any and all files that the FTR needs to live under KBN-TEST, as needed.
This commit is contained in:
parent
ddfe6e8dea
commit
a1df3e8b07
60 changed files with 228 additions and 223 deletions
|
@ -25,6 +25,8 @@ bower_components
|
|||
/packages/kbn-ui-framework/dist
|
||||
/packages/kbn-ui-framework/doc_site/build
|
||||
/packages/kbn-ui-framework/generator-kui/*/templates/
|
||||
/packages/kbn-test/src/functional_test_runner/__tests__/fixtures/
|
||||
/packages/kbn-test/src/functional_test_runner/lib/config/__tests__/fixtures/
|
||||
/x-pack/legacy/plugins/maps/public/vendor/**
|
||||
/x-pack/coverage
|
||||
/x-pack/build
|
||||
|
|
|
@ -42,7 +42,7 @@ function resolveKibanaPath(path) {
|
|||
}
|
||||
|
||||
function readFtrConfigFile(log, path, settingOverrides) {
|
||||
return require(resolveKibanaPath('src/functional_test_runner')) // eslint-disable-line import/no-dynamic-require
|
||||
return require('@kbn/test') // eslint-disable-line import/no-dynamic-require
|
||||
.readConfigFile(log, path, settingOverrides);
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.4.4",
|
||||
"@kbn/test": "1.0.0",
|
||||
"argv-split": "^2.0.1",
|
||||
"commander": "^2.9.0",
|
||||
"del": "^4.0.0",
|
||||
|
|
|
@ -25,7 +25,7 @@ import expect from '@kbn/expect';
|
|||
const SCRIPT = resolve(__dirname, '../../../../scripts/functional_test_runner.js');
|
||||
const BASIC_CONFIG = resolve(__dirname, '../fixtures/simple_project/config.js');
|
||||
|
||||
describe('basic config file with a single app and test', function () {
|
||||
describe('basic config file with a single app and test', function() {
|
||||
this.timeout(60 * 1000);
|
||||
|
||||
it('runs and prints expected output', () => {
|
|
@ -26,7 +26,7 @@ import expect from '@kbn/expect';
|
|||
const SCRIPT = resolve(__dirname, '../../../../scripts/functional_test_runner.js');
|
||||
const FAILURE_HOOKS_CONFIG = resolve(__dirname, '../fixtures/failure_hooks/config.js');
|
||||
|
||||
describe('failure hooks', function () {
|
||||
describe('failure hooks', function() {
|
||||
this.timeout(60 * 1000);
|
||||
|
||||
it('runs and prints expected output', () => {
|
||||
|
@ -37,8 +37,10 @@ describe('failure hooks', function () {
|
|||
flag: '$FAILING_BEFORE_HOOK$',
|
||||
assert(lines) {
|
||||
expect(lines.shift()).to.match(/info\s+testHookFailure\s+\$FAILING_BEFORE_ERROR\$/);
|
||||
expect(lines.shift()).to.match(/info\s+testHookFailureAfterDelay\s+\$FAILING_BEFORE_ERROR\$/);
|
||||
}
|
||||
expect(lines.shift()).to.match(
|
||||
/info\s+testHookFailureAfterDelay\s+\$FAILING_BEFORE_ERROR\$/
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
flag: '$FAILING_TEST$',
|
||||
|
@ -46,14 +48,16 @@ describe('failure hooks', function () {
|
|||
expect(lines.shift()).to.match(/global before each/);
|
||||
expect(lines.shift()).to.match(/info\s+testFailure\s+\$FAILING_TEST_ERROR\$/);
|
||||
expect(lines.shift()).to.match(/info\s+testFailureAfterDelay\s+\$FAILING_TEST_ERROR\$/);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
flag: '$FAILING_AFTER_HOOK$',
|
||||
assert(lines) {
|
||||
expect(lines.shift()).to.match(/info\s+testHookFailure\s+\$FAILING_AFTER_ERROR\$/);
|
||||
expect(lines.shift()).to.match(/info\s+testHookFailureAfterDelay\s+\$FAILING_AFTER_ERROR\$/);
|
||||
}
|
||||
expect(lines.shift()).to.match(
|
||||
/info\s+testHookFailureAfterDelay\s+\$FAILING_AFTER_ERROR\$/
|
||||
);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
107
packages/kbn-test/src/functional_test_runner/cli.ts
Normal file
107
packages/kbn-test/src/functional_test_runner/cli.ts
Normal file
|
@ -0,0 +1,107 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { resolve } from 'path';
|
||||
import { run } from '../../../../src/dev/run';
|
||||
import { FunctionalTestRunner } from './functional_test_runner';
|
||||
|
||||
export function runFtrCli() {
|
||||
run(
|
||||
async ({ flags, log }) => {
|
||||
const resolveConfigPath = (v: string) => resolve(process.cwd(), v);
|
||||
const toArray = (v: string | string[]) => ([] as string[]).concat(v || []);
|
||||
|
||||
const functionalTestRunner = new FunctionalTestRunner(
|
||||
log,
|
||||
resolveConfigPath(flags.config as string),
|
||||
{
|
||||
mochaOpts: {
|
||||
bail: flags.bail,
|
||||
grep: flags.grep || undefined,
|
||||
invert: flags.invert,
|
||||
},
|
||||
suiteTags: {
|
||||
include: toArray(flags['include-tag'] as string | string[]),
|
||||
exclude: toArray(flags['exclude-tag'] as string | string[]),
|
||||
},
|
||||
updateBaselines: flags.updateBaselines,
|
||||
excludeTestFiles: flags.exclude || undefined,
|
||||
}
|
||||
);
|
||||
|
||||
let teardownRun = false;
|
||||
const teardown = async (err?: Error) => {
|
||||
if (teardownRun) return;
|
||||
|
||||
teardownRun = true;
|
||||
if (err) {
|
||||
log.indent(-log.indent());
|
||||
log.error(err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
|
||||
try {
|
||||
await functionalTestRunner.close();
|
||||
} finally {
|
||||
process.exit();
|
||||
}
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', err => teardown(err));
|
||||
process.on('SIGTERM', () => teardown());
|
||||
process.on('SIGINT', () => teardown());
|
||||
|
||||
try {
|
||||
if (flags['test-stats']) {
|
||||
process.stderr.write(
|
||||
JSON.stringify(await functionalTestRunner.getTestStats(), null, 2) + '\n'
|
||||
);
|
||||
} else {
|
||||
const failureCount = await functionalTestRunner.run();
|
||||
process.exitCode = failureCount ? 1 : 0;
|
||||
}
|
||||
} catch (err) {
|
||||
await teardown(err);
|
||||
} finally {
|
||||
await teardown();
|
||||
}
|
||||
},
|
||||
{
|
||||
flags: {
|
||||
string: ['config', 'grep', 'exclude', 'include-tag', 'exclude-tag'],
|
||||
boolean: ['bail', 'invert', 'test-stats', 'updateBaselines'],
|
||||
default: {
|
||||
config: 'test/functional/config.js',
|
||||
debug: true,
|
||||
},
|
||||
help: `
|
||||
--config=path path to a config file
|
||||
--bail stop tests after the first failure
|
||||
--grep <pattern> pattern used to select which tests to run
|
||||
--invert invert grep to exclude tests
|
||||
--exclude=file path to a test file that should not be loaded
|
||||
--include-tag=tag a tag to be included, pass multiple times for multiple tags
|
||||
--exclude-tag=tag a tag to be excluded, pass multiple times for multiple tags
|
||||
--test-stats print the number of tests (included and excluded) to STDERR
|
||||
--updateBaselines replace baseline screenshots with whatever is generated from the test
|
||||
`,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
|
@ -19,3 +19,4 @@
|
|||
|
||||
export { FunctionalTestRunner } from './functional_test_runner';
|
||||
export { readConfigFile } from './lib';
|
||||
export { runFtrCli } from './cli';
|
|
@ -29,16 +29,14 @@ describe('readConfigFile()', () => {
|
|||
it('reads config from a file, returns an instance of Config class', async () => {
|
||||
const config = await readConfigFile(log, require.resolve('./fixtures/config.1'));
|
||||
expect(config).to.be.a(Config);
|
||||
expect(config.get('testFiles')).to.eql([
|
||||
'config.1'
|
||||
]);
|
||||
expect(config.get('testFiles')).to.eql(['config.1']);
|
||||
});
|
||||
|
||||
it('merges setting overrides into log', async () => {
|
||||
const config = await readConfigFile(log, require.resolve('./fixtures/config.1'), {
|
||||
screenshots: {
|
||||
directory: 'foo.bar'
|
||||
}
|
||||
directory: 'foo.bar',
|
||||
},
|
||||
});
|
||||
|
||||
expect(config.get('screenshots.directory')).to.be('foo.bar');
|
||||
|
@ -46,10 +44,7 @@ describe('readConfigFile()', () => {
|
|||
|
||||
it('supports loading config files from within config files', async () => {
|
||||
const config = await readConfigFile(log, require.resolve('./fixtures/config.2'));
|
||||
expect(config.get('testFiles')).to.eql([
|
||||
'config.1',
|
||||
'config.2',
|
||||
]);
|
||||
expect(config.get('testFiles')).to.eql(['config.1', 'config.2']);
|
||||
});
|
||||
|
||||
it('throws if settings are invalid', async () => {
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
// @ts-ignore
|
||||
import { createTransform, Deprecations } from '../../../legacy/deprecation';
|
||||
import { createTransform, Deprecations } from '../../../../../../src/legacy/deprecation';
|
||||
|
||||
type DeprecationTransformer = (
|
||||
settings: object,
|
|
@ -31,7 +31,7 @@ export function createAssignmentProxy(object, interceptor) {
|
|||
|
||||
get(target, property) {
|
||||
if (property === 'revertProxiedAssignments') {
|
||||
return function () {
|
||||
return function() {
|
||||
for (const [property, value] of originalValues) {
|
||||
object[property] = value;
|
||||
}
|
||||
|
@ -39,6 +39,6 @@ export function createAssignmentProxy(object, interceptor) {
|
|||
}
|
||||
|
||||
return Reflect.get(target, property);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
|
@ -56,12 +56,12 @@ export function decorateMochaUi(lifecycle, context) {
|
|||
throw new Error(`Unexpected arguments to ${name}(${argumentsList.join(', ')})`);
|
||||
}
|
||||
|
||||
argumentsList[1] = function () {
|
||||
argumentsList[1] = function() {
|
||||
before(async () => {
|
||||
await lifecycle.trigger('beforeTestSuite', this);
|
||||
});
|
||||
|
||||
this.tags = (tags) => {
|
||||
this.tags = tags => {
|
||||
this._tags = [].concat(this._tags || [], tags);
|
||||
};
|
||||
|
||||
|
@ -77,7 +77,7 @@ export function decorateMochaUi(lifecycle, context) {
|
|||
},
|
||||
after() {
|
||||
suiteLevel -= 1;
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -91,9 +91,12 @@ export function decorateMochaUi(lifecycle, context) {
|
|||
* @return {Function}
|
||||
*/
|
||||
function wrapTestFunction(name, fn) {
|
||||
return wrapNonSuiteFunction(name, wrapRunnableArgsWithErrorHandler(fn, async (err, test) => {
|
||||
await lifecycle.trigger('testFailure', err, test);
|
||||
}));
|
||||
return wrapNonSuiteFunction(
|
||||
name,
|
||||
wrapRunnableArgsWithErrorHandler(fn, async (err, test) => {
|
||||
await lifecycle.trigger('testFailure', err, test);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -106,9 +109,12 @@ export function decorateMochaUi(lifecycle, context) {
|
|||
* @return {Function}
|
||||
*/
|
||||
function wrapTestHookFunction(name, fn) {
|
||||
return wrapNonSuiteFunction(name, wrapRunnableArgsWithErrorHandler(fn, async (err, test) => {
|
||||
await lifecycle.trigger('testHookFailure', err, test);
|
||||
}));
|
||||
return wrapNonSuiteFunction(
|
||||
name,
|
||||
wrapRunnableArgsWithErrorHandler(fn, async (err, test) => {
|
||||
await lifecycle.trigger('testHookFailure', err, test);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -127,7 +133,7 @@ export function decorateMochaUi(lifecycle, context) {
|
|||
All ${name}() calls in test files must be within a describe() call.
|
||||
`);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -30,18 +30,15 @@
|
|||
export function filterSuitesByTags({ log, mocha, include, exclude }) {
|
||||
mocha.excludedTests = [];
|
||||
// collect all the tests from some suite, including it's children
|
||||
const collectTests = (suite) =>
|
||||
suite.suites.reduce(
|
||||
(acc, s) => acc.concat(collectTests(s)),
|
||||
suite.tests
|
||||
);
|
||||
const collectTests = suite =>
|
||||
suite.suites.reduce((acc, s) => acc.concat(collectTests(s)), suite.tests);
|
||||
|
||||
// if include tags were provided, filter the tree once to
|
||||
// only include branches that are included at some point
|
||||
if (include.length) {
|
||||
log.info('Only running suites (and their sub-suites) if they include the tag(s):', include);
|
||||
|
||||
const isIncluded = suite => !suite._tags ? false : suite._tags.some(t => include.includes(t));
|
||||
const isIncluded = suite => (!suite._tags ? false : suite._tags.some(t => include.includes(t)));
|
||||
const isChildIncluded = suite => suite.suites.some(s => isIncluded(s) || isChildIncluded(s));
|
||||
|
||||
(function recurse(parentSuite) {
|
||||
|
@ -55,7 +52,6 @@ export function filterSuitesByTags({ log, mocha, include, exclude }) {
|
|||
continue;
|
||||
}
|
||||
|
||||
|
||||
// this suite has an included child but is not included
|
||||
// itself, so strip out its tests and recurse to filter
|
||||
// out child suites which are not included
|
||||
|
@ -69,10 +65,9 @@ export function filterSuitesByTags({ log, mocha, include, exclude }) {
|
|||
mocha.excludedTests = mocha.excludedTests.concat(collectTests(child));
|
||||
}
|
||||
}
|
||||
}(mocha.suite));
|
||||
})(mocha.suite);
|
||||
}
|
||||
|
||||
|
||||
// if exclude tags were provided, filter the possibly already
|
||||
// filtered tree to remove branches that are excluded
|
||||
if (exclude.length) {
|
||||
|
@ -94,6 +89,6 @@ export function filterSuitesByTags({ log, mocha, include, exclude }) {
|
|||
mocha.excludedTests = mocha.excludedTests.concat(collectTests(child));
|
||||
}
|
||||
}
|
||||
}(mocha.suite));
|
||||
})(mocha.suite);
|
||||
}
|
||||
}
|
|
@ -31,10 +31,18 @@ import { decorateMochaUi } from './decorate_mocha_ui';
|
|||
* @param {String} path
|
||||
* @return {undefined} - mutates mocha, no return value
|
||||
*/
|
||||
export const loadTestFiles = ({ mocha, log, lifecycle, providers, paths, excludePaths, updateBaselines }) => {
|
||||
export const loadTestFiles = ({
|
||||
mocha,
|
||||
log,
|
||||
lifecycle,
|
||||
providers,
|
||||
paths,
|
||||
excludePaths,
|
||||
updateBaselines,
|
||||
}) => {
|
||||
const pendingExcludes = new Set(excludePaths.slice(0));
|
||||
|
||||
const innerLoadTestFile = (path) => {
|
||||
const innerLoadTestFile = path => {
|
||||
if (typeof path !== 'string' || !isAbsolute(path)) {
|
||||
throw new TypeError('loadTestFile() only accepts absolute paths');
|
||||
}
|
||||
|
@ -49,9 +57,7 @@ export const loadTestFiles = ({ mocha, log, lifecycle, providers, paths, exclude
|
|||
log.verbose('Loading test file %s', path);
|
||||
|
||||
const testModule = require(path); // eslint-disable-line import/no-dynamic-require
|
||||
const testProvider = testModule.__esModule
|
||||
? testModule.default
|
||||
: testModule;
|
||||
const testProvider = testModule.__esModule ? testModule.default : testModule;
|
||||
|
||||
runTestProvider(testProvider, path); // eslint-disable-line
|
||||
});
|
||||
|
@ -90,6 +96,11 @@ export const loadTestFiles = ({ mocha, log, lifecycle, providers, paths, exclude
|
|||
paths.forEach(innerLoadTestFile);
|
||||
|
||||
if (pendingExcludes.size) {
|
||||
throw new Error(`After loading all test files some exclude paths were not consumed:${['', ...pendingExcludes].join('\n -')}`);
|
||||
throw new Error(
|
||||
`After loading all test files some exclude paths were not consumed:${[
|
||||
'',
|
||||
...pendingExcludes,
|
||||
].join('\n -')}`
|
||||
);
|
||||
}
|
||||
};
|
|
@ -23,12 +23,12 @@ import Mocha from 'mocha';
|
|||
import { ToolingLogTextWriter } from '@kbn/dev-utils';
|
||||
import moment from 'moment';
|
||||
|
||||
import { setupJUnitReportGeneration } from '../../../../dev';
|
||||
import { setupJUnitReportGeneration } from '../../../../../../../src/dev';
|
||||
import * as colors from './colors';
|
||||
import * as symbols from './symbols';
|
||||
import { ms } from './ms';
|
||||
import { writeEpilogue } from './write_epilogue';
|
||||
import { recordLog, snapshotLogsForRunnable } from '../../../../dev/mocha/log_cache';
|
||||
import { recordLog, snapshotLogsForRunnable } from '../../../../../../../src/dev/mocha/log_cache';
|
||||
|
||||
export function MochaReporterProvider({ getService }) {
|
||||
const log = getService('log');
|
||||
|
@ -60,7 +60,9 @@ export function MochaReporterProvider({ getService }) {
|
|||
|
||||
onStart = () => {
|
||||
if (config.get('mochaReporter.captureLogOutput')) {
|
||||
log.warning('debug logs are being captured, only error logs will be written to the console');
|
||||
log.warning(
|
||||
'debug logs are being captured, only error logs will be written to the console'
|
||||
);
|
||||
|
||||
reporterCaptureStartTime = moment();
|
||||
originalLogWriters = log.getWriters();
|
||||
|
@ -68,47 +70,47 @@ export function MochaReporterProvider({ getService }) {
|
|||
log.setWriters([
|
||||
new ToolingLogTextWriter({
|
||||
level: 'error',
|
||||
writeTo: process.stdout
|
||||
writeTo: process.stdout,
|
||||
}),
|
||||
new ToolingLogTextWriter({
|
||||
level: 'debug',
|
||||
writeTo: {
|
||||
write: (line) => {
|
||||
write: line => {
|
||||
// if the current runnable is a beforeEach hook then
|
||||
// `runner.suite` is set to the suite that defined the
|
||||
// hook, rather than the suite executing, so instead we
|
||||
// grab the suite from the test, but that's only available
|
||||
// when we are doing something test specific, so for global
|
||||
// hooks we fallback to `runner.suite`
|
||||
const currentSuite = this.runner.test
|
||||
? this.runner.test.parent
|
||||
: this.runner.suite;
|
||||
const currentSuite = this.runner.test ? this.runner.test.parent : this.runner.suite;
|
||||
|
||||
// We are computing the difference between the time when this
|
||||
// reporter has started and the time when each line are being
|
||||
// logged in order to be able to label the test results log lines
|
||||
// with this relative time information
|
||||
const diffTimeSinceStart = moment().diff(reporterCaptureStartTime);
|
||||
const readableDiffTimeSinceStart = `[${moment(diffTimeSinceStart).format('HH:mm:ss')}] `;
|
||||
const readableDiffTimeSinceStart = `[${moment(diffTimeSinceStart).format(
|
||||
'HH:mm:ss'
|
||||
)}] `;
|
||||
|
||||
recordLog(currentSuite, `${readableDiffTimeSinceStart} ${line}`);
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
},
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
log.write('');
|
||||
}
|
||||
};
|
||||
|
||||
onHookStart = hook => {
|
||||
log.write(`-> ${colors.suite(hook.title)}`);
|
||||
log.indent(2);
|
||||
}
|
||||
};
|
||||
|
||||
onHookEnd = () => {
|
||||
log.indent(-2);
|
||||
}
|
||||
};
|
||||
|
||||
onSuiteStart = suite => {
|
||||
if (!suite.root) {
|
||||
|
@ -116,34 +118,34 @@ export function MochaReporterProvider({ getService }) {
|
|||
}
|
||||
|
||||
log.indent(2);
|
||||
}
|
||||
};
|
||||
|
||||
onSuiteEnd = () => {
|
||||
if (log.indent(-2) === 0) {
|
||||
log.write();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
onTestStart = test => {
|
||||
log.write(`-> ${test.title}`);
|
||||
log.indent(2);
|
||||
}
|
||||
};
|
||||
|
||||
onTestEnd = (test) => {
|
||||
onTestEnd = test => {
|
||||
snapshotLogsForRunnable(test);
|
||||
log.indent(-2);
|
||||
}
|
||||
};
|
||||
|
||||
onPending = test => {
|
||||
log.write('-> ' + colors.pending(test.title));
|
||||
log.indent(2);
|
||||
}
|
||||
};
|
||||
|
||||
onPass = test => {
|
||||
const time = colors.speed(test.speed, ` (${ms(test.duration)})`);
|
||||
const pass = colors.pass(`${symbols.ok} pass`);
|
||||
log.write(`- ${pass} ${time} "${test.fullTitle()}"`);
|
||||
}
|
||||
};
|
||||
|
||||
onFail = runnable => {
|
||||
// NOTE: this is super gross
|
||||
|
@ -155,7 +157,7 @@ export function MochaReporterProvider({ getService }) {
|
|||
//
|
||||
let output = '';
|
||||
const realLog = console.log;
|
||||
console.log = (...args) => output += `${format(...args)}\n`;
|
||||
console.log = (...args) => (output += `${format(...args)}\n`);
|
||||
try {
|
||||
Mocha.reporters.Base.list([runnable]);
|
||||
} finally {
|
||||
|
@ -164,21 +166,21 @@ export function MochaReporterProvider({ getService }) {
|
|||
|
||||
log.write(
|
||||
`- ${colors.fail(`${symbols.err} fail: "${runnable.fullTitle()}"`)}` +
|
||||
'\n' +
|
||||
output
|
||||
.split('\n')
|
||||
// drop the first two lines, (empty + test title)
|
||||
.slice(2)
|
||||
// move leading colors behind leading spaces
|
||||
.map(line => line.replace(/^((?:\[.+m)+)(\s+)/, '$2$1'))
|
||||
.map(line => ` ${line}`)
|
||||
.join('\n')
|
||||
'\n' +
|
||||
output
|
||||
.split('\n')
|
||||
// drop the first two lines, (empty + test title)
|
||||
.slice(2)
|
||||
// move leading colors behind leading spaces
|
||||
.map(line => line.replace(/^((?:\[.+m)+)(\s+)/, '$2$1'))
|
||||
.map(line => ` ${line}`)
|
||||
.join('\n')
|
||||
);
|
||||
|
||||
// failed hooks trigger the `onFail(runnable)` callback, so we snapshot the logs for
|
||||
// them here. Tests will re-capture the snapshot in `onTestEnd()`
|
||||
snapshotLogsForRunnable(runnable);
|
||||
}
|
||||
};
|
||||
|
||||
onEnd = () => {
|
||||
if (originalLogWriters) {
|
||||
|
@ -186,6 +188,6 @@ export function MochaReporterProvider({ getService }) {
|
|||
}
|
||||
|
||||
writeEpilogue(log, this.stats);
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
|
@ -19,10 +19,6 @@
|
|||
|
||||
// originally extracted from mocha https://git.io/v1PGh
|
||||
|
||||
export const ok = process.platform === 'win32'
|
||||
? '\u221A'
|
||||
: '✓';
|
||||
export const ok = process.platform === 'win32' ? '\u221A' : '✓';
|
||||
|
||||
export const err = process.platform === 'win32'
|
||||
? '\u00D7'
|
||||
: '✖';
|
||||
export const err = process.platform === 'win32' ? '\u00D7' : '✖';
|
|
@ -25,26 +25,16 @@ export function writeEpilogue(log, stats) {
|
|||
log.write();
|
||||
|
||||
// passes
|
||||
log.write(
|
||||
`${colors.pass('%d passing')} (%s)`,
|
||||
stats.passes || 0,
|
||||
ms(stats.duration)
|
||||
);
|
||||
log.write(`${colors.pass('%d passing')} (%s)`, stats.passes || 0, ms(stats.duration));
|
||||
|
||||
// pending
|
||||
if (stats.pending) {
|
||||
log.write(
|
||||
colors.pending('%d pending'),
|
||||
stats.pending
|
||||
);
|
||||
log.write(colors.pending('%d pending'), stats.pending);
|
||||
}
|
||||
|
||||
// failures
|
||||
if (stats.failures) {
|
||||
log.write(
|
||||
'%d failing',
|
||||
stats.failures
|
||||
);
|
||||
log.write('%d failing', stats.failures);
|
||||
}
|
||||
|
||||
// footer
|
|
@ -36,14 +36,11 @@ export async function setupMocha(lifecycle, log, config, providers) {
|
|||
// configure mocha
|
||||
const mocha = new Mocha({
|
||||
...config.get('mochaOpts'),
|
||||
reporter: await providers.loadExternalService(
|
||||
'mocha reporter',
|
||||
MochaReporterProvider
|
||||
)
|
||||
reporter: await providers.loadExternalService('mocha reporter', MochaReporterProvider),
|
||||
});
|
||||
|
||||
// global beforeEach hook in root suite triggers before all others
|
||||
mocha.suite.beforeEach('global before each', async function () {
|
||||
mocha.suite.beforeEach('global before each', async function() {
|
||||
await lifecycle.trigger('beforeEachTest', this.currentTest);
|
||||
});
|
||||
|
|
@ -17,7 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* Get handler that will intercept calls to `toString`
|
||||
* on the function, since Function.prototype.toString()
|
||||
|
@ -59,7 +58,7 @@ export function wrapFunction(fn, hooks = {}) {
|
|||
hooks.after(target, thisArg, argumentsList);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -94,6 +93,6 @@ export function wrapAsyncFunction(fn, hooks = {}) {
|
|||
await hooks.after(target, thisArg, argumentsList);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
|
@ -36,7 +36,7 @@ export function wrapRunnableArgsWithErrorHandler(fn, handler) {
|
|||
argumentsList[i] = wrapRunnableError(argumentsList[i], handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -45,6 +45,6 @@ function wrapRunnableError(runnable, handler) {
|
|||
async handleError(target, thisArg, argumentsList, err) {
|
||||
await handler(err, thisArg.test);
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { FunctionalTestRunner, readConfigFile } from '../../../../../src/functional_test_runner';
|
||||
import { FunctionalTestRunner, readConfigFile } from '../../functional_test_runner';
|
||||
import { CliError } from './run_cli';
|
||||
|
||||
async function createFtr({ configPath, options: { log, bail, grep, updateBaselines, suiteTags } }) {
|
||||
|
|
|
@ -31,7 +31,7 @@ import {
|
|||
KIBANA_FTR_SCRIPT,
|
||||
} from './lib';
|
||||
|
||||
import { readConfigFile } from '../../../../src/functional_test_runner/lib';
|
||||
import { readConfigFile } from '../functional_test_runner/lib';
|
||||
|
||||
const SUCCESS_MESSAGE = `
|
||||
|
||||
|
|
|
@ -28,3 +28,7 @@ export { esTestConfig, createEsTestCluster } from './es';
|
|||
export { kbnTestConfig, kibanaServerTestUser, kibanaTestUser, adminTestUser } from './kbn';
|
||||
|
||||
export { setupUsers, DEFAULT_SUPERUSER_PASS } from './functional_tests/lib/auth';
|
||||
|
||||
export { readConfigFile } from './functional_test_runner/lib/config/read_config_file';
|
||||
|
||||
export { runFtrCli } from './functional_test_runner/cli';
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": [
|
||||
"types/**/*"
|
||||
"types/**/*",
|
||||
"src/functional_test_runner/**/*"
|
||||
]
|
||||
}
|
||||
|
|
2
packages/kbn-test/types/ftr.d.ts
vendored
2
packages/kbn-test/types/ftr.d.ts
vendored
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
import { Config, Lifecycle } from '../../../src/functional_test_runner/lib';
|
||||
import { Config, Lifecycle } from '../src/functional_test_runner/lib';
|
||||
|
||||
interface AsyncInstance<T> {
|
||||
/**
|
||||
|
|
|
@ -18,4 +18,4 @@
|
|||
*/
|
||||
|
||||
require('../src/setup_node_env');
|
||||
require('../src/functional_test_runner/cli');
|
||||
require('@kbn/test').runFtrCli();
|
||||
|
|
|
@ -18,6 +18,6 @@
|
|||
*/
|
||||
|
||||
require('../src/setup_node_env');
|
||||
require('../packages/kbn-test').startServersCli(
|
||||
require('@kbn/test').startServersCli(
|
||||
require.resolve('../test/functional/config.js'),
|
||||
);
|
||||
|
|
|
@ -30,7 +30,7 @@ export default {
|
|||
'<rootDir>/src/cli',
|
||||
'<rootDir>/src/cli_keystore',
|
||||
'<rootDir>/src/cli_plugin',
|
||||
'<rootDir>/src/functional_test_runner',
|
||||
'<rootDir>/packages/kbn-test/target/functional_test_runner',
|
||||
'<rootDir>/src/dev',
|
||||
'<rootDir>/src/legacy/utils',
|
||||
'<rootDir>/src/setup_node_env',
|
||||
|
|
|
@ -33,7 +33,7 @@ import elasticsearch from 'elasticsearch';
|
|||
|
||||
import { EsArchiver } from './es_archiver';
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
import { readConfigFile } from '../functional_test_runner';
|
||||
import { readConfigFile } from '@kbn/test';
|
||||
|
||||
const cmd = new Command('node scripts/es_archiver');
|
||||
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import { run } from '../dev/run';
|
||||
import { FunctionalTestRunner } from './functional_test_runner';
|
||||
|
||||
run(
|
||||
async ({ flags, log }) => {
|
||||
const resolveConfigPath = (v: string) => resolve(process.cwd(), v);
|
||||
const toArray = (v: string | string[]) => ([] as string[]).concat(v || []);
|
||||
|
||||
const functionalTestRunner = new FunctionalTestRunner(
|
||||
log,
|
||||
resolveConfigPath(flags.config as string),
|
||||
{
|
||||
mochaOpts: {
|
||||
bail: flags.bail,
|
||||
grep: flags.grep || undefined,
|
||||
invert: flags.invert,
|
||||
},
|
||||
suiteTags: {
|
||||
include: toArray(flags['include-tag'] as string | string[]),
|
||||
exclude: toArray(flags['exclude-tag'] as string | string[]),
|
||||
},
|
||||
updateBaselines: flags.updateBaselines,
|
||||
excludeTestFiles: flags.exclude || undefined,
|
||||
}
|
||||
);
|
||||
|
||||
let teardownRun = false;
|
||||
const teardown = async (err?: Error) => {
|
||||
if (teardownRun) return;
|
||||
|
||||
teardownRun = true;
|
||||
if (err) {
|
||||
log.indent(-log.indent());
|
||||
log.error(err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
|
||||
try {
|
||||
await functionalTestRunner.close();
|
||||
} finally {
|
||||
process.exit();
|
||||
}
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', err => teardown(err));
|
||||
process.on('SIGTERM', () => teardown());
|
||||
process.on('SIGINT', () => teardown());
|
||||
|
||||
try {
|
||||
if (flags['test-stats']) {
|
||||
process.stderr.write(
|
||||
JSON.stringify(await functionalTestRunner.getTestStats(), null, 2) + '\n'
|
||||
);
|
||||
} else {
|
||||
const failureCount = await functionalTestRunner.run();
|
||||
process.exitCode = failureCount ? 1 : 0;
|
||||
}
|
||||
} catch (err) {
|
||||
await teardown(err);
|
||||
} finally {
|
||||
await teardown();
|
||||
}
|
||||
},
|
||||
{
|
||||
flags: {
|
||||
string: ['config', 'grep', 'exclude', 'include-tag', 'exclude-tag'],
|
||||
boolean: ['bail', 'invert', 'test-stats', 'updateBaselines'],
|
||||
default: {
|
||||
config: 'test/functional/config.js',
|
||||
debug: true,
|
||||
},
|
||||
help: `
|
||||
--config=path path to a config file
|
||||
--bail stop tests after the first failure
|
||||
--grep <pattern> pattern used to select which tests to run
|
||||
--invert invert grep to exclude tests
|
||||
--exclude=file path to a test file that should not be loaded
|
||||
--include-tag=tag a tag to be included, pass multiple times for multiple tags
|
||||
--exclude-tag=tag a tag to be excluded, pass multiple times for multiple tags
|
||||
--test-stats print the number of tests (included and excluded) to STDERR
|
||||
--updateBaselines replace baseline screenshots with whatever is generated from the test
|
||||
`,
|
||||
},
|
||||
}
|
||||
);
|
Loading…
Add table
Add a link
Reference in a new issue