mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Functional test setup with kbn-test package (#18568)
Restructure testing with kbn-test package - Run with multiple configs, move cli options to config - Package-ify kbn-test - Eventually we'll have functional_test_runner live in a package of its own, and then this kbn-test will use that as a dependency, probably still as a devDependency. - Implement functional_tests_server - Collapse single and multiple config apis into one command Use kbn-es Replace es_test_cluster + es_test_config with kbn/test utils Implement new createEsTestCluster Improve scripts, jsdocs, cli top-level tools Lift error handling to the top level
This commit is contained in:
parent
c8cd547dca
commit
079a01fe6f
66 changed files with 3577 additions and 1763 deletions
|
@ -23,6 +23,7 @@ module.exports = {
|
|||
'packages/kbn-es/**/*',
|
||||
'packages/kbn-datemath/**/*.js',
|
||||
'packages/kbn-plugin-generator/**/*',
|
||||
'packages/kbn-test/**/*',
|
||||
'packages/kbn-eslint-import-resolver-kibana/**/*',
|
||||
'x-pack/plugins/apm/**/*',
|
||||
],
|
||||
|
|
|
@ -337,6 +337,7 @@ yarn test:browser --dev # remove the --dev flag to run them once and close
|
|||
|
||||
[Read about the `FunctionalTestRunner`](https://www.elastic.co/guide/en/kibana/current/development-functional-tests.html) to learn more about how you can run and develop functional tests for Kibana core and plugins.
|
||||
|
||||
You can also look into the [Scripts README.md](./scripts/README.md) to learn more about using the node scripts we provide for building Kibana, running integration tests, and starting up Kibana and Elasticsearch while you develop.
|
||||
|
||||
### Building OS packages
|
||||
|
||||
|
|
|
@ -221,6 +221,7 @@
|
|||
"@kbn/eslint-import-resolver-kibana": "link:packages/kbn-eslint-import-resolver-kibana",
|
||||
"@kbn/eslint-plugin-license-header": "link:packages/kbn-eslint-plugin-license-header",
|
||||
"@kbn/plugin-generator": "link:packages/kbn-plugin-generator",
|
||||
"@kbn/test": "link:packages/kbn-test",
|
||||
"angular-mocks": "1.4.7",
|
||||
"babel-eslint": "8.1.2",
|
||||
"babel-jest": "^22.4.3",
|
||||
|
|
|
@ -78,7 +78,7 @@ export function createProc(name, { cmd, args, cwd, env, stdin, log }) {
|
|||
.map(code => {
|
||||
// JVM exits with 143 on SIGTERM and 130 on SIGINT, dont' treat then as errors
|
||||
if (code > 0 && !(code === 143 || code === 130)) {
|
||||
throw createCliError(`[${name}] exitted with code ${code}`);
|
||||
throw createCliError(`[${name}] exited with code ${code}`);
|
||||
}
|
||||
|
||||
return code;
|
||||
|
|
|
@ -76,7 +76,9 @@ export class ProcRunner {
|
|||
.first()
|
||||
.catch(err => {
|
||||
if (err.name !== 'EmptyError') {
|
||||
throw createCliError(`[${name}] exitted without matching pattern: ${wait}`);
|
||||
throw createCliError(
|
||||
`[${name}] exited without matching pattern: ${wait}`
|
||||
);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
|
@ -171,7 +173,12 @@ export class ProcRunner {
|
|||
proc.outcome$.subscribe({
|
||||
next: (code) => {
|
||||
const duration = moment.duration(Date.now() - startMs);
|
||||
this._log.info('[%s] exitted with %s after %s', name, code, duration.humanize());
|
||||
this._log.info(
|
||||
'[%s] exited with %s after %s',
|
||||
name,
|
||||
code,
|
||||
duration.humanize()
|
||||
);
|
||||
},
|
||||
complete: () => {
|
||||
remove();
|
||||
|
|
3
packages/kbn-test/.babelrc
Normal file
3
packages/kbn-test/.babelrc
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"presets": ["@kbn/babel-preset/node"]
|
||||
}
|
39
packages/kbn-test/README.md
Normal file
39
packages/kbn-test/README.md
Normal file
|
@ -0,0 +1,39 @@
|
|||
Kibana Testing Library
|
||||
======================
|
||||
|
||||
The @kbn/test package provides ways to run tests. Currently only functional testing is provided by this library, with unit and other testing possibly added here.
|
||||
|
||||
Functional Testing
|
||||
-------------------
|
||||
|
||||
### Dependencies
|
||||
|
||||
Functional testing methods exist in the `src/functional_tests` directory. They depend on the Functional Test Runner, which is found in [`{KIBANA_ROOT}/src/functional_test_runner`](../../src/functional_test_runner). Ideally libraries provided by kibana packages such as this one should not depend on kibana source code that lives in [`{KIBANA_ROOT}/src`](../../src). The goal is to start pulling test and development utilities out into packages so they can be used across Kibana and plugins. Accordingly the Functional Test Runner itself will be pulled out into a package (or part of a package), and this package's dependence on it will not be an issue.
|
||||
|
||||
### Exposed methods
|
||||
|
||||
#### runTests(configPaths: Array<string>)
|
||||
For each config file specified in configPaths, starts Elasticsearch and Kibana once, runs tests specified in that config file, and shuts down Elasticsearch and Kibana once completed. (Repeats for every config file.)
|
||||
|
||||
`configPaths`: array of strings, each an absolute path to a config file that looks like [this](../../test/functional/config.js), following the config schema specified [here](../../src/functional_test_runner/lib/config/schema.js).
|
||||
|
||||
Internally the method that starts Elasticsearch comes from [kbn-es](../../packages/kbn-es).
|
||||
|
||||
#### startServers(configPath: string)
|
||||
Starts Elasticsearch and Kibana servers given a specified config.
|
||||
|
||||
`configPath`: absolute path to a config file that looks like [this](../../test/functional/config.js), following the config schema specified [here](../../src/functional_test_runner/lib/config/schema.js).
|
||||
|
||||
Allows users to start another process to run just the tests while keeping the servers running with this method. Start servers _and_ run tests using the same config file ([see how](../../scripts/README.md)).
|
||||
|
||||
## Rationale
|
||||
|
||||
### Single config per setup
|
||||
|
||||
We think it makes sense to specify the tests to run along with the particular server configuration for Elasticsearch and Kibana servers, because the tests expect a particular configuration. For example, saml api integration tests expect certain xml files to exist in Elasticsearch's config directory, and certain saml specific options to be passed in via the command line (or alternatively via the `.yml` config file) to both Elasticsearch and Kibana. It makes sense to keep all these config options together with the list of test files.
|
||||
|
||||
### Multiple configs running in succession
|
||||
|
||||
We also think it makes sense to have a test runner intelligently (but simply) start servers, run tests, tear down servers, and repeat for each config, uninterrupted. There's nothing special about each kind of config that specifies running some set of functional tests against some kind of Elasticsearch/Kibana servers. There doesn't need to be a separate job to run each kind of setup/test/teardown. These can all be orchestrated sequentially via the current `runTests` implementation. This is how we envision tests to run on CI.
|
||||
|
||||
This inherently means that grouping test files in configs matters, such that a group of test files that depends on a particular server config appears together in that config's `testFiles` list. Given how quickly and easily we can start servers using [@kbn/es](../../packages/kbn-es), it should not impact performance to logically group tests by domain even if multiple groups of tests share the same server config. We can think about how to group test files together across domains when that time comes.
|
27
packages/kbn-test/package.json
Normal file
27
packages/kbn-test/package.json
Normal file
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"name": "@kbn/test",
|
||||
"main": "./target/index.js",
|
||||
"version": "1.0.0",
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "babel src --out-dir target",
|
||||
"kbn:bootstrap": "yarn build",
|
||||
"kbn:watch": "yarn build --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@kbn/babel-preset": "link:../kbn-babel-preset",
|
||||
"@kbn/dev-utils": "link:../kbn-dev-utils",
|
||||
"babel-cli": "^6.26.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^2.4.1",
|
||||
"dedent": "^0.7.0",
|
||||
"getopts": "^2.0.6",
|
||||
"glob": "^7.1.2",
|
||||
"rxjs": "^5.4.3",
|
||||
"tar-fs": "^1.16.2",
|
||||
"tmp": "^0.0.33",
|
||||
"zlib": "^1.0.5"
|
||||
}
|
||||
}
|
|
@ -1,27 +1,32 @@
|
|||
import { resolve } from 'path';
|
||||
import { get } from 'lodash';
|
||||
import { format } from 'url';
|
||||
import elasticsearch from 'elasticsearch';
|
||||
import { get } from 'lodash';
|
||||
import toPath from 'lodash/internal/toPath';
|
||||
import { Cluster } from '@kbn/es';
|
||||
import { esTestConfig } from './es_test_config';
|
||||
import { rmrfSync } from './rmrf_sync';
|
||||
import { KIBANA_ROOT } from '../';
|
||||
import elasticsearch from 'elasticsearch';
|
||||
|
||||
export function createTestCluster(options = {}) {
|
||||
export function createEsTestCluster(options = {}) {
|
||||
const {
|
||||
port = esTestConfig.getPort(),
|
||||
password = 'changeme',
|
||||
license = 'oss',
|
||||
log,
|
||||
basePath = resolve(__dirname, '../../../.es'),
|
||||
basePath = resolve(KIBANA_ROOT, '.es'),
|
||||
// Use source when running on CI
|
||||
from = esTestConfig.getBuildFrom(),
|
||||
} = options;
|
||||
|
||||
const randomHash = Math.random().toString(36).substring(2);
|
||||
const randomHash = Math.random()
|
||||
.toString(36)
|
||||
.substring(2);
|
||||
const clusterName = `test-${randomHash}`;
|
||||
const config = {
|
||||
version: esTestConfig.getVersion(),
|
||||
installPath: resolve(basePath, clusterName),
|
||||
sourcePath: resolve(__dirname, '../../../../elasticsearch'),
|
||||
sourcePath: resolve(KIBANA_ROOT, '../elasticsearch'),
|
||||
password,
|
||||
license,
|
||||
basePath,
|
||||
|
@ -29,9 +34,6 @@ export function createTestCluster(options = {}) {
|
|||
|
||||
const cluster = new Cluster(log);
|
||||
|
||||
// Use source when running on CI
|
||||
const from = options.from || esTestConfig.getBuildFrom();
|
||||
|
||||
return new class EsTestCluster {
|
||||
getStartTimeout() {
|
||||
const second = 1000;
|
||||
|
@ -51,18 +53,20 @@ export function createTestCluster(options = {}) {
|
|||
`cluster.name=${clusterName}`,
|
||||
`http.port=${port}`,
|
||||
`discovery.zen.ping.unicast.hosts=localhost:${port}`,
|
||||
...esArgs
|
||||
...esArgs,
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
async stop() {
|
||||
await cluster.stop();
|
||||
log.info('[es] stopped');
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
await this.stop();
|
||||
rmrfSync(config.installPath);
|
||||
log.info('[es] cleanup complete');
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -84,7 +88,7 @@ export function createTestCluster(options = {}) {
|
|||
|
||||
return format(parts);
|
||||
}
|
||||
};
|
||||
}();
|
||||
}
|
||||
|
||||
/**
|
50
packages/kbn-test/src/es/es_test_config.js
Normal file
50
packages/kbn-test/src/es/es_test_config.js
Normal file
|
@ -0,0 +1,50 @@
|
|||
import url, { format as formatUrl } from 'url';
|
||||
import pkg from '../../../../package.json';
|
||||
import { adminTestUser } from '../kbn';
|
||||
|
||||
export const esTestConfig = new class EsTestConfig {
|
||||
getVersion() {
|
||||
return process.env.TEST_ES_BRANCH || pkg.version;
|
||||
}
|
||||
|
||||
getPort() {
|
||||
return this.getUrlParts().port;
|
||||
}
|
||||
|
||||
getUrl() {
|
||||
return formatUrl(this.getUrlParts());
|
||||
}
|
||||
|
||||
getBuildFrom() {
|
||||
return process.env.TEST_ES_FROM || 'snapshot';
|
||||
}
|
||||
|
||||
getUrlParts() {
|
||||
// Allow setting one complete TEST_ES_URL for Es like https://elastic:changeme@myCloudInstance:9200
|
||||
if (process.env.TEST_ES_URL) {
|
||||
const testEsUrl = url.parse(process.env.TEST_ES_URL);
|
||||
return {
|
||||
// have to remove the ":" off protocol
|
||||
protocol: testEsUrl.protocol.slice(0, -1),
|
||||
hostname: testEsUrl.hostname,
|
||||
port: parseInt(testEsUrl.port, 10),
|
||||
username: testEsUrl.auth.split(':')[0],
|
||||
password: testEsUrl.auth.split(':')[1],
|
||||
auth: testEsUrl.auth,
|
||||
};
|
||||
}
|
||||
|
||||
const username = process.env.TEST_KIBANA_USERNAME || adminTestUser.username;
|
||||
const password = process.env.TEST_KIBANA_PASSWORD || adminTestUser.password;
|
||||
return {
|
||||
// Allow setting any individual component(s) of the URL,
|
||||
// or use default values (username and password from ../kbn/users.js)
|
||||
protocol: process.env.TEST_ES_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_ES_HOSTNAME || 'localhost',
|
||||
port: parseInt(process.env.TEST_ES_PORT, 10) || 9220,
|
||||
auth: username + ':' + password,
|
||||
username: username,
|
||||
password: password,
|
||||
};
|
||||
}
|
||||
}();
|
2
packages/kbn-test/src/es/index.js
Normal file
2
packages/kbn-test/src/es/index.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
export { createEsTestCluster } from './es_test_cluster.js';
|
||||
export { esTestConfig } from './es_test_config';
|
|
@ -18,4 +18,4 @@ export function rmrfSync(path) {
|
|||
});
|
||||
fs.rmdirSync(path);
|
||||
}
|
||||
}
|
||||
}
|
2
packages/kbn-test/src/functional_tests/cli/index.js
Normal file
2
packages/kbn-test/src/functional_tests/cli/index.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
export { runTestsCli } from './run_tests_cli';
|
||||
export { startServersCli } from './start_servers_cli';
|
78
packages/kbn-test/src/functional_tests/cli/run_tests_cli.js
Normal file
78
packages/kbn-test/src/functional_tests/cli/run_tests_cli.js
Normal file
|
@ -0,0 +1,78 @@
|
|||
import dedent from 'dedent';
|
||||
import getopts from 'getopts';
|
||||
import { createToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
|
||||
import { runTests } from '../../';
|
||||
|
||||
/**
|
||||
* Run servers and tests for each config
|
||||
* Only cares about --config option. Other options
|
||||
* are passed directly to functional_test_runner, such as
|
||||
* --bail, --verbose, etc.
|
||||
* @param {string[]} defaultConfigPaths Array of paths to configs to use
|
||||
* if no config option is passed
|
||||
*/
|
||||
export async function runTestsCli(defaultConfigPaths) {
|
||||
const { configs, help, bail, log } = processArgs(defaultConfigPaths);
|
||||
|
||||
if (help) return displayHelp();
|
||||
|
||||
if (!configs || configs.length === 0) {
|
||||
log.error(
|
||||
`Run Tests requires at least one path to a config. Leave blank to use defaults.`
|
||||
);
|
||||
process.exit(9);
|
||||
}
|
||||
|
||||
try {
|
||||
await runTests(configs, { bail, log });
|
||||
} catch (err) {
|
||||
log.error('FATAL ERROR');
|
||||
log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function processArgs(defaultConfigPaths) {
|
||||
// If no args are passed, use {}
|
||||
const options = getopts(process.argv.slice(2)) || {};
|
||||
|
||||
// If --config is passed without paths, it's "true", so use default
|
||||
const configs =
|
||||
typeof options.config === 'string' || Array.isArray(options.config)
|
||||
? [].concat(options.config)
|
||||
: defaultConfigPaths;
|
||||
|
||||
const log = createToolingLog(pickLevelFromFlags(options));
|
||||
log.pipe(process.stdout);
|
||||
|
||||
return {
|
||||
configs,
|
||||
log,
|
||||
help: options.help,
|
||||
bail: options.bail,
|
||||
rest: options._,
|
||||
};
|
||||
}
|
||||
|
||||
function displayHelp() {
|
||||
console.log(
|
||||
dedent(`
|
||||
Run Functional Tests
|
||||
|
||||
Usage: node scripts/functional_tests [options]
|
||||
|
||||
--config Option to pass in a config
|
||||
Can pass in multiple configs with
|
||||
--config file1 --config file2 --config file3
|
||||
--bail Stop the test run at the first failure
|
||||
--help Display this menu and exit
|
||||
|
||||
Log level options:
|
||||
|
||||
--verbose
|
||||
--debug
|
||||
--quiet Log errors
|
||||
--silent
|
||||
`)
|
||||
);
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
import chalk from 'chalk';
|
||||
import dedent from 'dedent';
|
||||
import getopts from 'getopts';
|
||||
import { createToolingLog, pickLevelFromFlags } from '@kbn/dev-utils';
|
||||
import { startServers } from '../../';
|
||||
|
||||
/**
|
||||
* Start servers
|
||||
* @param {string} configPath path to config
|
||||
*/
|
||||
export async function startServersCli(defaultConfigPath) {
|
||||
const { config, log, help } = processArgv(defaultConfigPath);
|
||||
|
||||
if (help) return displayHelp();
|
||||
|
||||
if (!config) {
|
||||
log.error(
|
||||
`Start Servers requires one path to a config. Leave blank to use default.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
await startServers(config, { log });
|
||||
} catch (err) {
|
||||
log.error('FATAL ERROR');
|
||||
log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function processArgv(defaultConfigPath) {
|
||||
const options = getopts(process.argv.slice(2)) || {};
|
||||
|
||||
if (Array.isArray(options.config)) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`Starting servers requires a single config path. Multiple were passed.`
|
||||
)
|
||||
);
|
||||
process.exit(9);
|
||||
}
|
||||
|
||||
const config =
|
||||
typeof options.config === 'string' ? options.config : defaultConfigPath;
|
||||
|
||||
const log = createToolingLog(pickLevelFromFlags(options));
|
||||
log.pipe(process.stdout);
|
||||
|
||||
return {
|
||||
config,
|
||||
log,
|
||||
help: options.help,
|
||||
rest: options._,
|
||||
};
|
||||
}
|
||||
|
||||
function displayHelp() {
|
||||
console.log(
|
||||
dedent(`
|
||||
Start Functional Test Servers
|
||||
|
||||
Usage: node scripts/functional_tests_server [options]
|
||||
|
||||
--config Option to pass in a config
|
||||
--help Display this menu and exit
|
||||
|
||||
Log level options:
|
||||
|
||||
--verbose
|
||||
--debug
|
||||
--quiet Log errors
|
||||
--silent
|
||||
`)
|
||||
);
|
||||
}
|
|
@ -12,20 +12,25 @@ import { delay, fromNode as fcb } from 'bluebird';
|
|||
export const DEFAULT_SUPERUSER_PASS = 'iamsuperuser';
|
||||
|
||||
async function updateCredentials(port, auth, username, password, retries = 10) {
|
||||
const result = await fcb(cb => request({
|
||||
method: 'PUT',
|
||||
uri: formatUrl({
|
||||
protocol: 'http:',
|
||||
auth,
|
||||
hostname: 'localhost',
|
||||
port,
|
||||
pathname: `/_xpack/security/user/${username}/_password`,
|
||||
}),
|
||||
json: true,
|
||||
body: { password }
|
||||
}, (err, httpResponse, body) => {
|
||||
cb(err, { httpResponse, body });
|
||||
}));
|
||||
const result = await fcb(cb =>
|
||||
request(
|
||||
{
|
||||
method: 'PUT',
|
||||
uri: formatUrl({
|
||||
protocol: 'http:',
|
||||
auth,
|
||||
hostname: 'localhost',
|
||||
port,
|
||||
pathname: `/_xpack/security/user/${username}/_password`,
|
||||
}),
|
||||
json: true,
|
||||
body: { password },
|
||||
},
|
||||
(err, httpResponse, body) => {
|
||||
cb(err, { httpResponse, body });
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const { body, httpResponse } = result;
|
||||
const { statusCode } = httpResponse;
|
||||
|
@ -38,20 +43,22 @@ async function updateCredentials(port, auth, username, password, retries = 10) {
|
|||
return await updateCredentials(port, auth, username, password, retries - 1);
|
||||
}
|
||||
|
||||
throw new Error(`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`);
|
||||
throw new Error(
|
||||
`${statusCode} response, expected 200 -- ${JSON.stringify(body)}`
|
||||
);
|
||||
}
|
||||
|
||||
export async function setupUsers(log, ftrConfig) {
|
||||
const esPort = ftrConfig.get('servers.elasticsearch.port');
|
||||
export async function setupUsers(log, config) {
|
||||
const esPort = config.get('servers.elasticsearch.port');
|
||||
|
||||
// track the current credentials for the `elastic` user as
|
||||
// they will likely change as we apply updates
|
||||
let auth = 'elastic:iamsuperuser';
|
||||
let auth = `elastic:${DEFAULT_SUPERUSER_PASS}`;
|
||||
|
||||
// list of updates we need to apply
|
||||
const updates = [
|
||||
ftrConfig.get('servers.elasticsearch'),
|
||||
ftrConfig.get('servers.kibana'),
|
||||
config.get('servers.elasticsearch'),
|
||||
config.get('servers.kibana'),
|
||||
];
|
||||
|
||||
for (const { username, password } of updates) {
|
9
packages/kbn-test/src/functional_tests/lib/index.js
Normal file
9
packages/kbn-test/src/functional_tests/lib/index.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
export { runKibanaServer } from './run_kibana_server';
|
||||
export { runElasticsearch } from './run_elasticsearch';
|
||||
export { runFtr } from './run_ftr';
|
||||
export {
|
||||
KIBANA_ROOT,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
FUNCTIONAL_CONFIG_PATH,
|
||||
API_CONFIG_PATH,
|
||||
} from './paths';
|
25
packages/kbn-test/src/functional_tests/lib/paths.js
Normal file
25
packages/kbn-test/src/functional_tests/lib/paths.js
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { resolve, relative } from 'path';
|
||||
|
||||
// resolve() treats relative paths as relative to process.cwd(),
|
||||
// so to return a relative path we use relative()
|
||||
function resolveRelative(path) {
|
||||
return relative(process.cwd(), resolve(path));
|
||||
}
|
||||
|
||||
export const KIBANA_EXEC = 'node';
|
||||
export const KIBANA_EXEC_PATH = resolveRelative('scripts/kibana');
|
||||
export const KIBANA_ROOT = resolve(__dirname, '../../../../../');
|
||||
export const KIBANA_FTR_SCRIPT = resolve(
|
||||
KIBANA_ROOT,
|
||||
'scripts/functional_test_runner'
|
||||
);
|
||||
export const PROJECT_ROOT = resolve(__dirname, '../../../../../../');
|
||||
export const FUNCTIONAL_CONFIG_PATH = resolve(
|
||||
KIBANA_ROOT,
|
||||
'test/functional/config'
|
||||
);
|
||||
export const API_CONFIG_PATH = resolve(
|
||||
KIBANA_ROOT,
|
||||
'test/api_integration/config'
|
||||
);
|
||||
export const OPTIMIZE_BUNDLE_DIR = resolve(KIBANA_ROOT, 'optimize/bundles');
|
|
@ -0,0 +1,30 @@
|
|||
import { resolve } from 'path';
|
||||
import { KIBANA_ROOT } from './paths';
|
||||
import { createEsTestCluster } from '../../es';
|
||||
|
||||
import { setupUsers, DEFAULT_SUPERUSER_PASS } from './auth';
|
||||
|
||||
export async function runElasticsearch({ config, log }) {
|
||||
const isOss = config.get('esTestCluster.license') === 'oss';
|
||||
|
||||
const cluster = createEsTestCluster({
|
||||
port: config.get('servers.elasticsearch.port'),
|
||||
password: !isOss
|
||||
? DEFAULT_SUPERUSER_PASS
|
||||
: config.get('servers.elasticsearch.password'),
|
||||
license: config.get('esTestCluster.license'),
|
||||
log,
|
||||
basePath: resolve(KIBANA_ROOT, '.es'),
|
||||
from: config.get('esTestCluster.from'),
|
||||
});
|
||||
|
||||
const esArgs = config.get('esTestCluster.serverArgs');
|
||||
|
||||
await cluster.start(esArgs);
|
||||
|
||||
if (!isOss) {
|
||||
await setupUsers(log, config);
|
||||
}
|
||||
|
||||
return cluster;
|
||||
}
|
29
packages/kbn-test/src/functional_tests/lib/run_ftr.js
Normal file
29
packages/kbn-test/src/functional_tests/lib/run_ftr.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
import { KIBANA_FTR_SCRIPT, PROJECT_ROOT } from './paths';
|
||||
|
||||
export async function runFtr({
|
||||
procs,
|
||||
configPath,
|
||||
bail,
|
||||
log,
|
||||
cwd = PROJECT_ROOT,
|
||||
}) {
|
||||
const args = [KIBANA_FTR_SCRIPT];
|
||||
|
||||
if (getLogFlag(log)) args.push(`--${getLogFlag(log)}`);
|
||||
if (bail) args.push('--bail');
|
||||
if (configPath) args.push('--config', configPath);
|
||||
|
||||
await procs.run('ftr', {
|
||||
cmd: 'node',
|
||||
args,
|
||||
cwd,
|
||||
wait: true,
|
||||
});
|
||||
}
|
||||
|
||||
function getLogFlag(log) {
|
||||
const level = log.getLevel();
|
||||
|
||||
if (level === 'info') return null;
|
||||
return level === 'error' ? 'quiet' : level;
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
import { KIBANA_ROOT, KIBANA_EXEC, KIBANA_EXEC_PATH } from './paths';
|
||||
|
||||
export async function runKibanaServer({ procs, config }) {
|
||||
const cliArgs = config.get('kibanaServerArgs') || [];
|
||||
|
||||
// start the kibana server and wait for it to log "Server running" before resolving
|
||||
await procs.run('kibana', {
|
||||
cmd: KIBANA_EXEC,
|
||||
args: [KIBANA_EXEC_PATH, ...cliArgs],
|
||||
env: {
|
||||
FORCE_COLOR: 1,
|
||||
...process.env,
|
||||
},
|
||||
cwd: KIBANA_ROOT,
|
||||
wait: /Server running/,
|
||||
});
|
||||
}
|
90
packages/kbn-test/src/functional_tests/tasks.js
Normal file
90
packages/kbn-test/src/functional_tests/tasks.js
Normal file
|
@ -0,0 +1,90 @@
|
|||
import { relative, resolve } from 'path';
|
||||
import Rx from 'rxjs/Rx';
|
||||
import { withProcRunner } from '@kbn/dev-utils';
|
||||
|
||||
import {
|
||||
runElasticsearch,
|
||||
runKibanaServer,
|
||||
runFtr,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
} from './lib';
|
||||
|
||||
import { readConfigFile } from '../../../../src/functional_test_runner/lib';
|
||||
|
||||
const SUCCESS_MESSAGE = `
|
||||
|
||||
Elasticsearch and Kibana are ready for functional testing. Start the functional tests
|
||||
in another terminal session by running this command from this directory:
|
||||
|
||||
node ${relative(process.cwd(), KIBANA_FTR_SCRIPT)}
|
||||
|
||||
`;
|
||||
|
||||
/**
|
||||
* Run servers and tests for each config
|
||||
* @param {string[]} configPaths Array of paths to configs
|
||||
* @param {boolean} bail Whether to exit test run at the first failure
|
||||
* @param {Log} log Optional logger
|
||||
*/
|
||||
export async function runTests(configPaths, { bail, log }) {
|
||||
for (const configPath of configPaths) {
|
||||
await runSingleConfig(resolve(process.cwd(), configPath), { bail, log });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start only servers using single config
|
||||
* @param {string} configPath Path to a config file
|
||||
* @param {Log} log Optional logger
|
||||
*/
|
||||
export async function startServers(configPath, { log }) {
|
||||
configPath = resolve(process.cwd(), configPath);
|
||||
|
||||
await withProcRunner(log, async procs => {
|
||||
const config = await readConfigFile(log, configPath);
|
||||
|
||||
const es = await runElasticsearch({ config, log });
|
||||
await runKibanaServer({ procs, config, log });
|
||||
|
||||
// wait for 5 seconds of silence before logging the
|
||||
// success message so that it doesn't get buried
|
||||
await silence(5000, { log });
|
||||
log.info(SUCCESS_MESSAGE);
|
||||
|
||||
await procs.waitForAllToStop();
|
||||
await es.cleanup();
|
||||
});
|
||||
}
|
||||
|
||||
async function silence(milliseconds, { log }) {
|
||||
await Rx.Observable.fromEvent(log, 'data')
|
||||
.startWith(null)
|
||||
.switchMap(() => Rx.Observable.timer(milliseconds))
|
||||
.take(1)
|
||||
.toPromise();
|
||||
}
|
||||
|
||||
/*
|
||||
* Start servers and run tests for single config
|
||||
*/
|
||||
async function runSingleConfig(configPath, { bail, log }) {
|
||||
await withProcRunner(log, async procs => {
|
||||
const config = await readConfigFile(log, configPath);
|
||||
|
||||
const es = await runElasticsearch({ config, log });
|
||||
await runKibanaServer({ procs, config });
|
||||
|
||||
// Note: When solving how to incorporate functional_test_runner
|
||||
// clean this up
|
||||
await runFtr({
|
||||
procs,
|
||||
configPath,
|
||||
bail,
|
||||
log,
|
||||
cwd: process.cwd(),
|
||||
});
|
||||
|
||||
await procs.stop('kibana');
|
||||
await es.cleanup();
|
||||
});
|
||||
}
|
14
packages/kbn-test/src/index.js
Normal file
14
packages/kbn-test/src/index.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
export { runTestsCli, startServersCli } from './functional_tests/cli';
|
||||
|
||||
export { runTests, startServers } from './functional_tests/tasks';
|
||||
|
||||
export { OPTIMIZE_BUNDLE_DIR, KIBANA_ROOT } from './functional_tests/lib/paths';
|
||||
|
||||
export { esTestConfig, createEsTestCluster } from './es';
|
||||
|
||||
export {
|
||||
kbnTestConfig,
|
||||
kibanaServerTestUser,
|
||||
kibanaTestUser,
|
||||
adminTestUser,
|
||||
} from './kbn';
|
2
packages/kbn-test/src/kbn/index.js
Normal file
2
packages/kbn-test/src/kbn/index.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
export { kbnTestConfig } from './kbn_test_config';
|
||||
export { kibanaTestUser, kibanaServerTestUser, adminTestUser } from './users';
|
36
packages/kbn-test/src/kbn/kbn_test_config.js
Normal file
36
packages/kbn-test/src/kbn/kbn_test_config.js
Normal file
|
@ -0,0 +1,36 @@
|
|||
import { kibanaTestUser } from './users';
|
||||
import url from 'url';
|
||||
|
||||
export const kbnTestConfig = new class KbnTestConfig {
|
||||
getPort() {
|
||||
return this.getUrlParts().port;
|
||||
}
|
||||
|
||||
getUrlParts() {
|
||||
// allow setting one complete TEST_KIBANA_URL for ES like https://elastic:changeme@example.com:9200
|
||||
if (process.env.TEST_KIBANA_URL) {
|
||||
const testKibanaUrl = url.parse(process.env.TEST_KIBANA_URL);
|
||||
return {
|
||||
protocol: testKibanaUrl.protocol.slice(0, -1),
|
||||
hostname: testKibanaUrl.hostname,
|
||||
port: parseInt(testKibanaUrl.port, 10),
|
||||
auth: testKibanaUrl.auth,
|
||||
username: testKibanaUrl.auth.split(':')[0],
|
||||
password: testKibanaUrl.auth.split(':')[1],
|
||||
};
|
||||
}
|
||||
|
||||
const username =
|
||||
process.env.TEST_KIBANA_USERNAME || kibanaTestUser.username;
|
||||
const password =
|
||||
process.env.TEST_KIBANA_PASSWORD || kibanaTestUser.password;
|
||||
return {
|
||||
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
|
||||
port: parseInt(process.env.TEST_KIBANA_PORT, 10) || 5620,
|
||||
auth: `${username}:${password}`,
|
||||
username,
|
||||
password,
|
||||
};
|
||||
}
|
||||
}();
|
16
packages/kbn-test/src/kbn/users.js
Normal file
16
packages/kbn-test/src/kbn/users.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
const env = process.env;
|
||||
|
||||
export const kibanaTestUser = {
|
||||
username: env.TEST_KIBANA_USER || 'elastic',
|
||||
password: env.TEST_KIBANA_PASS || 'changeme',
|
||||
};
|
||||
|
||||
export const kibanaServerTestUser = {
|
||||
username: env.TEST_KIBANA_SERVER_USER || 'kibana',
|
||||
password: env.TEST_KIBANA_SERVER_PASS || 'changeme',
|
||||
};
|
||||
|
||||
export const adminTestUser = {
|
||||
username: env.TEST_ES_USER || 'elastic',
|
||||
password: env.TEST_ES_PASS || 'changeme',
|
||||
};
|
1685
packages/kbn-test/yarn.lock
Normal file
1685
packages/kbn-test/yarn.lock
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
|||
# kibana dev scripts
|
||||
# Kibana Dev Scripts
|
||||
|
||||
This directory contains scripts useful for interacting with Kibana tools in development. Use the node executable and `--help` flag to learn about how they work:
|
||||
|
||||
|
@ -6,11 +6,37 @@ This directory contains scripts useful for interacting with Kibana tools in deve
|
|||
node scripts/{{script name}} --help
|
||||
```
|
||||
|
||||
## for developers
|
||||
## For Developers
|
||||
|
||||
This directory is excluded from the build and tools within it should help users discover their capabilities. Each script in this directory must:
|
||||
|
||||
- require `src/babel-register` to bootstrap babel
|
||||
- call out to source code that is in the `src` directory
|
||||
- call out to source code in the [`src`](../src) or [`packages`](../packages) directories
|
||||
- react to the `--help` flag
|
||||
- run everywhere OR check and fail fast when a required OS or toolchain is not available
|
||||
- run everywhere OR check and fail fast when a required OS or toolchain is not available
|
||||
|
||||
## Functional Test Scripts
|
||||
|
||||
**`node scripts/functional_tests [--config test/functional/config.js --config test/api_integration/config.js]`**
|
||||
|
||||
Runs all the functional tests: selenium tests and api integration tests. List configs with multiple `--config` arguments. Uses the [@kbn/test](../packages/kbn-test) library to run Elasticsearch and Kibana servers and tests against those servers, for multiple server+test setups. In particular, calls out to [`runTests()`](../packages/kbn-test/src/functional_tests/tasks.js). Can be run on a single config.
|
||||
|
||||
**`node scripts/functional_tests_server [--config test/functional/config.js]`**
|
||||
|
||||
Starts just the Elasticsearch and Kibana servers given a single config, i.e. via `--config test/functional/config.js` or `--config test/api_integration/config`. Allows the user to start just the servers with this script, and keep them running while running tests against these servers. The idea is that the same config file configures both Elasticsearch and Kibana servers. Uses the [`startServers()`](../packages/kbn-test/src/functional_tests/tasks.js#L52-L80) method from [@kbn/test](../packages/kbn-test) library.
|
||||
|
||||
Example. Start servers _and_ run tests, separately, but using the same config:
|
||||
|
||||
```sh
|
||||
# Just the servers
|
||||
node scripts/functional_tests_server --config path/to/config
|
||||
```
|
||||
|
||||
In another terminal:
|
||||
|
||||
```sh
|
||||
# Just the tests--against the running servers
|
||||
node scripts/functional_test_runner --config path/to/config
|
||||
```
|
||||
|
||||
For details on how the internal methods work, [read this readme](../packages/kbn-test/README.md).
|
||||
|
|
5
scripts/functional_tests.js
Normal file
5
scripts/functional_tests.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
require('../src/babel-register');
|
||||
require('../packages/kbn-test').runTestsCli([
|
||||
require.resolve('../test/functional/config.js'),
|
||||
require.resolve('../test/api_integration/config.js'),
|
||||
]);
|
4
scripts/functional_tests_server.js
Normal file
4
scripts/functional_tests_server.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
require('../src/babel-register');
|
||||
require('../packages/kbn-test').startServersCli(
|
||||
require.resolve('../test/functional/config.js'),
|
||||
);
|
|
@ -3,7 +3,7 @@ import Promise from 'bluebird';
|
|||
import sinon from 'sinon';
|
||||
import expect from 'expect.js';
|
||||
|
||||
import { esTestConfig } from '../../../../test_utils/es';
|
||||
import { esTestConfig } from '@kbn/test';
|
||||
import { ensureEsVersion } from '../ensure_es_version';
|
||||
|
||||
describe('plugins/elasticsearch', () => {
|
||||
|
|
|
@ -7,7 +7,7 @@ const NoConnections = require('elasticsearch').errors.NoConnections;
|
|||
import mappings from './fixtures/mappings';
|
||||
import healthCheck from '../health_check';
|
||||
import kibanaVersion from '../kibana_version';
|
||||
import { esTestConfig } from '../../../../test_utils/es';
|
||||
import { esTestConfig } from '@kbn/test';
|
||||
import * as patchKibanaIndexNS from '../patch_kibana_index';
|
||||
|
||||
const esPort = esTestConfig.getPort();
|
||||
|
|
|
@ -88,6 +88,15 @@ export const schema = Joi.object().keys({
|
|||
elasticsearch: urlPartsSchema(),
|
||||
}).default(),
|
||||
|
||||
esTestCluster: Joi.object().keys({
|
||||
license: Joi.string().default('oss'),
|
||||
from: Joi.string().default('snapshot'),
|
||||
serverArgs: Joi.array(),
|
||||
}).default(),
|
||||
|
||||
kibanaServerArgs: Joi.array(),
|
||||
|
||||
// env allows generic data, but should be removed
|
||||
env: Joi.object().default(),
|
||||
|
||||
chromedriver: Joi.object().keys({
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
import { format as formatUrl } from 'url';
|
||||
import pkg from '../../../package.json';
|
||||
import { admin } from '../../../test/shield';
|
||||
|
||||
export const esTestConfig = new class EsTestConfig {
|
||||
getVersion() {
|
||||
return process.env.TEST_ES_BRANCH || pkg.version;
|
||||
}
|
||||
|
||||
getPort() {
|
||||
return this.getUrlParts().port;
|
||||
}
|
||||
|
||||
getUrl() {
|
||||
return formatUrl(this.getUrlParts());
|
||||
}
|
||||
|
||||
getBuildFrom() {
|
||||
return process.env.TEST_ES_FROM || 'snapshot';
|
||||
}
|
||||
|
||||
getUrlParts() {
|
||||
return {
|
||||
protocol: process.env.TEST_ES_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_ES_HOSTNAME || 'localhost',
|
||||
port: parseInt(process.env.TEST_ES_PORT, 10) || 9220,
|
||||
auth: admin.username + ':' + admin.password,
|
||||
username: admin.username,
|
||||
password: admin.password,
|
||||
};
|
||||
}
|
||||
};
|
|
@ -1,2 +0,0 @@
|
|||
export { esTestConfig } from './es_test_config';
|
||||
export { createTestCluster } from './es_test_cluster';
|
|
@ -9,7 +9,7 @@ import url from 'url';
|
|||
* protocol: 'http',
|
||||
* hostname: 'localhost',
|
||||
* port: 9220,
|
||||
* auth: shield.kibanaUser.username + ':' + shield.kibanaUser.password
|
||||
* auth: kibanaTestUser.username + ':' + kibanaTestUser.password
|
||||
* }
|
||||
* @param {object} app The params to append
|
||||
* example:
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import { resolve } from 'path';
|
||||
import { defaultsDeep, set } from 'lodash';
|
||||
import { header as basicAuthHeader } from './base_auth';
|
||||
import { kibanaUser, kibanaServer } from '../../test/shield';
|
||||
import { esTestConfig } from '../test_utils/es';
|
||||
import { esTestConfig, kibanaTestUser, kibanaServerTestUser } from '@kbn/test';
|
||||
import KbnServer from '../../src/server/kbn_server';
|
||||
|
||||
const DEFAULTS_SETTINGS = {
|
||||
|
@ -29,8 +28,8 @@ const DEFAULT_SETTINGS_WITH_CORE_PLUGINS = {
|
|||
},
|
||||
elasticsearch: {
|
||||
url: esTestConfig.getUrl(),
|
||||
username: kibanaServer.username,
|
||||
password: kibanaServer.password
|
||||
username: kibanaServerTestUser.username,
|
||||
password: kibanaServerTestUser.password
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -60,7 +59,7 @@ export function createServerWithCorePlugins(settings = {}) {
|
|||
* Creates request configuration with a basic auth header
|
||||
*/
|
||||
export function authOptions() {
|
||||
const { username, password } = kibanaUser;
|
||||
const { username, password } = kibanaTestUser;
|
||||
const authHeader = basicAuthHeader(username, password);
|
||||
return set({}, 'headers.Authorization', authHeader);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import sinon from 'sinon';
|
||||
import expect from 'expect.js';
|
||||
|
||||
import { createTestCluster } from '../../../../test_utils/es';
|
||||
import { createEsTestCluster } from '@kbn/test';
|
||||
import { createServerWithCorePlugins } from '../../../../test_utils/kbn_server';
|
||||
import { createToolingLog } from '../../../../dev';
|
||||
import { createOrUpgradeSavedConfig } from '../create_or_upgrade_saved_config';
|
||||
|
@ -19,7 +19,7 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
log.info('starting elasticsearch');
|
||||
log.indent(4);
|
||||
|
||||
const es = createTestCluster({ log });
|
||||
const es = createEsTestCluster({ log });
|
||||
this.timeout(es.getStartTimeout());
|
||||
|
||||
log.indent(-4);
|
||||
|
@ -47,7 +47,7 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
type: 'config',
|
||||
attributes: {
|
||||
buildNum: 54090,
|
||||
'5.4.0-SNAPSHOT': true
|
||||
'5.4.0-SNAPSHOT': true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -55,7 +55,7 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
type: 'config',
|
||||
attributes: {
|
||||
buildNum: 54010,
|
||||
'5.4.0-rc1': true
|
||||
'5.4.0-rc1': true,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -63,7 +63,7 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
type: 'config',
|
||||
attributes: {
|
||||
buildNum: 99999,
|
||||
'@@version': true
|
||||
'@@version': true,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
@ -83,18 +83,20 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
savedObjectsClient,
|
||||
version: '5.4.0',
|
||||
buildNum: 54099,
|
||||
log: sinon.stub()
|
||||
log: sinon.stub(),
|
||||
});
|
||||
|
||||
const config540 = await savedObjectsClient.get('config', '5.4.0');
|
||||
expect(config540).to.have.property('attributes').eql({
|
||||
// should have the new build number
|
||||
buildNum: 54099,
|
||||
expect(config540)
|
||||
.to.have.property('attributes')
|
||||
.eql({
|
||||
// should have the new build number
|
||||
buildNum: 54099,
|
||||
|
||||
// 5.4.0-SNAPSHOT and @@version were ignored so we only have the
|
||||
// attributes from 5.4.0-rc1, even though the other build nums are greater
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
// 5.4.0-SNAPSHOT and @@version were ignored so we only have the
|
||||
// attributes from 5.4.0-rc1, even though the other build nums are greater
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
|
||||
// add the 5.4.0 flag to the 5.4.0 savedConfig
|
||||
await savedObjectsClient.update('config', '5.4.0', {
|
||||
|
@ -107,18 +109,20 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
savedObjectsClient,
|
||||
version: '5.4.1',
|
||||
buildNum: 54199,
|
||||
log: sinon.stub()
|
||||
log: sinon.stub(),
|
||||
});
|
||||
|
||||
const config541 = await savedObjectsClient.get('config', '5.4.1');
|
||||
expect(config541).to.have.property('attributes').eql({
|
||||
// should have the new build number
|
||||
buildNum: 54199,
|
||||
expect(config541)
|
||||
.to.have.property('attributes')
|
||||
.eql({
|
||||
// should have the new build number
|
||||
buildNum: 54199,
|
||||
|
||||
// should also include properties from 5.4.0 and 5.4.0-rc1
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
// should also include properties from 5.4.0 and 5.4.0-rc1
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
|
||||
// add the 5.4.1 flag to the 5.4.1 savedConfig
|
||||
await savedObjectsClient.update('config', '5.4.1', {
|
||||
|
@ -131,19 +135,21 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
savedObjectsClient,
|
||||
version: '7.0.0-rc1',
|
||||
buildNum: 70010,
|
||||
log: sinon.stub()
|
||||
log: sinon.stub(),
|
||||
});
|
||||
|
||||
const config700rc1 = await savedObjectsClient.get('config', '7.0.0-rc1');
|
||||
expect(config700rc1).to.have.property('attributes').eql({
|
||||
// should have the new build number
|
||||
buildNum: 70010,
|
||||
expect(config700rc1)
|
||||
.to.have.property('attributes')
|
||||
.eql({
|
||||
// should have the new build number
|
||||
buildNum: 70010,
|
||||
|
||||
// should also include properties from 5.4.1, 5.4.0 and 5.4.0-rc1
|
||||
'5.4.1': true,
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
// should also include properties from 5.4.1, 5.4.0 and 5.4.0-rc1
|
||||
'5.4.1': true,
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
|
||||
// tag the 7.0.0-rc1 doc
|
||||
await savedObjectsClient.update('config', '7.0.0-rc1', {
|
||||
|
@ -156,20 +162,22 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
savedObjectsClient,
|
||||
version: '7.0.0',
|
||||
buildNum: 70099,
|
||||
log: sinon.stub()
|
||||
log: sinon.stub(),
|
||||
});
|
||||
|
||||
const config700 = await savedObjectsClient.get('config', '7.0.0');
|
||||
expect(config700).to.have.property('attributes').eql({
|
||||
// should have the new build number
|
||||
buildNum: 70099,
|
||||
expect(config700)
|
||||
.to.have.property('attributes')
|
||||
.eql({
|
||||
// should have the new build number
|
||||
buildNum: 70099,
|
||||
|
||||
// should also include properties from ancestors, including 7.0.0-rc1
|
||||
'7.0.0-rc1': true,
|
||||
'5.4.1': true,
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
// should also include properties from ancestors, including 7.0.0-rc1
|
||||
'7.0.0-rc1': true,
|
||||
'5.4.1': true,
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
|
||||
// tag the 7.0.0 doc
|
||||
await savedObjectsClient.update('config', '7.0.0', {
|
||||
|
@ -182,18 +190,20 @@ describe('createOrUpgradeSavedConfig()', () => {
|
|||
savedObjectsClient,
|
||||
version: '6.2.3-rc1',
|
||||
buildNum: 62310,
|
||||
log: sinon.stub()
|
||||
log: sinon.stub(),
|
||||
});
|
||||
|
||||
const config623rc1 = await savedObjectsClient.get('config', '6.2.3-rc1');
|
||||
expect(config623rc1).to.have.property('attributes').eql({
|
||||
// should have the new build number
|
||||
buildNum: 62310,
|
||||
expect(config623rc1)
|
||||
.to.have.property('attributes')
|
||||
.eql({
|
||||
// should have the new build number
|
||||
buildNum: 62310,
|
||||
|
||||
// should also include properties from ancestors, but not 7.0.0-rc1 or 7.0.0
|
||||
'5.4.1': true,
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
// should also include properties from ancestors, but not 7.0.0-rc1 or 7.0.0
|
||||
'5.4.1': true,
|
||||
'5.4.0': true,
|
||||
'5.4.0-rc1': true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { createTestCluster } from '../../../../../test_utils/es';
|
||||
import { createEsTestCluster } from '@kbn/test';
|
||||
import { createToolingLog } from '@kbn/dev-utils';
|
||||
import * as kbnTestServer from '../../../../../test_utils/kbn_server';
|
||||
|
||||
let kbnServer;
|
||||
|
@ -6,9 +7,17 @@ let services;
|
|||
let es;
|
||||
|
||||
export async function startServers() {
|
||||
es = createTestCluster();
|
||||
const log = createToolingLog('debug');
|
||||
log.pipe(process.stdout);
|
||||
log.indent(6);
|
||||
|
||||
log.info('starting elasticsearch');
|
||||
log.indent(4);
|
||||
|
||||
es = createEsTestCluster({ log });
|
||||
this.timeout(es.getStartTimeout());
|
||||
|
||||
log.indent(-4);
|
||||
await es.start();
|
||||
|
||||
kbnServer = kbnTestServer.createServerWithCorePlugins();
|
||||
|
@ -35,7 +44,7 @@ export function getServices() {
|
|||
kbnServer,
|
||||
callCluster,
|
||||
savedObjectsClient,
|
||||
uiSettings
|
||||
uiSettings,
|
||||
};
|
||||
|
||||
return services;
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { esTestConfig } from '../../src/test_utils/es';
|
||||
import { kibanaTestServerUrlParts } from '../../test/kibana_test_server_url_parts';
|
||||
import { esTestConfig, kbnTestConfig } from '@kbn/test';
|
||||
import { resolve } from 'path';
|
||||
|
||||
const SECOND = 1000;
|
||||
|
@ -47,14 +46,14 @@ module.exports = function (grunt) {
|
|||
'--optimize.enabled=false',
|
||||
'--elasticsearch.url=' + esTestConfig.getUrl(),
|
||||
'--elasticsearch.healthCheck.delay=' + HOUR,
|
||||
'--server.port=' + kibanaTestServerUrlParts.port,
|
||||
'--server.port=' + kbnTestConfig.getPort(),
|
||||
'--server.xsrf.disableProtection=true',
|
||||
];
|
||||
|
||||
const funcTestServerFlags = [
|
||||
'--server.maxPayloadBytes=1648576', //default is 1048576
|
||||
'--elasticsearch.url=' + esTestConfig.getUrl(),
|
||||
'--server.port=' + kibanaTestServerUrlParts.port,
|
||||
'--server.port=' + kbnTestConfig.getPort(),
|
||||
];
|
||||
|
||||
const browserTestServerFlags = [
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
|
||||
export default async function ({ readConfigFile }) {
|
||||
const commonConfig = await readConfigFile(require.resolve('../common/config'));
|
||||
const functionalConfig = await readConfigFile(require.resolve('../functional/config'));
|
||||
|
||||
return {
|
||||
testFiles: [
|
||||
|
@ -22,6 +23,14 @@ export default async function ({ readConfigFile }) {
|
|||
servers: commonConfig.get('servers'),
|
||||
junit: {
|
||||
reportName: 'API Integration Tests'
|
||||
}
|
||||
},
|
||||
env: commonConfig.get('env'),
|
||||
esTestCluster: commonConfig.get('esTestCluster'),
|
||||
kibanaServerArgs: [
|
||||
...functionalConfig.get('kibanaServerArgs'),
|
||||
'--optimize.enabled=false',
|
||||
'--elasticsearch.healthCheck.delay=3600000',
|
||||
'--server.xsrf.disableProtection=true',
|
||||
],
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { format as formatUrl } from 'url';
|
||||
import { OPTIMIZE_BUNDLE_DIR, esTestConfig, kbnTestConfig } from '@kbn/test';
|
||||
import {
|
||||
KibanaServerProvider,
|
||||
EsProvider,
|
||||
|
@ -5,15 +7,37 @@ import {
|
|||
RetryProvider,
|
||||
} from './services';
|
||||
|
||||
import { esTestConfig } from '../../src/test_utils/es';
|
||||
import { kibanaTestServerUrlParts } from '../kibana_test_server_url_parts';
|
||||
|
||||
export default function () {
|
||||
const servers = {
|
||||
kibana: kbnTestConfig.getUrlParts(),
|
||||
elasticsearch: esTestConfig.getUrlParts(),
|
||||
};
|
||||
|
||||
return {
|
||||
servers: {
|
||||
kibana: kibanaTestServerUrlParts,
|
||||
elasticsearch: esTestConfig.getUrlParts(),
|
||||
servers,
|
||||
|
||||
esTestCluster: {
|
||||
license: 'oss',
|
||||
from: 'snapshot',
|
||||
serverArgs: [
|
||||
],
|
||||
},
|
||||
|
||||
kibanaServerArgs: [
|
||||
'--env=development',
|
||||
'--logging.json=false',
|
||||
'--no-base-path',
|
||||
`--server.port=${kbnTestConfig.getPort()}`,
|
||||
`--optimize.watchPort=${kbnTestConfig.getPort()}`,
|
||||
'--optimize.watchPrebuild=true',
|
||||
'--status.allowAnonymous=true',
|
||||
'--optimize.enabled=true',
|
||||
`--optimize.bundleDir=${OPTIMIZE_BUNDLE_DIR}`,
|
||||
`--elasticsearch.url=${formatUrl(servers.elasticsearch)}`,
|
||||
`--elasticsearch.username=${servers.elasticsearch.username}`,
|
||||
`--elasticsearch.password=${servers.elasticsearch.password}`,
|
||||
],
|
||||
|
||||
services: {
|
||||
kibanaServer: KibanaServerProvider,
|
||||
retry: RetryProvider,
|
||||
|
|
|
@ -78,6 +78,16 @@ export default async function ({ readConfigFile }) {
|
|||
failureDebugging: FailureDebuggingProvider,
|
||||
},
|
||||
servers: commonConfig.get('servers'),
|
||||
|
||||
env: commonConfig.get('env'),
|
||||
|
||||
esTestCluster: commonConfig.get('esTestCluster'),
|
||||
|
||||
kibanaServerArgs: [
|
||||
...commonConfig.get('kibanaServerArgs'),
|
||||
'--oss',
|
||||
],
|
||||
|
||||
apps: {
|
||||
status_page: {
|
||||
pathname: '/status',
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
import { kibanaUser } from './shield';
|
||||
|
||||
export const kibanaTestServerUrlParts = {
|
||||
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
|
||||
port: parseInt(process.env.TEST_KIBANA_PORT, 10) || 5620,
|
||||
auth: kibanaUser.username + ':' + kibanaUser.password,
|
||||
username: kibanaUser.username,
|
||||
password: kibanaUser.password,
|
||||
};
|
|
@ -35,14 +35,7 @@ mkdir -p "$installDir"
|
|||
tar -xzf "$linuxBuild" -C "$installDir" --strip=1
|
||||
|
||||
|
||||
echo " -> Running api integration tests"
|
||||
cd "$XPACK_DIR"
|
||||
node scripts/functional_tests_api --kibana-install-dir "$installDir" --es-from=source
|
||||
echo ""
|
||||
echo ""
|
||||
|
||||
|
||||
echo " -> Running functional tests"
|
||||
echo " -> Running functional and api tests"
|
||||
cd "$XPACK_DIR"
|
||||
xvfb-run node scripts/functional_tests --bail --kibana-install-dir "$installDir" --es-from=source
|
||||
echo ""
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
const env = process.env;
|
||||
|
||||
export const kibanaUser = {
|
||||
username: env.TEST_KIBANA_USER || 'elastic',
|
||||
password: env.TEST_KIBANA_PASS || 'changeme'
|
||||
};
|
||||
|
||||
export const kibanaServer = {
|
||||
username: env.TEST_KIBANA_SERVER_USER || 'kibana',
|
||||
password: env.TEST_KIBANA_SERVER_PASS || 'changeme'
|
||||
};
|
||||
|
||||
export const admin = {
|
||||
username: env.TEST_ES_USER || 'elastic',
|
||||
password: env.TEST_ES_PASS || 'changeme'
|
||||
};
|
|
@ -48,7 +48,15 @@ yarn test:server
|
|||
|
||||
#### Running functional tests
|
||||
|
||||
The functional tests are run against a live browser, Kibana, and Elasticsearch install. They build their own version of elasticsearch and x-pack-elasticsearch, run the builds automatically, startup the kibana server, and run the tests against them.
|
||||
The functional UI tests, the API integration tests, and the SAML API integration tests are all run against a live browser, Kibana, and Elasticsearch install. Each set of tests is specified with a unique config that describes how to start the Elasticsearch server, the Kibana server, and what tests to run against them. The sets of tests that exist today are *functional UI tests* ([specified by this config](test/functional/config.js)), *API integration tests* ([specified by this config](test/api_integration/config.js)), and *SAML API integration tests* ([specified by this config](test/saml_api_integration/config.js)).
|
||||
|
||||
The script runs all sets of tests sequentially like so:
|
||||
* builds Elasticsearch and X-Pack
|
||||
* runs Elasticsearch with X-Pack
|
||||
* starts up the Kibana server with X-Pack
|
||||
* runs the functional UI tests against those servers
|
||||
* tears down the servers
|
||||
* repeats the same process for the API and SAML API integration test configs.
|
||||
|
||||
To do all of this in a single command run:
|
||||
|
||||
|
@ -56,61 +64,60 @@ To do all of this in a single command run:
|
|||
node scripts/functional_tests
|
||||
```
|
||||
|
||||
If you are **developing functional tests** then you probably don't want to rebuild elasticsearch and wait for all that setup on every test run, so instead use this command to get started:
|
||||
#### Running UI tests
|
||||
|
||||
The functional UI tests can be run separately like so:
|
||||
|
||||
```sh
|
||||
node scripts/functional_tests_server
|
||||
node scripts/functional_tests --config test/functional/config
|
||||
```
|
||||
|
||||
After both Elasticsearch and Kibana are running, open a new terminal (without tearing down Elasticsearch, Kibana, etc.) and use the following to run the tests:
|
||||
|
||||
```sh
|
||||
# this command accepts a bunch of arguments to tweak the run, try sending --help to learn more
|
||||
node ../scripts/functional_test_runner
|
||||
```
|
||||
It does the same as the previous command, except that it only does setup/test/teardown for the UI tests.
|
||||
|
||||
#### Running API integration tests
|
||||
|
||||
API integration tests are very similar to functional tests in a sense that they are organized in the same way and run against live Kibana and Elasticsearch instances.
|
||||
The difference is that API integration tests are intended to test only programmatic API exposed by Kibana. There is no need to run browser and simulate user actions that significantly reduces execution time.
|
||||
API integration tests are run with a unique setup usually without UI assets built for the Kibana server.
|
||||
|
||||
To build, run `x-pack-kibana` with `x-pack-elasticsearch` and then run API integration tests against them use the following command:
|
||||
API integration tests are intended to test _only programmatic API exposed by Kibana_. There is no need to run browser and simulate user actions, which significantly reduces execution time. In addition, the configuration for API integration tests typically sets `optimize.enabled=false` for Kibana because UI assets are usually not needed for these tests.
|
||||
|
||||
The API integration tests can be run separately like so:
|
||||
|
||||
```sh
|
||||
node scripts/functional_tests_api
|
||||
node scripts/functional_tests --config test/api_integration/config
|
||||
```
|
||||
|
||||
If you are **developing api integration tests** then you probably don't want to rebuild `x-pack-elasticsearch` and wait for all that setup on every test run, so instead use this command to get started:
|
||||
#### Running SAML API integration tests
|
||||
|
||||
We also have SAML API integration tests which set up Elasticsearch and Kibana with SAML support. Run API integration tests separately with SAML support like so:
|
||||
|
||||
```sh
|
||||
node scripts/functional_tests --config test/saml_api_integration/config
|
||||
```
|
||||
|
||||
#### Developing functional tests
|
||||
|
||||
If you are **developing functional tests** then you probably don't want to rebuild Elasticsearch and wait for all that setup on every test run, so instead use this command to build and start just the Elasticsearch and Kibana servers:
|
||||
|
||||
```sh
|
||||
node scripts/functional_tests_server
|
||||
```
|
||||
|
||||
Once Kibana and Elasticsearch are up and running open a new terminal and run this command to just run the tests (without tearing down Elasticsearch, Kibana, etc.)
|
||||
After the servers are started, open a new terminal and run this command to run just the tests (without tearing down Elasticsearch or Kibana):
|
||||
|
||||
```sh
|
||||
# this command accepts a bunch of arguments to tweak the run, try sending --help to learn more
|
||||
node ../scripts/functional_test_runner --config test/api_integration/config.js
|
||||
```
|
||||
|
||||
You can also run API integration tests with SAML support. The `--saml` option configures both Kibana and Elasticsearch
|
||||
with the SAML security realm, as required by the SAML security API.
|
||||
|
||||
Start the functional test server with SAML support:
|
||||
|
||||
```sh
|
||||
node scripts/functional_tests_server --saml
|
||||
```
|
||||
|
||||
Then run the tests with:
|
||||
```sh
|
||||
# make sure you are in the x-pack-kibana project
|
||||
cd x-pack-kibana
|
||||
cd x-pack
|
||||
|
||||
# use a different config for SAML
|
||||
node ../scripts/functional_test_runner --config test/saml_api_integration/config.js
|
||||
# invoke the functional_test_runner from kibana project. try sending --help to learn more
|
||||
node ../scripts/functional_test_runner
|
||||
```
|
||||
|
||||
For both of the above commands, it's crucial that you pass in `--config` to specify the same config file to both commands. This makes sure that the right tests will run against the right servers. Typically a set of tests and server configuration go together.
|
||||
|
||||
Read more about how the scripts work [here](scripts/README.md).
|
||||
|
||||
For a deeper dive, read more about the way functional tests and servers work [here](packages/kbn-test/README.md).
|
||||
|
||||
### Issues starting dev more of creating builds
|
||||
|
||||
You may see an error like this when you are getting started:
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export {
|
||||
runFunctionTests,
|
||||
runApiTests,
|
||||
runFunctionalTestsServer,
|
||||
} from './tasks';
|
|
@ -1,17 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
const $isCliError = Symbol('isCliError');
|
||||
|
||||
export function createCliError(message) {
|
||||
const error = new Error(message);
|
||||
error[$isCliError] = true;
|
||||
return error;
|
||||
}
|
||||
|
||||
export function isCliError(error) {
|
||||
return error && !!error[$isCliError];
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { readFtrConfigFile } from '@kbn/plugin-helpers';
|
||||
|
||||
import { FTR_CONFIG_PATH } from './paths';
|
||||
import { log } from './log';
|
||||
|
||||
export async function getFtrConfig() {
|
||||
return await readFtrConfigFile(log, FTR_CONFIG_PATH);
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { getFtrConfig } from './get_ftr_config';
|
||||
|
||||
export { runKibanaServer } from './run_kibana_server';
|
||||
export { runEsWithXpack } from './run_es_with_xpack';
|
||||
export { runFtr } from './run_ftr';
|
||||
export { log } from './log';
|
||||
export { KIBANA_FTR_SCRIPT } from './paths';
|
||||
export { isCliError } from './errors';
|
|
@ -1,10 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { createToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
export const log = createToolingLog('debug');
|
||||
log.pipe(process.stdout);
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import { resolveKibanaPath } from '@kbn/plugin-helpers';
|
||||
|
||||
function useBat(bin) {
|
||||
return process.platform.startsWith('win') ? `${bin}.bat` : bin;
|
||||
}
|
||||
|
||||
export const KIBANA_BIN_PATH = useBat(resolveKibanaPath('bin/kibana'));
|
||||
export const KIBANA_ROOT = resolveKibanaPath('');
|
||||
export const XPACK_KIBANA_ROOT = resolve(KIBANA_ROOT, 'x-pack');
|
||||
export const GULP_COMMAND_PATH = resolve(XPACK_KIBANA_ROOT, 'node_modules/.bin/gulp');
|
||||
export const KIBANA_FTR_SCRIPT = resolve(KIBANA_ROOT, 'scripts/functional_test_runner');
|
||||
export const PROJECT_ROOT = resolve(__dirname, '../../../');
|
||||
export const FTR_CONFIG_PATH = resolve(PROJECT_ROOT, 'test/functional/config');
|
||||
export const OPTIMIZE_BUNDLE_DIR = resolve(KIBANA_ROOT, 'optimize/xpackTestUiServer');
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { resolve } from 'path';
|
||||
import { createTestCluster } from '../../../../src/test_utils/es/es_test_cluster';
|
||||
import { log } from './log';
|
||||
|
||||
import { setupUsers, DEFAULT_SUPERUSER_PASS } from './auth';
|
||||
|
||||
export async function runEsWithXpack({ ftrConfig, useSAML = false, from }) {
|
||||
const cluster = createTestCluster({
|
||||
port: ftrConfig.get('servers.elasticsearch.port'),
|
||||
password: DEFAULT_SUPERUSER_PASS,
|
||||
license: 'trial',
|
||||
from,
|
||||
log,
|
||||
});
|
||||
|
||||
const kibanaPort = ftrConfig.get('servers.kibana.port');
|
||||
const idpPath = resolve(
|
||||
__dirname,
|
||||
'../../../test/saml_api_integration/fixtures/idp_metadata.xml'
|
||||
);
|
||||
|
||||
const esArgs = [
|
||||
'xpack.security.enabled=true',
|
||||
];
|
||||
|
||||
const samlEsArgs = [
|
||||
...esArgs,
|
||||
'xpack.security.authc.token.enabled=true',
|
||||
'xpack.security.authc.token.timeout=15s',
|
||||
'xpack.security.authc.realms.saml1.type=saml',
|
||||
'xpack.security.authc.realms.saml1.order=0',
|
||||
`xpack.security.authc.realms.saml1.idp.metadata.path=${idpPath}`,
|
||||
'xpack.security.authc.realms.saml1.idp.entity_id=http://www.elastic.co',
|
||||
`xpack.security.authc.realms.saml1.sp.entity_id=http://localhost:${kibanaPort}`,
|
||||
`xpack.security.authc.realms.saml1.sp.logout=http://localhost:${kibanaPort}/logout`,
|
||||
`xpack.security.authc.realms.saml1.sp.acs=http://localhost:${kibanaPort}/api/security/v1/saml`,
|
||||
'xpack.security.authc.realms.saml1.attributes.principal=urn:oid:0.0.7',
|
||||
];
|
||||
|
||||
await cluster.start(useSAML ? samlEsArgs : esArgs);
|
||||
await setupUsers(log, ftrConfig);
|
||||
|
||||
return cluster;
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
|
||||
import {
|
||||
KIBANA_FTR_SCRIPT,
|
||||
PROJECT_ROOT
|
||||
} from './paths';
|
||||
|
||||
export async function runFtr({ procs, configPath, bail }) {
|
||||
const args = [KIBANA_FTR_SCRIPT, '--debug'];
|
||||
|
||||
if (configPath) {
|
||||
args.push('--config', configPath);
|
||||
}
|
||||
|
||||
if (bail) {
|
||||
args.push('--bail');
|
||||
}
|
||||
|
||||
await procs.run('ftr', {
|
||||
cmd: 'node',
|
||||
args,
|
||||
cwd: PROJECT_ROOT,
|
||||
wait: true
|
||||
});
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { resolve, relative } from 'path';
|
||||
import { format as formatUrl } from 'url';
|
||||
|
||||
import {
|
||||
KIBANA_ROOT,
|
||||
KIBANA_BIN_PATH,
|
||||
OPTIMIZE_BUNDLE_DIR
|
||||
} from './paths';
|
||||
|
||||
export async function runKibanaServer(options) {
|
||||
const {
|
||||
procs,
|
||||
ftrConfig,
|
||||
devMode = false,
|
||||
enableUI = true,
|
||||
useSAML = false,
|
||||
existingInstallDir = null,
|
||||
} = options;
|
||||
|
||||
if (devMode && existingInstallDir) {
|
||||
throw new Error('Kibana installations can not be run in dev mode');
|
||||
}
|
||||
|
||||
const runFromSourceArgs = existingInstallDir
|
||||
? ['--optimize.useBundleCache=true']
|
||||
: [
|
||||
'--no-base-path',
|
||||
`--optimize.bundleDir=${OPTIMIZE_BUNDLE_DIR}`,
|
||||
];
|
||||
|
||||
const samlArgs = useSAML ? [
|
||||
'--server.xsrf.whitelist=[\"/api/security/v1/saml\"]',
|
||||
'--xpack.security.authProviders=[\"saml\"]',
|
||||
] : [];
|
||||
|
||||
// start the kibana server and wait for it to log "Server running" before resolving
|
||||
await procs.run('kibana', {
|
||||
cwd: existingInstallDir || KIBANA_ROOT,
|
||||
|
||||
cmd: existingInstallDir
|
||||
? resolve(existingInstallDir, relative(KIBANA_ROOT, KIBANA_BIN_PATH))
|
||||
: KIBANA_BIN_PATH,
|
||||
|
||||
args: [
|
||||
...runFromSourceArgs,
|
||||
devMode ? '--dev' : '--env=development',
|
||||
'--logging.json=false',
|
||||
`--server.port=${ftrConfig.get('servers.kibana.port')}`,
|
||||
`--server.uuid=${ftrConfig.get('env').kibana.server.uuid}`,
|
||||
`--elasticsearch.url=${formatUrl(ftrConfig.get('servers.elasticsearch'))}`,
|
||||
`--optimize.enabled=${enableUI}`,
|
||||
`--optimize.lazyPort=${ftrConfig.get('servers.kibana.port') + 1}`,
|
||||
'--optimize.lazyPrebuild=true',
|
||||
'--status.allowAnonymous=true',
|
||||
`--elasticsearch.username=${ftrConfig.get('servers.elasticsearch.username')}`,
|
||||
`--elasticsearch.password=${ftrConfig.get('servers.elasticsearch.password')}`,
|
||||
'--xpack.security.encryptionKey="wuGNaIhoMpk5sO4UBxgr3NyW1sFcLgIf"', // server restarts should not invalidate active sessions
|
||||
'--xpack.monitoring.kibana.collection.enabled=false',
|
||||
'--xpack.xpack_main.telemetry.enabled=false',
|
||||
...samlArgs,
|
||||
],
|
||||
|
||||
env: {
|
||||
FORCE_COLOR: 1,
|
||||
...process.env,
|
||||
},
|
||||
|
||||
wait: /Server running/,
|
||||
});
|
||||
}
|
|
@ -1,186 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { relative } from 'path';
|
||||
import Rx from 'rxjs/Rx';
|
||||
import { Command } from 'commander';
|
||||
import { withProcRunner } from '@kbn/dev-utils';
|
||||
|
||||
import {
|
||||
getFtrConfig,
|
||||
runKibanaServer,
|
||||
runEsWithXpack,
|
||||
runFtr,
|
||||
log,
|
||||
KIBANA_FTR_SCRIPT,
|
||||
isCliError,
|
||||
} from './lib';
|
||||
|
||||
const SUCCESS_MESSAGE = `
|
||||
|
||||
Elasticsearch and Kibana are ready for functional testing. Start the functional tests
|
||||
in another terminal session by running this command from this directory:
|
||||
|
||||
node ${relative(process.cwd(), KIBANA_FTR_SCRIPT)}
|
||||
|
||||
`;
|
||||
|
||||
export function fatalErrorHandler(err) {
|
||||
log.error('FATAL ERROR');
|
||||
log.error(isCliError(err) ? err.message : err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
export async function runFunctionTests() {
|
||||
try {
|
||||
const cmd = new Command('node scripts/functional_tests');
|
||||
|
||||
cmd
|
||||
.option(
|
||||
'--bail',
|
||||
'Stop the functional_test_runner as soon as a failure occurs'
|
||||
)
|
||||
.option(
|
||||
'--kibana-install-dir <path>',
|
||||
'Run Kibana from an existing install directory'
|
||||
)
|
||||
.option(
|
||||
'--es-from <from>',
|
||||
'Run ES from either source or snapshot [default: snapshot]'
|
||||
)
|
||||
.parse(process.argv);
|
||||
|
||||
await withProcRunner(log, async procs => {
|
||||
const ftrConfig = await getFtrConfig();
|
||||
|
||||
const es = await runEsWithXpack({ ftrConfig, from: cmd.esFrom });
|
||||
await runKibanaServer({
|
||||
procs,
|
||||
ftrConfig,
|
||||
existingInstallDir: cmd.kibanaInstallDir,
|
||||
});
|
||||
await runFtr({
|
||||
procs,
|
||||
bail: cmd.bail,
|
||||
});
|
||||
|
||||
await procs.stop('kibana');
|
||||
await es.cleanup();
|
||||
});
|
||||
} catch (err) {
|
||||
fatalErrorHandler(err);
|
||||
}
|
||||
}
|
||||
|
||||
export async function runApiTests() {
|
||||
const cmd = new Command('node scripts/functional_tests_api');
|
||||
|
||||
cmd
|
||||
.option(
|
||||
'--bail',
|
||||
'Stop the functional_test_runner as soon as a failure occurs'
|
||||
)
|
||||
.option(
|
||||
'--kibana-install-dir <path>',
|
||||
'Run Kibana from an existing install directory'
|
||||
)
|
||||
.option(
|
||||
'--es-from <from>',
|
||||
'Run ES from either source or snapshot [default: snapshot]'
|
||||
)
|
||||
.parse(process.argv);
|
||||
|
||||
try {
|
||||
await withProcRunner(log, async procs => {
|
||||
const ftrConfig = await getFtrConfig();
|
||||
|
||||
const es = await runEsWithXpack({ ftrConfig, from: cmd.esFrom });
|
||||
await runKibanaServer({
|
||||
procs,
|
||||
ftrConfig,
|
||||
enableUI: true,
|
||||
existingInstallDir: cmd.kibanaInstallDir,
|
||||
});
|
||||
await runFtr({
|
||||
procs,
|
||||
configPath: require.resolve('../../test/api_integration/config.js'),
|
||||
bail: cmd.bail,
|
||||
});
|
||||
|
||||
await procs.stop('kibana');
|
||||
await es.cleanup();
|
||||
|
||||
// Run SAML specific API integration tests.
|
||||
const samlEs = await runEsWithXpack({
|
||||
ftrConfig,
|
||||
useSAML: true,
|
||||
from: cmd.esFrom,
|
||||
});
|
||||
await runKibanaServer({
|
||||
procs,
|
||||
ftrConfig,
|
||||
enableUI: false,
|
||||
useSAML: true,
|
||||
existingInstallDir: cmd.kibanaInstallDir,
|
||||
});
|
||||
await runFtr({
|
||||
procs,
|
||||
configPath: require.resolve(
|
||||
'../../test/saml_api_integration/config.js'
|
||||
),
|
||||
});
|
||||
|
||||
await procs.stop('kibana');
|
||||
await samlEs.cleanup();
|
||||
});
|
||||
} catch (err) {
|
||||
fatalErrorHandler(err);
|
||||
}
|
||||
}
|
||||
|
||||
export async function runFunctionalTestsServer() {
|
||||
const cmd = new Command('node scripts/functional_test_server');
|
||||
|
||||
cmd
|
||||
.option(
|
||||
'--saml',
|
||||
'Run Elasticsearch and Kibana with configured SAML security realm',
|
||||
false
|
||||
)
|
||||
.option(
|
||||
'--es-from <from>',
|
||||
'Run ES from either source or snapshot [default: snapshot]'
|
||||
)
|
||||
.parse(process.argv);
|
||||
|
||||
const useSAML = cmd.saml;
|
||||
|
||||
try {
|
||||
await withProcRunner(log, async procs => {
|
||||
const ftrConfig = await getFtrConfig();
|
||||
await runEsWithXpack({ ftrConfig, useSAML, from: cmd.esFrom });
|
||||
await runKibanaServer({
|
||||
devMode: true,
|
||||
procs,
|
||||
ftrConfig,
|
||||
useSAML,
|
||||
});
|
||||
|
||||
// wait for 5 seconds of silence before logging the
|
||||
// success message so that it doesn't get buried
|
||||
await Rx.Observable.fromEvent(log, 'data')
|
||||
.startWith(null)
|
||||
.switchMap(() => Rx.Observable.timer(5000))
|
||||
.take(1)
|
||||
.toPromise();
|
||||
|
||||
log.success(SUCCESS_MESSAGE);
|
||||
await procs.waitForAllToStop();
|
||||
});
|
||||
} catch (err) {
|
||||
fatalErrorHandler(err);
|
||||
}
|
||||
}
|
|
@ -24,7 +24,9 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@kbn/dev-utils": "link:../packages/kbn-dev-utils",
|
||||
"@kbn/es": "link:../packages/kbn-es",
|
||||
"@kbn/plugin-helpers": "link:../packages/kbn-plugin-helpers",
|
||||
"@kbn/test": "link:../packages/kbn-test",
|
||||
"@types/jest": "^22.2.3",
|
||||
"abab": "^1.0.4",
|
||||
"ansicolors": "0.3.2",
|
||||
|
|
|
@ -5,4 +5,8 @@
|
|||
*/
|
||||
|
||||
require('@kbn/plugin-helpers').babelRegister();
|
||||
require('../dev-tools/functional_tests').runFunctionTests();
|
||||
require('@kbn/test').runTestsCli([
|
||||
require.resolve('../test/functional/config.js'),
|
||||
require.resolve('../test/api_integration/config.js'),
|
||||
require.resolve('../test/saml_api_integration/config.js'),
|
||||
]);
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
require('@kbn/plugin-helpers').babelRegister();
|
||||
require('../dev-tools/functional_tests').runApiTests();
|
|
@ -5,4 +5,6 @@
|
|||
*/
|
||||
|
||||
require('@kbn/plugin-helpers').babelRegister();
|
||||
require('../dev-tools/functional_tests').runFunctionalTestsServer();
|
||||
require('@kbn/test').startServersCli(
|
||||
require.resolve('../test/functional/config.js'),
|
||||
);
|
||||
|
|
|
@ -9,10 +9,9 @@ import { SupertestWithoutAuthProvider } from './services';
|
|||
|
||||
export default async function ({ readConfigFile }) {
|
||||
|
||||
// Read the Kibana API integration tests config file so that we can utilize its services.
|
||||
const kibanaAPITestsConfig = await readConfigFile(resolveKibanaPath('test/api_integration/config.js'));
|
||||
const xPackFunctionalTestsConfig = await readConfigFile(require.resolve('../functional/config.js'));
|
||||
const kibanaFunctionalConfig = await readConfigFile(resolveKibanaPath('test/functional/config.js'));
|
||||
const kibanaCommonConfig = await readConfigFile(resolveKibanaPath('test/common/config.js'));
|
||||
|
||||
return {
|
||||
testFiles: [require.resolve('./apis')],
|
||||
|
@ -21,12 +20,15 @@ export default async function ({ readConfigFile }) {
|
|||
supertest: kibanaAPITestsConfig.get('services.supertest'),
|
||||
esSupertest: kibanaAPITestsConfig.get('services.esSupertest'),
|
||||
supertestWithoutAuth: SupertestWithoutAuthProvider,
|
||||
es: kibanaFunctionalConfig.get('services.es'),
|
||||
esArchiver: kibanaFunctionalConfig.get('services.esArchiver'),
|
||||
es: kibanaCommonConfig.get('services.es'),
|
||||
esArchiver: kibanaCommonConfig.get('services.esArchiver'),
|
||||
},
|
||||
esArchiver: xPackFunctionalTestsConfig.get('esArchiver'),
|
||||
junit: {
|
||||
reportName: 'X-Pack API Integration Tests',
|
||||
},
|
||||
env: xPackFunctionalTestsConfig.get('env'),
|
||||
kibanaServerArgs: xPackFunctionalTestsConfig.get('kibanaServerArgs'),
|
||||
esTestCluster: xPackFunctionalTestsConfig.get('esTestCluster'),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import { resolve } from 'path';
|
||||
import { resolveKibanaPath } from '@kbn/plugin-helpers';
|
||||
import { format as formatUrl } from 'url';
|
||||
|
||||
import {
|
||||
SecurityPageProvider,
|
||||
|
@ -53,11 +54,37 @@ import {
|
|||
// that returns an object with the projects config values
|
||||
export default async function ({ readConfigFile }) {
|
||||
|
||||
// read the Kibana config file so that we can utilize some of
|
||||
// its services and PageObjects
|
||||
const kibanaConfig = await readConfigFile(resolveKibanaPath('test/functional/config.js'));
|
||||
const kibanaCommonConfig = await readConfigFile(resolveKibanaPath('test/common/config.js'));
|
||||
const kibanaFunctionalConfig = await readConfigFile(resolveKibanaPath('test/functional/config.js'));
|
||||
const kibanaAPITestsConfig = await readConfigFile(resolveKibanaPath('test/api_integration/config.js'));
|
||||
|
||||
const servers = {
|
||||
elasticsearch: {
|
||||
protocol: process.env.TEST_ES_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_ES_HOSTNAME || 'localhost',
|
||||
port: parseInt(process.env.TEST_ES_PORT, 10) || 9240,
|
||||
auth: 'elastic:changeme',
|
||||
username: 'elastic',
|
||||
password: 'changeme',
|
||||
},
|
||||
kibana: {
|
||||
protocol: process.env.TEST_KIBANA_PROTOCOL || 'http',
|
||||
hostname: process.env.TEST_KIBANA_HOSTNAME || 'localhost',
|
||||
port: parseInt(process.env.TEST_KIBANA_PORT, 10) || 5640,
|
||||
auth: 'elastic:changeme',
|
||||
username: 'elastic',
|
||||
password: 'changeme',
|
||||
},
|
||||
};
|
||||
|
||||
const env = {
|
||||
kibana: {
|
||||
server: {
|
||||
uuid: '5b2de169-2785-441b-ae8c-186a1936b17d', // Kibana UUID for "primary" cluster in monitoring data
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
// list paths to the files that contain your plugins tests
|
||||
testFiles: [
|
||||
|
@ -75,7 +102,7 @@ export default async function ({ readConfigFile }) {
|
|||
// available to your tests. If you don't specify anything here
|
||||
// only the built-in services will be avaliable
|
||||
services: {
|
||||
...kibanaConfig.get('services'),
|
||||
...kibanaFunctionalConfig.get('services'),
|
||||
esSupertest: kibanaAPITestsConfig.get('services.esSupertest'),
|
||||
monitoringNoData: MonitoringNoDataProvider,
|
||||
monitoringClusterList: MonitoringClusterListProvider,
|
||||
|
@ -108,7 +135,7 @@ export default async function ({ readConfigFile }) {
|
|||
// just like services, PageObjects are defined as a map of
|
||||
// names to Providers. Merge in Kibana's or pick specific ones
|
||||
pageObjects: {
|
||||
...kibanaConfig.get('pageObjects'),
|
||||
...kibanaFunctionalConfig.get('pageObjects'),
|
||||
security: SecurityPageProvider,
|
||||
reporting: ReportingPageProvider,
|
||||
monitoring: MonitoringPageProvider,
|
||||
|
@ -118,34 +145,35 @@ export default async function ({ readConfigFile }) {
|
|||
watcher: WatcherPageProvider,
|
||||
},
|
||||
|
||||
servers: {
|
||||
elasticsearch: {
|
||||
port: 9240,
|
||||
auth: 'elastic:changeme',
|
||||
username: 'elastic',
|
||||
password: 'changeme',
|
||||
},
|
||||
kibana: {
|
||||
port: 5640,
|
||||
auth: 'elastic:changeme',
|
||||
username: 'elastic',
|
||||
password: 'changeme',
|
||||
},
|
||||
},
|
||||
env: {
|
||||
kibana: {
|
||||
server: {
|
||||
uuid: '5b2de169-2785-441b-ae8c-186a1936b17d', // Kibana UUID for "primary" cluster in monitoring data
|
||||
}
|
||||
}
|
||||
servers,
|
||||
|
||||
env,
|
||||
|
||||
esTestCluster: {
|
||||
license: 'trial',
|
||||
from: 'source',
|
||||
serverArgs: [
|
||||
'xpack.license.self_generated.type=trial',
|
||||
'xpack.security.enabled=true',
|
||||
],
|
||||
},
|
||||
|
||||
kibanaServerArgs: [
|
||||
...kibanaCommonConfig.get('kibanaServerArgs'),
|
||||
`--server.uuid=${env.kibana.server.uuid}`,
|
||||
`--server.port=${servers.kibana.port}`,
|
||||
`--elasticsearch.url=${formatUrl(servers.elasticsearch)}`,
|
||||
'--xpack.monitoring.kibana.collection.enabled=false',
|
||||
'--xpack.xpack_main.telemetry.enabled=false',
|
||||
'--xpack.security.encryptionKey="wuGNaIhoMpk5sO4UBxgr3NyW1sFcLgIf"', // server restarts should not invalidate active sessions
|
||||
],
|
||||
|
||||
// the apps section defines the urls that
|
||||
// `PageObjects.common.navigateTo(appKey)` will use.
|
||||
// Merge urls for your plugin with the urls defined in
|
||||
// Kibana's config in order to use this helper
|
||||
apps: {
|
||||
...kibanaConfig.get('apps'),
|
||||
...kibanaFunctionalConfig.get('apps'),
|
||||
login: {
|
||||
pathname: '/login'
|
||||
},
|
||||
|
|
|
@ -5,12 +5,15 @@
|
|||
*/
|
||||
|
||||
import { resolveKibanaPath } from '@kbn/plugin-helpers/lib/index';
|
||||
import { resolve } from 'path';
|
||||
|
||||
export default async function ({ readConfigFile }) {
|
||||
// Read the Kibana API integration tests config file so that we can utilize its services.
|
||||
const kibanaAPITestsConfig = await readConfigFile(resolveKibanaPath('test/api_integration/config.js'));
|
||||
const xPackAPITestsConfig = await readConfigFile(require.resolve('../api_integration/config.js'));
|
||||
|
||||
const kibanaPort = xPackAPITestsConfig.get('servers.kibana.port');
|
||||
const idpPath = resolve(__dirname, '../../test/saml_api_integration/fixtures/idp_metadata.xml');
|
||||
|
||||
return {
|
||||
testFiles: [require.resolve('./apis')],
|
||||
servers: xPackAPITestsConfig.get('servers'),
|
||||
|
@ -21,5 +24,30 @@ export default async function ({ readConfigFile }) {
|
|||
junit: {
|
||||
reportName: 'X-Pack SAML API Integration Tests',
|
||||
},
|
||||
env: xPackAPITestsConfig.get('env'),
|
||||
|
||||
esTestCluster: {
|
||||
...xPackAPITestsConfig.get('esTestCluster'),
|
||||
serverArgs: [
|
||||
...xPackAPITestsConfig.get('esTestCluster.serverArgs'),
|
||||
'xpack.security.authc.token.enabled=true',
|
||||
'xpack.security.authc.token.timeout=15s',
|
||||
'xpack.security.authc.realms.saml1.type=saml',
|
||||
'xpack.security.authc.realms.saml1.order=0',
|
||||
`xpack.security.authc.realms.saml1.idp.metadata.path=${idpPath}`,
|
||||
'xpack.security.authc.realms.saml1.idp.entity_id=http://www.elastic.co',
|
||||
`xpack.security.authc.realms.saml1.sp.entity_id=http://localhost:${kibanaPort}`,
|
||||
`xpack.security.authc.realms.saml1.sp.logout=http://localhost:${kibanaPort}/logout`,
|
||||
`xpack.security.authc.realms.saml1.sp.acs=http://localhost:${kibanaPort}/api/security/v1/saml`,
|
||||
'xpack.security.authc.realms.saml1.attributes.principal=urn:oid:0.0.7',
|
||||
],
|
||||
},
|
||||
|
||||
kibanaServerArgs: [
|
||||
...xPackAPITestsConfig.get('kibanaServerArgs'),
|
||||
'--optimize.enabled=false',
|
||||
'--server.xsrf.whitelist=[\"/api/security/v1/saml\"]',
|
||||
'--xpack.security.authProviders=[\"saml\"]',
|
||||
],
|
||||
};
|
||||
}
|
||||
|
|
2018
x-pack/yarn.lock
2018
x-pack/yarn.lock
File diff suppressed because it is too large
Load diff
35
yarn.lock
35
yarn.lock
|
@ -160,6 +160,10 @@
|
|||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/test@link:packages/kbn-test":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/ui-framework@link:packages/kbn-ui-framework":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
@ -2174,6 +2178,14 @@ chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3, chalk@~1.1.1:
|
|||
strip-ansi "^3.0.0"
|
||||
supports-color "^2.0.0"
|
||||
|
||||
chalk@^2.4.1:
|
||||
version "2.4.1"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e"
|
||||
dependencies:
|
||||
ansi-styles "^3.2.1"
|
||||
escape-string-regexp "^1.0.5"
|
||||
supports-color "^5.3.0"
|
||||
|
||||
chalk@~0.5.1:
|
||||
version "0.5.1"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174"
|
||||
|
@ -5142,6 +5154,10 @@ getopts@^2.0.0:
|
|||
version "2.0.5"
|
||||
resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.0.5.tgz#e4d3948e87fd9fb50c8a0f2912f4de16301fb8ae"
|
||||
|
||||
getopts@^2.0.6:
|
||||
version "2.0.6"
|
||||
resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.0.6.tgz#4788d533a977527e79efd57b5e742ffa0dd33105"
|
||||
|
||||
getos@^3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/getos/-/getos-3.1.0.tgz#db3aa4df15a3295557ce5e81aa9e3e5cdfaa6567"
|
||||
|
@ -11026,6 +11042,12 @@ rxjs@5.4.3:
|
|||
dependencies:
|
||||
symbol-observable "^1.0.1"
|
||||
|
||||
rxjs@^5.4.3:
|
||||
version "5.5.10"
|
||||
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.5.10.tgz#fde02d7a614f6c8683d0d1957827f492e09db045"
|
||||
dependencies:
|
||||
symbol-observable "1.0.1"
|
||||
|
||||
safe-buffer@5.1.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
|
||||
|
@ -11960,6 +11982,10 @@ svgo@^0.7.0:
|
|||
sax "~1.2.1"
|
||||
whet.extend "~0.9.9"
|
||||
|
||||
symbol-observable@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4"
|
||||
|
||||
symbol-observable@^1.0.1, symbol-observable@^1.0.3:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
|
||||
|
@ -12023,6 +12049,15 @@ tar-fs@^1.16.0:
|
|||
pump "^1.0.0"
|
||||
tar-stream "^1.1.2"
|
||||
|
||||
tar-fs@^1.16.2:
|
||||
version "1.16.2"
|
||||
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.2.tgz#17e5239747e399f7e77344f5f53365f04af53577"
|
||||
dependencies:
|
||||
chownr "^1.0.1"
|
||||
mkdirp "^0.5.1"
|
||||
pump "^1.0.0"
|
||||
tar-stream "^1.1.2"
|
||||
|
||||
tar-pack@^3.4.0:
|
||||
version "3.4.1"
|
||||
resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue