[kbn-es] Package for managing Elasticsearch during dev and testing (#17168)

Signed-off-by: Tyler Smalley <tyler.smalley@elastic.co>
This commit is contained in:
Tyler Smalley 2018-03-20 08:30:15 -07:00 committed by GitHub
parent 4a501cb4d2
commit 82e17f435f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
46 changed files with 2861 additions and 1292 deletions

View file

@ -19,6 +19,7 @@ module.exports = {
files: [
'.eslintrc.js',
'packages/kbn-pm/**/*',
'packages/kbn-es/**/*',
'packages/kbn-datemath/**/*.js',
'packages/kbn-plugin-generator/**/*',
],

2
.gitignore vendored
View file

@ -1,6 +1,7 @@
.aws-config.json
.signing-config.json
.ackrc
/.es
.DS_Store
.node_binaries
node_modules
@ -18,7 +19,6 @@ target
/test/*/screenshots/session
/test/*/screenshots/visual_regression_gallery.html
/html_docs
/esvm
.htpasswd
.eslintcache
/plugins/

View file

@ -168,12 +168,14 @@ yarn kbn bootstrap
(You can also run `yarn kbn` to see the other available commands. For more info about this tool, see https://github.com/elastic/kibana/tree/master/packages/kbn-pm.)
Start elasticsearch.
Start elasticsearch from a nightly snapshot.
```bash
yarn elasticsearch
yarn es snapshot
```
Additional options are available, pass `--help` for more information.
> You'll need to have a `java` binary in `PATH` or set `JAVA_HOME`.
If you're just getting started with `elasticsearch`, you could use the following command to populate your instance with a few fake logs to hit the ground running.

View file

@ -43,6 +43,8 @@
"scripts": {
"preinstall": "node ./preinstall_check",
"kbn": "node scripts/kbn",
"es": "node scripts/es",
"elasticsearch": "echo 'use `yarn es snapshot -E path.data=../data/`'",
"test": "grunt test",
"test:dev": "grunt test:dev",
"test:quick": "grunt test:quick",
@ -60,7 +62,6 @@
"debug": "node --nolazy --inspect --debug-brk scripts/kibana --dev",
"precommit": "node scripts/precommit_hook",
"karma": "karma start",
"elasticsearch": "grunt esvm:dev:keepalive",
"lint": "echo 'use `node scripts/eslint`' && false",
"lintroller": "echo 'use `node scripts/eslint --fix`' && false",
"makelogs": "echo 'use `node scripts/makelogs`' && false",
@ -215,6 +216,7 @@
"@elastic/eslint-config-kibana": "link:packages/eslint-config-kibana",
"@elastic/eslint-import-resolver-kibana": "1.0.0",
"@elastic/eslint-plugin-kibana-custom": "link:packages/eslint-plugin-kibana-custom",
"@kbn/es": "link:packages/kbn-es",
"@kbn/plugin-generator": "link:packages/kbn-plugin-generator",
"angular-mocks": "1.4.7",
"babel-eslint": "8.1.2",
@ -249,7 +251,6 @@
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "1.0.0",
"grunt-contrib-copy": "0.8.1",
"grunt-esvm": "3.2.12",
"grunt-karma": "2.0.0",
"grunt-run": "0.7.0",
"grunt-simple-mocha": "0.4.0",

View file

@ -0,0 +1,19 @@
{
"name": "@kbn/es",
"main": "./src/index.js",
"version": "1.0.0",
"license": "Apache-2.0",
"private": true,
"dependencies": {
"@kbn/dev-utils": "link:../kbn-dev-utils",
"chalk": "^2.3.1",
"dedent": "^0.7.0",
"elasticsearch": "^14.1.0",
"execa": "^0.10.0",
"mkdirp": "^0.5.1",
"node-fetch": "^2.0.0",
"simple-git": "^1.91.0",
"tar-fs": "^1.16.0",
"zlib": "^1.0.5"
}
}

View file

@ -0,0 +1,64 @@
const chalk = require('chalk');
const getopts = require('getopts');
const dedent = require('dedent');
const commands = require('./cli_commands');
function help() {
const availableCommands = Object.keys(commands).map(
name => `${name} - ${commands[name].description}`
);
console.log(dedent`
usage: es <command> [<args>]
Assists with running Elasticsearch for Kibana development
Available commands:
${availableCommands.join('\n ')}
Global options:
--help
`);
}
exports.run = async (defaults = {}) => {
const argv = process.argv.slice(2);
const options = getopts(argv, {
alias: {
h: 'help',
},
default: defaults,
});
const args = options._;
const commandName = args[0];
if (args.length === 0 || (!commandName && options.help)) {
help();
return;
}
const command = commands[commandName];
if (command === undefined) {
console.log(
chalk.red(`[${commandName}] is not a valid command, see 'es --help'`)
);
process.exit(1);
}
if (commandName && options.help) {
console.log(dedent`
usage: ${command.usage || `es ${commandName} [<args>]`}
${command.description}
${command.help(defaults).replace(/\n/g, '\n ')}
`);
return;
}
await command.run(defaults);
};

View file

@ -0,0 +1,52 @@
const dedent = require('dedent');
const getopts = require('getopts');
const chalk = require('chalk');
const { Cluster } = require('../cluster');
exports.description = 'Install and run from an Elasticsearch tar';
exports.usage = 'es archive <path> [<args>]';
exports.help = (defaults = {}) => {
return dedent`
Options:
--base-path Path containing cache/installations [default: ${
defaults['base-path']
}]
--install-path Installation path, defaults to 'source' within base-path
-E Additional key=value settings to pass to Elasticsearch
Example:
es archive ../elasticsearch.tar.gz -E cluster.name=test -E path.data=/tmp/es-data
`;
};
exports.run = async (defaults = {}) => {
const argv = process.argv.slice(2);
const options = getopts(argv, {
alias: {
basePath: 'base-path',
installPath: 'install-path',
esArgs: 'E',
},
default: defaults,
});
const cluster = new Cluster();
const [, path] = options._;
if (!path || !path.endsWith('tar.gz')) {
console.warn('you must provide a path to an ES tar file');
return;
}
try {
const { installPath } = await cluster.installArchive(path, options);
await cluster.run(installPath, { esArgs: options.esArgs });
} catch (e) {
console.log(chalk.red(e.stack));
}
};

View file

@ -0,0 +1,3 @@
exports.snapshot = require('./snapshot');
exports.source = require('./source');
exports.archive = require('./archive');

View file

@ -0,0 +1,45 @@
const dedent = require('dedent');
const getopts = require('getopts');
const chalk = require('chalk');
const { Cluster } = require('../cluster');
exports.description = 'Downloads and run from a nightly snapshot';
exports.help = (defaults = {}) => {
return dedent`
Options:
--version Version of ES to download [default: ${defaults.version}]
--base-path Path containing cache/installations [default: ${
defaults['base-path']
}]
--install-path Installation path, defaults to 'source' within base-path
-E Additional key=value settings to pass to Elasticsearch
Example:
es snapshot --version 5.6.8 -E cluster.name=test -E path.data=/tmp/es-data
`;
};
exports.run = async (defaults = {}) => {
const argv = process.argv.slice(2);
const options = getopts(argv, {
alias: {
basePath: 'base-path',
installPath: 'install-path',
esArgs: 'E',
},
default: defaults,
});
const cluster = new Cluster();
try {
const { installPath } = await cluster.installSnapshot(options);
await cluster.run(installPath, { esArgs: options.esArgs });
} catch (e) {
console.log(chalk.red(e.stack));
}
};

View file

@ -0,0 +1,46 @@
const dedent = require('dedent');
const getopts = require('getopts');
const chalk = require('chalk');
const { Cluster } = require('../cluster');
exports.description = 'Build and run from source';
exports.help = (defaults = {}) => {
return dedent`
Options:
--source-path Path to ES source [default: ${defaults['source-path']}]
--base-path Path containing cache/installations [default: ${
defaults['base-path']
}]
--install-path Installation path, defaults to 'source' within base-path
-E Additional key=value settings to pass to Elasticsearch
Example:
es snapshot --source-path=../elasticsearch -E cluster.name=test -E path.data=/tmp/es-data
`;
};
exports.run = async (defaults = {}) => {
const argv = process.argv.slice(2);
const options = getopts(argv, {
alias: {
basePath: 'base-path',
installPath: 'install-path',
sourcePath: 'source-path',
esArgs: 'E',
},
default: defaults,
});
const cluster = new Cluster();
try {
const { installPath } = await cluster.installSource(options);
await cluster.run(installPath, { esArgs: options.esArgs });
} catch (e) {
console.log(chalk.red(e.stack));
}
};

View file

@ -0,0 +1,133 @@
const execa = require('execa');
const chalk = require('chalk');
const { installSnapshot, installSource, installArchive } = require('./install');
const { ES_BIN } = require('./paths');
const { log: defaultLog, parseEsLog, extractConfigFiles } = require('./utils');
exports.Cluster = class Cluster {
constructor(log = defaultLog) {
this._log = log;
}
/**
* Builds and installs ES from source
*
* @param {Object} options
* @property {Array} options.installPath
* @property {Array} options.sourcePath
* @returns {Promise}
*/
async installSource(options = {}) {
this._log.info(chalk.bold('Installing from source'));
this._log.indent(4);
const install = await installSource({ log: this._log, ...options });
this._log.indent(-4);
return install;
}
/**
* Download and installs ES from a snapshot
*
* @param {Object} options
* @property {Array} options.installPath
* @property {Array} options.sourcePath
* @returns {Promise}
*/
async installSnapshot(options = {}) {
this._log.info(chalk.bold('Installing from snapshot'));
this._log.indent(4);
const install = await installSnapshot({ log: this._log, ...options });
this._log.indent(-4);
return install;
}
/**
* Installs ES from a local tar
*
* @param {String} path
* @param {Object} options
* @property {Array} options.installPath
* @returns {Promise}
*/
async installArchive(path, options = {}) {
this._log.info(chalk.bold('Installing from an archive'));
this._log.indent(4);
const install = await installArchive(path, { log: this._log, ...options });
this._log.indent(-4);
return install;
}
/**
* Starts ES and returns resolved promise once started
*
* @param {String} installPath
* @param {Object} options
* @property {Array} options.esArgs
* @returns {Promise}
*/
async start(installPath, options = {}) {
await this.run(installPath, options);
return new Promise(resolve => {
this._process.stdout.on('data', data => {
if (/started/.test(data)) {
return resolve(process);
}
});
});
}
/**
* Starts Elasticsearch and immediately returns with process
*
* @param {String} installPath
* @param {Object} options
* @property {Array} options.esArgs
* @returns {Process}
*/
run(installPath, { esArgs = [] }) {
this._log.info(chalk.bold('Starting'));
this._log.indent(4);
const args = extractConfigFiles(esArgs, this._installPath).reduce(
(acc, cur) => acc.concat(['-E', cur]),
[]
);
this._log.debug('%s %s', ES_BIN, args.join(' '));
this._process = execa(ES_BIN, args, {
cwd: installPath,
stdio: ['ignore', 'pipe', 'pipe'],
});
this._process.stdout.on('data', data => {
const lines = parseEsLog(data.toString());
lines.forEach(line => this._log.info(line.formattedMessage));
});
this._process.stderr.on('data', data =>
this._log.error(chalk.red(data.toString()))
);
return process;
}
/**
* Stops ES process, if it's running
*/
async stop() {
if (this._process) {
await this._process.kill();
}
}
};

View file

@ -0,0 +1,2 @@
exports.run = require('./cli').run;
exports.Cluster = require('./cluster').Cluster;

View file

@ -0,0 +1,54 @@
const fs = require('fs');
const path = require('path');
const chalk = require('chalk');
const { log: defaultLog, extractTarball } = require('../utils');
const { BASE_PATH } = require('../paths');
/**
* Extracts an ES archive and optionally installs plugins
*
* @param {String} archive - path to tar
* @param {Object} options
* @property {String} options.basePath
* @property {String} options.installPath
* @property {ToolingLog} options.log
*/
exports.installArchive = async function installArchive(
archive,
{
basePath = BASE_PATH,
installPath = path.resolve(basePath, path.basename(archive, '.tar.gz')),
log = defaultLog,
}
) {
if (fs.existsSync(installPath)) {
log.info('install directory already exists, removing');
rmrfSync(installPath);
}
log.info('extracting %s', chalk.bold(archive));
await extractTarball(archive, installPath);
log.info('extracted to %s', chalk.bold(installPath));
return { installPath };
};
/**
* Recurive deletion for a directory
*
* @param {String} path
*/
function rmrfSync(path) {
if (fs.existsSync(path)) {
fs.readdirSync(path).forEach(file => {
const curPath = path + '/' + file;
if (fs.lstatSync(curPath).isDirectory()) {
rmrfSync(curPath);
} else {
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
}

View file

@ -0,0 +1,3 @@
exports.installArchive = require('./archive').installArchive;
exports.installSnapshot = require('./snapshot').installSnapshot;
exports.installSource = require('./source').installSource;

View file

@ -0,0 +1,84 @@
const fetch = require('node-fetch');
const fs = require('fs');
const mkdirp = require('mkdirp');
const chalk = require('chalk');
const path = require('path');
const { BASE_PATH, DL_PATH } = require('../paths');
const { installArchive } = require('./archive');
const { log: defaultLog, cache } = require('../utils');
/**
* @param {Object} options
* @property {String} options.version
* @property {String} options.basePath
* @property {String} options.installPath
* @property {ToolingLog} options.log
*/
exports.installSnapshot = async function installSnapshot({
version,
basePath = BASE_PATH,
installPath = path.resolve(basePath, version),
log = defaultLog,
}) {
const fileName = `elasticsearch-${version}-SNAPSHOT.tar.gz`;
const url = `https://snapshots.elastic.co/downloads/elasticsearch/${fileName}`;
const dest = path.resolve(basePath, 'cache', fileName);
log.info('version: %s', chalk.bold(version));
log.info('install path: %s', chalk.bold(installPath));
await downloadFile(url, dest, log);
return await installArchive(dest, { installPath, basePath, log });
};
/**
* Downloads to tmp and moves once complete
*
* @param {String} url
* @param {String} dest
* @param {ToolingLog} log
* @returns {Promose}
*/
function downloadFile(url, dest, log) {
const downloadPath = path.resolve(DL_PATH, path.basename(dest));
const cacheMeta = cache.readMeta(dest);
mkdirp.sync(DL_PATH);
log.info('downloading from %s', chalk.bold(url));
return fetch(url, { headers: { 'If-None-Match': cacheMeta.etag } }).then(
res =>
new Promise((resolve, reject) => {
if (res.status === 304) {
log.info(
'etags match, using cache from %s',
chalk.bold(cacheMeta.ts)
);
return resolve();
}
if (!res.ok) {
return reject(new Error(res.statusText));
}
const stream = fs.createWriteStream(downloadPath);
res.body
.pipe(stream)
.on('error', error => {
reject(error);
})
.on('finish', () => {
if (res.ok) {
const etag = res.headers.get('etag');
cache.writeMeta(dest, { etag });
fs.renameSync(downloadPath, dest);
resolve();
} else {
reject(new Error(res.statusText));
}
});
})
);
}

View file

@ -0,0 +1,128 @@
const execa = require('execa');
const path = require('path');
const fs = require('fs');
const readline = require('readline');
const chalk = require('chalk');
const crypto = require('crypto');
const simpleGit = require('simple-git/promise');
const { installArchive } = require('./archive');
const { findMostRecentlyChanged, log: defaultLog, cache } = require('../utils');
const { GRADLE_BIN, ES_ARCHIVE_PATTERN, BASE_PATH } = require('../paths');
/**
* Installs ES from source
*
* @param {Object} options
* @property {String} options.sourcePath
* @property {String} options.basePath
* @property {String} options.installPath
* @property {ToolingLog} options.log
*/
exports.installSource = async function installSource({
sourcePath,
basePath = BASE_PATH,
installPath = path.resolve(basePath, 'source'),
log = defaultLog,
}) {
log.info('source path: %s', chalk.bold(sourcePath));
log.info('install path: %s', chalk.bold(installPath));
const { filename, etag } = await sourceInfo(sourcePath, log);
const cacheDest = path.resolve(basePath, 'cache', filename);
const cacheMeta = cache.readMeta(cacheDest);
const isCached = cacheMeta.exists && cacheMeta.etag === etag;
const archive = isCached
? cacheDest
: await createSnapshot({ sourcePath, log });
if (isCached) {
log.info(
'source path unchanged since %s, using cache',
chalk.bold(cacheMeta.ts)
);
} else {
cache.writeMeta(cacheDest, { etag });
fs.copyFileSync(archive, cacheDest);
}
return await installArchive(cacheDest, { basePath, installPath, log });
};
/**
*
* @param {String} cwd
* @param {ToolingLog} log
*/
async function sourceInfo(cwd, log = defaultLog) {
if (!fs.existsSync(cwd)) {
throw new Error(`${cwd} does not exist`);
}
const git = simpleGit(cwd);
const status = await git.status();
const branch = status.current;
const sha = (await git.revparse(['HEAD'])).trim();
log.info('on %s at %s', chalk.bold(branch), chalk.bold(sha));
log.info('%s locally modified file(s)', chalk.bold(status.modified.length));
const etag = crypto.createHash('md5').update(branch);
etag.update(sha);
// for changed files, use last modified times in hash calculation
status.files.forEach(file => {
etag.update(fs.statSync(path.join(cwd, file.path)).mtime.toString());
});
const cwdHash = crypto
.createHash('md5')
.update(cwd)
.digest('hex');
return {
etag: etag.digest('hex'),
filename: `${branch}-${cwdHash.substr(0, 8)}.tar.gz`,
branch,
};
}
/**
* Creates archive from source
*
* @param {Object} options
* @property {String} options.sourcePath
* @property {ToolingLog} options.log
* @returns {Object} containing archive and optional plugins
*/
function createSnapshot({ sourcePath, log = defaultLog }) {
const buildArgs = [':distribution:archives:tar:assemble'];
return new Promise((resolve, reject) => {
log.info('%s %s', GRADLE_BIN, buildArgs.join(' '));
const build = execa(GRADLE_BIN, buildArgs, {
cwd: sourcePath,
stdio: ['ignore', 'pipe', 'pipe'],
});
const stdout = readline.createInterface({ input: build.stdout });
const stderr = readline.createInterface({ input: build.stderr });
stdout.on('line', line => log.debug(line));
stderr.on('line', line => log.error(line));
build.stdout.on('end', () => {
if (build.exitCode > 0) {
reject(new Error('unable to build ES'));
} else {
const esTarballPath = findMostRecentlyChanged(
path.resolve(sourcePath, ES_ARCHIVE_PATTERN)
);
resolve(esTarballPath);
}
});
});
}

View file

@ -0,0 +1,16 @@
const os = require('os');
const path = require('path');
function useBat(bin) {
return os.platform().startsWith('win') ? `${bin}.bat` : bin;
}
const tempDir = os.tmpdir();
exports.BASE_PATH = path.resolve(tempDir, 'kbn-es');
exports.DL_PATH = tempDir;
exports.GRADLE_BIN = useBat('./gradlew');
exports.ES_BIN = useBat('bin/elasticsearch');
exports.ES_ARCHIVE_PATTERN =
'distribution/archives/tar/build/distributions/elasticsearch-*.tar.gz';

View file

@ -0,0 +1,24 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`parses data containing execption 1`] = `"[o.e.n.Node] [qEfPPg8] starting ..."`;
exports[`parses data containing execption 2`] = `
"[o.e.b.ElasticsearchUncaughtExceptionHandler] [] uncaught exception in thread [main]
org.elasticsearch.bootstrap.StartupException: BindHttpException; nested: BindException[Address already in use];
at org.elasticsearch.bootstrap.Elasticsearch.init(Elasticsearch.java:125) ~[elasticsearch-7.0.0.jar:7.0.0-alpha1-SNAPSHOT]
Caused by: org.elasticsearch.http.BindHttpException: Failed to bind to [9200]
at org.elasticsearch.http.netty4.Netty4HttpServerTransport.bindAddress(Netty4HttpServerTransport.java:408) ~[?:?]
at org.elasticsearch.http.netty4.Netty4HttpServerTransport.createBoundHttpAddress(Netty4HttpServerTransport.java:309) ~[?:?]
... 13 more
Caused by: java.net.BindException: Address already in use
at sun.nio.ch.Net.bind0(Native Method) ~[?:?]
at java.lang.Thread.run(Thread.java:844) [?:?]"
`;
exports[`parses data containing execption 3`] = `"[o.e.g.GatewayService] [qEfPPg8] recovered [0] indices into cluster_state"`;
exports[`parses multiple lines 1`] = `"[o.e.p.PluginsService] [qEfPPg8] loaded plugin [x-pack-security]"`;
exports[`parses multiple lines 2`] = `"[o.e.p.PluginsService] [qEfPPg8] loaded plugin [x-pack-watcher]"`;
exports[`parses single line 1`] = `"[o.e.p.PluginsService] [qEfPPg8] loaded module [lang-expression]"`;

View file

@ -0,0 +1,34 @@
const fs = require('fs');
const mkdirp = require('mkdirp');
const path = require('path');
exports.readMeta = function readMeta(file) {
try {
const meta = fs.readFileSync(`${file}.meta`, {
encoding: 'utf8',
});
return {
exists: fs.existsSync(file),
...JSON.parse(meta),
};
} catch (e) {
if (e.code !== 'ENOENT') {
throw e;
}
return {
exists: false,
};
}
};
exports.writeMeta = function readMeta(file, details = {}) {
const meta = {
ts: new Date(),
...details,
};
mkdirp.sync(path.dirname(file));
fs.writeFileSync(`${file}.meta`, JSON.stringify(meta, null, 2));
};

View file

@ -0,0 +1,54 @@
const path = require('path');
const fs = require('fs');
const mkdirp = require('mkdirp');
/**
* Copies config references to an absolute path to
* the provided destination. This is necicary as ES security
* requires files to be within the installation directory
*
* @param {Array} config
* @param {String} dest
*/
exports.extractConfigFiles = function extractConfigFiles(
config,
dest,
options = {}
) {
const originalConfig = typeof config === 'string' ? [config] : config;
const localConfig = [];
originalConfig.forEach(prop => {
const [key, value] = prop.split('=');
if (isFile(value)) {
const filename = path.basename(value);
const destPath = path.resolve(dest, 'config', filename);
copyFileSync(value, destPath);
if (options.log) {
options.log.info('moved %s in config to %s', value, destPath);
}
localConfig.push(`${key}=${filename}`);
} else {
localConfig.push(prop);
}
});
return localConfig;
};
function isFile(dest = '') {
return path.isAbsolute(dest) && path.extname(dest).length > 0;
}
function copyFileSync(src, dest) {
const destPath = path.dirname(dest);
if (!fs.existsSync(destPath)) {
mkdirp(destPath);
}
fs.writeFileSync(dest, fs.readFileSync(src));
}

View file

@ -0,0 +1,44 @@
const { extractConfigFiles } = require('./extract_config_files');
const mockFs = require('mock-fs');
const fs = require('fs');
beforeEach(() => {
mockFs({
'/data': {
'foo.yml': '',
},
'/es': {},
});
});
afterEach(() => {
mockFs.restore();
});
test('returns config with local paths', () => {
const config = extractConfigFiles(['path=/data/foo.yml'], '/es');
expect(config[0]).toBe('path=foo.yml');
});
test('copies file', () => {
extractConfigFiles(['path=/data/foo.yml'], '/es');
expect(fs.existsSync('/es/config/foo.yml')).toBe(true);
expect(fs.existsSync('/data/foo.yml')).toBe(true);
});
test('ignores non-paths', () => {
const config = extractConfigFiles(['foo=bar', 'foo.bar=baz'], '/es');
expect(config).toEqual(['foo=bar', 'foo.bar=baz']);
});
test('ignores directories', () => {
const config = extractConfigFiles(
['path=/data/foo.yml', 'foo.bar=/data/bar'],
'/es'
);
expect(config).toEqual(['path=foo.yml', 'foo.bar=/data/bar']);
});

View file

@ -0,0 +1,22 @@
const path = require('path');
const fs = require('fs');
const glob = require('glob');
/**
* Find the most recently modified file that matches the pattern pattern
*
* @param {String} pattern absolute path with glob expressions
* @return {String} Absolute path
*/
exports.findMostRecentlyChanged = function findMostRecentlyChanged(pattern) {
if (!path.isAbsolute(pattern)) {
throw new TypeError(`Pattern must be absolute, got ${pattern}`);
}
const ctime = path => fs.statSync(path).ctime.getTime();
return glob
.sync(pattern)
.sort((a, b) => ctime(a) - ctime(b))
.pop();
};

View file

@ -0,0 +1,6 @@
exports.cache = require('./cache');
exports.log = require('./log').log;
exports.parseEsLog = require('./parse_es_log').parseEsLog;
exports.extractTarball = require('./tarball').extractTarball;
exports.findMostRecentlyChanged = require('./find_most_recently_changed').findMostRecentlyChanged;
exports.extractConfigFiles = require('./extract_config_files').extractConfigFiles;

View file

@ -0,0 +1,6 @@
const { createToolingLog } = require('@kbn/dev-utils');
const log = createToolingLog('verbose');
log.pipe(process.stdout);
exports.log = log;

View file

@ -0,0 +1,46 @@
const chalk = require('chalk');
/**
* @param {String} data
* @returns {Array} lines
*/
exports.parseEsLog = function parseEsLog(data) {
const lines = [];
const regex = /\[([0-9-T:,]+)\]\[([A-Z]+)\s?\]\[([A-Za-z0-9.]+)\s*\]\s?([\S\s]+?(?=$|\n\[))/g;
let capture = regex.exec(data);
if (!capture) {
return [
{
formattedMessage: data.trim(),
message: data.trim(),
level: 'warn',
},
];
}
do {
const [, , level, location, message] = capture;
const color = colorForLevel(level);
lines.push({
formattedMessage: `[${chalk.dim(location)}] ${color(message.trim())}`,
message: `[${location}] ${message.trim()}`,
level: level.toLowerCase(),
});
capture = regex.exec(data);
} while (capture);
return lines;
};
function colorForLevel(level) {
switch (level) {
case 'WARN':
return chalk.yellow;
case 'DEBUG':
return chalk.dim;
}
return chalk.reset;
}

View file

@ -0,0 +1,54 @@
const dedent = require('dedent');
const { parseEsLog } = require('./parse_es_log');
test('parses single line', () => {
const data = dedent(`
[2018-02-23T10:13:40,371][INFO ][o.e.p.PluginsService ] [qEfPPg8] loaded module [lang-expression]
`);
const lines = parseEsLog(data);
expect(lines).toHaveLength(1);
expect(lines[0].message).toMatchSnapshot();
});
test('parses multiple lines', () => {
const data = dedent(`
[2018-02-23T10:13:40,405][INFO ][o.e.p.PluginsService ] [qEfPPg8] loaded plugin [x-pack-security]
[2018-02-23T10:13:40,405][INFO ][o.e.p.PluginsService ] [qEfPPg8] loaded plugin [x-pack-watcher]
`);
const lines = parseEsLog(data);
expect(lines).toHaveLength(2);
expect(lines[0].message).toMatchSnapshot();
expect(lines[1].message).toMatchSnapshot();
});
test('parses data containing execption', () => {
const data = dedent(`
[2018-02-23T10:13:45,646][INFO ][o.e.n.Node ] [qEfPPg8] starting ...
[2018-02-23T10:13:53,992][WARN ][o.e.b.ElasticsearchUncaughtExceptionHandler] [] uncaught exception in thread [main]
org.elasticsearch.bootstrap.StartupException: BindHttpException; nested: BindException[Address already in use];
at org.elasticsearch.bootstrap.Elasticsearch.init(Elasticsearch.java:125) ~[elasticsearch-7.0.0.jar:7.0.0-alpha1-SNAPSHOT]
Caused by: org.elasticsearch.http.BindHttpException: Failed to bind to [9200]
at org.elasticsearch.http.netty4.Netty4HttpServerTransport.bindAddress(Netty4HttpServerTransport.java:408) ~[?:?]
at org.elasticsearch.http.netty4.Netty4HttpServerTransport.createBoundHttpAddress(Netty4HttpServerTransport.java:309) ~[?:?]
... 13 more
Caused by: java.net.BindException: Address already in use
at sun.nio.ch.Net.bind0(Native Method) ~[?:?]
at java.lang.Thread.run(Thread.java:844) [?:?]
[2018-02-23T10:13:54,280][INFO ][o.e.g.GatewayService ] [qEfPPg8] recovered [0] indices into cluster_state
`);
const lines = parseEsLog(data);
expect(lines).toHaveLength(3);
expect(lines[0].message).toMatchSnapshot();
expect(lines[1].message).toMatchSnapshot();
expect(lines[2].message).toMatchSnapshot();
});
test('handles parsing exception', () => {
const lines = parseEsLog('foo');
expect(lines).toHaveLength(1);
expect(lines[0].message).toBe('foo');
});

View file

@ -0,0 +1,29 @@
const fs = require('fs');
const zlib = require('zlib');
const path = require('path');
const tarFs = require('tar-fs');
/**
* @param {String} archive
* @param {String} dirPath
*/
exports.extractTarball = function extractTarball(archive, dirPath) {
const stripOne = header => {
header.name = header.name
.split(/\/|\\/)
.slice(1)
.join(path.sep);
return header;
};
return new Promise((resolve, reject) => {
fs
.createReadStream(archive)
.on('error', reject)
.pipe(zlib.createGunzip())
.on('error', reject)
.pipe(tarFs.extract(dirPath, { map: stripOne }))
.on('error', reject)
.on('finish', resolve);
});
};

345
packages/kbn-es/yarn.lock Normal file
View file

@ -0,0 +1,345 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@kbn/dev-utils@link:../kbn-dev-utils":
version "0.0.0"
uid ""
agentkeepalive@^3.4.1:
version "3.4.1"
resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-3.4.1.tgz#aa95aebc3a749bca5ed53e3880a09f5235b48f0c"
dependencies:
humanize-ms "^1.2.1"
ansi-regex@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
ansi-styles@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
ansi-styles@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
dependencies:
color-convert "^1.9.0"
bl@^1.0.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.1.tgz#cac328f7bee45730d404b692203fcb590e172d5e"
dependencies:
readable-stream "^2.0.5"
chalk@^1.0.0:
version "1.1.3"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
dependencies:
ansi-styles "^2.2.1"
escape-string-regexp "^1.0.2"
has-ansi "^2.0.0"
strip-ansi "^3.0.0"
supports-color "^2.0.0"
chalk@^2.3.0, chalk@^2.3.1:
version "2.3.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.2.tgz#250dc96b07491bfd601e648d66ddf5f60c7a5c65"
dependencies:
ansi-styles "^3.2.1"
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
chownr@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181"
color-convert@^1.9.0:
version "1.9.1"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed"
dependencies:
color-name "^1.1.1"
color-name@^1.1.1:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
core-util-is@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
cross-spawn@^6.0.0:
version "6.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
dependencies:
nice-try "^1.0.4"
path-key "^2.0.1"
semver "^5.5.0"
shebang-command "^1.2.0"
which "^1.2.9"
debug@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
dependencies:
ms "2.0.0"
dedent@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c"
elasticsearch@^14.1.0:
version "14.2.0"
resolved "https://registry.yarnpkg.com/elasticsearch/-/elasticsearch-14.2.0.tgz#ef7c6e505cb41525a0751b5156e8c0fbd1f02d62"
dependencies:
agentkeepalive "^3.4.1"
chalk "^1.0.0"
lodash "2.4.2"
lodash.get "^4.4.2"
lodash.isempty "^4.4.0"
lodash.trimend "^4.5.1"
end-of-stream@^1.0.0, end-of-stream@^1.1.0:
version "1.4.1"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
dependencies:
once "^1.4.0"
escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
execa@^0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-0.10.0.tgz#ff456a8f53f90f8eccc71a96d11bdfc7f082cb50"
dependencies:
cross-spawn "^6.0.0"
get-stream "^3.0.0"
is-stream "^1.1.0"
npm-run-path "^2.0.0"
p-finally "^1.0.0"
signal-exit "^3.0.0"
strip-eof "^1.0.0"
get-stream@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
has-ansi@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
dependencies:
ansi-regex "^2.0.0"
has-flag@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
humanize-ms@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed"
dependencies:
ms "^2.0.0"
inherits@~2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
isarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
lodash.get@^4.4.2:
version "4.4.2"
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
lodash.isempty@^4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e"
lodash.trimend@^4.5.1:
version "4.5.1"
resolved "https://registry.yarnpkg.com/lodash.trimend/-/lodash.trimend-4.5.1.tgz#12804437286b98cad8996b79414e11300114082f"
lodash@2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-2.4.2.tgz#fadd834b9683073da179b3eae6d9c0d15053f73e"
minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
mkdirp@^0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
dependencies:
minimist "0.0.8"
moment@^2.20.1:
version "2.21.0"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.21.0.tgz#2a114b51d2a6ec9e6d83cf803f838a878d8a023a"
ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
ms@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
nice-try@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.4.tgz#d93962f6c52f2c1558c0fbda6d512819f1efe1c4"
node-fetch@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.1.1.tgz#369ca70b82f50c86496104a6c776d274f4e4a2d4"
npm-run-path@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
dependencies:
path-key "^2.0.0"
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
dependencies:
wrappy "1"
p-finally@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
path-key@^2.0.0, path-key@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
process-nextick-args@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
pump@^1.0.0:
version "1.0.3"
resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954"
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
readable-stream@^2.0.0, readable-stream@^2.0.5:
version "2.3.4"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.4.tgz#c946c3f47fa7d8eabc0b6150f4a12f69a4574071"
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
isarray "~1.0.0"
process-nextick-args "~2.0.0"
safe-buffer "~5.1.1"
string_decoder "~1.0.3"
util-deprecate "~1.0.1"
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
semver@^5.5.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab"
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
dependencies:
shebang-regex "^1.0.0"
shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
signal-exit@^3.0.0:
version "3.0.2"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
simple-git@^1.91.0:
version "1.92.0"
resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-1.92.0.tgz#6061468eb7d19f0141078fc742e62457e910f547"
dependencies:
debug "^3.1.0"
string_decoder@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab"
dependencies:
safe-buffer "~5.1.0"
strip-ansi@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
dependencies:
ansi-regex "^2.0.0"
strip-eof@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
supports-color@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
supports-color@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.3.0.tgz#5b24ac15db80fa927cf5227a4a33fd3c4c7676c0"
dependencies:
has-flag "^3.0.0"
tar-fs@^1.16.0:
version "1.16.0"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.0.tgz#e877a25acbcc51d8c790da1c57c9cf439817b896"
dependencies:
chownr "^1.0.1"
mkdirp "^0.5.1"
pump "^1.0.0"
tar-stream "^1.1.2"
tar-stream@^1.1.2:
version "1.5.5"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.5.5.tgz#5cad84779f45c83b1f2508d96b09d88c7218af55"
dependencies:
bl "^1.0.0"
end-of-stream "^1.0.0"
readable-stream "^2.0.0"
xtend "^4.0.0"
tree-kill@1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.1.0.tgz#c963dcf03722892ec59cba569e940b71954d1729"
util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
which@^1.2.9:
version "1.3.0"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a"
dependencies:
isexe "^2.0.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
xtend@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
zlib@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/zlib/-/zlib-1.0.5.tgz#6e7c972fc371c645a6afb03ab14769def114fcc0"

View file

@ -10,6 +10,7 @@
"getopts": "^2.0.0",
"lodash.camelcase": "^4.3.0",
"lodash.kebabcase": "^4.1.1",
"lodash.snakecase": "^4.1.1",
"lodash.startcase": "^4.4.0",
"sao": "^0.22.12"
}

View file

@ -913,6 +913,10 @@ lodash.kebabcase@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36"
lodash.snakecase@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d"
lodash.startcase@^4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.startcase/-/lodash.startcase-4.4.0.tgz#9436e34ed26093ed7ffae1936144350915d9add8"

14
scripts/es.js Normal file
View file

@ -0,0 +1,14 @@
const path = require('path');
const pkg = require('../package.json');
const kbnEs = require('@kbn/es');
kbnEs
.run({
version: pkg.version,
'source-path': path.resolve(__dirname, '../../elasticsearch'),
'base-path': path.resolve(__dirname, '../.es'),
})
.catch(e => {
console.error(e);
process.exitCode = 1;
});

View file

@ -1,4 +1,4 @@
import { esTestConfig } from '../../test_utils/es';
import pkg from '../../../package.json';
export const createTestEntryTemplate = (defaultUiSettings) => (bundle) => `
/**
@ -16,7 +16,7 @@ window.__KBN__ = {
vars: {
kbnIndex: '.kibana',
esShardTimeout: 1500,
esApiVersion: ${JSON.stringify(esTestConfig.getBranch())},
esApiVersion: ${JSON.stringify(pkg.branch)},
esRequestTimeout: '300000',
tilemapsConfig: {
deprecated: {

View file

@ -12,7 +12,7 @@ run(async ({ log }) => {
ignore: [
'**/node_modules/**/*',
'optimize/**/*',
'esvm/**/*',
'.es/**/*',
'data/**/*',
]
});

View file

@ -1,16 +1,18 @@
import { spawn } from 'child_process';
import { resolve } from 'path';
import { format as formatUrl } from 'url';
import expect from 'expect.js';
import { readConfigFile } from '../../lib';
import { createToolingLog } from '../../../dev';
import { createReduceStream } from '../../../utils';
import { createEsTestCluster } from '../../../test_utils/es';
import { createTestCluster } from '../../../test_utils/es';
import { startupKibana } from '../lib';
const SCRIPT = resolve(__dirname, '../../../../scripts/functional_test_runner.js');
const SCRIPT = resolve(
__dirname,
'../../../../scripts/functional_test_runner.js'
);
const CONFIG = resolve(__dirname, '../fixtures/with_es_archiver/config.js');
describe('single test that uses esArchiver', () => {
@ -27,23 +29,20 @@ describe('single test that uses esArchiver', () => {
log.info('starting elasticsearch');
log.indent(2);
const es = createEsTestCluster({
log: msg => log.debug(msg),
name: 'ftr/withEsArchiver',
port: config.get('servers.elasticsearch.port')
});
cleanupWork.unshift(() => es.stop());
const es = createTestCluster({ port: config.get('servers.elasticsearch.port'), log });
this.timeout(es.getStartTimeout());
await es.start();
cleanupWork.unshift(async () => await es.cleanup());
log.indent(-2);
log.info('starting kibana');
log.indent(2);
const kibana = await startupKibana({
port: config.get('servers.kibana.port'),
esUrl: formatUrl(config.get('servers.elasticsearch'))
esUrl: es.getUrl(),
});
log.indent(-2);

View file

@ -1,21 +0,0 @@
import { get } from 'lodash';
import toPath from 'lodash/internal/toPath';
/**
* Create a callCluster function that properly executes methods on an
* elasticsearch-js client
*
* @param {elasticsearch.Client} esClient
* @return {Function}
*/
export function createCallCluster(esClient) {
return function callCluster(method, params) {
const path = toPath(method);
const contextPath = path.slice(0, -1);
const action = get(esClient, path);
const context = contextPath.length ? get(esClient, contextPath) : esClient;
return action.call(context, params);
};
}

View file

@ -1,122 +1,96 @@
import { resolve } from 'path';
import libesvm from 'libesvm';
import { get } from 'lodash';
import { format } from 'url';
import elasticsearch from 'elasticsearch';
import toPath from 'lodash/internal/toPath';
import { Cluster } from '@kbn/es';
import { esTestConfig } from './es_test_config';
import { createCallCluster } from './create_call_cluster';
import { rmrfSync } from './rmrf_sync';
const ESVM_DIR = resolve(__dirname, '../../../esvm/test_utils/es_test_cluster');
const BRANCHES_DOWNLOADED = [];
export function createTestCluster(options = {}) {
const { port = esTestConfig.getPort(), log } = options;
const randomHash = Math.random().toString(36).substring(2);
const clusterName = `test-${randomHash}`;
const basePath = resolve(__dirname, '../../../.es');
const config = {
version: esTestConfig.getVersion(),
installPath: resolve(basePath, clusterName),
sourcePath: resolve(__dirname, '../../../../elasticsearch'),
basePath,
};
function isDownloadNeeded(branch) {
if (process.env.ESVM_NO_FRESH || process.argv.includes('--esvm-no-fresh')) {
return false;
}
if (BRANCHES_DOWNLOADED.includes(branch)) {
return false;
}
return true;
}
export function createEsTestCluster(options = {}) {
const {
name,
log = console.log,
port = esTestConfig.getPort(),
branch = esTestConfig.getBranch(),
} = options;
if (!name) {
throw new Error('createEsTestCluster() requires { name }');
}
// assigned in use.start(), reassigned in use.stop()
let cluster;
let client;
const cluster = new Cluster(log);
const from = esTestConfig.getBuildFrom();
return new class EsTestCluster {
getStartTimeout() {
return esTestConfig.getLibesvmStartTimeout();
const second = 1000;
const minute = second * 60;
return from === 'snapshot' ? minute : minute * 3;
}
getClient() {
if (!client) {
client = new elasticsearch.Client({
host: esTestConfig.getUrl()
});
}
async start() {
const { installPath } =
from === 'source'
? await cluster.installSource(config)
: await cluster.installSnapshot(config);
return client;
await cluster.start(installPath, {
esArgs: [
`cluster.name=${clusterName}`,
`http.port=${port}`,
`discovery.zen.ping.unicast.hosts=localhost:${port}`,
],
});
}
async stop() {
await cluster.stop();
}
async cleanup() {
await this.stop();
rmrfSync(config.installPath);
}
/**
* Returns an ES Client to the configured cluster
*/
getClient() {
return new elasticsearch.Client({
host: this.getUrl(),
});
}
getCallCluster() {
return createCallCluster(this.getClient());
}
async start() {
const download = isDownloadNeeded(branch);
getUrl() {
const parts = esTestConfig.getUrlParts();
parts.port = port;
if (cluster) {
throw new Error(`
EsTestCluster[${name}] is already started, call and await es.stop()
before calling es.start() again.
`);
}
cluster = libesvm.createCluster({
fresh: download,
purge: !download,
directory: ESVM_DIR,
branch,
config: {
http: {
port,
},
cluster: {
name,
},
discovery: {
zen: {
ping: {
unicast: {
hosts: [ `localhost:${port}` ]
}
}
}
}
}
});
cluster.on('log', (event) => {
log(`EsTestCluster[${name}]: ${event.type} - ${event.message}`);
});
await cluster.install();
if (download) {
// track the branches that have successfully downloaded
// after cluster.install() resolves
BRANCHES_DOWNLOADED.push(branch);
}
await cluster.start();
}
async stop() {
if (client) {
const c = client;
client = null;
await c.close();
}
if (cluster) {
const c = cluster;
cluster = null;
await c.shutdown();
}
return format(parts);
}
};
}
/**
* Create a callCluster function that properly executes methods on an
* elasticsearch-js client
*
* @param {elasticsearch.Client} esClient
* @return {Function}
*/
function createCallCluster(esClient) {
return function callCluster(method, params) {
const path = toPath(method);
const contextPath = path.slice(0, -1);
const action = get(esClient, path);
const context = contextPath.length ? get(esClient, contextPath) : esClient;
return action.call(context, params);
};
}

View file

@ -1,27 +1,10 @@
import { format as formatUrl } from 'url';
import { resolve } from 'path';
import pkg from '../../../package.json';
import { admin } from '../../../test/shield';
const SECOND = 1000;
const MINUTE = 60 * SECOND;
export const esTestConfig = new class EsTestConfig {
getLibesvmStartTimeout() {
return process.env.TEST_ES_STARTUP_TIMEOUT || (5 * MINUTE);
}
getDirectoryForEsvm(uniqueSubDir) {
if (!uniqueSubDir) {
throw new Error('getDirectoryForEsvm() requires uniqueSubDir');
}
return resolve(__dirname, '../../../esvm', uniqueSubDir);
}
getBranch() {
return process.env.TEST_ES_BRANCH || pkg.branch;
getVersion() {
return process.env.TEST_ES_BRANCH || pkg.version;
}
getPort() {
@ -32,6 +15,10 @@ export const esTestConfig = new class EsTestConfig {
return formatUrl(this.getUrlParts());
}
getBuildFrom() {
return process.env.TEST_ES_FROM || 'snapshot';
}
getUrlParts() {
return {
protocol: process.env.TEST_ES_PROTOCOL || 'http',

View file

@ -1,3 +1,2 @@
export { esTestConfig } from './es_test_config';
export { createEsTestCluster } from './es_test_cluster';
export { createCallCluster } from './create_call_cluster';
export { createTestCluster } from './es_test_cluster';

View file

@ -0,0 +1,21 @@
import * as fs from 'fs';
/**
* Recurive deletion for a directory
*
* @param {String} path
*/
export function rmrfSync(path) {
if (fs.existsSync(path)) {
fs.readdirSync(path).forEach(file => {
const curPath = path + '/' + file;
if (fs.lstatSync(curPath).isDirectory()) {
rmrfSync(curPath);
} else {
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
}

View file

@ -1,7 +1,7 @@
import sinon from 'sinon';
import expect from 'expect.js';
import { createEsTestCluster } from '../../../../test_utils/es';
import { createTestCluster } from '../../../../test_utils/es';
import { createServerWithCorePlugins } from '../../../../test_utils/kbn_server';
import { createToolingLog } from '../../../../dev';
import { createOrUpgradeSavedConfig } from '../create_or_upgrade_saved_config';
@ -16,17 +16,16 @@ describe('createOrUpgradeSavedConfig()', () => {
log.pipe(process.stdout);
log.indent(6);
const es = createEsTestCluster({
log: msg => log.debug(msg),
name: 'savedObjects/healthCheck/integration',
});
this.timeout(es.getStartTimeout());
log.info('starting elasticsearch');
log.indent(2);
log.indent(4);
const es = createTestCluster({ log });
this.timeout(es.getStartTimeout());
log.indent(-4);
cleanup.push(async () => await es.cleanup());
await es.start();
log.indent(-2);
cleanup.push(() => es.stop());
kbnServer = createServerWithCorePlugins();
await kbnServer.ready();

View file

@ -1,14 +1,14 @@
import { createEsTestCluster } from '../../../../../test_utils/es';
import { createTestCluster } from '../../../../../test_utils/es';
import * as kbnTestServer from '../../../../../test_utils/kbn_server';
let kbnServer;
let services;
const es = createEsTestCluster({
name: 'ui_settings/routes'
});
let es;
export async function startServers() {
es = createTestCluster();
this.timeout(es.getStartTimeout());
await es.start();
kbnServer = kbnTestServer.createServerWithCorePlugins();
@ -49,5 +49,8 @@ export async function stopServers() {
kbnServer = null;
}
await es.stop();
if (es) {
await es.cleanup();
es = null;
}
}

View file

@ -1,57 +0,0 @@
import { esTestConfig } from '../../src/test_utils/es';
module.exports = function (grunt) {
const branch = esTestConfig.getBranch();
const dataDir = esTestConfig.getDirectoryForEsvm('data_dir');
return {
options: {
branch,
fresh: !grunt.option('esvm-no-fresh'),
config: {
http: {
port: 9200
},
}
},
dev: {
options: {
directory: esTestConfig.getDirectoryForEsvm('dev'),
config: {
path: {
data: dataDir
},
cluster: {
name: 'esvm-dev'
}
}
}
},
ui: {
options: {
directory: esTestConfig.getDirectoryForEsvm('test'),
purge: true,
config: {
http: {
port: esTestConfig.getPort()
},
cluster: {
name: 'esvm-ui'
},
discovery: {
zen: {
ping: {
unicast: {
hosts: [ `localhost:${esTestConfig.getPort()}` ]
}
}
}
}
}
}
},
};
};

View file

@ -217,7 +217,22 @@ module.exports = function (grunt) {
'--plugins.initialize=false',
'--server.autoListen=false',
...kbnServerFlags,
]
}
],
},
testEsServer: {
options: {
wait: false,
ready: /started/,
quiet: false,
},
cmd: process.execPath,
args: [
'scripts/es',
grunt.option('from') || 'snapshot',
'-E',
`http.port=${esTestConfig.getPort()}`,
],
},
};
};

View file

@ -8,7 +8,7 @@ export default grunt => {
grunt.registerTask('rejectRejFiles', 'Reject any git-apply .rej files', () => {
const files = grunt.file.expand([
'**/*.rej',
'!esvm/**/*.rej',
'!.es/**/*.rej',
'!plugins/**/*.rej',
'!optimize/**/*.rej',
'!**/node_modules/**/*.rej',

View file

@ -56,38 +56,38 @@ module.exports = function (grunt) {
grunt.registerTask('test:ui', [
'checkPlugins',
'esvm:ui',
'run:testEsServer',
'run:testUIServer',
'functional_test_runner:functional',
'esvm_shutdown:ui',
'stop:testEsServer',
'stop:testUIServer'
]);
grunt.registerTask('test:uiRelease', [
'checkPlugins',
'esvm:ui',
'run:testEsServer',
'run:testUIReleaseServer',
'functional_test_runner:functional',
'esvm_shutdown:ui',
'stop:testEsServer',
'stop:testUIReleaseServer'
]);
grunt.registerTask('test:ui:server', [
'checkPlugins',
'esvm:ui',
'run:testEsServer',
'run:testUIDevServer:keepalive'
]);
grunt.registerTask('test:api', [
'esvm:ui',
'run:testEsServer',
'run:apiTestServer',
'functional_test_runner:apiIntegration',
'esvm_shutdown:ui',
'stop:testEsServer',
'stop:apiTestServer'
]);
grunt.registerTask('test:api:server', [
'esvm:ui',
'run:testEsServer',
'run:devApiTestServer:keepalive'
]);

2408
yarn.lock

File diff suppressed because it is too large Load diff