mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 18:51:07 -04:00
Upgrade to NodeJS 10 (#25157)
* feat(NA): upgrade node js version on file configs. * chore(NA): migrate configs and 3rd party dependencies to work on node js 10.x * fix(NA): add missing async function declaration. * chore(NA): updated elastic/good package to work with node10 * chore(NA): update lockfiles. * fix(NA): add missing dep. * fix(NA): types for node 10. * test(NA): fix error return type for node10. * fix(NA): kbn-pm webpack config to unlazy a require using lazy-cache. fix(NA): build to work with node 10. * test(NA): jest integration test for kbn-pluin-helpers. * test(NA): fix jest tests for kbn-es. * fix(NA): use ostmpdir instead of a tmp folder inside the fixtures. * fix(NA): change afterEach on kbn es decompress test. * fix(NA): change afterEach on kbn es decompress test. * fix(NA): readd mock-fs for the tests that still use it on kbn-es and that works on node10. * fix(NA): readd mock-fs for the tests that still use it on kbn-es and that works on node10. * refact(NA): rewrite tests using mock-fs and completely remove this dependency. * fix(NA): failing test implementation using jest mock in order to replace mock-fs. * fix(NA): update jest snapshots to match new ones generated one node 10. * fix(NA): cli/cluster mock to spyOn off method instead off spyOn removeListener as this was changed on Node 10. * fix(NA): tests for cluster_manager to also spyOn off and on instead of addListener and removeListener * test(NA): fix management advance settings image field test flow. * fix(NA): apply missing types for src/core/server/plugins/discovery/plugins_discovery.ts. * test(NA): updated 2 missing snapshots for KuiCodeEditor on kbn-ui-framework. * refact(NA): fix eslint errors. * refact(NA): fix ts code with tslint fix. chore(NA): update jest snapshots. * chore(NA): migrate kbn config schema peer dependency to last used joi version to avoid warning on bootstrap. * fix(NA): tslint errors. * chore(NA): upgrade types node to the last version. * fix(NA): missing utf8 input format encoding when reading a file. * chore(NA): upgrade to node 10.14.1 * fix(NA): Buffer api usage to avoid deprecation warnings.
This commit is contained in:
parent
ef7cf4b44b
commit
45a67701f2
77 changed files with 31242 additions and 5727 deletions
|
@ -1 +1 @@
|
||||||
8.14.0
|
10.14.1
|
||||||
|
|
2
.nvmrc
2
.nvmrc
|
@ -1 +1 @@
|
||||||
8.14.0
|
10.14.1
|
||||||
|
|
21
package.json
21
package.json
|
@ -72,7 +72,7 @@
|
||||||
"url": "https://github.com/elastic/kibana.git"
|
"url": "https://github.com/elastic/kibana.git"
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"**/@types/node": "8.10.38",
|
"**/@types/node": "10.12.12",
|
||||||
"@types/react": "16.3.14"
|
"@types/react": "16.3.14"
|
||||||
},
|
},
|
||||||
"workspaces": {
|
"workspaces": {
|
||||||
|
@ -95,7 +95,7 @@
|
||||||
"@elastic/datemath": "5.0.1",
|
"@elastic/datemath": "5.0.1",
|
||||||
"@elastic/eui": "5.3.0",
|
"@elastic/eui": "5.3.0",
|
||||||
"@elastic/filesaver": "1.1.2",
|
"@elastic/filesaver": "1.1.2",
|
||||||
"@elastic/good": "8.1.1-kibana1",
|
"@elastic/good": "8.1.1-kibana2",
|
||||||
"@elastic/numeral": "2.3.2",
|
"@elastic/numeral": "2.3.2",
|
||||||
"@elastic/ui-ace": "0.2.3",
|
"@elastic/ui-ace": "0.2.3",
|
||||||
"@kbn/babel-code-parser": "1.0.0",
|
"@kbn/babel-code-parser": "1.0.0",
|
||||||
|
@ -137,7 +137,7 @@
|
||||||
"elasticsearch": "^15.2.0",
|
"elasticsearch": "^15.2.0",
|
||||||
"elasticsearch-browser": "^15.2.0",
|
"elasticsearch-browser": "^15.2.0",
|
||||||
"encode-uri-query": "1.0.0",
|
"encode-uri-query": "1.0.0",
|
||||||
"execa": "^0.10.0",
|
"execa": "^1.0.0",
|
||||||
"expiry-js": "0.1.7",
|
"expiry-js": "0.1.7",
|
||||||
"file-loader": "2.0.0",
|
"file-loader": "2.0.0",
|
||||||
"font-awesome": "4.4.0",
|
"font-awesome": "4.4.0",
|
||||||
|
@ -285,7 +285,7 @@
|
||||||
"@types/minimatch": "^2.0.29",
|
"@types/minimatch": "^2.0.29",
|
||||||
"@types/moment-timezone": "^0.5.8",
|
"@types/moment-timezone": "^0.5.8",
|
||||||
"@types/mustache": "^0.8.31",
|
"@types/mustache": "^0.8.31",
|
||||||
"@types/node": "^8.10.38",
|
"@types/node": "^10.12.12",
|
||||||
"@types/opn": "^5.1.0",
|
"@types/opn": "^5.1.0",
|
||||||
"@types/podium": "^1.0.0",
|
"@types/podium": "^1.0.0",
|
||||||
"@types/prop-types": "^15.5.3",
|
"@types/prop-types": "^15.5.3",
|
||||||
|
@ -308,7 +308,7 @@
|
||||||
"angular-mocks": "1.4.7",
|
"angular-mocks": "1.4.7",
|
||||||
"archiver": "^3.0.0",
|
"archiver": "^3.0.0",
|
||||||
"babel-eslint": "^9.0.0",
|
"babel-eslint": "^9.0.0",
|
||||||
"babel-jest": "^23.4.2",
|
"babel-jest": "^23.6.0",
|
||||||
"backport": "4.4.1",
|
"backport": "4.4.1",
|
||||||
"chai": "3.5.0",
|
"chai": "3.5.0",
|
||||||
"chance": "1.0.10",
|
"chance": "1.0.10",
|
||||||
|
@ -325,7 +325,7 @@
|
||||||
"eslint-config-prettier": "^3.1.0",
|
"eslint-config-prettier": "^3.1.0",
|
||||||
"eslint-plugin-babel": "^5.2.0",
|
"eslint-plugin-babel": "^5.2.0",
|
||||||
"eslint-plugin-import": "^2.14.0",
|
"eslint-plugin-import": "^2.14.0",
|
||||||
"eslint-plugin-jest": "^21.22.1",
|
"eslint-plugin-jest": "^21.26.2",
|
||||||
"eslint-plugin-jsx-a11y": "^6.1.2",
|
"eslint-plugin-jsx-a11y": "^6.1.2",
|
||||||
"eslint-plugin-mocha": "^5.2.0",
|
"eslint-plugin-mocha": "^5.2.0",
|
||||||
"eslint-plugin-no-unsanitized": "^3.0.2",
|
"eslint-plugin-no-unsanitized": "^3.0.2",
|
||||||
|
@ -345,13 +345,13 @@
|
||||||
"grunt-peg": "^2.0.1",
|
"grunt-peg": "^2.0.1",
|
||||||
"grunt-run": "0.7.0",
|
"grunt-run": "0.7.0",
|
||||||
"gulp-babel": "^7.0.1",
|
"gulp-babel": "^7.0.1",
|
||||||
"gulp-sourcemaps": "1.7.3",
|
"gulp-sourcemaps": "2.6.4",
|
||||||
"has-ansi": "^3.0.0",
|
"has-ansi": "^3.0.0",
|
||||||
"image-diff": "1.6.0",
|
"image-diff": "1.6.0",
|
||||||
"intl-messageformat-parser": "^1.4.0",
|
"intl-messageformat-parser": "^1.4.0",
|
||||||
"istanbul-instrumenter-loader": "3.0.1",
|
"istanbul-instrumenter-loader": "3.0.1",
|
||||||
"jest": "^23.5.0",
|
"jest": "^23.6.0",
|
||||||
"jest-cli": "^23.5.0",
|
"jest-cli": "^23.6.0",
|
||||||
"jest-raw-loader": "^1.0.1",
|
"jest-raw-loader": "^1.0.1",
|
||||||
"jimp": "0.2.28",
|
"jimp": "0.2.28",
|
||||||
"json5": "^1.0.1",
|
"json5": "^1.0.1",
|
||||||
|
@ -369,7 +369,6 @@
|
||||||
"load-grunt-config": "0.19.2",
|
"load-grunt-config": "0.19.2",
|
||||||
"makelogs": "^4.3.0",
|
"makelogs": "^4.3.0",
|
||||||
"mocha": "3.3.0",
|
"mocha": "3.3.0",
|
||||||
"mock-fs": "^4.4.2",
|
|
||||||
"murmurhash3js": "3.0.1",
|
"murmurhash3js": "3.0.1",
|
||||||
"mutation-observer": "^1.0.3",
|
"mutation-observer": "^1.0.3",
|
||||||
"nock": "8.0.0",
|
"nock": "8.0.0",
|
||||||
|
@ -400,7 +399,7 @@
|
||||||
"zlib": "^1.0.5"
|
"zlib": "^1.0.5"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "8.14.0",
|
"node": "10.14.1",
|
||||||
"yarn": "^1.10.1"
|
"yarn": "^1.10.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,8 +10,8 @@
|
||||||
"url": "https://github.com/elastic/kibana/tree/master/packages/kbn-babel-code-parser"
|
"url": "https://github.com/elastic/kibana/tree/master/packages/kbn-babel-code-parser"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "babel src --out-dir target --quiet",
|
"build": "babel src --out-dir target",
|
||||||
"kbn:bootstrap": "yarn build",
|
"kbn:bootstrap": "yarn build --quiet",
|
||||||
"kbn:watch": "yarn build --watch"
|
"kbn:watch": "yarn build --watch"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
"typescript": "^3.0.3"
|
"typescript": "^3.0.3"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"joi": "10.4.1",
|
"joi": "^13.5.2",
|
||||||
"moment": "^2.20.1",
|
"moment": "^2.20.1",
|
||||||
"type-detect": "^4.0.8"
|
"type-detect": "^4.0.8"
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"chalk": "^2.4.1",
|
"chalk": "^2.4.1",
|
||||||
"execa": "^0.10.0",
|
"execa": "^1.0.0",
|
||||||
"moment": "^2.20.1",
|
"moment": "^2.20.1",
|
||||||
"rxjs": "^6.2.1",
|
"rxjs": "^6.2.1",
|
||||||
"tree-kill": "^1.2.0",
|
"tree-kill": "^1.2.0",
|
||||||
|
|
|
@ -9,16 +9,14 @@
|
||||||
"chalk": "^2.4.1",
|
"chalk": "^2.4.1",
|
||||||
"dedent": "^0.7.0",
|
"dedent": "^0.7.0",
|
||||||
"del": "^3.0.0",
|
"del": "^3.0.0",
|
||||||
"execa": "^0.10.0",
|
"execa": "^1.0.0",
|
||||||
"getopts": "^2.0.6",
|
"getopts": "^2.0.6",
|
||||||
"glob": "^7.1.2",
|
"glob": "^7.1.2",
|
||||||
"mkdirp": "^0.5.1",
|
"mkdirp": "^0.5.1",
|
||||||
"mock-fs": "^4.5.0",
|
|
||||||
"node-fetch": "^2.0.0",
|
"node-fetch": "^2.0.0",
|
||||||
"simple-git": "^1.91.0",
|
"simple-git": "^1.91.0",
|
||||||
"tar-fs": "^1.16.0",
|
"tar-fs": "^1.16.3",
|
||||||
"tree-kill": "^1.1.0",
|
"tree-kill": "^1.1.0",
|
||||||
"yauzl": "^2.10.0",
|
"yauzl": "^2.10.0"
|
||||||
"zlib": "^1.0.5"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,30 +18,40 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const { decompress } = require('./decompress');
|
const { decompress } = require('./decompress');
|
||||||
const mockFs = require('mock-fs');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
const del = require('del');
|
||||||
|
const os = require('os');
|
||||||
|
|
||||||
|
const fixturesFolder = path.resolve(__dirname, '__fixtures__');
|
||||||
|
const randomDir = Math.random().toString(36);
|
||||||
|
const tmpFolder = path.resolve(os.tmpdir(), randomDir);
|
||||||
|
const dataFolder = path.resolve(tmpFolder, 'data');
|
||||||
|
const esFolder = path.resolve(tmpFolder, '.es');
|
||||||
|
|
||||||
|
const zipSnapshot = path.resolve(dataFolder, 'snapshot.zip');
|
||||||
|
const tarGzSnapshot = path.resolve(dataFolder, 'snapshot.tar.gz');
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockFs({
|
mkdirp.sync(tmpFolder);
|
||||||
'/data': {
|
mkdirp.sync(dataFolder);
|
||||||
'snapshot.zip': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.zip')),
|
mkdirp.sync(esFolder);
|
||||||
'snapshot.tar.gz': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.tar.gz')),
|
|
||||||
},
|
fs.copyFileSync(path.resolve(fixturesFolder, 'snapshot.zip'), zipSnapshot);
|
||||||
'/.es': {},
|
fs.copyFileSync(path.resolve(fixturesFolder, 'snapshot.tar.gz'), tarGzSnapshot);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockFs.restore();
|
del.sync(tmpFolder, { force: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
test('zip strips root directory', async () => {
|
test('zip strips root directory', async () => {
|
||||||
await decompress('/data/snapshot.zip', '/.es/foo');
|
await decompress(zipSnapshot, path.resolve(esFolder, 'foo'));
|
||||||
expect(fs.readdirSync('/.es/foo/bin')).toContain('elasticsearch.bat');
|
expect(fs.readdirSync(path.resolve(esFolder, 'foo/bin'))).toContain('elasticsearch.bat');
|
||||||
});
|
});
|
||||||
|
|
||||||
test('tar strips root directory', async () => {
|
test('tar strips root directory', async () => {
|
||||||
await decompress('/data/snapshot.tar.gz', '/.es/foo');
|
await decompress(tarGzSnapshot, path.resolve(esFolder, 'foo'));
|
||||||
expect(fs.readdirSync('/.es/foo/bin')).toContain('elasticsearch');
|
expect(fs.readdirSync(path.resolve(esFolder, 'foo/bin'))).toContain('elasticsearch');
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,21 +17,21 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
readFileSync: jest.fn(),
|
||||||
|
existsSync: jest.fn().mockImplementation(() => true),
|
||||||
|
writeFileSync: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
const { extractConfigFiles } = require('./extract_config_files');
|
const { extractConfigFiles } = require('./extract_config_files');
|
||||||
const mockFs = require('mock-fs');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
|
||||||
beforeEach(() => {
|
afterEach(() => {
|
||||||
mockFs({
|
jest.clearAllMocks();
|
||||||
'/data': {
|
|
||||||
'foo.yml': '',
|
|
||||||
},
|
|
||||||
'/es': {},
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterAll(() => {
|
||||||
mockFs.restore();
|
jest.restoreAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
test('returns config with local paths', () => {
|
test('returns config with local paths', () => {
|
||||||
|
@ -43,8 +43,8 @@ test('returns config with local paths', () => {
|
||||||
test('copies file', () => {
|
test('copies file', () => {
|
||||||
extractConfigFiles(['path=/data/foo.yml'], '/es');
|
extractConfigFiles(['path=/data/foo.yml'], '/es');
|
||||||
|
|
||||||
expect(fs.existsSync('/es/config/foo.yml')).toBe(true);
|
expect(fs.readFileSync.mock.calls[0][0]).toEqual('/data/foo.yml');
|
||||||
expect(fs.existsSync('/data/foo.yml')).toBe(true);
|
expect(fs.writeFileSync.mock.calls[0][0]).toEqual('/es/config/foo.yml');
|
||||||
});
|
});
|
||||||
|
|
||||||
test('ignores non-paths', () => {
|
test('ignores non-paths', () => {
|
||||||
|
|
|
@ -17,33 +17,38 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const mockFs = require('mock-fs');
|
jest.mock('fs', () => ({
|
||||||
const { findMostRecentlyChanged } = require('./find_most_recently_changed');
|
statSync: jest.fn().mockImplementation(path => {
|
||||||
|
if (path.includes('oldest')) {
|
||||||
beforeEach(() => {
|
return {
|
||||||
mockFs({
|
|
||||||
'/data': {
|
|
||||||
'oldest.yml': mockFs.file({
|
|
||||||
content: 'foo',
|
|
||||||
ctime: new Date(2018, 2, 1),
|
ctime: new Date(2018, 2, 1),
|
||||||
}),
|
};
|
||||||
'newest.yml': mockFs.file({
|
}
|
||||||
content: 'bar',
|
|
||||||
ctime: new Date(2018, 2, 3),
|
|
||||||
}),
|
|
||||||
'middle.yml': mockFs.file({
|
|
||||||
content: 'baz',
|
|
||||||
ctime: new Date(2018, 2, 2),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
if (path.includes('newest')) {
|
||||||
mockFs.restore();
|
return {
|
||||||
});
|
ctime: new Date(2018, 2, 3),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path.includes('middle')) {
|
||||||
|
return {
|
||||||
|
ctime: new Date(2018, 2, 2),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
readdirSync: jest.fn().mockImplementation(() => {
|
||||||
|
return ['oldest.yml', 'newest.yml', 'middle.yml'];
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const { findMostRecentlyChanged } = require('./find_most_recently_changed');
|
||||||
|
|
||||||
test('returns newest file', () => {
|
test('returns newest file', () => {
|
||||||
const file = findMostRecentlyChanged('/data/*.yml');
|
const file = findMostRecentlyChanged('/data/*.yml');
|
||||||
expect(file).toEqual('/data/newest.yml');
|
expect(file).toEqual('/data/newest.yml');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
|
@ -2,4 +2,4 @@
|
||||||
|
|
||||||
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not an absolute 1`] = `"Paths to translation files must be absolute. Got relative path: \\"./en.json\\""`;
|
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not an absolute 1`] = `"Paths to translation files must be absolute. Got relative path: \\"./en.json\\""`;
|
||||||
|
|
||||||
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not specified 1`] = `"Path must be a string. Received undefined"`;
|
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not specified 1`] = `"The \\"path\\" argument must be of type string. Received type undefined"`;
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"chalk": "^2.4.1",
|
"chalk": "^2.4.1",
|
||||||
"dedent": "^0.7.0",
|
"dedent": "^0.7.0",
|
||||||
"execa": "^0.9.0",
|
"execa": "^1.0.0",
|
||||||
"getopts": "^2.0.0",
|
"getopts": "^2.0.0",
|
||||||
"lodash.camelcase": "^4.3.0",
|
"lodash.camelcase": "^4.3.0",
|
||||||
"lodash.kebabcase": "^4.1.1",
|
"lodash.kebabcase": "^4.1.1",
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
"argv-split": "^2.0.1",
|
"argv-split": "^2.0.1",
|
||||||
"commander": "^2.9.0",
|
"commander": "^2.9.0",
|
||||||
"del": "^2.2.2",
|
"del": "^2.2.2",
|
||||||
"execa": "^0.10.0",
|
"execa": "^1.0.0",
|
||||||
"gulp-rename": "1.2.2",
|
"gulp-rename": "1.2.2",
|
||||||
"gulp-zip": "^4.1.0",
|
"gulp-zip": "^4.1.0",
|
||||||
"inquirer": "^1.2.2",
|
"inquirer": "^1.2.2",
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const { resolve } = require('path');
|
const { resolve } = require('path');
|
||||||
const { readdirSync, existsSync, unlink } = require('fs');
|
const { readdirSync, existsSync, unlinkSync } = require('fs');
|
||||||
const del = require('del');
|
const del = require('del');
|
||||||
const createBuild = require('../create_build');
|
const createBuild = require('../create_build');
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ describe('creating the build', () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
PLUGIN.skipInstallDependencies = false;
|
PLUGIN.skipInstallDependencies = false;
|
||||||
PLUGIN.styleSheetToCompile = undefined;
|
PLUGIN.styleSheetToCompile = undefined;
|
||||||
unlink(cssPath);
|
unlinkSync(cssPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('produces CSS', async () => {
|
it('produces CSS', async () => {
|
||||||
|
|
33775
packages/kbn-pm/dist/index.js
vendored
33775
packages/kbn-pm/dist/index.js
vendored
File diff suppressed because one or more lines are too long
|
@ -14,7 +14,7 @@
|
||||||
"@types/cpy": "^5.1.0",
|
"@types/cpy": "^5.1.0",
|
||||||
"@types/dedent": "^0.7.0",
|
"@types/dedent": "^0.7.0",
|
||||||
"@types/del": "^3.0.0",
|
"@types/del": "^3.0.0",
|
||||||
"@types/execa": "^0.8.1",
|
"@types/execa": "^0.9.0",
|
||||||
"@types/getopts": "^2.0.0",
|
"@types/getopts": "^2.0.0",
|
||||||
"@types/glob": "^5.0.35",
|
"@types/glob": "^5.0.35",
|
||||||
"@types/globby": "^6.1.0",
|
"@types/globby": "^6.1.0",
|
||||||
|
@ -25,7 +25,7 @@
|
||||||
"@types/log-symbols": "^2.0.0",
|
"@types/log-symbols": "^2.0.0",
|
||||||
"@types/mkdirp": "^0.5.2",
|
"@types/mkdirp": "^0.5.2",
|
||||||
"@types/ncp": "^2.0.1",
|
"@types/ncp": "^2.0.1",
|
||||||
"@types/node": "^8.10.38",
|
"@types/node": "^10.12.12",
|
||||||
"@types/ora": "^1.3.2",
|
"@types/ora": "^1.3.2",
|
||||||
"@types/read-pkg": "^3.0.0",
|
"@types/read-pkg": "^3.0.0",
|
||||||
"@types/strip-ansi": "^3.0.0",
|
"@types/strip-ansi": "^3.0.0",
|
||||||
|
@ -39,10 +39,10 @@
|
||||||
"babel-preset-stage-3": "^6.24.1",
|
"babel-preset-stage-3": "^6.24.1",
|
||||||
"chalk": "^2.4.1",
|
"chalk": "^2.4.1",
|
||||||
"cmd-shim": "^2.0.2",
|
"cmd-shim": "^2.0.2",
|
||||||
"cpy": "^6.0.0",
|
"cpy": "^7.0.1",
|
||||||
"dedent": "^0.7.0",
|
"dedent": "^0.7.0",
|
||||||
"del": "^3.0.0",
|
"del": "^3.0.0",
|
||||||
"execa": "^0.9.0",
|
"execa": "^1.0.0",
|
||||||
"getopts": "^2.0.0",
|
"getopts": "^2.0.0",
|
||||||
"glob": "^7.1.2",
|
"glob": "^7.1.2",
|
||||||
"globby": "^8.0.1",
|
"globby": "^8.0.1",
|
||||||
|
@ -63,6 +63,7 @@
|
||||||
"tempy": "^0.2.1",
|
"tempy": "^0.2.1",
|
||||||
"ts-loader": "^5.2.2",
|
"ts-loader": "^5.2.2",
|
||||||
"typescript": "^3.0.3",
|
"typescript": "^3.0.3",
|
||||||
|
"unlazy-loader": "^0.1.3",
|
||||||
"webpack": "^4.23.1",
|
"webpack": "^4.23.1",
|
||||||
"webpack-cli": "^3.1.2",
|
"webpack-cli": "^3.1.2",
|
||||||
"wrap-ansi": "^3.0.1",
|
"wrap-ansi": "^3.0.1",
|
||||||
|
|
|
@ -72,6 +72,21 @@ module.exports = {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
// In order to make it work with Node 10 we had the need to upgrade
|
||||||
|
// the package cpy to a version >= 7.0.0. In this version cpy is
|
||||||
|
// using the new globby that relies in the fast-glob which relies
|
||||||
|
// in the new micromatch. The micromatch (use and class-utils) dependencies having a require
|
||||||
|
// that uses the lazy-cache which cannot be correctly extracted by webpack.
|
||||||
|
// According to the documentation we should use the unlazy-loader to solve
|
||||||
|
// this situation: https://github.com/jonschlinkert/lazy-cache#heads-up
|
||||||
|
// We can also found some issues arround this who also used unlazy-loader
|
||||||
|
// to solve this: https://github.com/micromatch/micromatch/issues/55
|
||||||
|
{
|
||||||
|
test: /node_modules\/(use|class-utils)\/utils\.js$/,
|
||||||
|
use: {
|
||||||
|
loader: 'unlazy-loader',
|
||||||
|
},
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -70,6 +70,6 @@
|
||||||
"webpack": "^4.23.1",
|
"webpack": "^4.23.1",
|
||||||
"webpack-dev-server": "^3.1.10",
|
"webpack-dev-server": "^3.1.10",
|
||||||
"yeoman-generator": "1.1.1",
|
"yeoman-generator": "1.1.1",
|
||||||
"yo": "2.0.3"
|
"yo": "2.0.5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ class MockClusterFork extends EventEmitter {
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.spyOn(this, 'on');
|
jest.spyOn(this, 'on');
|
||||||
jest.spyOn(this, 'removeListener');
|
jest.spyOn(this, 'off');
|
||||||
jest.spyOn(this, 'emit');
|
jest.spyOn(this, 'emit');
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
|
|
|
@ -41,8 +41,8 @@ describe('CLI cluster manager', () => {
|
||||||
kill: jest.fn(),
|
kill: jest.fn(),
|
||||||
},
|
},
|
||||||
isDead: jest.fn().mockReturnValue(false),
|
isDead: jest.fn().mockReturnValue(false),
|
||||||
removeListener: jest.fn(),
|
off: jest.fn(),
|
||||||
addListener: jest.fn(),
|
on: jest.fn(),
|
||||||
send: jest.fn()
|
send: jest.fn()
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
@ -105,8 +105,8 @@ describe('CLI cluster manager', () => {
|
||||||
|
|
||||||
clusterManager = ClusterManager.create({}, {}, basePathProxyMock);
|
clusterManager = ClusterManager.create({}, {}, basePathProxyMock);
|
||||||
|
|
||||||
jest.spyOn(clusterManager.server, 'addListener');
|
jest.spyOn(clusterManager.server, 'on');
|
||||||
jest.spyOn(clusterManager.server, 'removeListener');
|
jest.spyOn(clusterManager.server, 'off');
|
||||||
|
|
||||||
[[{ blockUntil, shouldRedirectFromOldBasePath }]] = basePathProxyMock.start.mock.calls;
|
[[{ blockUntil, shouldRedirectFromOldBasePath }]] = basePathProxyMock.start.mock.calls;
|
||||||
});
|
});
|
||||||
|
@ -128,58 +128,58 @@ describe('CLI cluster manager', () => {
|
||||||
clusterManager.server.crashed = true;
|
clusterManager.server.crashed = true;
|
||||||
|
|
||||||
await expect(blockUntil()).resolves.not.toBeDefined();
|
await expect(blockUntil()).resolves.not.toBeDefined();
|
||||||
expect(clusterManager.server.addListener).not.toHaveBeenCalled();
|
expect(clusterManager.server.on).not.toHaveBeenCalled();
|
||||||
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
|
expect(clusterManager.server.off).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
test('`blockUntil()` resolves immediately if worker is already listening.', async () => {
|
test('`blockUntil()` resolves immediately if worker is already listening.', async () => {
|
||||||
clusterManager.server.listening = true;
|
clusterManager.server.listening = true;
|
||||||
|
|
||||||
await expect(blockUntil()).resolves.not.toBeDefined();
|
await expect(blockUntil()).resolves.not.toBeDefined();
|
||||||
expect(clusterManager.server.addListener).not.toHaveBeenCalled();
|
expect(clusterManager.server.on).not.toHaveBeenCalled();
|
||||||
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
|
expect(clusterManager.server.off).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
test('`blockUntil()` resolves when worker crashes.', async () => {
|
test('`blockUntil()` resolves when worker crashes.', async () => {
|
||||||
const blockUntilPromise = blockUntil();
|
const blockUntilPromise = blockUntil();
|
||||||
|
|
||||||
expect(clusterManager.server.addListener).toHaveBeenCalledTimes(2);
|
expect(clusterManager.server.on).toHaveBeenCalledTimes(2);
|
||||||
expect(clusterManager.server.addListener).toHaveBeenCalledWith(
|
expect(clusterManager.server.on).toHaveBeenCalledWith(
|
||||||
'crashed',
|
'crashed',
|
||||||
expect.any(Function)
|
expect.any(Function)
|
||||||
);
|
);
|
||||||
|
|
||||||
const [, [eventName, onCrashed]] = clusterManager.server.addListener.mock.calls;
|
const [, [eventName, onCrashed]] = clusterManager.server.on.mock.calls;
|
||||||
// Check event name to make sure we call the right callback,
|
// Check event name to make sure we call the right callback,
|
||||||
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
|
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
|
||||||
expect(eventName).toBe('crashed');
|
expect(eventName).toBe('crashed');
|
||||||
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
|
expect(clusterManager.server.off).not.toHaveBeenCalled();
|
||||||
|
|
||||||
onCrashed();
|
onCrashed();
|
||||||
await expect(blockUntilPromise).resolves.not.toBeDefined();
|
await expect(blockUntilPromise).resolves.not.toBeDefined();
|
||||||
|
|
||||||
expect(clusterManager.server.removeListener).toHaveBeenCalledTimes(2);
|
expect(clusterManager.server.off).toHaveBeenCalledTimes(2);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('`blockUntil()` resolves when worker starts listening.', async () => {
|
test('`blockUntil()` resolves when worker starts listening.', async () => {
|
||||||
const blockUntilPromise = blockUntil();
|
const blockUntilPromise = blockUntil();
|
||||||
|
|
||||||
expect(clusterManager.server.addListener).toHaveBeenCalledTimes(2);
|
expect(clusterManager.server.on).toHaveBeenCalledTimes(2);
|
||||||
expect(clusterManager.server.addListener).toHaveBeenCalledWith(
|
expect(clusterManager.server.on).toHaveBeenCalledWith(
|
||||||
'listening',
|
'listening',
|
||||||
expect.any(Function)
|
expect.any(Function)
|
||||||
);
|
);
|
||||||
|
|
||||||
const [[eventName, onListening]] = clusterManager.server.addListener.mock.calls;
|
const [[eventName, onListening]] = clusterManager.server.on.mock.calls;
|
||||||
// Check event name to make sure we call the right callback,
|
// Check event name to make sure we call the right callback,
|
||||||
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
|
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
|
||||||
expect(eventName).toBe('listening');
|
expect(eventName).toBe('listening');
|
||||||
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
|
expect(clusterManager.server.off).not.toHaveBeenCalled();
|
||||||
|
|
||||||
onListening();
|
onListening();
|
||||||
await expect(blockUntilPromise).resolves.not.toBeDefined();
|
await expect(blockUntilPromise).resolves.not.toBeDefined();
|
||||||
|
|
||||||
expect(clusterManager.server.removeListener).toHaveBeenCalledTimes(2);
|
expect(clusterManager.server.off).toHaveBeenCalledTimes(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -35,7 +35,8 @@ function assertListenerRemoved(emitter, event) {
|
||||||
const [, onEventListener] = emitter.on.mock.calls.find(([eventName]) => {
|
const [, onEventListener] = emitter.on.mock.calls.find(([eventName]) => {
|
||||||
return eventName === event;
|
return eventName === event;
|
||||||
});
|
});
|
||||||
expect(emitter.removeListener).toHaveBeenCalledWith(event, onEventListener);
|
|
||||||
|
expect(emitter.off).toHaveBeenCalledWith(event, onEventListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
function setup(opts = {}) {
|
function setup(opts = {}) {
|
||||||
|
@ -98,7 +99,7 @@ describe('CLI cluster manager', () => {
|
||||||
assertListenerAdded(fork, 'message');
|
assertListenerAdded(fork, 'message');
|
||||||
assertListenerAdded(fork, 'online');
|
assertListenerAdded(fork, 'online');
|
||||||
assertListenerAdded(fork, 'disconnect');
|
assertListenerAdded(fork, 'disconnect');
|
||||||
worker.shutdown();
|
await worker.shutdown();
|
||||||
expect(fork.process.kill).toHaveBeenCalledTimes(1);
|
expect(fork.process.kill).toHaveBeenCalledTimes(1);
|
||||||
assertListenerRemoved(fork, 'message');
|
assertListenerRemoved(fork, 'message');
|
||||||
assertListenerRemoved(fork, 'online');
|
assertListenerRemoved(fork, 'online');
|
||||||
|
|
|
@ -17,8 +17,25 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
||||||
|
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
||||||
|
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
readFileSync: jest.fn().mockImplementation((path) => {
|
||||||
|
if (!path.includes('nonexistent')) {
|
||||||
|
return JSON.stringify(mockKeystoreData);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw { code: 'ENOENT' };
|
||||||
|
}),
|
||||||
|
existsSync: jest.fn().mockImplementation((path) => {
|
||||||
|
return !path.includes('nonexistent');
|
||||||
|
}),
|
||||||
|
writeFileSync: jest.fn()
|
||||||
|
}));
|
||||||
|
|
||||||
import sinon from 'sinon';
|
import sinon from 'sinon';
|
||||||
import mockFs from 'mock-fs';
|
|
||||||
import { PassThrough } from 'stream';
|
import { PassThrough } from 'stream';
|
||||||
|
|
||||||
import { Keystore } from '../server/keystore';
|
import { Keystore } from '../server/keystore';
|
||||||
|
@ -30,17 +47,7 @@ describe('Kibana keystore', () => {
|
||||||
describe('add', () => {
|
describe('add', () => {
|
||||||
const sandbox = sinon.createSandbox();
|
const sandbox = sinon.createSandbox();
|
||||||
|
|
||||||
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
|
||||||
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
|
||||||
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockFs({
|
|
||||||
'/data': {
|
|
||||||
'test.keystore': JSON.stringify(keystoreData),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
sandbox.stub(prompt, 'confirm');
|
sandbox.stub(prompt, 'confirm');
|
||||||
sandbox.stub(prompt, 'question');
|
sandbox.stub(prompt, 'question');
|
||||||
|
|
||||||
|
@ -49,7 +56,6 @@ describe('Kibana keystore', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockFs.restore();
|
|
||||||
sandbox.restore();
|
sandbox.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -149,4 +155,8 @@ describe('Kibana keystore', () => {
|
||||||
expect(keystore.data.foo).toEqual('kibana');
|
expect(keystore.data.foo).toEqual('kibana');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,8 +17,25 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
||||||
|
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
||||||
|
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
readFileSync: jest.fn().mockImplementation((path) => {
|
||||||
|
if (!path.includes('foo')) {
|
||||||
|
return JSON.stringify(mockKeystoreData);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw { code: 'ENOENT' };
|
||||||
|
}),
|
||||||
|
existsSync: jest.fn().mockImplementation((path) => {
|
||||||
|
return !path.includes('foo');
|
||||||
|
}),
|
||||||
|
writeFileSync: jest.fn()
|
||||||
|
}));
|
||||||
|
|
||||||
import sinon from 'sinon';
|
import sinon from 'sinon';
|
||||||
import mockFs from 'mock-fs';
|
|
||||||
|
|
||||||
import { Keystore } from '../server/keystore';
|
import { Keystore } from '../server/keystore';
|
||||||
import { create } from './create';
|
import { create } from './create';
|
||||||
|
@ -29,23 +46,12 @@ describe('Kibana keystore', () => {
|
||||||
describe('create', () => {
|
describe('create', () => {
|
||||||
const sandbox = sinon.createSandbox();
|
const sandbox = sinon.createSandbox();
|
||||||
|
|
||||||
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
|
||||||
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
|
||||||
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockFs({
|
|
||||||
'/data': {
|
|
||||||
'test.keystore': JSON.stringify(keystoreData),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
sandbox.stub(Logger.prototype, 'log');
|
sandbox.stub(Logger.prototype, 'log');
|
||||||
sandbox.stub(Logger.prototype, 'error');
|
sandbox.stub(Logger.prototype, 'error');
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockFs.restore();
|
|
||||||
sandbox.restore();
|
sandbox.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -94,4 +100,8 @@ describe('Kibana keystore', () => {
|
||||||
sinon.assert.notCalled(keystore.save);
|
sinon.assert.notCalled(keystore.save);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,9 +17,24 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import sinon from 'sinon';
|
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
||||||
import mockFs from 'mock-fs';
|
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
||||||
|
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
readFileSync: jest.fn().mockImplementation((path) => {
|
||||||
|
if (!path.includes('nonexistent')) {
|
||||||
|
return JSON.stringify(mockKeystoreData);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw { code: 'ENOENT' };
|
||||||
|
}),
|
||||||
|
existsSync: jest.fn().mockImplementation((path) => {
|
||||||
|
return !path.includes('nonexistent');
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
|
||||||
|
import sinon from 'sinon';
|
||||||
import { Keystore } from '../server/keystore';
|
import { Keystore } from '../server/keystore';
|
||||||
import { list } from './list';
|
import { list } from './list';
|
||||||
import Logger from '../cli_plugin/lib/logger';
|
import Logger from '../cli_plugin/lib/logger';
|
||||||
|
@ -28,23 +43,12 @@ describe('Kibana keystore', () => {
|
||||||
describe('list', () => {
|
describe('list', () => {
|
||||||
const sandbox = sinon.createSandbox();
|
const sandbox = sinon.createSandbox();
|
||||||
|
|
||||||
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
|
||||||
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
|
||||||
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockFs({
|
|
||||||
'/data': {
|
|
||||||
'test.keystore': JSON.stringify(keystoreData),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
sandbox.stub(Logger.prototype, 'log');
|
sandbox.stub(Logger.prototype, 'log');
|
||||||
sandbox.stub(Logger.prototype, 'error');
|
sandbox.stub(Logger.prototype, 'error');
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockFs.restore();
|
|
||||||
sandbox.restore();
|
sandbox.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -64,4 +68,8 @@ describe('Kibana keystore', () => {
|
||||||
sinon.assert.calledWith(Logger.prototype.error, 'ERROR: Kibana keystore not found. Use \'create\' command to create one.');
|
sinon.assert.calledWith(Logger.prototype.error, 'ERROR: Kibana keystore not found. Use \'create\' command to create one.');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,8 +17,17 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
||||||
|
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
||||||
|
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
readFileSync: jest.fn().mockImplementation(() => JSON.stringify(mockKeystoreData)),
|
||||||
|
existsSync: jest.fn().mockImplementation(() => true),
|
||||||
|
writeFileSync: jest.fn()
|
||||||
|
}));
|
||||||
|
|
||||||
import sinon from 'sinon';
|
import sinon from 'sinon';
|
||||||
import mockFs from 'mock-fs';
|
|
||||||
|
|
||||||
import { Keystore } from '../server/keystore';
|
import { Keystore } from '../server/keystore';
|
||||||
import { remove } from './remove';
|
import { remove } from './remove';
|
||||||
|
@ -27,20 +36,7 @@ describe('Kibana keystore', () => {
|
||||||
describe('remove', () => {
|
describe('remove', () => {
|
||||||
const sandbox = sinon.createSandbox();
|
const sandbox = sinon.createSandbox();
|
||||||
|
|
||||||
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
|
|
||||||
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
|
|
||||||
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
mockFs({
|
|
||||||
'/data': {
|
|
||||||
'test.keystore': JSON.stringify(keystoreData),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockFs.restore();
|
|
||||||
sandbox.restore();
|
sandbox.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -61,4 +57,8 @@ describe('Kibana keystore', () => {
|
||||||
sinon.assert.calledOnce(keystore.save);
|
sinon.assert.calledOnce(keystore.save);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,12 +17,18 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
statSync: jest.fn().mockImplementation(() => require('fs').statSync),
|
||||||
|
unlinkSync: jest.fn().mockImplementation(() => require('fs').unlinkSync),
|
||||||
|
mkdirSync: jest.fn().mockImplementation(() => require('fs').mkdirSync),
|
||||||
|
}));
|
||||||
|
|
||||||
import sinon from 'sinon';
|
import sinon from 'sinon';
|
||||||
import mockFs from 'mock-fs';
|
|
||||||
import Logger from '../lib/logger';
|
import Logger from '../lib/logger';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import rimraf from 'rimraf';
|
import rimraf from 'rimraf';
|
||||||
import mkdirp from 'mkdirp';
|
import mkdirp from 'mkdirp';
|
||||||
|
import fs from 'fs';
|
||||||
import { existingInstall, assertVersion } from './kibana';
|
import { existingInstall, assertVersion } from './kibana';
|
||||||
|
|
||||||
describe('kibana cli', function () {
|
describe('kibana cli', function () {
|
||||||
|
@ -119,20 +125,24 @@ describe('kibana cli', function () {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if the plugin already exists.', function () {
|
it('should throw an error if the plugin already exists.', function () {
|
||||||
mockFs({ [`${pluginDir}/foo`]: {} });
|
fs.statSync = jest.fn().mockImplementationOnce(() => true);
|
||||||
|
|
||||||
existingInstall(settings, logger);
|
existingInstall(settings, logger);
|
||||||
expect(logger.error.firstCall.args[0]).toMatch(/already exists/);
|
expect(logger.error.firstCall.args[0]).toMatch(/already exists/);
|
||||||
expect(process.exit.called).toBe(true);
|
expect(process.exit.called).toBe(true);
|
||||||
|
|
||||||
mockFs.restore();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not throw an error if the plugin does not exist.', function () {
|
it('should not throw an error if the plugin does not exist.', function () {
|
||||||
|
fs.statSync = jest.fn().mockImplementationOnce(() => {
|
||||||
|
throw { code: 'ENOENT' };
|
||||||
|
});
|
||||||
existingInstall(settings, logger);
|
existingInstall(settings, logger);
|
||||||
expect(logger.error.called).toBe(false);
|
expect(logger.error.called).toBe(false);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -24,7 +24,9 @@ interface RecursiveReadonlyArray<T> extends Array<RecursiveReadonly<T>> {}
|
||||||
|
|
||||||
type RecursiveReadonly<T> = T extends any[]
|
type RecursiveReadonly<T> = T extends any[]
|
||||||
? RecursiveReadonlyArray<T[number]>
|
? RecursiveReadonlyArray<T[number]>
|
||||||
: T extends object ? Readonly<{ [K in keyof T]: RecursiveReadonly<T[K]> }> : T;
|
: T extends object
|
||||||
|
? Readonly<{ [K in keyof T]: RecursiveReadonly<T[K]> }>
|
||||||
|
: T;
|
||||||
|
|
||||||
export function deepFreeze<T extends Freezable>(object: T) {
|
export function deepFreeze<T extends Freezable>(object: T) {
|
||||||
// for any properties that reference an object, makes sure that object is
|
// for any properties that reference an object, makes sure that object is
|
||||||
|
|
|
@ -79,8 +79,8 @@ export class ConfigService {
|
||||||
ConfigClass: ConfigWithSchema<TSchema, TConfig>
|
ConfigClass: ConfigWithSchema<TSchema, TConfig>
|
||||||
) {
|
) {
|
||||||
return this.getDistinctConfig(path).pipe(
|
return this.getDistinctConfig(path).pipe(
|
||||||
map(
|
map(config =>
|
||||||
config => (config === undefined ? undefined : this.createConfig(path, config, ConfigClass))
|
config === undefined ? undefined : this.createConfig(path, config, ConfigClass)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,7 +85,7 @@ export function createServer(options: ServerOptions) {
|
||||||
server.listener.keepAliveTimeout = 120e3;
|
server.listener.keepAliveTimeout = 120e3;
|
||||||
server.listener.on('clientError', (err, socket) => {
|
server.listener.on('clientError', (err, socket) => {
|
||||||
if (socket.writable) {
|
if (socket.writable) {
|
||||||
socket.end(new Buffer('HTTP/1.1 400 Bad Request\r\n\r\n', 'ascii'));
|
socket.end(Buffer.from('HTTP/1.1 400 Bad Request\r\n\r\n', 'ascii'));
|
||||||
} else {
|
} else {
|
||||||
socket.destroy(err);
|
socket.destroy(err);
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,7 +76,7 @@ function processPluginSearchPaths$(pluginDirs: ReadonlyArray<string>, log: Logge
|
||||||
log.debug(`Scanning "${dir}" for plugin sub-directories...`);
|
log.debug(`Scanning "${dir}" for plugin sub-directories...`);
|
||||||
|
|
||||||
return fsReadDir$(dir).pipe(
|
return fsReadDir$(dir).pipe(
|
||||||
mergeMap(subDirs => subDirs.map(subDir => resolve(dir, subDir))),
|
mergeMap((subDirs: string[]) => subDirs.map(subDir => resolve(dir, subDir))),
|
||||||
mergeMap(path =>
|
mergeMap(path =>
|
||||||
fsStat$(path).pipe(
|
fsStat$(path).pipe(
|
||||||
// Filter out non-directory entries from target directories, it's expected that
|
// Filter out non-directory entries from target directories, it's expected that
|
||||||
|
|
|
@ -53,7 +53,7 @@ export async function scanDelete(options: Options) {
|
||||||
const getChildPath$ = (path: string) =>
|
const getChildPath$ = (path: string) =>
|
||||||
getReadDir$(path).pipe(
|
getReadDir$(path).pipe(
|
||||||
mergeAll(),
|
mergeAll(),
|
||||||
map(name => join(path, name))
|
map((name: string) => join(path, name))
|
||||||
);
|
);
|
||||||
|
|
||||||
// get an observable of all paths to be deleted, by starting with the arg
|
// get an observable of all paths to be deleted, by starting with the arg
|
||||||
|
|
|
@ -10,7 +10,6 @@ exports[`Inspector Data View component should render empty state 1`] = `
|
||||||
},
|
},
|
||||||
"_eventsCount": 1,
|
"_eventsCount": 1,
|
||||||
"_maxListeners": undefined,
|
"_maxListeners": undefined,
|
||||||
"domain": null,
|
|
||||||
"tabular": [Function],
|
"tabular": [Function],
|
||||||
"tabularOptions": Object {},
|
"tabularOptions": Object {},
|
||||||
},
|
},
|
||||||
|
@ -101,7 +100,6 @@ exports[`Inspector Data View component should render loading state 1`] = `
|
||||||
},
|
},
|
||||||
"_eventsCount": 1,
|
"_eventsCount": 1,
|
||||||
"_maxListeners": undefined,
|
"_maxListeners": undefined,
|
||||||
"domain": null,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,9 +44,9 @@ export interface EmbeddableIsInitializedActionPayload {
|
||||||
|
|
||||||
export interface EmbeddableIsInitializedAction
|
export interface EmbeddableIsInitializedAction
|
||||||
extends KibanaAction<
|
extends KibanaAction<
|
||||||
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED,
|
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED,
|
||||||
EmbeddableIsInitializedActionPayload
|
EmbeddableIsInitializedActionPayload
|
||||||
> {}
|
> {}
|
||||||
|
|
||||||
export interface SetStagedFilterActionPayload {
|
export interface SetStagedFilterActionPayload {
|
||||||
panelId: PanelId;
|
panelId: PanelId;
|
||||||
|
|
|
@ -243,10 +243,14 @@ describe('Field', () => {
|
||||||
...component.instance().props.setting,
|
...component.instance().props.setting,
|
||||||
value: userValue,
|
value: userValue,
|
||||||
} });
|
} });
|
||||||
|
|
||||||
|
await component.instance().cancelChangeImage();
|
||||||
component.update();
|
component.update();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be able to change value from existing value and save', async () => {
|
it('should be able to change value from existing value and save', async () => {
|
||||||
|
findTestSubject(component, `advancedSetting-changeImage-${setting.name}`).simulate('click');
|
||||||
|
|
||||||
const newUserValue = `${userValue}=`;
|
const newUserValue = `${userValue}=`;
|
||||||
await component.instance().onImageChange([newUserValue]);
|
await component.instance().onImageChange([newUserValue]);
|
||||||
component.update();
|
component.update();
|
||||||
|
|
|
@ -45,10 +45,9 @@ export const VisTypeIcon = ({ visType }: VisTypeIconProps) => {
|
||||||
<img src={visType.image} aria-hidden="true" className="visNewVisDialog__typeImage" />
|
<img src={visType.image} aria-hidden="true" className="visNewVisDialog__typeImage" />
|
||||||
)}
|
)}
|
||||||
{!visType.image && visType.legacyIcon && <span className={legacyIconClass} />}
|
{!visType.image && visType.legacyIcon && <span className={legacyIconClass} />}
|
||||||
{!visType.image &&
|
{!visType.image && !visType.legacyIcon && (
|
||||||
!visType.legacyIcon && (
|
<EuiIcon type={visType.icon || 'empty'} size="l" color="secondary" aria-hidden="true" />
|
||||||
<EuiIcon type={visType.icon || 'empty'} size="l" color="secondary" aria-hidden="true" />
|
)}
|
||||||
)}
|
|
||||||
</React.Fragment>
|
</React.Fragment>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -66,7 +66,7 @@ describe('plugin discovery/plugin spec', () => {
|
||||||
it('throws if spec.publicDir is truthy and not a string', () => {
|
it('throws if spec.publicDir is truthy and not a string', () => {
|
||||||
function assert(publicDir) {
|
function assert(publicDir) {
|
||||||
expect(() => new PluginSpec(fooPack, { publicDir })).to.throwError(error => {
|
expect(() => new PluginSpec(fooPack, { publicDir })).to.throwError(error => {
|
||||||
expect(error.message).to.contain('Path must be a string');
|
expect(error.message).to.contain(`The "path" argument must be of type string. Received type ${typeof publicDir}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ export class Keystore {
|
||||||
|
|
||||||
static decrypt(data, password = '') {
|
static decrypt(data, password = '') {
|
||||||
try {
|
try {
|
||||||
const bData = new Buffer(data, 'base64');
|
const bData = Buffer.from(data, 'base64');
|
||||||
|
|
||||||
// convert data to buffers
|
// convert data to buffers
|
||||||
const salt = bData.slice(0, 64);
|
const salt = bData.slice(0, 64);
|
||||||
|
|
|
@ -17,7 +17,38 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import mockFs from 'mock-fs';
|
const mockProtectedKeystoreData = '1:4BnWfydL8NwFIQJg+VQKe0jlIs7uXtty6+++yaWPbSB'
|
||||||
|
+ 'KIX3d9nPfQ20K1C6Xh26E/gMJAQ9jh7BxK0+W3lt/iDJBJn44wqX3pQ0189iGkNBL0ibDCc'
|
||||||
|
+ 'tz4mRy6+hqwiLxiukpH8ELAJsff8LNNHr+gNzX/2k/GvB7nQ==';
|
||||||
|
|
||||||
|
const mockUnprotectedKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDy'
|
||||||
|
+ 'fGfJSy4mHBBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqh'
|
||||||
|
+ 'I4lzJ9MRy21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
||||||
|
|
||||||
|
jest.mock('fs', () => ({
|
||||||
|
readFileSync: jest.fn().mockImplementation((path) => {
|
||||||
|
if (path.includes('data/unprotected')) {
|
||||||
|
return JSON.stringify(mockUnprotectedKeystoreData);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path.includes('data/protected')) {
|
||||||
|
return JSON.stringify(mockProtectedKeystoreData);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path.includes('data/test') || path.includes('data/nonexistent')) {
|
||||||
|
throw { code: 'ENOENT' };
|
||||||
|
}
|
||||||
|
|
||||||
|
throw { code: 'EACCES' };
|
||||||
|
}),
|
||||||
|
existsSync: jest.fn().mockImplementation((path) => {
|
||||||
|
return path.includes('data/unprotected')
|
||||||
|
|| path.includes('data/protected')
|
||||||
|
|| path.includes('inaccessible');
|
||||||
|
}),
|
||||||
|
writeFileSync: jest.fn()
|
||||||
|
}));
|
||||||
|
|
||||||
import sinon from 'sinon';
|
import sinon from 'sinon';
|
||||||
import { readFileSync } from 'fs';
|
import { readFileSync } from 'fs';
|
||||||
|
|
||||||
|
@ -26,28 +57,7 @@ import { Keystore } from './keystore';
|
||||||
describe('Keystore', () => {
|
describe('Keystore', () => {
|
||||||
const sandbox = sinon.createSandbox();
|
const sandbox = sinon.createSandbox();
|
||||||
|
|
||||||
const protectedKeystoreData = '1:4BnWfydL8NwFIQJg+VQKe0jlIs7uXtty6+++yaWPbSB'
|
|
||||||
+ 'KIX3d9nPfQ20K1C6Xh26E/gMJAQ9jh7BxK0+W3lt/iDJBJn44wqX3pQ0189iGkNBL0ibDCc'
|
|
||||||
+ 'tz4mRy6+hqwiLxiukpH8ELAJsff8LNNHr+gNzX/2k/GvB7nQ==';
|
|
||||||
|
|
||||||
const unprotectedKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDy'
|
|
||||||
+ 'fGfJSy4mHBBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqh'
|
|
||||||
+ 'I4lzJ9MRy21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
mockFs({
|
|
||||||
'/data': {
|
|
||||||
'protected.keystore': protectedKeystoreData,
|
|
||||||
'unprotected.keystore': unprotectedKeystoreData,
|
|
||||||
},
|
|
||||||
'/inaccessible': mockFs.directory({
|
|
||||||
mode: '0000',
|
|
||||||
})
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockFs.restore();
|
|
||||||
sandbox.restore();
|
sandbox.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -71,6 +81,7 @@ describe('Keystore', () => {
|
||||||
const keystore = new Keystore(path);
|
const keystore = new Keystore(path);
|
||||||
keystore.save();
|
keystore.save();
|
||||||
|
|
||||||
|
readFileSync.mockReturnValueOnce(mockProtectedKeystoreData);
|
||||||
const fileBuffer = readFileSync(path);
|
const fileBuffer = readFileSync(path);
|
||||||
const contents = fileBuffer.toString();
|
const contents = fileBuffer.toString();
|
||||||
const [version, data] = contents.split(':');
|
const [version, data] = contents.split(':');
|
||||||
|
@ -215,4 +226,8 @@ describe('Keystore', () => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -27,6 +27,6 @@ export async function setupLogging(server, config) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function loggingMixin(kbnServer, server, config) {
|
export async function loggingMixin(kbnServer, server, config) {
|
||||||
return setupLogging(server, config);
|
return await setupLogging(server, config);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This file contains logic to build the index mappings for a migration.
|
* This file contains logic to build the index mappings for a migration.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import { IndexMapping, MappingProperties } from './call_cluster';
|
import { IndexMapping, MappingProperties } from './call_cluster';
|
||||||
|
|
|
@ -36,7 +36,7 @@
|
||||||
*
|
*
|
||||||
* This way, we keep looping until there are no transforms left to apply, and we properly
|
* This way, we keep looping until there are no transforms left to apply, and we properly
|
||||||
* handle property addition / deletion / renaming.
|
* handle property addition / deletion / renaming.
|
||||||
*
|
*
|
||||||
* A caveat is that this means we must restrict what a migration can do to the doc's
|
* A caveat is that this means we must restrict what a migration can do to the doc's
|
||||||
* migrationVersion itself. We allow only these kinds of changes:
|
* migrationVersion itself. We allow only these kinds of changes:
|
||||||
*
|
*
|
||||||
|
@ -54,11 +54,11 @@
|
||||||
* and those documents are simply given a stamp of approval by this transformer. This is why it is
|
* and those documents are simply given a stamp of approval by this transformer. This is why it is
|
||||||
* important for migration authors to *also* write a saved object validation that will prevent this
|
* important for migration authors to *also* write a saved object validation that will prevent this
|
||||||
* assumption from inserting out-of-date documents into the index.
|
* assumption from inserting out-of-date documents into the index.
|
||||||
*
|
*
|
||||||
* If the client(s) send us documents with migrationVersion specified, we will migrate them as
|
* If the client(s) send us documents with migrationVersion specified, we will migrate them as
|
||||||
* appropriate. This means for data import scenarios, any documetns being imported should be explicitly
|
* appropriate. This means for data import scenarios, any documetns being imported should be explicitly
|
||||||
* given an empty migrationVersion property {} if no such property exists.
|
* given an empty migrationVersion property {} if no such property exists.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import Boom from 'boom';
|
import Boom from 'boom';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
* just migrate data into an existing index. Such an action could result in data loss. Instead,
|
* just migrate data into an existing index. Such an action could result in data loss. Instead,
|
||||||
* we should probably fail, and the Kibana sys-admin should clean things up before relaunching
|
* we should probably fail, and the Kibana sys-admin should clean things up before relaunching
|
||||||
* Kibana.
|
* Kibana.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import { Logger } from './migration_logger';
|
import { Logger } from './migration_logger';
|
||||||
|
|
|
@ -83,7 +83,7 @@ export function createRootWithSettings(...settings: Array<Record<string, any>>)
|
||||||
* @param path
|
* @param path
|
||||||
*/
|
*/
|
||||||
function getSupertest(root: Root, method: HttpMethod, path: string) {
|
function getSupertest(root: Root, method: HttpMethod, path: string) {
|
||||||
const testUserCredentials = new Buffer(`${kibanaTestUser.username}:${kibanaTestUser.password}`);
|
const testUserCredentials = Buffer.from(`${kibanaTestUser.username}:${kibanaTestUser.password}`);
|
||||||
return supertest((root as any).server.http.service.httpServer.server.listener)
|
return supertest((root as any).server.http.service.httpServer.server.listener)
|
||||||
[method](path)
|
[method](path)
|
||||||
.set('Authorization', `Basic ${testUserCredentials.toString('base64')}`);
|
.set('Authorization', `Basic ${testUserCredentials.toString('base64')}`);
|
||||||
|
|
|
@ -79,7 +79,7 @@ export class Sha256 {
|
||||||
|
|
||||||
const blockSize = 64;
|
const blockSize = 64;
|
||||||
const finalSize = 56;
|
const finalSize = 56;
|
||||||
this._block = new Buffer(blockSize);
|
this._block = Buffer.alloc(blockSize);
|
||||||
this._finalSize = finalSize;
|
this._finalSize = finalSize;
|
||||||
this._blockSize = blockSize;
|
this._blockSize = blockSize;
|
||||||
this._len = 0;
|
this._len = 0;
|
||||||
|
@ -102,7 +102,7 @@ export class Sha256 {
|
||||||
update(data, enc) {
|
update(data, enc) {
|
||||||
if (typeof data === 'string') {
|
if (typeof data === 'string') {
|
||||||
enc = enc || 'utf8';
|
enc = enc || 'utf8';
|
||||||
data = new Buffer(data, enc);
|
data = Buffer.from(data, enc);
|
||||||
}
|
}
|
||||||
|
|
||||||
const l = this._len += data.length;
|
const l = this._len += data.length;
|
||||||
|
@ -195,7 +195,7 @@ export class Sha256 {
|
||||||
}
|
}
|
||||||
|
|
||||||
_hash() {
|
_hash() {
|
||||||
const H = new Buffer(32);
|
const H = Buffer.alloc(32);
|
||||||
|
|
||||||
H.writeInt32BE(this._a, 0);
|
H.writeInt32BE(this._a, 0);
|
||||||
H.writeInt32BE(this._b, 4);
|
H.writeInt32BE(this._b, 4);
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
* license agreements. See the NOTICE file distributed with
|
* license agreements. See the NOTICE file distributed with
|
||||||
* this work for additional information regarding copyright
|
* this work for additional information regarding copyright
|
||||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||||
* the Apache License, Version 2.0 (the "License"); you mayexport
|
* the Apache License, Version 2.0 (the "License"); you mayexport
|
||||||
* not use this file except in compliance with the License.
|
* not use this file except in compliance with the License.
|
||||||
* You may obtain a copy of the License at
|
* You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
|
|
4
src/ui/public/state_management/state.d.ts
vendored
4
src/ui/public/state_management/state.d.ts
vendored
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
export interface State {
|
export interface State {
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
translateHashToRison: (
|
translateHashToRison: (stateHashOrRison: string | string[]) => string | string[];
|
||||||
stateHashOrRison: string | string[] | undefined
|
|
||||||
) => string | string[] | undefined;
|
|
||||||
getQueryParamName: () => string;
|
getQueryParamName: () => string;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,23 +17,15 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import mockFs from 'mock-fs';
|
|
||||||
import { resolve } from 'path';
|
|
||||||
|
|
||||||
const mockTemplate = `
|
const mockTemplate = `
|
||||||
{{appId}}
|
{{appId}}
|
||||||
{{regularBundlePath}}
|
{{regularBundlePath}}
|
||||||
{{i18n 'foo' '{"defaultMessage": "bar"}'}}
|
{{i18n 'foo' '{"defaultMessage": "bar"}'}}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const templatePath = resolve(__dirname, 'template.js.hbs');
|
jest.mock('fs', () => ({
|
||||||
|
readFile: jest.fn().mockImplementation((path, encoding, cb) => cb(null, mockTemplate))
|
||||||
beforeEach(() => {
|
}));
|
||||||
mockFs({
|
|
||||||
[templatePath]: mockTemplate
|
|
||||||
});
|
|
||||||
});
|
|
||||||
afterEach(mockFs.restore);
|
|
||||||
|
|
||||||
import { AppBootstrap } from './app_bootstrap';
|
import { AppBootstrap } from './app_bootstrap';
|
||||||
|
|
||||||
|
@ -113,6 +105,10 @@ describe('ui_render/AppBootstrap', () => {
|
||||||
expect(hash2).not.toEqual(hash1);
|
expect(hash2).not.toEqual(hash1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
function mockConfig() {
|
function mockConfig() {
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"**/@types/node": "8.10.38",
|
"**/@types/node": "10.12.12",
|
||||||
"@types/react": "16.3.14"
|
"@types/react": "16.3.14"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
@ -61,7 +61,7 @@
|
||||||
"ansicolors": "0.3.2",
|
"ansicolors": "0.3.2",
|
||||||
"aws-sdk": "2.2.33",
|
"aws-sdk": "2.2.33",
|
||||||
"axios": "^0.18.0",
|
"axios": "^0.18.0",
|
||||||
"babel-jest": "^23.4.2",
|
"babel-jest": "^23.6.0",
|
||||||
"babel-plugin-inline-react-svg": "^0.5.4",
|
"babel-plugin-inline-react-svg": "^0.5.4",
|
||||||
"babel-plugin-mock-imports": "^0.0.5",
|
"babel-plugin-mock-imports": "^0.0.5",
|
||||||
"babel-plugin-transform-react-remove-prop-types": "^0.4.14",
|
"babel-plugin-transform-react-remove-prop-types": "^0.4.14",
|
||||||
|
@ -78,23 +78,23 @@
|
||||||
"expect.js": "0.3.1",
|
"expect.js": "0.3.1",
|
||||||
"fancy-log": "^1.3.2",
|
"fancy-log": "^1.3.2",
|
||||||
"fetch-mock": "^5.13.1",
|
"fetch-mock": "^5.13.1",
|
||||||
"graphql-code-generator": "^0.10.1",
|
"graphql-code-generator": "^0.13.0",
|
||||||
"graphql-codegen-introspection-template": "^0.10.5",
|
"graphql-codegen-introspection-template": "^0.13.0",
|
||||||
"graphql-codegen-typescript-template": "^0.10.1",
|
"graphql-codegen-typescript-template": "^0.13.0",
|
||||||
"gulp": "3.9.1",
|
"gulp": "3.9.1",
|
||||||
"gulp-mocha": "2.2.0",
|
"gulp-mocha": "2.2.0",
|
||||||
"gulp-multi-process": "^1.3.1",
|
"gulp-multi-process": "^1.3.1",
|
||||||
"hapi": "^17.5.3",
|
"hapi": "^17.5.3",
|
||||||
"jest": "^23.5.0",
|
"jest": "^23.6.0",
|
||||||
"jest-cli": "^23.5.0",
|
"jest-cli": "^23.6.0",
|
||||||
"jest-styled-components": "^6.1.1",
|
"jest-styled-components": "^6.2.2",
|
||||||
"jsdom": "^12.0.0",
|
"jsdom": "^12.0.0",
|
||||||
"mocha": "3.3.0",
|
"mocha": "3.3.0",
|
||||||
"mustache": "^2.3.0",
|
"mustache": "^2.3.0",
|
||||||
"mutation-observer": "^1.0.3",
|
"mutation-observer": "^1.0.3",
|
||||||
"node-fetch": "^2.1.2",
|
"node-fetch": "^2.1.2",
|
||||||
"pdf-image": "2.0.0",
|
"pdf-image": "2.0.0",
|
||||||
"pdfjs-dist": "^2.0.489",
|
"pdfjs-dist": "^2.0.943",
|
||||||
"pixelmatch": "4.0.2",
|
"pixelmatch": "4.0.2",
|
||||||
"proxyquire": "1.7.11",
|
"proxyquire": "1.7.11",
|
||||||
"react-test-renderer": "^16.2.0",
|
"react-test-renderer": "^16.2.0",
|
||||||
|
@ -128,13 +128,13 @@
|
||||||
"@kbn/ui-framework": "1.0.0",
|
"@kbn/ui-framework": "1.0.0",
|
||||||
"@samverschueren/stream-to-observable": "^0.3.0",
|
"@samverschueren/stream-to-observable": "^0.3.0",
|
||||||
"@scant/router": "^0.1.0",
|
"@scant/router": "^0.1.0",
|
||||||
"@slack/client": "^4.2.2",
|
"@slack/client": "^4.8.0",
|
||||||
"angular-resource": "1.4.9",
|
"angular-resource": "1.4.9",
|
||||||
"angular-sanitize": "1.4.9",
|
"angular-sanitize": "1.4.9",
|
||||||
"angular-ui-ace": "0.2.3",
|
"angular-ui-ace": "0.2.3",
|
||||||
"apollo-cache-inmemory": "^1.2.7",
|
"apollo-cache-inmemory": "^1.2.7",
|
||||||
"apollo-client": "^2.3.8",
|
"apollo-client": "^2.3.8",
|
||||||
"apollo-link": "^1.2.2",
|
"apollo-link": "^1.2.3",
|
||||||
"apollo-link-http": "^1.5.4",
|
"apollo-link-http": "^1.5.4",
|
||||||
"apollo-link-schema": "^1.1.0",
|
"apollo-link-schema": "^1.1.0",
|
||||||
"apollo-link-state": "^0.4.1",
|
"apollo-link-state": "^0.4.1",
|
||||||
|
@ -231,7 +231,7 @@
|
||||||
"react-router-dom": "^4.3.1",
|
"react-router-dom": "^4.3.1",
|
||||||
"react-select": "^1.2.1",
|
"react-select": "^1.2.1",
|
||||||
"react-shortcuts": "^2.0.0",
|
"react-shortcuts": "^2.0.0",
|
||||||
"react-sticky": "^6.0.1",
|
"react-sticky": "^6.0.3",
|
||||||
"react-syntax-highlighter": "^5.7.0",
|
"react-syntax-highlighter": "^5.7.0",
|
||||||
"react-vis": "^1.8.1",
|
"react-vis": "^1.8.1",
|
||||||
"recompose": "^0.26.0",
|
"recompose": "^0.26.0",
|
||||||
|
@ -265,6 +265,6 @@
|
||||||
"xregexp": "3.2.0"
|
"xregexp": "3.2.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"yarn": "^1.6.0"
|
"yarn": "^1.10.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,9 +90,8 @@ describe('waterfall_helpers', () => {
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
const childrenByParentId = groupBy(
|
const childrenByParentId = groupBy(items, hit =>
|
||||||
items,
|
hit.parentId ? hit.parentId : 'root'
|
||||||
hit => (hit.parentId ? hit.parentId : 'root')
|
|
||||||
);
|
);
|
||||||
const entryTransactionItem = childrenByParentId.root[0];
|
const entryTransactionItem = childrenByParentId.root[0];
|
||||||
expect(
|
expect(
|
||||||
|
|
|
@ -287,9 +287,8 @@ export function getWaterfall(
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const childrenByParentId = groupBy(
|
const childrenByParentId = groupBy(filteredHits, hit =>
|
||||||
filteredHits,
|
hit.parentId ? hit.parentId : 'root'
|
||||||
hit => (hit.parentId ? hit.parentId : 'root')
|
|
||||||
);
|
);
|
||||||
const entryTransactionItem = getTransactionItem(entryTransaction);
|
const entryTransactionItem = getTransactionItem(entryTransaction);
|
||||||
const itemsById: IWaterfallIndex = indexBy(filteredHits, 'id');
|
const itemsById: IWaterfallIndex = indexBy(filteredHits, 'id');
|
||||||
|
|
|
@ -90,10 +90,9 @@ class TransactionOverview extends Component {
|
||||||
<div>
|
<div>
|
||||||
<HeaderContainer>
|
<HeaderContainer>
|
||||||
<h1>{serviceName}</h1>
|
<h1>{serviceName}</h1>
|
||||||
{get(license.data, 'features.ml.isAvailable') &&
|
{get(license.data, 'features.ml.isAvailable') && mlEnabled && (
|
||||||
mlEnabled && (
|
<DynamicBaselineButton onOpenFlyout={this.onOpenFlyout} />
|
||||||
<DynamicBaselineButton onOpenFlyout={this.onOpenFlyout} />
|
)}
|
||||||
)}
|
|
||||||
</HeaderContainer>
|
</HeaderContainer>
|
||||||
|
|
||||||
<KueryBar />
|
<KueryBar />
|
||||||
|
|
|
@ -44,18 +44,17 @@ export const LibraryFrames: React.SFC<Props> = ({
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
{visible &&
|
{visible &&
|
||||||
stackframes.map(
|
stackframes.map((stackframe, i) =>
|
||||||
(stackframe, i) =>
|
hasSourceLines(stackframe) ? (
|
||||||
hasSourceLines(stackframe) ? (
|
<CodePreview
|
||||||
<CodePreview
|
key={i}
|
||||||
key={i}
|
stackframe={stackframe}
|
||||||
stackframe={stackframe}
|
isLibraryFrame
|
||||||
isLibraryFrame
|
codeLanguage={codeLanguage}
|
||||||
codeLanguage={codeLanguage}
|
/>
|
||||||
/>
|
) : (
|
||||||
) : (
|
<FrameHeading key={i} stackframe={stackframe} isLibraryFrame />
|
||||||
<FrameHeading key={i} stackframe={stackframe} isLibraryFrame />
|
)
|
||||||
)
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -74,10 +74,9 @@ class InteractivePlot extends PureComponent {
|
||||||
{hoverX && <MarkSeries data={markPoints} colorType="literal" />}
|
{hoverX && <MarkSeries data={markPoints} colorType="literal" />}
|
||||||
{hoverX && <VerticalGridLines tickValues={[hoverX]} />}
|
{hoverX && <VerticalGridLines tickValues={[hoverX]} />}
|
||||||
|
|
||||||
{isDrawing &&
|
{isDrawing && selectionEnd !== null && (
|
||||||
selectionEnd !== null && (
|
<SelectionMarker start={x(selectionStart)} end={x(selectionEnd)} />
|
||||||
<SelectionMarker start={x(selectionStart)} end={x(selectionEnd)} />
|
)}
|
||||||
)}
|
|
||||||
</SharedPlot>
|
</SharedPlot>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -225,8 +225,8 @@ const withPreviousSuggestionSelected = (
|
||||||
props.suggestions.length === 0
|
props.suggestions.length === 0
|
||||||
? null
|
? null
|
||||||
: state.selectedIndex !== null
|
: state.selectedIndex !== null
|
||||||
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
|
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
|
||||||
: Math.max(props.suggestions.length - 1, 0),
|
: Math.max(props.suggestions.length - 1, 0),
|
||||||
});
|
});
|
||||||
|
|
||||||
const withNextSuggestionSelected = (
|
const withNextSuggestionSelected = (
|
||||||
|
@ -238,8 +238,8 @@ const withNextSuggestionSelected = (
|
||||||
props.suggestions.length === 0
|
props.suggestions.length === 0
|
||||||
? null
|
? null
|
||||||
: state.selectedIndex !== null
|
: state.selectedIndex !== null
|
||||||
? (state.selectedIndex + 1) % props.suggestions.length
|
? (state.selectedIndex + 1) % props.suggestions.length
|
||||||
: 0,
|
: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
|
const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
|
||||||
|
@ -251,8 +251,8 @@ const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
|
||||||
props.suggestions.length === 0
|
props.suggestions.length === 0
|
||||||
? null
|
? null
|
||||||
: suggestionIndex >= 0 && suggestionIndex < props.suggestions.length
|
: suggestionIndex >= 0 && suggestionIndex < props.suggestions.length
|
||||||
? suggestionIndex
|
? suggestionIndex
|
||||||
: 0,
|
: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const withSuggestionsVisible = (state: AutocompleteFieldState) => ({
|
const withSuggestionsVisible = (state: AutocompleteFieldState) => ({
|
||||||
|
|
|
@ -102,15 +102,14 @@ export class Table extends React.Component<TableProps, TableState> {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<TableContainer>
|
<TableContainer>
|
||||||
{!hideTableControls &&
|
{!hideTableControls && assignmentOptions && (
|
||||||
assignmentOptions && (
|
<ControlBar
|
||||||
<ControlBar
|
itemType={type.itemType}
|
||||||
itemType={type.itemType}
|
assignmentOptions={assignmentOptions}
|
||||||
assignmentOptions={assignmentOptions}
|
kueryBarProps={kueryBarProps}
|
||||||
kueryBarProps={kueryBarProps}
|
selectionCount={this.state.selection.length}
|
||||||
selectionCount={this.state.selection.length}
|
/>
|
||||||
/>
|
)}
|
||||||
)}
|
|
||||||
<EuiSpacer size="m" />
|
<EuiSpacer size="m" />
|
||||||
<EuiInMemoryTable
|
<EuiInMemoryTable
|
||||||
columns={type.columnDefinitions}
|
columns={type.columnDefinitions}
|
||||||
|
|
|
@ -73,17 +73,16 @@ export class WithKueryAutocompletion extends React.Component<
|
||||||
suggestions = [];
|
suggestions = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
this.setState(
|
this.setState(state =>
|
||||||
state =>
|
state.currentRequest &&
|
||||||
state.currentRequest &&
|
state.currentRequest.expression !== expression &&
|
||||||
state.currentRequest.expression !== expression &&
|
state.currentRequest.cursorPosition !== cursorPosition
|
||||||
state.currentRequest.cursorPosition !== cursorPosition
|
? state // ignore this result, since a newer request is in flight
|
||||||
? state // ignore this result, since a newer request is in flight
|
: {
|
||||||
: {
|
...state,
|
||||||
...state,
|
currentRequest: null,
|
||||||
currentRequest: null,
|
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
|
||||||
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
|
}
|
||||||
}
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,23 +49,25 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
|
||||||
public async removeTagsFromBeats(removals: BeatsTagAssignment[]): Promise<BeatsRemovalReturn[]> {
|
public async removeTagsFromBeats(removals: BeatsTagAssignment[]): Promise<BeatsRemovalReturn[]> {
|
||||||
const beatIds = removals.map(r => r.beatId);
|
const beatIds = removals.map(r => r.beatId);
|
||||||
|
|
||||||
const response = this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
|
const response = this.beatsDB
|
||||||
const tagData = removals.find(r => r.beatId === beat.id);
|
.filter(beat => beatIds.includes(beat.id))
|
||||||
if (tagData) {
|
.map(beat => {
|
||||||
if (beat.tags) {
|
const tagData = removals.find(r => r.beatId === beat.id);
|
||||||
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
|
if (tagData) {
|
||||||
}
|
|
||||||
}
|
|
||||||
const removalsForBeat = removals.filter(r => r.beatId === beat.id);
|
|
||||||
if (removalsForBeat.length) {
|
|
||||||
removalsForBeat.forEach((assignment: BeatsTagAssignment) => {
|
|
||||||
if (beat.tags) {
|
if (beat.tags) {
|
||||||
beat.tags = beat.tags.filter(tag => tag !== assignment.tag);
|
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
}
|
const removalsForBeat = removals.filter(r => r.beatId === beat.id);
|
||||||
return beat;
|
if (removalsForBeat.length) {
|
||||||
});
|
removalsForBeat.forEach((assignment: BeatsTagAssignment) => {
|
||||||
|
if (beat.tags) {
|
||||||
|
beat.tags = beat.tags.filter(tag => tag !== assignment.tag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return beat;
|
||||||
|
});
|
||||||
|
|
||||||
return response.map<any>((item: CMBeat, resultIdx: number) => ({
|
return response.map<any>((item: CMBeat, resultIdx: number) => ({
|
||||||
idxInRequest: removals[resultIdx].idxInRequest,
|
idxInRequest: removals[resultIdx].idxInRequest,
|
||||||
|
@ -77,24 +79,26 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
|
||||||
public async assignTagsToBeats(assignments: BeatsTagAssignment[]): Promise<CMAssignmentReturn[]> {
|
public async assignTagsToBeats(assignments: BeatsTagAssignment[]): Promise<CMAssignmentReturn[]> {
|
||||||
const beatIds = assignments.map(r => r.beatId);
|
const beatIds = assignments.map(r => r.beatId);
|
||||||
|
|
||||||
this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
|
this.beatsDB
|
||||||
// get tags that need to be assigned to this beat
|
.filter(beat => beatIds.includes(beat.id))
|
||||||
const tags = assignments
|
.map(beat => {
|
||||||
.filter(a => a.beatId === beat.id)
|
// get tags that need to be assigned to this beat
|
||||||
.map((t: BeatsTagAssignment) => t.tag);
|
const tags = assignments
|
||||||
|
.filter(a => a.beatId === beat.id)
|
||||||
|
.map((t: BeatsTagAssignment) => t.tag);
|
||||||
|
|
||||||
if (tags.length > 0) {
|
if (tags.length > 0) {
|
||||||
if (!beat.tags) {
|
if (!beat.tags) {
|
||||||
beat.tags = [];
|
beat.tags = [];
|
||||||
}
|
}
|
||||||
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
|
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
|
||||||
|
|
||||||
if (nonExistingTags.length > 0) {
|
if (nonExistingTags.length > 0) {
|
||||||
beat.tags = beat.tags.concat(nonExistingTags);
|
beat.tags = beat.tags.concat(nonExistingTags);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
return beat;
|
||||||
return beat;
|
});
|
||||||
});
|
|
||||||
|
|
||||||
return assignments.map<any>((item: BeatsTagAssignment, resultIdx: number) => ({
|
return assignments.map<any>((item: BeatsTagAssignment, resultIdx: number) => ({
|
||||||
idxInRequest: assignments[resultIdx].idxInRequest,
|
idxInRequest: assignments[resultIdx].idxInRequest,
|
||||||
|
|
|
@ -72,26 +72,25 @@ class BeatDetailsPageComponent extends React.PureComponent<PageProps, PageState>
|
||||||
/>
|
/>
|
||||||
</EuiText>
|
</EuiText>
|
||||||
</EuiFlexItem>
|
</EuiFlexItem>
|
||||||
{beat.full_tags &&
|
{beat.full_tags && beat.full_tags.length > 0 && (
|
||||||
beat.full_tags.length > 0 && (
|
<EuiFlexItem grow={false}>
|
||||||
<EuiFlexItem grow={false}>
|
<EuiText size="xs">
|
||||||
<EuiText size="xs">
|
<FormattedMessage
|
||||||
<FormattedMessage
|
id="xpack.beatsManagement.beat.lastConfigUpdateMessage"
|
||||||
id="xpack.beatsManagement.beat.lastConfigUpdateMessage"
|
defaultMessage="Last Config Update: {lastUpdateTime}."
|
||||||
defaultMessage="Last Config Update: {lastUpdateTime}."
|
values={{
|
||||||
values={{
|
lastUpdateTime: (
|
||||||
lastUpdateTime: (
|
<strong>
|
||||||
<strong>
|
{moment(
|
||||||
{moment(
|
first(sortByOrder(beat.full_tags, 'last_updated')).last_updated
|
||||||
first(sortByOrder(beat.full_tags, 'last_updated')).last_updated
|
).fromNow()}
|
||||||
).fromNow()}
|
</strong>
|
||||||
</strong>
|
),
|
||||||
),
|
}}
|
||||||
}}
|
/>
|
||||||
/>
|
</EuiText>
|
||||||
</EuiText>
|
</EuiFlexItem>
|
||||||
</EuiFlexItem>
|
)}
|
||||||
)}
|
|
||||||
</EuiFlexGroup>
|
</EuiFlexGroup>
|
||||||
) : (
|
) : (
|
||||||
<FormattedMessage
|
<FormattedMessage
|
||||||
|
|
|
@ -59,15 +59,17 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
|
||||||
): Promise<BeatsTagAssignment[]> {
|
): Promise<BeatsTagAssignment[]> {
|
||||||
const beatIds = removals.map(r => r.beatId);
|
const beatIds = removals.map(r => r.beatId);
|
||||||
|
|
||||||
const response = this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
|
const response = this.beatsDB
|
||||||
const tagData = removals.find(r => r.beatId === beat.id);
|
.filter(beat => beatIds.includes(beat.id))
|
||||||
if (tagData) {
|
.map(beat => {
|
||||||
if (beat.tags) {
|
const tagData = removals.find(r => r.beatId === beat.id);
|
||||||
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
|
if (tagData) {
|
||||||
|
if (beat.tags) {
|
||||||
|
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
return beat;
|
||||||
return beat;
|
});
|
||||||
});
|
|
||||||
|
|
||||||
return response.map<any>((item: CMBeat, resultIdx: number) => ({
|
return response.map<any>((item: CMBeat, resultIdx: number) => ({
|
||||||
idxInRequest: removals[resultIdx].idxInRequest,
|
idxInRequest: removals[resultIdx].idxInRequest,
|
||||||
|
@ -82,24 +84,26 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
|
||||||
): Promise<BeatsTagAssignment[]> {
|
): Promise<BeatsTagAssignment[]> {
|
||||||
const beatIds = assignments.map(r => r.beatId);
|
const beatIds = assignments.map(r => r.beatId);
|
||||||
|
|
||||||
this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
|
this.beatsDB
|
||||||
// get tags that need to be assigned to this beat
|
.filter(beat => beatIds.includes(beat.id))
|
||||||
const tags = assignments
|
.map(beat => {
|
||||||
.filter(a => a.beatId === beat.id)
|
// get tags that need to be assigned to this beat
|
||||||
.map((t: BeatsTagAssignment) => t.tag);
|
const tags = assignments
|
||||||
|
.filter(a => a.beatId === beat.id)
|
||||||
|
.map((t: BeatsTagAssignment) => t.tag);
|
||||||
|
|
||||||
if (tags.length > 0) {
|
if (tags.length > 0) {
|
||||||
if (!beat.tags) {
|
if (!beat.tags) {
|
||||||
beat.tags = [];
|
beat.tags = [];
|
||||||
}
|
}
|
||||||
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
|
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
|
||||||
|
|
||||||
if (nonExistingTags.length > 0) {
|
if (nonExistingTags.length > 0) {
|
||||||
beat.tags = beat.tags.concat(nonExistingTags);
|
beat.tags = beat.tags.concat(nonExistingTags);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
return beat;
|
||||||
return beat;
|
});
|
||||||
});
|
|
||||||
|
|
||||||
return assignments.map<any>((item: BeatsTagAssignment, resultIdx: number) => ({
|
return assignments.map<any>((item: BeatsTagAssignment, resultIdx: number) => ({
|
||||||
idxInRequest: assignments[resultIdx].idxInRequest,
|
idxInRequest: assignments[resultIdx].idxInRequest,
|
||||||
|
|
|
@ -11,17 +11,16 @@ import 'react-datetime/css/react-datetime.css';
|
||||||
|
|
||||||
export const DatetimeQuickList = ({ from, to, ranges, onSelect, children }) => (
|
export const DatetimeQuickList = ({ from, to, ranges, onSelect, children }) => (
|
||||||
<div style={{ display: 'grid', alignItems: 'center' }}>
|
<div style={{ display: 'grid', alignItems: 'center' }}>
|
||||||
{ranges.map(
|
{ranges.map((range, i) =>
|
||||||
(range, i) =>
|
from === range.from && to === range.to ? (
|
||||||
from === range.from && to === range.to ? (
|
<EuiButton size="s" fill key={i} onClick={() => onSelect(range.from, range.to)}>
|
||||||
<EuiButton size="s" fill key={i} onClick={() => onSelect(range.from, range.to)}>
|
{range.display}
|
||||||
{range.display}
|
</EuiButton>
|
||||||
</EuiButton>
|
) : (
|
||||||
) : (
|
<EuiButtonEmpty size="s" key={i} onClick={() => onSelect(range.from, range.to)}>
|
||||||
<EuiButtonEmpty size="s" key={i} onClick={() => onSelect(range.from, range.to)}>
|
{range.display}
|
||||||
{range.display}
|
</EuiButtonEmpty>
|
||||||
</EuiButtonEmpty>
|
)
|
||||||
)
|
|
||||||
)}
|
)}
|
||||||
{children}
|
{children}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -51,12 +51,11 @@ export const AdvancedFailureComponent = props => {
|
||||||
<EuiButton disabled={!valid} onClick={e => valueChange(e)} size="s" type="submit">
|
<EuiButton disabled={!valid} onClick={e => valueChange(e)} size="s" type="submit">
|
||||||
Apply
|
Apply
|
||||||
</EuiButton>
|
</EuiButton>
|
||||||
{defaultValue &&
|
{defaultValue && defaultValue.length && (
|
||||||
defaultValue.length && (
|
<EuiButtonEmpty size="s" color="danger" onClick={confirmReset}>
|
||||||
<EuiButtonEmpty size="s" color="danger" onClick={confirmReset}>
|
Reset
|
||||||
Reset
|
</EuiButtonEmpty>
|
||||||
</EuiButtonEmpty>
|
)}
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</EuiForm>
|
</EuiForm>
|
||||||
);
|
);
|
||||||
|
|
|
@ -65,16 +65,15 @@ export const SimpleTemplate = props => {
|
||||||
</EuiFlexItem>
|
</EuiFlexItem>
|
||||||
</Fragment>
|
</Fragment>
|
||||||
)}
|
)}
|
||||||
{name !== 'defaultStyle' &&
|
{name !== 'defaultStyle' && (!labels || labels.length === 0) && (
|
||||||
(!labels || labels.length === 0) && (
|
<EuiFlexItem grow={false}>
|
||||||
<EuiFlexItem grow={false}>
|
<TooltipIcon
|
||||||
<TooltipIcon
|
position="left"
|
||||||
position="left"
|
icon="warning"
|
||||||
icon="warning"
|
content="Data has no series to style, add a color dimension"
|
||||||
content="Data has no series to style, add a color dimension"
|
/>
|
||||||
/>
|
</EuiFlexItem>
|
||||||
</EuiFlexItem>
|
)}
|
||||||
)}
|
|
||||||
</EuiFlexGroup>
|
</EuiFlexGroup>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -31,17 +31,16 @@ const primaryUpdate = state => state.primaryUpdate;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// dispatch the various types of actions
|
// dispatch the various types of actions
|
||||||
const rawCursorPosition = select(
|
const rawCursorPosition = select(action =>
|
||||||
action => (action.type === 'cursorPosition' ? action.payload : null)
|
action.type === 'cursorPosition' ? action.payload : null
|
||||||
)(primaryUpdate);
|
)(primaryUpdate);
|
||||||
|
|
||||||
const mouseButtonEvent = select(action => (action.type === 'mouseEvent' ? action.payload : null))(
|
const mouseButtonEvent = select(action => (action.type === 'mouseEvent' ? action.payload : null))(
|
||||||
primaryUpdate
|
primaryUpdate
|
||||||
);
|
);
|
||||||
|
|
||||||
const keyFromMouse = select(
|
const keyFromMouse = select(({ type, payload: { altKey, metaKey, shiftKey, ctrlKey } }) =>
|
||||||
({ type, payload: { altKey, metaKey, shiftKey, ctrlKey } }) =>
|
type === 'cursorPosition' || type === 'mouseEvent' ? { altKey, metaKey, shiftKey, ctrlKey } : {}
|
||||||
type === 'cursorPosition' || type === 'mouseEvent' ? { altKey, metaKey, shiftKey, ctrlKey } : {}
|
|
||||||
)(primaryUpdate);
|
)(primaryUpdate);
|
||||||
|
|
||||||
const metaHeld = select(appleKeyboard ? e => e.metaKey : e => e.altKey)(keyFromMouse);
|
const metaHeld = select(appleKeyboard ? e => e.metaKey : e => e.altKey)(keyFromMouse);
|
||||||
|
|
|
@ -77,8 +77,8 @@ const hoveredShapes = select((shapes, cursorPosition) =>
|
||||||
)(shapes, cursorPosition);
|
)(shapes, cursorPosition);
|
||||||
|
|
||||||
const depthIndex = 0;
|
const depthIndex = 0;
|
||||||
const hoveredShape = select(
|
const hoveredShape = select(hoveredShapes =>
|
||||||
hoveredShapes => (hoveredShapes.length ? hoveredShapes[depthIndex] : null)
|
hoveredShapes.length ? hoveredShapes[depthIndex] : null
|
||||||
)(hoveredShapes);
|
)(hoveredShapes);
|
||||||
|
|
||||||
const draggedShape = select(draggingShape)(scene, hoveredShape, mouseIsDown, mouseDowned);
|
const draggedShape = select(draggingShape)(scene, hoveredShape, mouseIsDown, mouseDowned);
|
||||||
|
@ -94,57 +94,56 @@ const focusedShapes = select((shapes, focusedShape) =>
|
||||||
shapes.filter(shape => focusedShape && shape.id === focusedShape.id)
|
shapes.filter(shape => focusedShape && shape.id === focusedShape.id)
|
||||||
)(shapes, focusedShape);
|
)(shapes, focusedShape);
|
||||||
|
|
||||||
const keyTransformGesture = select(
|
const keyTransformGesture = select(keys =>
|
||||||
keys =>
|
config.shortcuts
|
||||||
config.shortcuts
|
? Object.keys(keys)
|
||||||
? Object.keys(keys)
|
.map(keypress => {
|
||||||
.map(keypress => {
|
switch (keypress) {
|
||||||
switch (keypress) {
|
case 'KeyW':
|
||||||
case 'KeyW':
|
return { transform: matrix.translate(0, -5, 0) };
|
||||||
return { transform: matrix.translate(0, -5, 0) };
|
case 'KeyA':
|
||||||
case 'KeyA':
|
return { transform: matrix.translate(-5, 0, 0) };
|
||||||
return { transform: matrix.translate(-5, 0, 0) };
|
case 'KeyS':
|
||||||
case 'KeyS':
|
return { transform: matrix.translate(0, 5, 0) };
|
||||||
return { transform: matrix.translate(0, 5, 0) };
|
case 'KeyD':
|
||||||
case 'KeyD':
|
return { transform: matrix.translate(5, 0, 0) };
|
||||||
return { transform: matrix.translate(5, 0, 0) };
|
case 'KeyF':
|
||||||
case 'KeyF':
|
return { transform: matrix.translate(0, 0, -20) };
|
||||||
return { transform: matrix.translate(0, 0, -20) };
|
case 'KeyC':
|
||||||
case 'KeyC':
|
return { transform: matrix.translate(0, 0, 20) };
|
||||||
return { transform: matrix.translate(0, 0, 20) };
|
case 'KeyX':
|
||||||
case 'KeyX':
|
return { transform: matrix.rotateX(Math.PI / 45) };
|
||||||
return { transform: matrix.rotateX(Math.PI / 45) };
|
case 'KeyY':
|
||||||
case 'KeyY':
|
return { transform: matrix.rotateY(Math.PI / 45 / 1.3) };
|
||||||
return { transform: matrix.rotateY(Math.PI / 45 / 1.3) };
|
case 'KeyZ':
|
||||||
case 'KeyZ':
|
return { transform: matrix.rotateZ(Math.PI / 45 / 1.6) };
|
||||||
return { transform: matrix.rotateZ(Math.PI / 45 / 1.6) };
|
case 'KeyI':
|
||||||
case 'KeyI':
|
return { transform: matrix.scale(1, 1.05, 1) };
|
||||||
return { transform: matrix.scale(1, 1.05, 1) };
|
case 'KeyJ':
|
||||||
case 'KeyJ':
|
return { transform: matrix.scale(1 / 1.05, 1, 1) };
|
||||||
return { transform: matrix.scale(1 / 1.05, 1, 1) };
|
case 'KeyK':
|
||||||
case 'KeyK':
|
return { transform: matrix.scale(1, 1 / 1.05, 1) };
|
||||||
return { transform: matrix.scale(1, 1 / 1.05, 1) };
|
case 'KeyL':
|
||||||
case 'KeyL':
|
return { transform: matrix.scale(1.05, 1, 1) };
|
||||||
return { transform: matrix.scale(1.05, 1, 1) };
|
case 'KeyP':
|
||||||
case 'KeyP':
|
return { transform: matrix.perspective(2000) };
|
||||||
return { transform: matrix.perspective(2000) };
|
case 'KeyR':
|
||||||
case 'KeyR':
|
return { transform: matrix.shear(0.1, 0) };
|
||||||
return { transform: matrix.shear(0.1, 0) };
|
case 'KeyT':
|
||||||
case 'KeyT':
|
return { transform: matrix.shear(-0.1, 0) };
|
||||||
return { transform: matrix.shear(-0.1, 0) };
|
case 'KeyU':
|
||||||
case 'KeyU':
|
return { transform: matrix.shear(0, 0.1) };
|
||||||
return { transform: matrix.shear(0, 0.1) };
|
case 'KeyH':
|
||||||
case 'KeyH':
|
return { transform: matrix.shear(0, -0.1) };
|
||||||
return { transform: matrix.shear(0, -0.1) };
|
case 'KeyM':
|
||||||
case 'KeyM':
|
return { transform: matrix.UNITMATRIX, sizes: [1.0, 0, 0, 0, 1.0, 0, 10, 0, 1] };
|
||||||
return { transform: matrix.UNITMATRIX, sizes: [1.0, 0, 0, 0, 1.0, 0, 10, 0, 1] };
|
case 'Backspace':
|
||||||
case 'Backspace':
|
case 'Delete':
|
||||||
case 'Delete':
|
return { transform: matrix.UNITMATRIX, delete: true };
|
||||||
return { transform: matrix.UNITMATRIX, delete: true };
|
}
|
||||||
}
|
})
|
||||||
})
|
.filter(identity)
|
||||||
.filter(identity)
|
: []
|
||||||
: []
|
|
||||||
)(pressedKeys);
|
)(pressedKeys);
|
||||||
|
|
||||||
const alterSnapGesture = select(metaHeld => (metaHeld ? ['relax'] : []))(metaHeld);
|
const alterSnapGesture = select(metaHeld => (metaHeld ? ['relax'] : []))(metaHeld);
|
||||||
|
@ -189,14 +188,14 @@ const transformGestures = select((keyTransformGesture, mouseTransformGesture) =>
|
||||||
keyTransformGesture.concat(mouseTransformGesture)
|
keyTransformGesture.concat(mouseTransformGesture)
|
||||||
)(keyTransformGesture, mouseTransformGesture);
|
)(keyTransformGesture, mouseTransformGesture);
|
||||||
|
|
||||||
const restateShapesEvent = select(
|
const restateShapesEvent = select(action =>
|
||||||
action => (action && action.type === 'restateShapesEvent' ? action.payload : null)
|
action && action.type === 'restateShapesEvent' ? action.payload : null
|
||||||
)(primaryUpdate);
|
)(primaryUpdate);
|
||||||
|
|
||||||
// directSelect is an API entry point (via the `shapeSelect` action) that lets the client directly specify what thing
|
// directSelect is an API entry point (via the `shapeSelect` action) that lets the client directly specify what thing
|
||||||
// is selected, as otherwise selection is driven by gestures and knowledge of element positions
|
// is selected, as otherwise selection is driven by gestures and knowledge of element positions
|
||||||
const directSelect = select(
|
const directSelect = select(action =>
|
||||||
action => (action && action.type === 'shapeSelect' ? action.payload : null)
|
action && action.type === 'shapeSelect' ? action.payload : null
|
||||||
)(primaryUpdate);
|
)(primaryUpdate);
|
||||||
|
|
||||||
const selectedShapeObjects = select(scene => scene.selectedShapeObjects || [])(scene);
|
const selectedShapeObjects = select(scene => scene.selectedShapeObjects || [])(scene);
|
||||||
|
@ -480,8 +479,8 @@ const resizeAnnotationManipulation = (transformGestures, directShapes, allShapes
|
||||||
|
|
||||||
const symmetricManipulation = optionHeld; // as in comparable software applications, todo: make configurable
|
const symmetricManipulation = optionHeld; // as in comparable software applications, todo: make configurable
|
||||||
|
|
||||||
const resizeManipulator = select(
|
const resizeManipulator = select(toggle =>
|
||||||
toggle => (toggle ? centeredResizeManipulation : asymmetricResizeManipulation)
|
toggle ? centeredResizeManipulation : asymmetricResizeManipulation
|
||||||
)(symmetricManipulation);
|
)(symmetricManipulation);
|
||||||
|
|
||||||
const transformIntents = select(
|
const transformIntents = select(
|
||||||
|
@ -1132,22 +1131,24 @@ const projectAABB = ([[xMin, yMin], [xMax, yMax]]) => {
|
||||||
|
|
||||||
const dissolveGroups = (preexistingAdHocGroups, shapes, selectedShapes) => {
|
const dissolveGroups = (preexistingAdHocGroups, shapes, selectedShapes) => {
|
||||||
return {
|
return {
|
||||||
shapes: shapes.filter(shape => !isAdHocGroup(shape)).map(shape => {
|
shapes: shapes
|
||||||
const preexistingAdHocGroupParent = preexistingAdHocGroups.find(
|
.filter(shape => !isAdHocGroup(shape))
|
||||||
groupShape => groupShape.id === shape.parent
|
.map(shape => {
|
||||||
);
|
const preexistingAdHocGroupParent = preexistingAdHocGroups.find(
|
||||||
// if linked, dissociate from ad hoc group parent
|
groupShape => groupShape.id === shape.parent
|
||||||
return preexistingAdHocGroupParent
|
);
|
||||||
? {
|
// if linked, dissociate from ad hoc group parent
|
||||||
...shape,
|
return preexistingAdHocGroupParent
|
||||||
parent: null,
|
? {
|
||||||
localTransformMatrix: matrix.multiply(
|
...shape,
|
||||||
preexistingAdHocGroupParent.localTransformMatrix, // reinstate the group offset onto the child
|
parent: null,
|
||||||
shape.localTransformMatrix
|
localTransformMatrix: matrix.multiply(
|
||||||
),
|
preexistingAdHocGroupParent.localTransformMatrix, // reinstate the group offset onto the child
|
||||||
}
|
shape.localTransformMatrix
|
||||||
: shape;
|
),
|
||||||
}),
|
}
|
||||||
|
: shape;
|
||||||
|
}),
|
||||||
selectedShapes,
|
selectedShapes,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -1229,8 +1230,8 @@ const getLeafs = (descendCondition, allShapes, shapes) =>
|
||||||
removeDuplicates(
|
removeDuplicates(
|
||||||
s => s.id,
|
s => s.id,
|
||||||
flatten(
|
flatten(
|
||||||
shapes.map(
|
shapes.map(shape =>
|
||||||
shape => (descendCondition(shape) ? allShapes.filter(s => s.parent === shape.id) : shape)
|
descendCondition(shape) ? allShapes.filter(s => s.parent === shape.id) : shape
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -38,8 +38,8 @@ export type SubsetResolverWithFields<R, IncludedFields extends string> = R exten
|
||||||
ArgsInArray
|
ArgsInArray
|
||||||
>
|
>
|
||||||
: R extends BasicResolver<infer Result, infer Args>
|
: R extends BasicResolver<infer Result, infer Args>
|
||||||
? BasicResolver<Pick<Result, Extract<keyof Result, IncludedFields>>, Args>
|
? BasicResolver<Pick<Result, Extract<keyof Result, IncludedFields>>, Args>
|
||||||
: never;
|
: never;
|
||||||
|
|
||||||
export type SubsetResolverWithoutFields<R, ExcludedFields extends string> = R extends BasicResolver<
|
export type SubsetResolverWithoutFields<R, ExcludedFields extends string> = R extends BasicResolver<
|
||||||
Array<infer ResultInArray>,
|
Array<infer ResultInArray>,
|
||||||
|
@ -50,8 +50,8 @@ export type SubsetResolverWithoutFields<R, ExcludedFields extends string> = R ex
|
||||||
ArgsInArray
|
ArgsInArray
|
||||||
>
|
>
|
||||||
: R extends BasicResolver<infer Result, infer Args>
|
: R extends BasicResolver<infer Result, infer Args>
|
||||||
? BasicResolver<Pick<Result, Exclude<keyof Result, ExcludedFields>>, Args>
|
? BasicResolver<Pick<Result, Exclude<keyof Result, ExcludedFields>>, Args>
|
||||||
: never;
|
: never;
|
||||||
|
|
||||||
export type InfraResolver<Result, Parent, Args, Context> = (
|
export type InfraResolver<Result, Parent, Args, Context> = (
|
||||||
parent: Parent,
|
parent: Parent,
|
||||||
|
|
|
@ -231,8 +231,8 @@ const withPreviousSuggestionSelected = (
|
||||||
props.suggestions.length === 0
|
props.suggestions.length === 0
|
||||||
? null
|
? null
|
||||||
: state.selectedIndex !== null
|
: state.selectedIndex !== null
|
||||||
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
|
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
|
||||||
: Math.max(props.suggestions.length - 1, 0),
|
: Math.max(props.suggestions.length - 1, 0),
|
||||||
});
|
});
|
||||||
|
|
||||||
const withNextSuggestionSelected = (
|
const withNextSuggestionSelected = (
|
||||||
|
@ -244,8 +244,8 @@ const withNextSuggestionSelected = (
|
||||||
props.suggestions.length === 0
|
props.suggestions.length === 0
|
||||||
? null
|
? null
|
||||||
: state.selectedIndex !== null
|
: state.selectedIndex !== null
|
||||||
? (state.selectedIndex + 1) % props.suggestions.length
|
? (state.selectedIndex + 1) % props.suggestions.length
|
||||||
: 0,
|
: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
|
const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
|
||||||
|
@ -257,8 +257,8 @@ const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
|
||||||
props.suggestions.length === 0
|
props.suggestions.length === 0
|
||||||
? null
|
? null
|
||||||
: suggestionIndex >= 0 && suggestionIndex < props.suggestions.length
|
: suggestionIndex >= 0 && suggestionIndex < props.suggestions.length
|
||||||
? suggestionIndex
|
? suggestionIndex
|
||||||
: 0,
|
: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const withSuggestionsVisible = (state: AutocompleteFieldState) => ({
|
const withSuggestionsVisible = (state: AutocompleteFieldState) => ({
|
||||||
|
|
|
@ -64,5 +64,5 @@ const formatMessageSegment = (messageSegment: LogEntryMessageSegment): string =>
|
||||||
messageSegment.__typename === 'InfraLogMessageFieldSegment'
|
messageSegment.__typename === 'InfraLogMessageFieldSegment'
|
||||||
? messageSegment.value
|
? messageSegment.value
|
||||||
: messageSegment.__typename === 'InfraLogMessageConstantSegment'
|
: messageSegment.__typename === 'InfraLogMessageConstantSegment'
|
||||||
? messageSegment.constant
|
? messageSegment.constant
|
||||||
: 'failed to format message';
|
: 'failed to format message';
|
||||||
|
|
|
@ -84,17 +84,16 @@ export class WithKueryAutocompletion extends React.Component<
|
||||||
selectionEnd: cursorPosition,
|
selectionEnd: cursorPosition,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.setState(
|
this.setState(state =>
|
||||||
state =>
|
state.currentRequest &&
|
||||||
state.currentRequest &&
|
state.currentRequest.expression !== expression &&
|
||||||
state.currentRequest.expression !== expression &&
|
state.currentRequest.cursorPosition !== cursorPosition
|
||||||
state.currentRequest.cursorPosition !== cursorPosition
|
? state // ignore this result, since a newer request is in flight
|
||||||
? state // ignore this result, since a newer request is in flight
|
: {
|
||||||
: {
|
...state,
|
||||||
...state,
|
currentRequest: null,
|
||||||
currentRequest: null,
|
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
|
||||||
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
|
}
|
||||||
}
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,8 +16,11 @@ export const selectIsAutoReloading = (state: WaffleTimeState) =>
|
||||||
export const selectTimeUpdatePolicyInterval = (state: WaffleTimeState) =>
|
export const selectTimeUpdatePolicyInterval = (state: WaffleTimeState) =>
|
||||||
state.updatePolicy.policy === 'interval' ? state.updatePolicy.interval : null;
|
state.updatePolicy.policy === 'interval' ? state.updatePolicy.interval : null;
|
||||||
|
|
||||||
export const selectCurrentTimeRange = createSelector(selectCurrentTime, currentTime => ({
|
export const selectCurrentTimeRange = createSelector(
|
||||||
from: currentTime - 1000 * 60 * 5,
|
selectCurrentTime,
|
||||||
interval: '1m',
|
currentTime => ({
|
||||||
to: currentTime,
|
from: currentTime - 1000 * 60 * 5,
|
||||||
}));
|
interval: '1m',
|
||||||
|
to: currentTime,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
|
@ -160,17 +160,32 @@ export const createGraphqlQueryEpic = <Data, Variables, Error = ApolloError>(
|
||||||
export const createGraphqlStateSelectors = <State>(
|
export const createGraphqlStateSelectors = <State>(
|
||||||
selectState: (parentState: any) => GraphqlState<State> = parentState => parentState
|
selectState: (parentState: any) => GraphqlState<State> = parentState => parentState
|
||||||
) => {
|
) => {
|
||||||
const selectData = createSelector(selectState, state => state.data);
|
const selectData = createSelector(
|
||||||
|
selectState,
|
||||||
|
state => state.data
|
||||||
|
);
|
||||||
|
|
||||||
const selectLoadingProgress = createSelector(selectState, state => state.current);
|
const selectLoadingProgress = createSelector(
|
||||||
|
selectState,
|
||||||
|
state => state.current
|
||||||
|
);
|
||||||
const selectLoadingProgressOperationInfo = createSelector(
|
const selectLoadingProgressOperationInfo = createSelector(
|
||||||
selectLoadingProgress,
|
selectLoadingProgress,
|
||||||
progress => (isRunningLoadingProgress(progress) ? progress.parameters : null)
|
progress => (isRunningLoadingProgress(progress) ? progress.parameters : null)
|
||||||
);
|
);
|
||||||
const selectIsLoading = createSelector(selectLoadingProgress, isRunningLoadingProgress);
|
const selectIsLoading = createSelector(
|
||||||
const selectIsIdle = createSelector(selectLoadingProgress, isIdleLoadingProgress);
|
selectLoadingProgress,
|
||||||
|
isRunningLoadingProgress
|
||||||
|
);
|
||||||
|
const selectIsIdle = createSelector(
|
||||||
|
selectLoadingProgress,
|
||||||
|
isIdleLoadingProgress
|
||||||
|
);
|
||||||
|
|
||||||
const selectLoadingResult = createSelector(selectState, state => state.last);
|
const selectLoadingResult = createSelector(
|
||||||
|
selectState,
|
||||||
|
state => state.last
|
||||||
|
);
|
||||||
const selectLoadingResultOperationInfo = createSelector(
|
const selectLoadingResultOperationInfo = createSelector(
|
||||||
selectLoadingResult,
|
selectLoadingResult,
|
||||||
result => (!isUninitializedLoadingResult(result) ? result.parameters : null)
|
result => (!isUninitializedLoadingResult(result) ? result.parameters : null)
|
||||||
|
@ -179,9 +194,18 @@ export const createGraphqlStateSelectors = <State>(
|
||||||
selectLoadingResult,
|
selectLoadingResult,
|
||||||
result => (!isUninitializedLoadingResult(result) ? result.time : null)
|
result => (!isUninitializedLoadingResult(result) ? result.time : null)
|
||||||
);
|
);
|
||||||
const selectIsUninitialized = createSelector(selectLoadingResult, isUninitializedLoadingResult);
|
const selectIsUninitialized = createSelector(
|
||||||
const selectIsSuccess = createSelector(selectLoadingResult, isSuccessLoadingResult);
|
selectLoadingResult,
|
||||||
const selectIsFailure = createSelector(selectLoadingResult, isFailureLoadingResult);
|
isUninitializedLoadingResult
|
||||||
|
);
|
||||||
|
const selectIsSuccess = createSelector(
|
||||||
|
selectLoadingResult,
|
||||||
|
isSuccessLoadingResult
|
||||||
|
);
|
||||||
|
const selectIsFailure = createSelector(
|
||||||
|
selectLoadingResult,
|
||||||
|
isFailureLoadingResult
|
||||||
|
);
|
||||||
|
|
||||||
const selectLoadingState = createSelector(
|
const selectLoadingState = createSelector(
|
||||||
selectLoadingProgress,
|
selectLoadingProgress,
|
||||||
|
|
|
@ -58,7 +58,9 @@ interface ActionCreators {
|
||||||
|
|
||||||
type PlainActionCreator<WrappedActionCreator> = WrappedActionCreator extends () => infer R
|
type PlainActionCreator<WrappedActionCreator> = WrappedActionCreator extends () => infer R
|
||||||
? () => R
|
? () => R
|
||||||
: WrappedActionCreator extends (payload: infer A) => infer R ? (payload: A) => R : never;
|
: WrappedActionCreator extends (payload: infer A) => infer R
|
||||||
|
? (payload: A) => R
|
||||||
|
: never;
|
||||||
|
|
||||||
export const bindPlainActionCreators = <WrappedActionCreators extends ActionCreators>(
|
export const bindPlainActionCreators = <WrappedActionCreators extends ActionCreators>(
|
||||||
actionCreators: WrappedActionCreators
|
actionCreators: WrappedActionCreators
|
||||||
|
|
|
@ -12,7 +12,7 @@ function getDocumentPayloadFn(server) {
|
||||||
function encodeContent(content, exportType) {
|
function encodeContent(content, exportType) {
|
||||||
switch (exportType.jobContentEncoding) {
|
switch (exportType.jobContentEncoding) {
|
||||||
case 'base64':
|
case 'base64':
|
||||||
return new Buffer(content, 'base64');
|
return Buffer.from(content, 'base64');
|
||||||
default:
|
default:
|
||||||
return content;
|
return content;
|
||||||
}
|
}
|
||||||
|
|
|
@ -200,7 +200,7 @@ describe(`when job is completed`, () => {
|
||||||
|
|
||||||
test(`base64 encodes output content for configured jobTypes`, async () => {
|
test(`base64 encodes output content for configured jobTypes`, async () => {
|
||||||
const { payload } = await getCompletedResponse({ jobType: 'base64EncodedJobType', outputContent: 'test' });
|
const { payload } = await getCompletedResponse({ jobType: 'base64EncodedJobType', outputContent: 'test' });
|
||||||
expect(payload).toBe(new Buffer('test', 'base64').toString());
|
expect(payload).toBe(Buffer.from('test', 'base64').toString());
|
||||||
});
|
});
|
||||||
|
|
||||||
test(`specifies text/csv; charset=utf-8 contentType header from the job output`, async () => {
|
test(`specifies text/csv; charset=utf-8 contentType header from the job output`, async () => {
|
||||||
|
|
|
@ -181,7 +181,7 @@ export class BasicCredentials {
|
||||||
throw new Error('Password should be a valid non-empty string.');
|
throw new Error('Password should be a valid non-empty string.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const basicCredentials = new Buffer(`${username}:${password}`).toString('base64');
|
const basicCredentials = Buffer.from(`${username}:${password}`).toString('base64');
|
||||||
request.headers.authorization = `Basic ${basicCredentials}`;
|
request.headers.authorization = `Basic ${basicCredentials}`;
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,17 +72,17 @@ export default function ({ getService }) {
|
||||||
|
|
||||||
await supertest.get('/api/security/v1/me')
|
await supertest.get('/api/security/v1/me')
|
||||||
.set('kbn-xsrf', 'xxx')
|
.set('kbn-xsrf', 'xxx')
|
||||||
.set('Authorization', `Basic ${new Buffer(`${wrongUsername}:${wrongPassword}`).toString('base64')}`)
|
.set('Authorization', `Basic ${Buffer.from(`${wrongUsername}:${wrongPassword}`).toString('base64')}`)
|
||||||
.expect(401);
|
.expect(401);
|
||||||
|
|
||||||
await supertest.get('/api/security/v1/me')
|
await supertest.get('/api/security/v1/me')
|
||||||
.set('kbn-xsrf', 'xxx')
|
.set('kbn-xsrf', 'xxx')
|
||||||
.set('Authorization', `Basic ${new Buffer(`${validUsername}:${wrongPassword}`).toString('base64')}`)
|
.set('Authorization', `Basic ${Buffer.from(`${validUsername}:${wrongPassword}`).toString('base64')}`)
|
||||||
.expect(401);
|
.expect(401);
|
||||||
|
|
||||||
await supertest.get('/api/security/v1/me')
|
await supertest.get('/api/security/v1/me')
|
||||||
.set('kbn-xsrf', 'xxx')
|
.set('kbn-xsrf', 'xxx')
|
||||||
.set('Authorization', `Basic ${new Buffer(`${wrongUsername}:${validPassword}`).toString('base64')}`)
|
.set('Authorization', `Basic ${Buffer.from(`${wrongUsername}:${validPassword}`).toString('base64')}`)
|
||||||
.expect(401);
|
.expect(401);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ export default function ({ getService }) {
|
||||||
const apiResponse = await supertest
|
const apiResponse = await supertest
|
||||||
.get('/api/security/v1/me')
|
.get('/api/security/v1/me')
|
||||||
.set('kbn-xsrf', 'xxx')
|
.set('kbn-xsrf', 'xxx')
|
||||||
.set('Authorization', `Basic ${new Buffer(`${validUsername}:${validPassword}`).toString('base64')}`)
|
.set('Authorization', `Basic ${Buffer.from(`${validUsername}:${validPassword}`).toString('base64')}`)
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
expect(apiResponse.body).to.only.have.keys([
|
expect(apiResponse.body).to.only.have.keys([
|
||||||
|
|
|
@ -29,7 +29,7 @@ const signatureAlgorithm = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha256';
|
||||||
|
|
||||||
export async function getSAMLRequestId(urlWithSAMLRequestId) {
|
export async function getSAMLRequestId(urlWithSAMLRequestId) {
|
||||||
const inflatedSAMLRequest = await inflateRawAsync(
|
const inflatedSAMLRequest = await inflateRawAsync(
|
||||||
new Buffer(url.parse(urlWithSAMLRequestId, true /* parseQueryString */).query.SAMLRequest, 'base64')
|
Buffer.from(url.parse(urlWithSAMLRequestId, true /* parseQueryString */).query.SAMLRequest, 'base64')
|
||||||
);
|
);
|
||||||
|
|
||||||
const parsedSAMLRequest = await parseStringAsync(inflatedSAMLRequest.toString());
|
const parsedSAMLRequest = await parseStringAsync(inflatedSAMLRequest.toString());
|
||||||
|
@ -82,7 +82,7 @@ export async function getSAMLResponse({ destination, inResponseTo, sessionIndex
|
||||||
{ location: { reference: `//*[local-name(.)='Issuer']`, action: 'after' } }
|
{ location: { reference: `//*[local-name(.)='Issuer']`, action: 'after' } }
|
||||||
);
|
);
|
||||||
|
|
||||||
return new Buffer(`
|
return Buffer.from(`
|
||||||
<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" ID="_bdf1d51245ed0f71aa23"
|
<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" ID="_bdf1d51245ed0f71aa23"
|
||||||
${inResponseTo ? `InResponseTo="${inResponseTo}"` : ''} Version="2.0"
|
${inResponseTo ? `InResponseTo="${inResponseTo}"` : ''} Version="2.0"
|
||||||
IssueInstant="${issueInstant}"
|
IssueInstant="${issueInstant}"
|
||||||
|
@ -111,7 +111,7 @@ export async function getLogoutRequest({ destination, sessionIndex }) {
|
||||||
|
|
||||||
// HTTP-Redirect with deflate encoding:
|
// HTTP-Redirect with deflate encoding:
|
||||||
// http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf - section 3.4.4.1
|
// http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf - section 3.4.4.1
|
||||||
const deflatedLogoutRequest = await deflateRawAsync(new Buffer(logoutRequestTemplateXML));
|
const deflatedLogoutRequest = await deflateRawAsync(Buffer.from(logoutRequestTemplateXML));
|
||||||
|
|
||||||
const queryStringParameters = {
|
const queryStringParameters = {
|
||||||
SAMLRequest: deflatedLogoutRequest.toString('base64'),
|
SAMLRequest: deflatedLogoutRequest.toString('base64'),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue