[6.x] Upgrade to NodeJS 10 (#25157) (#26909)

* chore(NA): merge and solve conflicts backporting to 6.x

* chore(NA): merge and solve conflicts backporting to 6.x
This commit is contained in:
Tiago Costa 2018-12-10 18:50:30 +00:00 committed by GitHub
parent cb8ad058f2
commit 1d3fcaac0a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
77 changed files with 31277 additions and 5735 deletions

View file

@ -1 +1 @@
8.14.0
10.14.1

2
.nvmrc
View file

@ -1 +1 @@
8.14.0
10.14.1

View file

@ -88,7 +88,7 @@
"url": "https://github.com/elastic/kibana.git"
},
"resolutions": {
"**/@types/node": "8.10.38",
"**/@types/node": "10.12.12",
"@types/react": "16.3.14"
},
"workspaces": {
@ -111,7 +111,7 @@
"@elastic/datemath": "5.0.0",
"@elastic/eui": "5.3.0",
"@elastic/filesaver": "1.1.2",
"@elastic/good": "8.1.1-kibana1",
"@elastic/good": "8.1.1-kibana2",
"@elastic/numeral": "2.3.2",
"@elastic/ui-ace": "0.2.3",
"@kbn/babel-code-parser": "1.0.0",
@ -153,7 +153,7 @@
"elasticsearch": "^15.2.0",
"elasticsearch-browser": "^15.2.0",
"encode-uri-query": "1.0.0",
"execa": "^0.10.0",
"execa": "^1.0.0",
"expiry-js": "0.1.7",
"file-loader": "2.0.0",
"font-awesome": "4.4.0",
@ -302,7 +302,7 @@
"@types/minimatch": "^2.0.29",
"@types/moment-timezone": "^0.5.8",
"@types/mustache": "^0.8.31",
"@types/node": "^8.10.38",
"@types/node": "^10.12.12",
"@types/opn": "^5.1.0",
"@types/podium": "^1.0.0",
"@types/prop-types": "^15.5.3",
@ -325,8 +325,8 @@
"angular-mocks": "1.4.7",
"archiver": "^3.0.0",
"babel-eslint": "^9.0.0",
"babel-jest": "^23.4.2",
"backport": "4.2.0",
"babel-jest": "^23.6.0",
"backport": "4.4.1",
"chai": "3.5.0",
"chance": "1.0.10",
"cheerio": "0.22.0",
@ -342,7 +342,7 @@
"eslint-config-prettier": "^3.1.0",
"eslint-plugin-babel": "^5.2.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-jest": "^21.22.1",
"eslint-plugin-jest": "^21.26.2",
"eslint-plugin-jsx-a11y": "^6.1.2",
"eslint-plugin-mocha": "^5.2.0",
"eslint-plugin-no-unsanitized": "^3.0.2",
@ -362,13 +362,13 @@
"grunt-peg": "^2.0.1",
"grunt-run": "0.7.0",
"gulp-babel": "^7.0.1",
"gulp-sourcemaps": "1.7.3",
"gulp-sourcemaps": "2.6.4",
"has-ansi": "^3.0.0",
"image-diff": "1.6.0",
"intl-messageformat-parser": "^1.4.0",
"istanbul-instrumenter-loader": "3.0.1",
"jest": "^23.5.0",
"jest-cli": "^23.5.0",
"jest": "^23.6.0",
"jest-cli": "^23.6.0",
"jest-raw-loader": "^1.0.1",
"jimp": "0.2.28",
"json5": "^1.0.1",
@ -386,7 +386,6 @@
"load-grunt-config": "0.19.2",
"makelogs": "^4.3.0",
"mocha": "3.3.0",
"mock-fs": "^4.4.2",
"murmurhash3js": "3.0.1",
"mutation-observer": "^1.0.3",
"nock": "8.0.0",
@ -417,7 +416,7 @@
"zlib": "^1.0.5"
},
"engines": {
"node": "8.14.0",
"node": "10.14.1",
"yarn": "^1.10.1"
}
}

View file

@ -10,8 +10,8 @@
"url": "https://github.com/elastic/kibana/tree/master/packages/kbn-babel-code-parser"
},
"scripts": {
"build": "babel src --out-dir target --quiet",
"kbn:bootstrap": "yarn build",
"build": "babel src --out-dir target",
"kbn:bootstrap": "yarn build --quiet",
"kbn:watch": "yarn build --watch"
},
"devDependencies": {

View file

@ -13,7 +13,7 @@
"typescript": "^3.0.3"
},
"peerDependencies": {
"joi": "10.4.1",
"joi": "^13.5.2",
"moment": "^2.20.1",
"type-detect": "^4.0.8"
}

View file

@ -12,7 +12,7 @@
},
"dependencies": {
"chalk": "^2.4.1",
"execa": "^0.10.0",
"execa": "^1.0.0",
"moment": "^2.20.1",
"rxjs": "^6.2.1",
"tree-kill": "^1.2.0",

View file

@ -9,16 +9,14 @@
"chalk": "^2.4.1",
"dedent": "^0.7.0",
"del": "^3.0.0",
"execa": "^0.10.0",
"execa": "^1.0.0",
"getopts": "^2.0.6",
"glob": "^7.1.2",
"mkdirp": "^0.5.1",
"mock-fs": "^4.5.0",
"node-fetch": "^2.0.0",
"simple-git": "^1.91.0",
"tar-fs": "^1.16.0",
"tar-fs": "^1.16.3",
"tree-kill": "^1.1.0",
"yauzl": "^2.10.0",
"zlib": "^1.0.5"
"yauzl": "^2.10.0"
}
}

View file

@ -18,30 +18,40 @@
*/
const { decompress } = require('./decompress');
const mockFs = require('mock-fs');
const fs = require('fs');
const path = require('path');
const mkdirp = require('mkdirp');
const del = require('del');
const os = require('os');
const fixturesFolder = path.resolve(__dirname, '__fixtures__');
const randomDir = Math.random().toString(36);
const tmpFolder = path.resolve(os.tmpdir(), randomDir);
const dataFolder = path.resolve(tmpFolder, 'data');
const esFolder = path.resolve(tmpFolder, '.es');
const zipSnapshot = path.resolve(dataFolder, 'snapshot.zip');
const tarGzSnapshot = path.resolve(dataFolder, 'snapshot.tar.gz');
beforeEach(() => {
mockFs({
'/data': {
'snapshot.zip': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.zip')),
'snapshot.tar.gz': fs.readFileSync(path.resolve(__dirname, '__fixtures__/snapshot.tar.gz')),
},
'/.es': {},
});
mkdirp.sync(tmpFolder);
mkdirp.sync(dataFolder);
mkdirp.sync(esFolder);
fs.copyFileSync(path.resolve(fixturesFolder, 'snapshot.zip'), zipSnapshot);
fs.copyFileSync(path.resolve(fixturesFolder, 'snapshot.tar.gz'), tarGzSnapshot);
});
afterEach(() => {
mockFs.restore();
del.sync(tmpFolder, { force: true });
});
test('zip strips root directory', async () => {
await decompress('/data/snapshot.zip', '/.es/foo');
expect(fs.readdirSync('/.es/foo/bin')).toContain('elasticsearch.bat');
await decompress(zipSnapshot, path.resolve(esFolder, 'foo'));
expect(fs.readdirSync(path.resolve(esFolder, 'foo/bin'))).toContain('elasticsearch.bat');
});
test('tar strips root directory', async () => {
await decompress('/data/snapshot.tar.gz', '/.es/foo');
expect(fs.readdirSync('/.es/foo/bin')).toContain('elasticsearch');
await decompress(tarGzSnapshot, path.resolve(esFolder, 'foo'));
expect(fs.readdirSync(path.resolve(esFolder, 'foo/bin'))).toContain('elasticsearch');
});

View file

@ -17,21 +17,21 @@
* under the License.
*/
jest.mock('fs', () => ({
readFileSync: jest.fn(),
existsSync: jest.fn().mockImplementation(() => true),
writeFileSync: jest.fn(),
}));
const { extractConfigFiles } = require('./extract_config_files');
const mockFs = require('mock-fs');
const fs = require('fs');
beforeEach(() => {
mockFs({
'/data': {
'foo.yml': '',
},
'/es': {},
});
afterEach(() => {
jest.clearAllMocks();
});
afterEach(() => {
mockFs.restore();
afterAll(() => {
jest.restoreAllMocks();
});
test('returns config with local paths', () => {
@ -43,8 +43,8 @@ test('returns config with local paths', () => {
test('copies file', () => {
extractConfigFiles(['path=/data/foo.yml'], '/es');
expect(fs.existsSync('/es/config/foo.yml')).toBe(true);
expect(fs.existsSync('/data/foo.yml')).toBe(true);
expect(fs.readFileSync.mock.calls[0][0]).toEqual('/data/foo.yml');
expect(fs.writeFileSync.mock.calls[0][0]).toEqual('/es/config/foo.yml');
});
test('ignores non-paths', () => {

View file

@ -17,33 +17,38 @@
* under the License.
*/
const mockFs = require('mock-fs');
const { findMostRecentlyChanged } = require('./find_most_recently_changed');
beforeEach(() => {
mockFs({
'/data': {
'oldest.yml': mockFs.file({
content: 'foo',
jest.mock('fs', () => ({
statSync: jest.fn().mockImplementation(path => {
if (path.includes('oldest')) {
return {
ctime: new Date(2018, 2, 1),
}),
'newest.yml': mockFs.file({
content: 'bar',
ctime: new Date(2018, 2, 3),
}),
'middle.yml': mockFs.file({
content: 'baz',
ctime: new Date(2018, 2, 2),
}),
},
});
});
};
}
afterEach(() => {
mockFs.restore();
});
if (path.includes('newest')) {
return {
ctime: new Date(2018, 2, 3),
};
}
if (path.includes('middle')) {
return {
ctime: new Date(2018, 2, 2),
};
}
}),
readdirSync: jest.fn().mockImplementation(() => {
return ['oldest.yml', 'newest.yml', 'middle.yml'];
}),
}));
const { findMostRecentlyChanged } = require('./find_most_recently_changed');
test('returns newest file', () => {
const file = findMostRecentlyChanged('/data/*.yml');
expect(file).toEqual('/data/newest.yml');
});
afterAll(() => {
jest.restoreAllMocks();
});

View file

@ -2,4 +2,4 @@
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not an absolute 1`] = `"Paths to translation files must be absolute. Got relative path: \\"./en.json\\""`;
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not specified 1`] = `"Path must be a string. Received undefined"`;
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not specified 1`] = `"The \\"path\\" argument must be of type string. Received type undefined"`;

View file

@ -6,7 +6,7 @@
"dependencies": {
"chalk": "^2.4.1",
"dedent": "^0.7.0",
"execa": "^0.9.0",
"execa": "^1.0.0",
"getopts": "^2.0.0",
"lodash.camelcase": "^4.3.0",
"lodash.kebabcase": "^4.1.1",

View file

@ -12,7 +12,7 @@
"dependencies": {
"commander": "^2.9.0",
"del": "^2.2.2",
"execa": "^0.10.0",
"execa": "^1.0.0",
"gulp-rename": "1.2.2",
"gulp-zip": "^4.1.0",
"inquirer": "^1.2.2",

View file

@ -18,7 +18,7 @@
*/
const { resolve } = require('path');
const { readdirSync, existsSync, unlink } = require('fs');
const { readdirSync, existsSync, unlinkSync } = require('fs');
const del = require('del');
const createBuild = require('../create_build');
@ -96,7 +96,7 @@ describe('creating the build', () => {
afterEach(() => {
PLUGIN.skipInstallDependencies = false;
PLUGIN.styleSheetToCompile = undefined;
unlink(cssPath);
unlinkSync(cssPath);
});
it('produces CSS', async () => {

File diff suppressed because one or more lines are too long

View file

@ -14,7 +14,7 @@
"@types/cpy": "^5.1.0",
"@types/dedent": "^0.7.0",
"@types/del": "^3.0.0",
"@types/execa": "^0.8.1",
"@types/execa": "^0.9.0",
"@types/getopts": "^2.0.0",
"@types/glob": "^5.0.35",
"@types/globby": "^6.1.0",
@ -25,7 +25,7 @@
"@types/log-symbols": "^2.0.0",
"@types/mkdirp": "^0.5.2",
"@types/ncp": "^2.0.1",
"@types/node": "^8.10.38",
"@types/node": "^10.12.12",
"@types/ora": "^1.3.2",
"@types/read-pkg": "^3.0.0",
"@types/strip-ansi": "^3.0.0",
@ -39,10 +39,10 @@
"babel-preset-stage-3": "^6.24.1",
"chalk": "^2.4.1",
"cmd-shim": "^2.0.2",
"cpy": "^6.0.0",
"cpy": "^7.0.1",
"dedent": "^0.7.0",
"del": "^3.0.0",
"execa": "^0.9.0",
"execa": "^1.0.0",
"getopts": "^2.0.0",
"glob": "^7.1.2",
"globby": "^8.0.1",
@ -63,6 +63,7 @@
"tempy": "^0.2.1",
"ts-loader": "^5.2.2",
"typescript": "^3.0.3",
"unlazy-loader": "^0.1.3",
"webpack": "^4.23.1",
"webpack-cli": "^3.1.2",
"wrap-ansi": "^3.0.1",

View file

@ -72,6 +72,21 @@ module.exports = {
},
},
},
// In order to make it work with Node 10 we had the need to upgrade
// the package cpy to a version >= 7.0.0. In this version cpy is
// using the new globby that relies in the fast-glob which relies
// in the new micromatch. The micromatch (use and class-utils) dependencies having a require
// that uses the lazy-cache which cannot be correctly extracted by webpack.
// According to the documentation we should use the unlazy-loader to solve
// this situation: https://github.com/jonschlinkert/lazy-cache#heads-up
// We can also found some issues arround this who also used unlazy-loader
// to solve this: https://github.com/micromatch/micromatch/issues/55
{
test: /node_modules\/(use|class-utils)\/utils\.js$/,
use: {
loader: 'unlazy-loader',
},
},
],
},

View file

@ -70,6 +70,6 @@
"webpack": "^4.23.1",
"webpack-dev-server": "^3.1.10",
"yeoman-generator": "1.1.1",
"yo": "2.0.3"
"yo": "2.0.5"
}
}

View file

@ -50,7 +50,7 @@ class MockClusterFork extends EventEmitter {
});
jest.spyOn(this, 'on');
jest.spyOn(this, 'removeListener');
jest.spyOn(this, 'off');
jest.spyOn(this, 'emit');
(async () => {

View file

@ -41,8 +41,8 @@ describe('CLI cluster manager', () => {
kill: jest.fn(),
},
isDead: jest.fn().mockReturnValue(false),
removeListener: jest.fn(),
addListener: jest.fn(),
off: jest.fn(),
on: jest.fn(),
send: jest.fn()
};
});
@ -105,8 +105,8 @@ describe('CLI cluster manager', () => {
clusterManager = ClusterManager.create({}, {}, basePathProxyMock);
jest.spyOn(clusterManager.server, 'addListener');
jest.spyOn(clusterManager.server, 'removeListener');
jest.spyOn(clusterManager.server, 'on');
jest.spyOn(clusterManager.server, 'off');
[[{ blockUntil, shouldRedirectFromOldBasePath }]] = basePathProxyMock.start.mock.calls;
});
@ -128,58 +128,58 @@ describe('CLI cluster manager', () => {
clusterManager.server.crashed = true;
await expect(blockUntil()).resolves.not.toBeDefined();
expect(clusterManager.server.addListener).not.toHaveBeenCalled();
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
expect(clusterManager.server.on).not.toHaveBeenCalled();
expect(clusterManager.server.off).not.toHaveBeenCalled();
});
test('`blockUntil()` resolves immediately if worker is already listening.', async () => {
clusterManager.server.listening = true;
await expect(blockUntil()).resolves.not.toBeDefined();
expect(clusterManager.server.addListener).not.toHaveBeenCalled();
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
expect(clusterManager.server.on).not.toHaveBeenCalled();
expect(clusterManager.server.off).not.toHaveBeenCalled();
});
test('`blockUntil()` resolves when worker crashes.', async () => {
const blockUntilPromise = blockUntil();
expect(clusterManager.server.addListener).toHaveBeenCalledTimes(2);
expect(clusterManager.server.addListener).toHaveBeenCalledWith(
expect(clusterManager.server.on).toHaveBeenCalledTimes(2);
expect(clusterManager.server.on).toHaveBeenCalledWith(
'crashed',
expect.any(Function)
);
const [, [eventName, onCrashed]] = clusterManager.server.addListener.mock.calls;
const [, [eventName, onCrashed]] = clusterManager.server.on.mock.calls;
// Check event name to make sure we call the right callback,
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
expect(eventName).toBe('crashed');
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
expect(clusterManager.server.off).not.toHaveBeenCalled();
onCrashed();
await expect(blockUntilPromise).resolves.not.toBeDefined();
expect(clusterManager.server.removeListener).toHaveBeenCalledTimes(2);
expect(clusterManager.server.off).toHaveBeenCalledTimes(2);
});
test('`blockUntil()` resolves when worker starts listening.', async () => {
const blockUntilPromise = blockUntil();
expect(clusterManager.server.addListener).toHaveBeenCalledTimes(2);
expect(clusterManager.server.addListener).toHaveBeenCalledWith(
expect(clusterManager.server.on).toHaveBeenCalledTimes(2);
expect(clusterManager.server.on).toHaveBeenCalledWith(
'listening',
expect.any(Function)
);
const [[eventName, onListening]] = clusterManager.server.addListener.mock.calls;
const [[eventName, onListening]] = clusterManager.server.on.mock.calls;
// Check event name to make sure we call the right callback,
// in Jest 23 we could use `toHaveBeenNthCalledWith` instead.
expect(eventName).toBe('listening');
expect(clusterManager.server.removeListener).not.toHaveBeenCalled();
expect(clusterManager.server.off).not.toHaveBeenCalled();
onListening();
await expect(blockUntilPromise).resolves.not.toBeDefined();
expect(clusterManager.server.removeListener).toHaveBeenCalledTimes(2);
expect(clusterManager.server.off).toHaveBeenCalledTimes(2);
});
});
});

View file

@ -35,7 +35,8 @@ function assertListenerRemoved(emitter, event) {
const [, onEventListener] = emitter.on.mock.calls.find(([eventName]) => {
return eventName === event;
});
expect(emitter.removeListener).toHaveBeenCalledWith(event, onEventListener);
expect(emitter.off).toHaveBeenCalledWith(event, onEventListener);
}
function setup(opts = {}) {
@ -98,7 +99,7 @@ describe('CLI cluster manager', () => {
assertListenerAdded(fork, 'message');
assertListenerAdded(fork, 'online');
assertListenerAdded(fork, 'disconnect');
worker.shutdown();
await worker.shutdown();
expect(fork.process.kill).toHaveBeenCalledTimes(1);
assertListenerRemoved(fork, 'message');
assertListenerRemoved(fork, 'online');

View file

@ -17,8 +17,25 @@
* under the License.
*/
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
jest.mock('fs', () => ({
readFileSync: jest.fn().mockImplementation((path) => {
if (!path.includes('nonexistent')) {
return JSON.stringify(mockKeystoreData);
}
throw { code: 'ENOENT' };
}),
existsSync: jest.fn().mockImplementation((path) => {
return !path.includes('nonexistent');
}),
writeFileSync: jest.fn()
}));
import sinon from 'sinon';
import mockFs from 'mock-fs';
import { PassThrough } from 'stream';
import { Keystore } from '../server/keystore';
@ -30,17 +47,7 @@ describe('Kibana keystore', () => {
describe('add', () => {
const sandbox = sinon.createSandbox();
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
beforeEach(() => {
mockFs({
'/data': {
'test.keystore': JSON.stringify(keystoreData),
}
});
sandbox.stub(prompt, 'confirm');
sandbox.stub(prompt, 'question');
@ -49,7 +56,6 @@ describe('Kibana keystore', () => {
});
afterEach(() => {
mockFs.restore();
sandbox.restore();
});
@ -149,4 +155,8 @@ describe('Kibana keystore', () => {
expect(keystore.data.foo).toEqual('kibana');
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});

View file

@ -17,8 +17,25 @@
* under the License.
*/
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
jest.mock('fs', () => ({
readFileSync: jest.fn().mockImplementation((path) => {
if (!path.includes('foo')) {
return JSON.stringify(mockKeystoreData);
}
throw { code: 'ENOENT' };
}),
existsSync: jest.fn().mockImplementation((path) => {
return !path.includes('foo');
}),
writeFileSync: jest.fn()
}));
import sinon from 'sinon';
import mockFs from 'mock-fs';
import { Keystore } from '../server/keystore';
import { create } from './create';
@ -29,23 +46,12 @@ describe('Kibana keystore', () => {
describe('create', () => {
const sandbox = sinon.createSandbox();
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
beforeEach(() => {
mockFs({
'/data': {
'test.keystore': JSON.stringify(keystoreData),
}
});
sandbox.stub(Logger.prototype, 'log');
sandbox.stub(Logger.prototype, 'error');
});
afterEach(() => {
mockFs.restore();
sandbox.restore();
});
@ -94,4 +100,8 @@ describe('Kibana keystore', () => {
sinon.assert.notCalled(keystore.save);
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});

View file

@ -17,9 +17,24 @@
* under the License.
*/
import sinon from 'sinon';
import mockFs from 'mock-fs';
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
jest.mock('fs', () => ({
readFileSync: jest.fn().mockImplementation((path) => {
if (!path.includes('nonexistent')) {
return JSON.stringify(mockKeystoreData);
}
throw { code: 'ENOENT' };
}),
existsSync: jest.fn().mockImplementation((path) => {
return !path.includes('nonexistent');
})
}));
import sinon from 'sinon';
import { Keystore } from '../server/keystore';
import { list } from './list';
import Logger from '../cli_plugin/lib/logger';
@ -28,23 +43,12 @@ describe('Kibana keystore', () => {
describe('list', () => {
const sandbox = sinon.createSandbox();
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
beforeEach(() => {
mockFs({
'/data': {
'test.keystore': JSON.stringify(keystoreData),
}
});
sandbox.stub(Logger.prototype, 'log');
sandbox.stub(Logger.prototype, 'error');
});
afterEach(() => {
mockFs.restore();
sandbox.restore();
});
@ -64,4 +68,8 @@ describe('Kibana keystore', () => {
sinon.assert.calledWith(Logger.prototype.error, 'ERROR: Kibana keystore not found. Use \'create\' command to create one.');
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});

View file

@ -17,8 +17,17 @@
* under the License.
*/
const mockKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
jest.mock('fs', () => ({
readFileSync: jest.fn().mockImplementation(() => JSON.stringify(mockKeystoreData)),
existsSync: jest.fn().mockImplementation(() => true),
writeFileSync: jest.fn()
}));
import sinon from 'sinon';
import mockFs from 'mock-fs';
import { Keystore } from '../server/keystore';
import { remove } from './remove';
@ -27,20 +36,7 @@ describe('Kibana keystore', () => {
describe('remove', () => {
const sandbox = sinon.createSandbox();
const keystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDyfGfJSy4mH'
+ 'BBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqhI4lzJ9M'
+ 'Ry21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
beforeEach(() => {
mockFs({
'/data': {
'test.keystore': JSON.stringify(keystoreData),
}
});
});
afterEach(() => {
mockFs.restore();
sandbox.restore();
});
@ -61,4 +57,8 @@ describe('Kibana keystore', () => {
sinon.assert.calledOnce(keystore.save);
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});

View file

@ -17,12 +17,18 @@
* under the License.
*/
jest.mock('fs', () => ({
statSync: jest.fn().mockImplementation(() => require('fs').statSync),
unlinkSync: jest.fn().mockImplementation(() => require('fs').unlinkSync),
mkdirSync: jest.fn().mockImplementation(() => require('fs').mkdirSync),
}));
import sinon from 'sinon';
import mockFs from 'mock-fs';
import Logger from '../lib/logger';
import { join } from 'path';
import rimraf from 'rimraf';
import mkdirp from 'mkdirp';
import fs from 'fs';
import { existingInstall, assertVersion } from './kibana';
describe('kibana cli', function () {
@ -119,20 +125,24 @@ describe('kibana cli', function () {
});
it('should throw an error if the plugin already exists.', function () {
mockFs({ [`${pluginDir}/foo`]: {} });
fs.statSync = jest.fn().mockImplementationOnce(() => true);
existingInstall(settings, logger);
expect(logger.error.firstCall.args[0]).toMatch(/already exists/);
expect(process.exit.called).toBe(true);
mockFs.restore();
});
it('should not throw an error if the plugin does not exist.', function () {
fs.statSync = jest.fn().mockImplementationOnce(() => {
throw { code: 'ENOENT' };
});
existingInstall(settings, logger);
expect(logger.error.called).toBe(false);
});
});
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});

View file

@ -24,7 +24,9 @@ interface RecursiveReadonlyArray<T> extends Array<RecursiveReadonly<T>> {}
type RecursiveReadonly<T> = T extends any[]
? RecursiveReadonlyArray<T[number]>
: T extends object ? Readonly<{ [K in keyof T]: RecursiveReadonly<T[K]> }> : T;
: T extends object
? Readonly<{ [K in keyof T]: RecursiveReadonly<T[K]> }>
: T;
export function deepFreeze<T extends Freezable>(object: T) {
// for any properties that reference an object, makes sure that object is

View file

@ -79,8 +79,8 @@ export class ConfigService {
ConfigClass: ConfigWithSchema<TSchema, TConfig>
) {
return this.getDistinctConfig(path).pipe(
map(
config => (config === undefined ? undefined : this.createConfig(path, config, ConfigClass))
map(config =>
config === undefined ? undefined : this.createConfig(path, config, ConfigClass)
)
);
}

View file

@ -85,7 +85,7 @@ export function createServer(options: ServerOptions) {
server.listener.keepAliveTimeout = 120e3;
server.listener.on('clientError', (err, socket) => {
if (socket.writable) {
socket.end(new Buffer('HTTP/1.1 400 Bad Request\r\n\r\n', 'ascii'));
socket.end(Buffer.from('HTTP/1.1 400 Bad Request\r\n\r\n', 'ascii'));
} else {
socket.destroy(err);
}

View file

@ -76,7 +76,7 @@ function processPluginSearchPaths$(pluginDirs: ReadonlyArray<string>, log: Logge
log.debug(`Scanning "${dir}" for plugin sub-directories...`);
return fsReadDir$(dir).pipe(
mergeMap(subDirs => subDirs.map(subDir => resolve(dir, subDir))),
mergeMap((subDirs: string[]) => subDirs.map(subDir => resolve(dir, subDir))),
mergeMap(path =>
fsStat$(path).pipe(
// Filter out non-directory entries from target directories, it's expected that

View file

@ -53,7 +53,7 @@ export async function scanDelete(options: Options) {
const getChildPath$ = (path: string) =>
getReadDir$(path).pipe(
mergeAll(),
map(name => join(path, name))
map((name: string) => join(path, name))
);
// get an observable of all paths to be deleted, by starting with the arg

View file

@ -10,7 +10,6 @@ exports[`Inspector Data View component should render empty state 1`] = `
},
"_eventsCount": 1,
"_maxListeners": undefined,
"domain": null,
"tabular": [Function],
"tabularOptions": Object {},
},
@ -101,7 +100,6 @@ exports[`Inspector Data View component should render loading state 1`] = `
},
"_eventsCount": 1,
"_maxListeners": undefined,
"domain": null,
},
}
}

View file

@ -44,9 +44,9 @@ export interface EmbeddableIsInitializedActionPayload {
export interface EmbeddableIsInitializedAction
extends KibanaAction<
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED,
EmbeddableIsInitializedActionPayload
> {}
EmbeddableActionTypeKeys.EMBEDDABLE_IS_INITIALIZED,
EmbeddableIsInitializedActionPayload
> {}
export interface SetStagedFilterActionPayload {
panelId: PanelId;

View file

@ -243,10 +243,14 @@ describe('Field', () => {
...component.instance().props.setting,
value: userValue,
} });
await component.instance().cancelChangeImage();
component.update();
});
it('should be able to change value from existing value and save', async () => {
findTestSubject(component, `advancedSetting-changeImage-${setting.name}`).simulate('click');
const newUserValue = `${userValue}=`;
await component.instance().onImageChange([newUserValue]);
component.update();

View file

@ -45,10 +45,9 @@ export const VisTypeIcon = ({ visType }: VisTypeIconProps) => {
<img src={visType.image} aria-hidden="true" className="visNewVisDialog__typeImage" />
)}
{!visType.image && visType.legacyIcon && <span className={legacyIconClass} />}
{!visType.image &&
!visType.legacyIcon && (
<EuiIcon type={visType.icon || 'empty'} size="l" color="secondary" aria-hidden="true" />
)}
{!visType.image && !visType.legacyIcon && (
<EuiIcon type={visType.icon || 'empty'} size="l" color="secondary" aria-hidden="true" />
)}
</React.Fragment>
);
};

View file

@ -66,7 +66,7 @@ describe('plugin discovery/plugin spec', () => {
it('throws if spec.publicDir is truthy and not a string', () => {
function assert(publicDir) {
expect(() => new PluginSpec(fooPack, { publicDir })).to.throwError(error => {
expect(error.message).to.contain('Path must be a string');
expect(error.message).to.contain(`The "path" argument must be of type string. Received type ${typeof publicDir}`);
});
}

View file

@ -51,7 +51,7 @@ export class Keystore {
static decrypt(data, password = '') {
try {
const bData = new Buffer(data, 'base64');
const bData = Buffer.from(data, 'base64');
// convert data to buffers
const salt = bData.slice(0, 64);

View file

@ -17,7 +17,38 @@
* under the License.
*/
import mockFs from 'mock-fs';
const mockProtectedKeystoreData = '1:4BnWfydL8NwFIQJg+VQKe0jlIs7uXtty6+++yaWPbSB'
+ 'KIX3d9nPfQ20K1C6Xh26E/gMJAQ9jh7BxK0+W3lt/iDJBJn44wqX3pQ0189iGkNBL0ibDCc'
+ 'tz4mRy6+hqwiLxiukpH8ELAJsff8LNNHr+gNzX/2k/GvB7nQ==';
const mockUnprotectedKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDy'
+ 'fGfJSy4mHBBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqh'
+ 'I4lzJ9MRy21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
jest.mock('fs', () => ({
readFileSync: jest.fn().mockImplementation((path) => {
if (path.includes('data/unprotected')) {
return JSON.stringify(mockUnprotectedKeystoreData);
}
if (path.includes('data/protected')) {
return JSON.stringify(mockProtectedKeystoreData);
}
if (path.includes('data/test') || path.includes('data/nonexistent')) {
throw { code: 'ENOENT' };
}
throw { code: 'EACCES' };
}),
existsSync: jest.fn().mockImplementation((path) => {
return path.includes('data/unprotected')
|| path.includes('data/protected')
|| path.includes('inaccessible');
}),
writeFileSync: jest.fn()
}));
import sinon from 'sinon';
import { readFileSync } from 'fs';
@ -26,28 +57,7 @@ import { Keystore } from './keystore';
describe('Keystore', () => {
const sandbox = sinon.createSandbox();
const protectedKeystoreData = '1:4BnWfydL8NwFIQJg+VQKe0jlIs7uXtty6+++yaWPbSB'
+ 'KIX3d9nPfQ20K1C6Xh26E/gMJAQ9jh7BxK0+W3lt/iDJBJn44wqX3pQ0189iGkNBL0ibDCc'
+ 'tz4mRy6+hqwiLxiukpH8ELAJsff8LNNHr+gNzX/2k/GvB7nQ==';
const unprotectedKeystoreData = '1:IxR0geiUTMJp8ueHDkqeUJ0I9eEw4NJPXIJi22UDy'
+ 'fGfJSy4mHBBuGPkkAix/x/YFfIxo4tiKGdJ2oVTtU8LgKDkVoGdL+z7ylY4n3myatt6osqh'
+ 'I4lzJ9MRy21UcAJki2qFUTj4TYuvhta3LId+RM5UX/dJ2468hQ==';
beforeEach(() => {
mockFs({
'/data': {
'protected.keystore': protectedKeystoreData,
'unprotected.keystore': unprotectedKeystoreData,
},
'/inaccessible': mockFs.directory({
mode: '0000',
})
});
});
afterEach(() => {
mockFs.restore();
sandbox.restore();
});
@ -71,6 +81,7 @@ describe('Keystore', () => {
const keystore = new Keystore(path);
keystore.save();
readFileSync.mockReturnValueOnce(mockProtectedKeystoreData);
const fileBuffer = readFileSync(path);
const contents = fileBuffer.toString();
const [version, data] = contents.split(':');
@ -215,4 +226,8 @@ describe('Keystore', () => {
}
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});

View file

@ -27,6 +27,6 @@ export async function setupLogging(server, config) {
});
}
export function loggingMixin(kbnServer, server, config) {
return setupLogging(server, config);
export async function loggingMixin(kbnServer, server, config) {
return await setupLogging(server, config);
}

View file

@ -19,7 +19,7 @@
/*
* This file contains logic to build the index mappings for a migration.
*/
*/
import _ from 'lodash';
import { IndexMapping, MappingProperties } from './call_cluster';

View file

@ -36,7 +36,7 @@
*
* This way, we keep looping until there are no transforms left to apply, and we properly
* handle property addition / deletion / renaming.
*
*
* A caveat is that this means we must restrict what a migration can do to the doc's
* migrationVersion itself. We allow only these kinds of changes:
*
@ -54,11 +54,11 @@
* and those documents are simply given a stamp of approval by this transformer. This is why it is
* important for migration authors to *also* write a saved object validation that will prevent this
* assumption from inserting out-of-date documents into the index.
*
*
* If the client(s) send us documents with migrationVersion specified, we will migrate them as
* appropriate. This means for data import scenarios, any documetns being imported should be explicitly
* given an empty migrationVersion property {} if no such property exists.
*/
*/
import Boom from 'boom';
import _ from 'lodash';

View file

@ -32,7 +32,7 @@
* just migrate data into an existing index. Such an action could result in data loss. Instead,
* we should probably fail, and the Kibana sys-admin should clean things up before relaunching
* Kibana.
*/
*/
import _ from 'lodash';
import { Logger } from './migration_logger';

View file

@ -82,7 +82,7 @@ export function createRootWithSettings(...settings: Array<Record<string, any>>)
* @param path
*/
function getSupertest(root: Root, method: HttpMethod, path: string) {
const testUserCredentials = new Buffer(`${kibanaTestUser.username}:${kibanaTestUser.password}`);
const testUserCredentials = Buffer.from(`${kibanaTestUser.username}:${kibanaTestUser.password}`);
return supertest((root as any).server.http.service.httpServer.server.listener)
[method](path)
.set('Authorization', `Basic ${testUserCredentials.toString('base64')}`);

View file

@ -79,7 +79,7 @@ export class Sha256 {
const blockSize = 64;
const finalSize = 56;
this._block = new Buffer(blockSize);
this._block = Buffer.alloc(blockSize);
this._finalSize = finalSize;
this._blockSize = blockSize;
this._len = 0;
@ -102,7 +102,7 @@ export class Sha256 {
update(data, enc) {
if (typeof data === 'string') {
enc = enc || 'utf8';
data = new Buffer(data, enc);
data = Buffer.from(data, enc);
}
const l = this._len += data.length;
@ -195,7 +195,7 @@ export class Sha256 {
}
_hash() {
const H = new Buffer(32);
const H = Buffer.alloc(32);
H.writeInt32BE(this._a, 0);
H.writeInt32BE(this._b, 4);

View file

@ -22,7 +22,7 @@
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you mayexport
* the Apache License, Version 2.0 (the "License"); you mayexport
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*

View file

@ -19,8 +19,6 @@
export interface State {
[key: string]: any;
translateHashToRison: (
stateHashOrRison: string | string[] | undefined
) => string | string[] | undefined;
translateHashToRison: (stateHashOrRison: string | string[]) => string | string[];
getQueryParamName: () => string;
}

View file

@ -17,23 +17,15 @@
* under the License.
*/
import mockFs from 'mock-fs';
import { resolve } from 'path';
const mockTemplate = `
{{appId}}
{{regularBundlePath}}
{{i18n 'foo' '{"defaultMessage": "bar"}'}}
`;
const templatePath = resolve(__dirname, 'template.js.hbs');
beforeEach(() => {
mockFs({
[templatePath]: mockTemplate
});
});
afterEach(mockFs.restore);
jest.mock('fs', () => ({
readFile: jest.fn().mockImplementation((path, encoding, cb) => cb(null, mockTemplate))
}));
import { AppBootstrap } from './app_bootstrap';
@ -113,6 +105,10 @@ describe('ui_render/AppBootstrap', () => {
expect(hash2).not.toEqual(hash1);
});
});
afterAll(() => {
jest.restoreAllMocks();
});
});
function mockConfig() {

View file

@ -21,7 +21,7 @@
}
},
"resolutions": {
"**/@types/node": "8.10.38",
"**/@types/node": "10.12.12",
"@types/react": "16.3.14"
},
"devDependencies": {
@ -61,7 +61,7 @@
"ansicolors": "0.3.2",
"aws-sdk": "2.2.33",
"axios": "^0.18.0",
"babel-jest": "^23.4.2",
"babel-jest": "^23.6.0",
"babel-plugin-inline-react-svg": "^0.5.4",
"babel-plugin-mock-imports": "^0.0.5",
"babel-plugin-transform-react-remove-prop-types": "^0.4.14",
@ -78,23 +78,23 @@
"expect.js": "0.3.1",
"fancy-log": "^1.3.2",
"fetch-mock": "^5.13.1",
"graphql-code-generator": "^0.10.1",
"graphql-codegen-introspection-template": "^0.10.5",
"graphql-codegen-typescript-template": "^0.10.1",
"graphql-code-generator": "^0.13.0",
"graphql-codegen-introspection-template": "^0.13.0",
"graphql-codegen-typescript-template": "^0.13.0",
"gulp": "3.9.1",
"gulp-mocha": "2.2.0",
"gulp-multi-process": "^1.3.1",
"hapi": "^17.5.3",
"jest": "^23.5.0",
"jest-cli": "^23.5.0",
"jest-styled-components": "^6.1.1",
"jest": "^23.6.0",
"jest-cli": "^23.6.0",
"jest-styled-components": "^6.2.2",
"jsdom": "^12.0.0",
"mocha": "3.3.0",
"mustache": "^2.3.0",
"mutation-observer": "^1.0.3",
"node-fetch": "^2.1.2",
"pdf-image": "2.0.0",
"pdfjs-dist": "^2.0.489",
"pdfjs-dist": "^2.0.943",
"pixelmatch": "4.0.2",
"proxyquire": "1.7.11",
"react-test-renderer": "^16.2.0",
@ -127,13 +127,13 @@
"@kbn/interpreter": "1.0.0",
"@samverschueren/stream-to-observable": "^0.3.0",
"@scant/router": "^0.1.0",
"@slack/client": "^4.2.2",
"@slack/client": "^4.8.0",
"angular-resource": "1.4.9",
"angular-sanitize": "1.4.9",
"angular-ui-ace": "0.2.3",
"apollo-cache-inmemory": "^1.2.7",
"apollo-client": "^2.3.8",
"apollo-link": "^1.2.2",
"apollo-link": "^1.2.3",
"apollo-link-http": "^1.5.4",
"apollo-link-schema": "^1.1.0",
"apollo-link-state": "^0.4.1",
@ -229,7 +229,7 @@
"react-router-dom": "^4.3.1",
"react-select": "^1.2.1",
"react-shortcuts": "^2.0.0",
"react-sticky": "^6.0.1",
"react-sticky": "^6.0.3",
"react-syntax-highlighter": "^5.7.0",
"react-vis": "^1.8.1",
"recompose": "^0.26.0",
@ -262,6 +262,6 @@
"xregexp": "3.2.0"
},
"engines": {
"yarn": "^1.6.0"
"yarn": "^1.10.1"
}
}

View file

@ -90,9 +90,8 @@ describe('waterfall_helpers', () => {
}
];
const childrenByParentId = groupBy(
items,
hit => (hit.parentId ? hit.parentId : 'root')
const childrenByParentId = groupBy(items, hit =>
hit.parentId ? hit.parentId : 'root'
);
const entryTransactionItem = childrenByParentId.root[0];
expect(

View file

@ -287,9 +287,8 @@ export function getWaterfall(
}
});
const childrenByParentId = groupBy(
filteredHits,
hit => (hit.parentId ? hit.parentId : 'root')
const childrenByParentId = groupBy(filteredHits, hit =>
hit.parentId ? hit.parentId : 'root'
);
const entryTransactionItem = getTransactionItem(entryTransaction);
const itemsById: IWaterfallIndex = indexBy(filteredHits, 'id');

View file

@ -90,10 +90,9 @@ class TransactionOverview extends Component {
<div>
<HeaderContainer>
<h1>{serviceName}</h1>
{get(license.data, 'features.ml.isAvailable') &&
mlEnabled && (
<DynamicBaselineButton onOpenFlyout={this.onOpenFlyout} />
)}
{get(license.data, 'features.ml.isAvailable') && mlEnabled && (
<DynamicBaselineButton onOpenFlyout={this.onOpenFlyout} />
)}
</HeaderContainer>
<KueryBar />

View file

@ -44,18 +44,17 @@ export const LibraryFrames: React.SFC<Props> = ({
<div>
{visible &&
stackframes.map(
(stackframe, i) =>
hasSourceLines(stackframe) ? (
<CodePreview
key={i}
stackframe={stackframe}
isLibraryFrame
codeLanguage={codeLanguage}
/>
) : (
<FrameHeading key={i} stackframe={stackframe} isLibraryFrame />
)
stackframes.map((stackframe, i) =>
hasSourceLines(stackframe) ? (
<CodePreview
key={i}
stackframe={stackframe}
isLibraryFrame
codeLanguage={codeLanguage}
/>
) : (
<FrameHeading key={i} stackframe={stackframe} isLibraryFrame />
)
)}
</div>
</div>

View file

@ -74,10 +74,9 @@ class InteractivePlot extends PureComponent {
{hoverX && <MarkSeries data={markPoints} colorType="literal" />}
{hoverX && <VerticalGridLines tickValues={[hoverX]} />}
{isDrawing &&
selectionEnd !== null && (
<SelectionMarker start={x(selectionStart)} end={x(selectionEnd)} />
)}
{isDrawing && selectionEnd !== null && (
<SelectionMarker start={x(selectionStart)} end={x(selectionEnd)} />
)}
</SharedPlot>
);
}

View file

@ -225,8 +225,8 @@ const withPreviousSuggestionSelected = (
props.suggestions.length === 0
? null
: state.selectedIndex !== null
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
: Math.max(props.suggestions.length - 1, 0),
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
: Math.max(props.suggestions.length - 1, 0),
});
const withNextSuggestionSelected = (
@ -238,8 +238,8 @@ const withNextSuggestionSelected = (
props.suggestions.length === 0
? null
: state.selectedIndex !== null
? (state.selectedIndex + 1) % props.suggestions.length
: 0,
? (state.selectedIndex + 1) % props.suggestions.length
: 0,
});
const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
@ -251,8 +251,8 @@ const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
props.suggestions.length === 0
? null
: suggestionIndex >= 0 && suggestionIndex < props.suggestions.length
? suggestionIndex
: 0,
? suggestionIndex
: 0,
});
const withSuggestionsVisible = (state: AutocompleteFieldState) => ({

View file

@ -102,15 +102,14 @@ export class Table extends React.Component<TableProps, TableState> {
return (
<TableContainer>
{!hideTableControls &&
assignmentOptions && (
<ControlBar
itemType={type.itemType}
assignmentOptions={assignmentOptions}
kueryBarProps={kueryBarProps}
selectionCount={this.state.selection.length}
/>
)}
{!hideTableControls && assignmentOptions && (
<ControlBar
itemType={type.itemType}
assignmentOptions={assignmentOptions}
kueryBarProps={kueryBarProps}
selectionCount={this.state.selection.length}
/>
)}
<EuiSpacer size="m" />
<EuiInMemoryTable
columns={type.columnDefinitions}

View file

@ -73,17 +73,16 @@ export class WithKueryAutocompletion extends React.Component<
suggestions = [];
}
this.setState(
state =>
state.currentRequest &&
state.currentRequest.expression !== expression &&
state.currentRequest.cursorPosition !== cursorPosition
? state // ignore this result, since a newer request is in flight
: {
...state,
currentRequest: null,
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
}
this.setState(state =>
state.currentRequest &&
state.currentRequest.expression !== expression &&
state.currentRequest.cursorPosition !== cursorPosition
? state // ignore this result, since a newer request is in flight
: {
...state,
currentRequest: null,
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
}
);
};
}

View file

@ -49,23 +49,25 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
public async removeTagsFromBeats(removals: BeatsTagAssignment[]): Promise<BeatsRemovalReturn[]> {
const beatIds = removals.map(r => r.beatId);
const response = this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
const tagData = removals.find(r => r.beatId === beat.id);
if (tagData) {
if (beat.tags) {
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
}
}
const removalsForBeat = removals.filter(r => r.beatId === beat.id);
if (removalsForBeat.length) {
removalsForBeat.forEach((assignment: BeatsTagAssignment) => {
const response = this.beatsDB
.filter(beat => beatIds.includes(beat.id))
.map(beat => {
const tagData = removals.find(r => r.beatId === beat.id);
if (tagData) {
if (beat.tags) {
beat.tags = beat.tags.filter(tag => tag !== assignment.tag);
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
}
});
}
return beat;
});
}
const removalsForBeat = removals.filter(r => r.beatId === beat.id);
if (removalsForBeat.length) {
removalsForBeat.forEach((assignment: BeatsTagAssignment) => {
if (beat.tags) {
beat.tags = beat.tags.filter(tag => tag !== assignment.tag);
}
});
}
return beat;
});
return response.map<any>((item: CMBeat, resultIdx: number) => ({
idxInRequest: removals[resultIdx].idxInRequest,
@ -77,24 +79,26 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
public async assignTagsToBeats(assignments: BeatsTagAssignment[]): Promise<CMAssignmentReturn[]> {
const beatIds = assignments.map(r => r.beatId);
this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
// get tags that need to be assigned to this beat
const tags = assignments
.filter(a => a.beatId === beat.id)
.map((t: BeatsTagAssignment) => t.tag);
this.beatsDB
.filter(beat => beatIds.includes(beat.id))
.map(beat => {
// get tags that need to be assigned to this beat
const tags = assignments
.filter(a => a.beatId === beat.id)
.map((t: BeatsTagAssignment) => t.tag);
if (tags.length > 0) {
if (!beat.tags) {
beat.tags = [];
}
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
if (tags.length > 0) {
if (!beat.tags) {
beat.tags = [];
}
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
if (nonExistingTags.length > 0) {
beat.tags = beat.tags.concat(nonExistingTags);
if (nonExistingTags.length > 0) {
beat.tags = beat.tags.concat(nonExistingTags);
}
}
}
return beat;
});
return beat;
});
return assignments.map<any>((item: BeatsTagAssignment, resultIdx: number) => ({
idxInRequest: assignments[resultIdx].idxInRequest,

View file

@ -43,26 +43,25 @@ export const BeatDetailsActionSection = ({ beat }: BeatDetailsActionSectionProps
Uptime: <strong>12min.</strong>
</EuiText>
</EuiFlexItem> */}
{beat.full_tags &&
beat.full_tags.length > 0 && (
<EuiFlexItem grow={false}>
<EuiText size="xs">
<FormattedMessage
id="xpack.beatsManagement.beat.lastConfigUpdateMessage"
defaultMessage="Last Config Update: {lastUpdateTime}."
values={{
lastUpdateTime: (
<strong>
{moment(
first(sortByOrder(beat.full_tags, 'last_updated')).last_updated
).fromNow()}
</strong>
),
}}
/>
</EuiText>
</EuiFlexItem>
)}
{beat.full_tags && beat.full_tags.length > 0 && (
<EuiFlexItem grow={false}>
<EuiText size="xs">
<FormattedMessage
id="xpack.beatsManagement.beat.lastConfigUpdateMessage"
defaultMessage="Last Config Update: {lastUpdateTime}."
values={{
lastUpdateTime: (
<strong>
{moment(
first(sortByOrder(beat.full_tags, 'last_updated')).last_updated
).fromNow()}
</strong>
),
}}
/>
</EuiText>
</EuiFlexItem>
)}
</EuiFlexGroup>
) : (
<div>

View file

@ -59,15 +59,17 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
): Promise<BeatsTagAssignment[]> {
const beatIds = removals.map(r => r.beatId);
const response = this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
const tagData = removals.find(r => r.beatId === beat.id);
if (tagData) {
if (beat.tags) {
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
const response = this.beatsDB
.filter(beat => beatIds.includes(beat.id))
.map(beat => {
const tagData = removals.find(r => r.beatId === beat.id);
if (tagData) {
if (beat.tags) {
beat.tags = beat.tags.filter(tag => tag !== tagData.tag);
}
}
}
return beat;
});
return beat;
});
return response.map<any>((item: CMBeat, resultIdx: number) => ({
idxInRequest: removals[resultIdx].idxInRequest,
@ -82,24 +84,26 @@ export class MemoryBeatsAdapter implements CMBeatsAdapter {
): Promise<BeatsTagAssignment[]> {
const beatIds = assignments.map(r => r.beatId);
this.beatsDB.filter(beat => beatIds.includes(beat.id)).map(beat => {
// get tags that need to be assigned to this beat
const tags = assignments
.filter(a => a.beatId === beat.id)
.map((t: BeatsTagAssignment) => t.tag);
this.beatsDB
.filter(beat => beatIds.includes(beat.id))
.map(beat => {
// get tags that need to be assigned to this beat
const tags = assignments
.filter(a => a.beatId === beat.id)
.map((t: BeatsTagAssignment) => t.tag);
if (tags.length > 0) {
if (!beat.tags) {
beat.tags = [];
}
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
if (tags.length > 0) {
if (!beat.tags) {
beat.tags = [];
}
const nonExistingTags = tags.filter((t: string) => beat.tags && !beat.tags.includes(t));
if (nonExistingTags.length > 0) {
beat.tags = beat.tags.concat(nonExistingTags);
if (nonExistingTags.length > 0) {
beat.tags = beat.tags.concat(nonExistingTags);
}
}
}
return beat;
});
return beat;
});
return assignments.map<any>((item: BeatsTagAssignment, resultIdx: number) => ({
idxInRequest: assignments[resultIdx].idxInRequest,

View file

@ -11,17 +11,16 @@ import 'react-datetime/css/react-datetime.css';
export const DatetimeQuickList = ({ from, to, ranges, onSelect, children }) => (
<div style={{ display: 'grid', alignItems: 'center' }}>
{ranges.map(
(range, i) =>
from === range.from && to === range.to ? (
<EuiButton size="s" fill key={i} onClick={() => onSelect(range.from, range.to)}>
{range.display}
</EuiButton>
) : (
<EuiButtonEmpty size="s" key={i} onClick={() => onSelect(range.from, range.to)}>
{range.display}
</EuiButtonEmpty>
)
{ranges.map((range, i) =>
from === range.from && to === range.to ? (
<EuiButton size="s" fill key={i} onClick={() => onSelect(range.from, range.to)}>
{range.display}
</EuiButton>
) : (
<EuiButtonEmpty size="s" key={i} onClick={() => onSelect(range.from, range.to)}>
{range.display}
</EuiButtonEmpty>
)
)}
{children}
</div>

View file

@ -51,12 +51,11 @@ export const AdvancedFailureComponent = props => {
<EuiButton disabled={!valid} onClick={e => valueChange(e)} size="s" type="submit">
Apply
</EuiButton>
{defaultValue &&
defaultValue.length && (
<EuiButtonEmpty size="s" color="danger" onClick={confirmReset}>
Reset
</EuiButtonEmpty>
)}
{defaultValue && defaultValue.length && (
<EuiButtonEmpty size="s" color="danger" onClick={confirmReset}>
Reset
</EuiButtonEmpty>
)}
</div>
</EuiForm>
);

View file

@ -65,16 +65,15 @@ export const SimpleTemplate = props => {
</EuiFlexItem>
</Fragment>
)}
{name !== 'defaultStyle' &&
(!labels || labels.length === 0) && (
<EuiFlexItem grow={false}>
<TooltipIcon
position="left"
icon="warning"
content="Data has no series to style, add a color dimension"
/>
</EuiFlexItem>
)}
{name !== 'defaultStyle' && (!labels || labels.length === 0) && (
<EuiFlexItem grow={false}>
<TooltipIcon
position="left"
icon="warning"
content="Data has no series to style, add a color dimension"
/>
</EuiFlexItem>
)}
</EuiFlexGroup>
);
};

View file

@ -31,17 +31,16 @@ const primaryUpdate = state => state.primaryUpdate;
*/
// dispatch the various types of actions
const rawCursorPosition = select(
action => (action.type === 'cursorPosition' ? action.payload : null)
const rawCursorPosition = select(action =>
action.type === 'cursorPosition' ? action.payload : null
)(primaryUpdate);
const mouseButtonEvent = select(action => (action.type === 'mouseEvent' ? action.payload : null))(
primaryUpdate
);
const keyFromMouse = select(
({ type, payload: { altKey, metaKey, shiftKey, ctrlKey } }) =>
type === 'cursorPosition' || type === 'mouseEvent' ? { altKey, metaKey, shiftKey, ctrlKey } : {}
const keyFromMouse = select(({ type, payload: { altKey, metaKey, shiftKey, ctrlKey } }) =>
type === 'cursorPosition' || type === 'mouseEvent' ? { altKey, metaKey, shiftKey, ctrlKey } : {}
)(primaryUpdate);
const metaHeld = select(appleKeyboard ? e => e.metaKey : e => e.altKey)(keyFromMouse);

View file

@ -77,8 +77,8 @@ const hoveredShapes = select((shapes, cursorPosition) =>
)(shapes, cursorPosition);
const depthIndex = 0;
const hoveredShape = select(
hoveredShapes => (hoveredShapes.length ? hoveredShapes[depthIndex] : null)
const hoveredShape = select(hoveredShapes =>
hoveredShapes.length ? hoveredShapes[depthIndex] : null
)(hoveredShapes);
const draggedShape = select(draggingShape)(scene, hoveredShape, mouseIsDown, mouseDowned);
@ -94,57 +94,56 @@ const focusedShapes = select((shapes, focusedShape) =>
shapes.filter(shape => focusedShape && shape.id === focusedShape.id)
)(shapes, focusedShape);
const keyTransformGesture = select(
keys =>
config.shortcuts
? Object.keys(keys)
.map(keypress => {
switch (keypress) {
case 'KeyW':
return { transform: matrix.translate(0, -5, 0) };
case 'KeyA':
return { transform: matrix.translate(-5, 0, 0) };
case 'KeyS':
return { transform: matrix.translate(0, 5, 0) };
case 'KeyD':
return { transform: matrix.translate(5, 0, 0) };
case 'KeyF':
return { transform: matrix.translate(0, 0, -20) };
case 'KeyC':
return { transform: matrix.translate(0, 0, 20) };
case 'KeyX':
return { transform: matrix.rotateX(Math.PI / 45) };
case 'KeyY':
return { transform: matrix.rotateY(Math.PI / 45 / 1.3) };
case 'KeyZ':
return { transform: matrix.rotateZ(Math.PI / 45 / 1.6) };
case 'KeyI':
return { transform: matrix.scale(1, 1.05, 1) };
case 'KeyJ':
return { transform: matrix.scale(1 / 1.05, 1, 1) };
case 'KeyK':
return { transform: matrix.scale(1, 1 / 1.05, 1) };
case 'KeyL':
return { transform: matrix.scale(1.05, 1, 1) };
case 'KeyP':
return { transform: matrix.perspective(2000) };
case 'KeyR':
return { transform: matrix.shear(0.1, 0) };
case 'KeyT':
return { transform: matrix.shear(-0.1, 0) };
case 'KeyU':
return { transform: matrix.shear(0, 0.1) };
case 'KeyH':
return { transform: matrix.shear(0, -0.1) };
case 'KeyM':
return { transform: matrix.UNITMATRIX, sizes: [1.0, 0, 0, 0, 1.0, 0, 10, 0, 1] };
case 'Backspace':
case 'Delete':
return { transform: matrix.UNITMATRIX, delete: true };
}
})
.filter(identity)
: []
const keyTransformGesture = select(keys =>
config.shortcuts
? Object.keys(keys)
.map(keypress => {
switch (keypress) {
case 'KeyW':
return { transform: matrix.translate(0, -5, 0) };
case 'KeyA':
return { transform: matrix.translate(-5, 0, 0) };
case 'KeyS':
return { transform: matrix.translate(0, 5, 0) };
case 'KeyD':
return { transform: matrix.translate(5, 0, 0) };
case 'KeyF':
return { transform: matrix.translate(0, 0, -20) };
case 'KeyC':
return { transform: matrix.translate(0, 0, 20) };
case 'KeyX':
return { transform: matrix.rotateX(Math.PI / 45) };
case 'KeyY':
return { transform: matrix.rotateY(Math.PI / 45 / 1.3) };
case 'KeyZ':
return { transform: matrix.rotateZ(Math.PI / 45 / 1.6) };
case 'KeyI':
return { transform: matrix.scale(1, 1.05, 1) };
case 'KeyJ':
return { transform: matrix.scale(1 / 1.05, 1, 1) };
case 'KeyK':
return { transform: matrix.scale(1, 1 / 1.05, 1) };
case 'KeyL':
return { transform: matrix.scale(1.05, 1, 1) };
case 'KeyP':
return { transform: matrix.perspective(2000) };
case 'KeyR':
return { transform: matrix.shear(0.1, 0) };
case 'KeyT':
return { transform: matrix.shear(-0.1, 0) };
case 'KeyU':
return { transform: matrix.shear(0, 0.1) };
case 'KeyH':
return { transform: matrix.shear(0, -0.1) };
case 'KeyM':
return { transform: matrix.UNITMATRIX, sizes: [1.0, 0, 0, 0, 1.0, 0, 10, 0, 1] };
case 'Backspace':
case 'Delete':
return { transform: matrix.UNITMATRIX, delete: true };
}
})
.filter(identity)
: []
)(pressedKeys);
const alterSnapGesture = select(metaHeld => (metaHeld ? ['relax'] : []))(metaHeld);
@ -189,14 +188,14 @@ const transformGestures = select((keyTransformGesture, mouseTransformGesture) =>
keyTransformGesture.concat(mouseTransformGesture)
)(keyTransformGesture, mouseTransformGesture);
const restateShapesEvent = select(
action => (action && action.type === 'restateShapesEvent' ? action.payload : null)
const restateShapesEvent = select(action =>
action && action.type === 'restateShapesEvent' ? action.payload : null
)(primaryUpdate);
// directSelect is an API entry point (via the `shapeSelect` action) that lets the client directly specify what thing
// is selected, as otherwise selection is driven by gestures and knowledge of element positions
const directSelect = select(
action => (action && action.type === 'shapeSelect' ? action.payload : null)
const directSelect = select(action =>
action && action.type === 'shapeSelect' ? action.payload : null
)(primaryUpdate);
const selectedShapeObjects = select(scene => scene.selectedShapeObjects || [])(scene);
@ -480,8 +479,8 @@ const resizeAnnotationManipulation = (transformGestures, directShapes, allShapes
const symmetricManipulation = optionHeld; // as in comparable software applications, todo: make configurable
const resizeManipulator = select(
toggle => (toggle ? centeredResizeManipulation : asymmetricResizeManipulation)
const resizeManipulator = select(toggle =>
toggle ? centeredResizeManipulation : asymmetricResizeManipulation
)(symmetricManipulation);
const transformIntents = select(
@ -1132,22 +1131,24 @@ const projectAABB = ([[xMin, yMin], [xMax, yMax]]) => {
const dissolveGroups = (preexistingAdHocGroups, shapes, selectedShapes) => {
return {
shapes: shapes.filter(shape => !isAdHocGroup(shape)).map(shape => {
const preexistingAdHocGroupParent = preexistingAdHocGroups.find(
groupShape => groupShape.id === shape.parent
);
// if linked, dissociate from ad hoc group parent
return preexistingAdHocGroupParent
? {
...shape,
parent: null,
localTransformMatrix: matrix.multiply(
preexistingAdHocGroupParent.localTransformMatrix, // reinstate the group offset onto the child
shape.localTransformMatrix
),
}
: shape;
}),
shapes: shapes
.filter(shape => !isAdHocGroup(shape))
.map(shape => {
const preexistingAdHocGroupParent = preexistingAdHocGroups.find(
groupShape => groupShape.id === shape.parent
);
// if linked, dissociate from ad hoc group parent
return preexistingAdHocGroupParent
? {
...shape,
parent: null,
localTransformMatrix: matrix.multiply(
preexistingAdHocGroupParent.localTransformMatrix, // reinstate the group offset onto the child
shape.localTransformMatrix
),
}
: shape;
}),
selectedShapes,
};
};
@ -1229,8 +1230,8 @@ const getLeafs = (descendCondition, allShapes, shapes) =>
removeDuplicates(
s => s.id,
flatten(
shapes.map(
shape => (descendCondition(shape) ? allShapes.filter(s => s.parent === shape.id) : shape)
shapes.map(shape =>
descendCondition(shape) ? allShapes.filter(s => s.parent === shape.id) : shape
)
)
);

View file

@ -38,8 +38,8 @@ export type SubsetResolverWithFields<R, IncludedFields extends string> = R exten
ArgsInArray
>
: R extends BasicResolver<infer Result, infer Args>
? BasicResolver<Pick<Result, Extract<keyof Result, IncludedFields>>, Args>
: never;
? BasicResolver<Pick<Result, Extract<keyof Result, IncludedFields>>, Args>
: never;
export type SubsetResolverWithoutFields<R, ExcludedFields extends string> = R extends BasicResolver<
Array<infer ResultInArray>,
@ -50,8 +50,8 @@ export type SubsetResolverWithoutFields<R, ExcludedFields extends string> = R ex
ArgsInArray
>
: R extends BasicResolver<infer Result, infer Args>
? BasicResolver<Pick<Result, Exclude<keyof Result, ExcludedFields>>, Args>
: never;
? BasicResolver<Pick<Result, Exclude<keyof Result, ExcludedFields>>, Args>
: never;
export type InfraResolver<Result, Parent, Args, Context> = (
parent: Parent,

View file

@ -231,8 +231,8 @@ const withPreviousSuggestionSelected = (
props.suggestions.length === 0
? null
: state.selectedIndex !== null
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
: Math.max(props.suggestions.length - 1, 0),
? (state.selectedIndex + props.suggestions.length - 1) % props.suggestions.length
: Math.max(props.suggestions.length - 1, 0),
});
const withNextSuggestionSelected = (
@ -244,8 +244,8 @@ const withNextSuggestionSelected = (
props.suggestions.length === 0
? null
: state.selectedIndex !== null
? (state.selectedIndex + 1) % props.suggestions.length
: 0,
? (state.selectedIndex + 1) % props.suggestions.length
: 0,
});
const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
@ -257,8 +257,8 @@ const withSuggestionAtIndexSelected = (suggestionIndex: number) => (
props.suggestions.length === 0
? null
: suggestionIndex >= 0 && suggestionIndex < props.suggestions.length
? suggestionIndex
: 0,
? suggestionIndex
: 0,
});
const withSuggestionsVisible = (state: AutocompleteFieldState) => ({

View file

@ -64,5 +64,5 @@ const formatMessageSegment = (messageSegment: LogEntryMessageSegment): string =>
messageSegment.__typename === 'InfraLogMessageFieldSegment'
? messageSegment.value
: messageSegment.__typename === 'InfraLogMessageConstantSegment'
? messageSegment.constant
: 'failed to format message';
? messageSegment.constant
: 'failed to format message';

View file

@ -91,17 +91,16 @@ export const WithKueryAutocompletion = withIndexPattern(
selectionEnd: cursorPosition,
});
this.setState(
state =>
state.currentRequest &&
state.currentRequest.expression !== expression &&
state.currentRequest.cursorPosition !== cursorPosition
? state // ignore this result, since a newer request is in flight
: {
...state,
currentRequest: null,
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
}
this.setState(state =>
state.currentRequest &&
state.currentRequest.expression !== expression &&
state.currentRequest.cursorPosition !== cursorPosition
? state // ignore this result, since a newer request is in flight
: {
...state,
currentRequest: null,
suggestions: maxSuggestions ? suggestions.slice(0, maxSuggestions) : suggestions,
}
);
};
}

View file

@ -16,8 +16,11 @@ export const selectIsAutoReloading = (state: WaffleTimeState) =>
export const selectTimeUpdatePolicyInterval = (state: WaffleTimeState) =>
state.updatePolicy.policy === 'interval' ? state.updatePolicy.interval : null;
export const selectCurrentTimeRange = createSelector(selectCurrentTime, currentTime => ({
from: currentTime - 1000 * 60 * 5,
interval: '1m',
to: currentTime,
}));
export const selectCurrentTimeRange = createSelector(
selectCurrentTime,
currentTime => ({
from: currentTime - 1000 * 60 * 5,
interval: '1m',
to: currentTime,
})
);

View file

@ -160,17 +160,32 @@ export const createGraphqlQueryEpic = <Data, Variables, Error = ApolloError>(
export const createGraphqlStateSelectors = <State>(
selectState: (parentState: any) => GraphqlState<State> = parentState => parentState
) => {
const selectData = createSelector(selectState, state => state.data);
const selectData = createSelector(
selectState,
state => state.data
);
const selectLoadingProgress = createSelector(selectState, state => state.current);
const selectLoadingProgress = createSelector(
selectState,
state => state.current
);
const selectLoadingProgressOperationInfo = createSelector(
selectLoadingProgress,
progress => (isRunningLoadingProgress(progress) ? progress.parameters : null)
);
const selectIsLoading = createSelector(selectLoadingProgress, isRunningLoadingProgress);
const selectIsIdle = createSelector(selectLoadingProgress, isIdleLoadingProgress);
const selectIsLoading = createSelector(
selectLoadingProgress,
isRunningLoadingProgress
);
const selectIsIdle = createSelector(
selectLoadingProgress,
isIdleLoadingProgress
);
const selectLoadingResult = createSelector(selectState, state => state.last);
const selectLoadingResult = createSelector(
selectState,
state => state.last
);
const selectLoadingResultOperationInfo = createSelector(
selectLoadingResult,
result => (!isUninitializedLoadingResult(result) ? result.parameters : null)
@ -179,9 +194,18 @@ export const createGraphqlStateSelectors = <State>(
selectLoadingResult,
result => (!isUninitializedLoadingResult(result) ? result.time : null)
);
const selectIsUninitialized = createSelector(selectLoadingResult, isUninitializedLoadingResult);
const selectIsSuccess = createSelector(selectLoadingResult, isSuccessLoadingResult);
const selectIsFailure = createSelector(selectLoadingResult, isFailureLoadingResult);
const selectIsUninitialized = createSelector(
selectLoadingResult,
isUninitializedLoadingResult
);
const selectIsSuccess = createSelector(
selectLoadingResult,
isSuccessLoadingResult
);
const selectIsFailure = createSelector(
selectLoadingResult,
isFailureLoadingResult
);
const selectLoadingState = createSelector(
selectLoadingProgress,

View file

@ -58,7 +58,9 @@ interface ActionCreators {
type PlainActionCreator<WrappedActionCreator> = WrappedActionCreator extends () => infer R
? () => R
: WrappedActionCreator extends (payload: infer A) => infer R ? (payload: A) => R : never;
: WrappedActionCreator extends (payload: infer A) => infer R
? (payload: A) => R
: never;
export const bindPlainActionCreators = <WrappedActionCreators extends ActionCreators>(
actionCreators: WrappedActionCreators

View file

@ -12,7 +12,7 @@ function getDocumentPayloadFn(server) {
function encodeContent(content, exportType) {
switch (exportType.jobContentEncoding) {
case 'base64':
return new Buffer(content, 'base64');
return Buffer.from(content, 'base64');
default:
return content;
}

View file

@ -200,7 +200,7 @@ describe(`when job is completed`, () => {
test(`base64 encodes output content for configured jobTypes`, async () => {
const { payload } = await getCompletedResponse({ jobType: 'base64EncodedJobType', outputContent: 'test' });
expect(payload).toBe(new Buffer('test', 'base64').toString());
expect(payload).toBe(Buffer.from('test', 'base64').toString());
});
test(`specifies text/csv; charset=utf-8 contentType header from the job output`, async () => {

View file

@ -181,7 +181,7 @@ export class BasicCredentials {
throw new Error('Password should be a valid non-empty string.');
}
const basicCredentials = new Buffer(`${username}:${password}`).toString('base64');
const basicCredentials = Buffer.from(`${username}:${password}`).toString('base64');
request.headers.authorization = `Basic ${basicCredentials}`;
return request;
}

View file

@ -72,17 +72,17 @@ export default function ({ getService }) {
await supertest.get('/api/security/v1/me')
.set('kbn-xsrf', 'xxx')
.set('Authorization', `Basic ${new Buffer(`${wrongUsername}:${wrongPassword}`).toString('base64')}`)
.set('Authorization', `Basic ${Buffer.from(`${wrongUsername}:${wrongPassword}`).toString('base64')}`)
.expect(401);
await supertest.get('/api/security/v1/me')
.set('kbn-xsrf', 'xxx')
.set('Authorization', `Basic ${new Buffer(`${validUsername}:${wrongPassword}`).toString('base64')}`)
.set('Authorization', `Basic ${Buffer.from(`${validUsername}:${wrongPassword}`).toString('base64')}`)
.expect(401);
await supertest.get('/api/security/v1/me')
.set('kbn-xsrf', 'xxx')
.set('Authorization', `Basic ${new Buffer(`${wrongUsername}:${validPassword}`).toString('base64')}`)
.set('Authorization', `Basic ${Buffer.from(`${wrongUsername}:${validPassword}`).toString('base64')}`)
.expect(401);
});
@ -90,7 +90,7 @@ export default function ({ getService }) {
const apiResponse = await supertest
.get('/api/security/v1/me')
.set('kbn-xsrf', 'xxx')
.set('Authorization', `Basic ${new Buffer(`${validUsername}:${validPassword}`).toString('base64')}`)
.set('Authorization', `Basic ${Buffer.from(`${validUsername}:${validPassword}`).toString('base64')}`)
.expect(200);
expect(apiResponse.body).to.only.have.keys([

View file

@ -29,7 +29,7 @@ const signatureAlgorithm = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha256';
export async function getSAMLRequestId(urlWithSAMLRequestId) {
const inflatedSAMLRequest = await inflateRawAsync(
new Buffer(url.parse(urlWithSAMLRequestId, true /* parseQueryString */).query.SAMLRequest, 'base64')
Buffer.from(url.parse(urlWithSAMLRequestId, true /* parseQueryString */).query.SAMLRequest, 'base64')
);
const parsedSAMLRequest = await parseStringAsync(inflatedSAMLRequest.toString());
@ -82,7 +82,7 @@ export async function getSAMLResponse({ destination, inResponseTo, sessionIndex
{ location: { reference: `//*[local-name(.)='Issuer']`, action: 'after' } }
);
return new Buffer(`
return Buffer.from(`
<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" ID="_bdf1d51245ed0f71aa23"
${inResponseTo ? `InResponseTo="${inResponseTo}"` : ''} Version="2.0"
IssueInstant="${issueInstant}"
@ -111,7 +111,7 @@ export async function getLogoutRequest({ destination, sessionIndex }) {
// HTTP-Redirect with deflate encoding:
// http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf - section 3.4.4.1
const deflatedLogoutRequest = await deflateRawAsync(new Buffer(logoutRequestTemplateXML));
const deflatedLogoutRequest = await deflateRawAsync(Buffer.from(logoutRequestTemplateXML));
const queryStringParameters = {
SAMLRequest: deflatedLogoutRequest.toString('base64'),

2099
yarn.lock

File diff suppressed because it is too large Load diff