Upgrade Node.js to version 12 (#61587)

This commit is contained in:
Thomas Watson 2020-11-12 22:00:57 +01:00 committed by GitHub
parent 3412843958
commit 4c49d5d1be
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
80 changed files with 520 additions and 296 deletions

View file

@ -1,7 +1,7 @@
# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable.
# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts
ARG NODE_VERSION=10.22.1
ARG NODE_VERSION=12.19.0
FROM node:${NODE_VERSION} AS base

View file

@ -1 +1 @@
10.22.1
12.19.0

2
.nvmrc
View file

@ -1 +1 @@
10.22.1
12.19.0

View file

@ -512,8 +512,8 @@ In addition to this setting, trusted certificates may be specified via <<server-
| `server.ssl.cipherSuites:`
| Details on the format, and the valid options, are available via the
https://www.openssl.org/docs/man1.0.2/apps/ciphers.html#CIPHER-LIST-FORMAT[OpenSSL cipher list format documentation].
*Default: `ECDHE-RSA-AES128-GCM-SHA256, ECDHE-ECDSA-AES128-GCM-SHA256, ECDHE-RSA-AES256-GCM-SHA384, ECDHE-ECDSA-AES256-GCM-SHA384, DHE-RSA-AES128-GCM-SHA256, ECDHE-RSA-AES128-SHA256, DHE-RSA-AES128-SHA256, ECDHE-RSA-AES256-SHA384, DHE-RSA-AES256-SHA384, ECDHE-RSA-AES256-SHA256, DHE-RSA-AES256-SHA256, HIGH,!aNULL, !eNULL, !EXPORT, !DES, !RC4, !MD5, !PSK, !SRP, !CAMELLIA`*.
https://www.openssl.org/docs/man1.1.1/man1/ciphers.html#CIPHER-LIST-FORMAT[OpenSSL cipher list format documentation].
*Default: `TLS_AES_256_GCM_SHA384 TLS_CHACHA20_POLY1305_SHA256 TLS_AES_128_GCM_SHA256 ECDHE-RSA-AES128-GCM-SHA256, ECDHE-ECDSA-AES128-GCM-SHA256, ECDHE-RSA-AES256-GCM-SHA384, ECDHE-ECDSA-AES256-GCM-SHA384, DHE-RSA-AES128-GCM-SHA256, ECDHE-RSA-AES128-SHA256, DHE-RSA-AES128-SHA256, ECDHE-RSA-AES256-SHA384, DHE-RSA-AES256-SHA384, ECDHE-RSA-AES256-SHA256, DHE-RSA-AES256-SHA256, HIGH,!aNULL, !eNULL, !EXPORT, !DES, !RC4, !MD5, !PSK, !SRP, !CAMELLIA`*.
| `server.ssl.clientAuthentication:`
| Controls the behavior in {kib} for requesting a certificate from client
@ -571,7 +571,7 @@ all http requests to https over the port configured as <<server-port, `server.po
| `server.ssl.supportedProtocols:`
| An array of supported protocols with versions.
Valid protocols: `TLSv1`, `TLSv1.1`, `TLSv1.2`. *Default: TLSv1.1, TLSv1.2*
Valid protocols: `TLSv1`, `TLSv1.1`, `TLSv1.2`, `TLSv1.3`. *Default: TLSv1.1, TLSv1.2, TLSv1.3*
| [[settings-xsrf-whitelist]] `server.xsrf.whitelist:`
| It is not recommended to disable protections for

View file

@ -81,7 +81,7 @@
"**/@types/hapi__boom": "^7.4.1",
"**/@types/hapi__hapi": "^18.2.6",
"**/@types/hapi__mimos": "4.1.0",
"**/@types/node": ">=10.17.17 <10.20.0",
"**/@types/node": "12.19.4",
"**/cross-fetch/node-fetch": "^2.6.1",
"**/deepmerge": "^4.2.2",
"**/fast-deep-equal": "^3.1.1",
@ -97,7 +97,7 @@
"**/typescript": "4.0.2"
},
"engines": {
"node": "10.22.1",
"node": "12.19.0",
"yarn": "^1.21.1"
},
"dependencies": {
@ -497,7 +497,7 @@
"@types/mustache": "^0.8.31",
"@types/ncp": "^2.0.1",
"@types/nock": "^10.0.3",
"@types/node": ">=10.17.17 <10.20.0",
"@types/node": "12.19.4",
"@types/node-fetch": "^2.5.7",
"@types/node-forge": "^0.9.5",
"@types/nodemailer": "^6.2.1",
@ -560,7 +560,7 @@
"@types/vinyl-fs": "^2.4.11",
"@types/watchpack": "^1.1.5",
"@types/webpack": "^4.41.3",
"@types/webpack-env": "^1.15.2",
"@types/webpack-env": "^1.15.3",
"@types/webpack-merge": "^4.1.5",
"@types/write-pkg": "^3.1.0",
"@types/xml-crypto": "^1.4.1",

View file

@ -57,7 +57,13 @@ test('includes namespace in failure', () => {
describe('#defaultValue', () => {
test('returns default when undefined', () => {
const value = new Stream();
expect(schema.stream({ defaultValue: value }).validate(undefined)).toStrictEqual(value);
expect(schema.stream({ defaultValue: value }).validate(undefined)).toMatchInlineSnapshot(`
Stream {
"_events": Object {},
"_eventsCount": 0,
"_maxListeners": undefined,
}
`);
});
test('returns value when specified', () => {

View file

@ -91,9 +91,9 @@ export function startProc(name: string, options: ProcOptions, log: ToolingLog) {
});
if (stdin) {
childProcess.stdin.end(stdin, 'utf8');
childProcess.stdin!.end(stdin, 'utf8'); // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdin will not be null
} else {
childProcess.stdin.end();
childProcess.stdin!.end(); // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdin will not be null
}
let stopCalled = false;
@ -123,8 +123,8 @@ export function startProc(name: string, options: ProcOptions, log: ToolingLog) {
).pipe(share());
const lines$ = Rx.merge(
observeLines(childProcess.stdout),
observeLines(childProcess.stderr)
observeLines(childProcess.stdout!), // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
observeLines(childProcess.stderr!) // TypeScript note: As long as the proc stdio[1] is 'pipe', then stderr will not be null
).pipe(
tap((line) => log.write(` ${chalk.gray('proc')} [${chalk.gray(name)}] ${line}`)),
share()

View file

@ -2,8 +2,7 @@
exports[`formats %s patterns and indents multi-line messages correctly 1`] = `
" │ succ foo bar
│ { foo: { bar: { '1': [Array] } },
│ bar: { bar: { '1': [Array] } } }
│ { foo: { bar: { '1': [Array] } }, bar: { bar: { '1': [Array] } } }
│ Infinity
"

View file

@ -43,7 +43,7 @@ import {
// are not listened for
const pipeline = (...streams: Readable[]) =>
streams.reduce((source, dest) =>
source.once('error', (error) => dest.emit('error', error)).pipe(dest as any)
source.once('error', (error) => dest.destroy(error)).pipe(dest as any)
);
export async function loadAction({

View file

@ -44,7 +44,7 @@ describe('concatStreamProviders() helper', () => {
() =>
new Readable({
read() {
this.emit('error', new Error('foo'));
this.destroy(new Error('foo'));
},
}),
]);

View file

@ -50,7 +50,7 @@ export function concatStreamProviders(
source
// proxy errors from the source to the destination
.once('error', (error) => destination.emit('error', error))
.once('error', (error) => destination.destroy(error))
// pipe the source to the destination but only proxy the
// end event if this is the last source
.pipe(destination, { end: isLast });

View file

@ -2,4 +2,4 @@
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not an absolute 1`] = `"Paths to translation files must be absolute. Got relative path: \\"./en.json\\""`;
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not specified 1`] = `"The \\"path\\" argument must be of type string. Received type undefined"`;
exports[`I18n loader registerTranslationFile should throw error if path to translation file is not specified 1`] = `"The \\"path\\" argument must be of type string. Received undefined"`;

View file

@ -261,7 +261,6 @@ const expectFileMatchesSnapshotWithCompression = (filePath: string, snapshotLabe
// Verify the brotli variant matches
expect(
// @ts-expect-error @types/node is missing the brotli functions
Zlib.brotliDecompressSync(
Fs.readFileSync(Path.resolve(MOCK_REPO_DIR, `${filePath}.br`))
).toString()

View file

@ -23,7 +23,7 @@ import * as Rx from 'rxjs';
import { mergeMap, map, catchError } from 'rxjs/operators';
import { allValuesFrom } from '../common';
const stat$ = Rx.bindNodeCallback(Fs.stat);
const stat$ = Rx.bindNodeCallback<Fs.PathLike, Fs.Stats>(Fs.stat);
/**
* get mtimes of referenced paths concurrently, limit concurrency to 100

View file

@ -164,7 +164,8 @@ export function observeWorker(
type: 'worker started',
bundles,
}),
observeStdio$(proc.stdout).pipe(
// TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
observeStdio$(proc.stdout!).pipe(
map(
(line): WorkerStdio => ({
type: 'worker stdio',
@ -173,7 +174,8 @@ export function observeWorker(
})
)
),
observeStdio$(proc.stderr).pipe(
// TypeScript note: As long as the proc stdio[2] is 'pipe', then stderr will not be null
observeStdio$(proc.stderr!).pipe(
map(
(line): WorkerStdio => ({
type: 'worker stdio',

View file

@ -32273,8 +32273,10 @@ function spawnStreaming(command, args, opts, {
mergeMultiline: true,
tag: color.bold(prefix)
});
spawned.stdout.pipe(prefixedStdout).pipe(streamToLog(debug));
spawned.stderr.pipe(prefixedStderr).pipe(streamToLog(debug));
spawned.stdout.pipe(prefixedStdout).pipe(streamToLog(debug)); // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
spawned.stderr.pipe(prefixedStderr).pipe(streamToLog(debug)); // TypeScript note: As long as the proc stdio[2] is 'pipe', then stderr will not be null
return spawned;
}
@ -51377,7 +51379,8 @@ const WatchCommand = {
await Object(_utils_parallelize__WEBPACK_IMPORTED_MODULE_2__["parallelizeBatches"])(batchedProjects, async pkg => {
const completionHint = await Object(_utils_watch__WEBPACK_IMPORTED_MODULE_4__["waitUntilWatchIsReady"])(pkg.runScriptStreaming(watchScriptName, {
debug: false
}).stdout);
}).stdout // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
);
_utils_log__WEBPACK_IMPORTED_MODULE_1__["log"].success(`[${pkg.name}] Initial build completed (${completionHint}).`);
});
}

View file

@ -80,7 +80,7 @@ export const WatchCommand: ICommand = {
const completionHint = await waitUntilWatchIsReady(
pkg.runScriptStreaming(watchScriptName, {
debug: false,
}).stdout
}).stdout! // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
);
log.success(`[${pkg.name}] Initial build completed (${completionHint}).`);

View file

@ -71,8 +71,8 @@ export function spawnStreaming(
const prefixedStdout = logTransformer({ tag: color.bold(prefix) });
const prefixedStderr = logTransformer({ mergeMultiline: true, tag: color.bold(prefix) });
spawned.stdout.pipe(prefixedStdout).pipe(streamToLog(debug));
spawned.stderr.pipe(prefixedStderr).pipe(streamToLog(debug));
spawned.stdout!.pipe(prefixedStdout).pipe(streamToLog(debug)); // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
spawned.stderr!.pipe(prefixedStderr).pipe(streamToLog(debug)); // TypeScript note: As long as the proc stdio[2] is 'pipe', then stderr will not be null
return spawned;
}

View file

@ -35,8 +35,8 @@ export function observeContainerLogs(name: string, containerId: string, log: Too
const logLine$ = new Rx.Subject<string>();
Rx.merge(
observeLines(logsProc.stdout).pipe(tap((line) => log.info(`[docker:${name}] ${line}`))),
observeLines(logsProc.stderr).pipe(tap((line) => log.error(`[docker:${name}] ${line}`)))
observeLines(logsProc.stdout!).pipe(tap((line) => log.info(`[docker:${name}] ${line}`))), // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
observeLines(logsProc.stderr!).pipe(tap((line) => log.error(`[docker:${name}] ${line}`))) // TypeScript note: As long as the proc stdio[2] is 'pipe', then stderr will not be null
).subscribe(logLine$);
return logLine$.asObservable();

View file

@ -59,11 +59,17 @@ describe('with randomness', () => {
);
await phase.trigger();
// `phase.trigger()` uses `Math.random` to sort the internal array of
// handlers. But since the sorting algorithm used internally in
// `Array.prototype.sort` is not spec'ed, it can change between Node.js
// versions, and as a result the expected output below might not match if
// you up/downgrade Node.js.
expect(order).toMatchInlineSnapshot(`
Array [
"one",
"three",
"two",
"one",
]
`);
});

View file

@ -39,9 +39,9 @@ export const kbnTestConfig = new (class KbnTestConfig {
const testKibanaUrl = url.parse(process.env.TEST_KIBANA_URL);
return {
protocol: testKibanaUrl.protocol?.slice(0, -1),
hostname: testKibanaUrl.hostname,
hostname: testKibanaUrl.hostname === null ? undefined : testKibanaUrl.hostname,
port: testKibanaUrl.port ? parseInt(testKibanaUrl.port, 10) : undefined,
auth: testKibanaUrl.auth,
auth: testKibanaUrl.auth === null ? undefined : testKibanaUrl.auth,
username: testKibanaUrl.auth?.split(':')[0],
password: testKibanaUrl.auth?.split(':')[1],
};

View file

@ -5,8 +5,14 @@ exports[`repl it allows print depth to be specified 1`] = `"{ '0': { 
exports[`repl it colorizes raw values 1`] = `"{ meaning: 42 }"`;
exports[`repl it handles deep and recursive objects 1`] = `
"{ '0': { '1': { '2': { '3': { '4': { '5': [Object] } } } } },
whoops: [Circular] }"
"{
'0': {
'1': {
'2': { '3': { '4': { '5': [Object] } } }
}
},
whoops: [Circular]
}"
`;
exports[`repl it handles undefined 1`] = `"undefined"`;
@ -45,8 +51,14 @@ Array [
Array [
"Promise Rejected:
",
"{ '0': { '1': { '2': { '3': { '4': { '5': [Object] } } } } },
whoops: [Circular] }",
"{
'0': {
'1': {
'2': { '3': { '4': { '5': [Object] } } }
}
},
whoops: [Circular]
}",
],
]
`;
@ -59,8 +71,14 @@ Array [
Array [
"Promise Resolved:
",
"{ '0': { '1': { '2': { '3': { '4': { '5': [Object] } } } } },
whoops: [Circular] }",
"{
'0': {
'1': {
'2': { '3': { '4': { '5': [Object] } } }
}
},
whoops: [Circular]
}",
],
]
`;

View file

@ -121,14 +121,15 @@ describe('Server logging configuration', function () {
'--verbose',
]);
const message$ = Rx.fromEvent(child.stdout, 'data').pipe(
// TypeScript note: As long as the child stdio[1] is 'pipe', then stdout will not be null
const message$ = Rx.fromEvent(child.stdout!, 'data').pipe(
map((messages) => String(messages).split('\n').filter(Boolean))
);
await message$
.pipe(
// We know the sighup handler will be registered before this message logged
filter((messages) => messages.some((m) => m.includes('setting up root'))),
filter((messages: string[]) => messages.some((m) => m.includes('setting up root'))),
take(1)
)
.toPromise();
@ -189,14 +190,15 @@ describe('Server logging configuration', function () {
child = Child.spawn(process.execPath, [kibanaPath, '--oss', '--config', configFilePath]);
const message$ = Rx.fromEvent(child.stdout, 'data').pipe(
// TypeScript note: As long as the child stdio[1] is 'pipe', then stdout will not be null
const message$ = Rx.fromEvent(child.stdout!, 'data').pipe(
map((messages) => String(messages).split('\n').filter(Boolean))
);
await message$
.pipe(
// We know the sighup handler will be registered before this message logged
filter((messages) => messages.some((m) => m.includes('setting up root'))),
filter((messages: string[]) => messages.some((m) => m.includes('setting up root'))),
take(1)
)
.toPromise();

View file

@ -206,12 +206,19 @@ export interface HttpRequestInit {
/** @public */
export interface HttpFetchQuery {
[key: string]:
| string
| number
| boolean
| undefined
| Array<string | number | boolean | undefined>;
/**
* TypeScript note: Technically we should use this interface instead, but @types/node uses the below stricter
* definition, so to avoid TypeScript errors, we'll restrict our version.
*
* [key: string]:
* | string
* | number
* | boolean
* | Array<string | number | boolean>
* | undefined
* | null;
*/
[key: string]: string | number | boolean | string[] | number[] | boolean[] | undefined | null;
}
/**

View file

@ -625,8 +625,7 @@ export interface HttpFetchOptionsWithPath extends HttpFetchOptions {
// @public (undocumented)
export interface HttpFetchQuery {
// (undocumented)
[key: string]: string | number | boolean | undefined | Array<string | number | boolean | undefined>;
[key: string]: string | number | boolean | string[] | number[] | boolean[] | undefined | null;
}
// @public

View file

@ -118,6 +118,18 @@ const K = [
const W = new Array(64);
type BufferEncoding =
| 'ascii'
| 'utf8'
| 'utf-8'
| 'utf16le'
| 'ucs2'
| 'ucs-2'
| 'base64'
| 'latin1'
| 'binary'
| 'hex';
/* eslint-disable no-bitwise, no-shadow */
export class Sha256 {
private _a: number;
@ -157,7 +169,7 @@ export class Sha256 {
this._s = 0;
}
update(data: string | Buffer, encoding?: string): Sha256 {
update(data: string | Buffer, encoding?: BufferEncoding): Sha256 {
if (typeof data === 'string') {
encoding = encoding || 'utf8';
data = Buffer.from(data, encoding);

View file

@ -145,6 +145,9 @@ describe('CoreUsageDataService', () => {
"certificateAuthoritiesConfigured": false,
"certificateConfigured": false,
"cipherSuites": Array [
"TLS_AES_256_GCM_SHA384",
"TLS_CHACHA20_POLY1305_SHA256",
"TLS_AES_128_GCM_SHA256",
"ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-AES128-GCM-SHA256",
"ECDHE-RSA-AES256-GCM-SHA384",
@ -174,6 +177,7 @@ describe('CoreUsageDataService', () => {
"supportedProtocols": Array [
"TLSv1.1",
"TLSv1.2",
"TLSv1.3",
],
"truststoreConfigured": false,
},

View file

@ -322,7 +322,6 @@ describe('configureClient', () => {
);
const response = createResponseWithBody(
// @ts-expect-error definition doesn't know about from
Readable.from(
JSON.stringify({
seq_no_primary_term: true,

View file

@ -47,6 +47,9 @@ Object {
"socketTimeout": 120000,
"ssl": Object {
"cipherSuites": Array [
"TLS_AES_256_GCM_SHA384",
"TLS_CHACHA20_POLY1305_SHA256",
"TLS_AES_128_GCM_SHA256",
"ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-AES128-GCM-SHA256",
"ECDHE-RSA-AES256-GCM-SHA384",
@ -75,6 +78,7 @@ Object {
"supportedProtocols": Array [
"TLSv1.1",
"TLSv1.2",
"TLSv1.3",
],
"truststore": Object {},
},

View file

@ -79,7 +79,7 @@ beforeEach(() => {
ssl: {
enabled: true,
certificate,
cipherSuites: ['cipherSuite'],
cipherSuites: ['TLS_AES_256_GCM_SHA384'],
getSecureOptions: () => 0,
key,
redirectHttpFromPort: config.port + 1,

View file

@ -143,7 +143,7 @@ describe('getServerOptions', () => {
Object {
"ca": undefined,
"cert": "content-some-certificate-path",
"ciphers": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
"ciphers": "TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
"honorCipherOrder": true,
"key": "content-some-key-path",
"passphrase": undefined,
@ -175,7 +175,7 @@ describe('getServerOptions', () => {
"content-ca-2",
],
"cert": "content-some-certificate-path",
"ciphers": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
"ciphers": "TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
"honorCipherOrder": true,
"key": "content-some-key-path",
"passphrase": undefined,

View file

@ -36,7 +36,7 @@ describe('Router validator', () => {
expect(() => validator.getParams({})).toThrowError('[foo]: Not a string');
expect(() => validator.getParams(undefined)).toThrowError(
"Cannot destructure property `foo` of 'undefined' or 'null'."
"Cannot destructure property 'foo' of 'undefined' as it is undefined."
);
expect(() => validator.getParams({}, 'myField')).toThrowError('[myField.foo]: Not a string');

View file

@ -266,14 +266,19 @@ describe('#sslSchema', () => {
certificate: '/path/to/certificate',
enabled: true,
key: '/path/to/key',
supportedProtocols: ['TLSv1', 'TLSv1.1', 'TLSv1.2'],
supportedProtocols: ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3'],
};
const singleKnownProtocolConfig = sslSchema.validate(singleKnownProtocol);
expect(singleKnownProtocolConfig.supportedProtocols).toEqual(['TLSv1']);
const allKnownProtocolsConfig = sslSchema.validate(allKnownProtocols);
expect(allKnownProtocolsConfig.supportedProtocols).toEqual(['TLSv1', 'TLSv1.1', 'TLSv1.2']);
expect(allKnownProtocolsConfig.supportedProtocols).toEqual([
'TLSv1',
'TLSv1.1',
'TLSv1.2',
'TLSv1.3',
]);
});
test('rejects unknown protocols`', () => {
@ -288,21 +293,23 @@ describe('#sslSchema', () => {
certificate: '/path/to/certificate',
enabled: true,
key: '/path/to/key',
supportedProtocols: ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'SOMEv100500'],
supportedProtocols: ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3', 'SOMEv100500'],
};
expect(() => sslSchema.validate(singleUnknownProtocol)).toThrowErrorMatchingInlineSnapshot(`
"[supportedProtocols.0]: types that failed validation:
- [supportedProtocols.0.0]: expected value to equal [TLSv1]
- [supportedProtocols.0.1]: expected value to equal [TLSv1.1]
- [supportedProtocols.0.2]: expected value to equal [TLSv1.2]"
- [supportedProtocols.0.2]: expected value to equal [TLSv1.2]
- [supportedProtocols.0.3]: expected value to equal [TLSv1.3]"
`);
expect(() => sslSchema.validate(allKnownWithOneUnknownProtocols))
.toThrowErrorMatchingInlineSnapshot(`
"[supportedProtocols.3]: types that failed validation:
- [supportedProtocols.3.0]: expected value to equal [TLSv1]
- [supportedProtocols.3.1]: expected value to equal [TLSv1.1]
- [supportedProtocols.3.2]: expected value to equal [TLSv1.2]"
"[supportedProtocols.4]: types that failed validation:
- [supportedProtocols.4.0]: expected value to equal [TLSv1]
- [supportedProtocols.4.1]: expected value to equal [TLSv1.1]
- [supportedProtocols.4.2]: expected value to equal [TLSv1.2]
- [supportedProtocols.4.3]: expected value to equal [TLSv1.3]"
`);
});
});

View file

@ -18,18 +18,16 @@
*/
import { schema, TypeOf } from '@kbn/config-schema';
import crypto from 'crypto';
import { constants as cryptoConstants } from 'crypto';
import { readFileSync } from 'fs';
import { readPkcs12Keystore, readPkcs12Truststore } from '../utils';
// `crypto` type definitions doesn't currently include `crypto.constants`, see
// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/fa5baf1733f49cf26228a4e509914572c1b74adf/types/node/v6/index.d.ts#L3412
const cryptoConstants = (crypto as any).constants;
const protocolMap = new Map<string, number>([
['TLSv1', cryptoConstants.SSL_OP_NO_TLSv1],
['TLSv1.1', cryptoConstants.SSL_OP_NO_TLSv1_1],
['TLSv1.2', cryptoConstants.SSL_OP_NO_TLSv1_2],
// @ts-expect-error According to the docs SSL_OP_NO_TLSv1_3 should exist (https://nodejs.org/docs/latest-v12.x/api/crypto.html)
['TLSv1.3', cryptoConstants.SSL_OP_NO_TLSv1_3],
]);
export const sslSchema = schema.object(
@ -56,8 +54,13 @@ export const sslSchema = schema.object(
}),
redirectHttpFromPort: schema.maybe(schema.number()),
supportedProtocols: schema.arrayOf(
schema.oneOf([schema.literal('TLSv1'), schema.literal('TLSv1.1'), schema.literal('TLSv1.2')]),
{ defaultValue: ['TLSv1.1', 'TLSv1.2'], minSize: 1 }
schema.oneOf([
schema.literal('TLSv1'),
schema.literal('TLSv1.1'),
schema.literal('TLSv1.2'),
schema.literal('TLSv1.3'),
]),
{ defaultValue: ['TLSv1.1', 'TLSv1.2', 'TLSv1.3'], minSize: 1 }
),
clientAuthentication: schema.oneOf(
[schema.literal('none'), schema.literal('optional'), schema.literal('required')],

View file

@ -144,7 +144,7 @@ test('`dispose()` succeeds even if stream is not created.', async () => {
test('`dispose()` closes stream.', async () => {
const mockStreamEndFinished = jest.fn();
const mockStreamEnd = jest.fn(async (chunk, encoding, callback) => {
const mockStreamEnd = jest.fn(async (callback) => {
// It's required to make sure `dispose` waits for `end` to complete.
await tickMs(100);
mockStreamEndFinished();
@ -170,7 +170,7 @@ test('`dispose()` closes stream.', async () => {
await appender.dispose();
expect(mockStreamEnd).toHaveBeenCalledTimes(1);
expect(mockStreamEnd).toHaveBeenCalledWith(undefined, undefined, expect.any(Function));
expect(mockStreamEnd).toHaveBeenCalledWith(expect.any(Function));
expect(mockStreamEndFinished).toHaveBeenCalled();
// Consequent `dispose` calls should not fail even if stream has been disposed.

View file

@ -76,7 +76,7 @@ export class FileAppender implements DisposableAppender {
return resolve();
}
this.outputStream.end(undefined, undefined, () => {
this.outputStream.end(() => {
this.outputStream = undefined;
resolve();
});

View file

@ -82,21 +82,23 @@ describe('MetricsService', () => {
// `advanceTimersByTime` only ensure the interval handler is executed
// however the `reset` call is executed after the async call to `collect`
// meaning that we are going to miss the call if we don't wait for the
// actual observable emission that is performed after
const waitForNextEmission = () => getOpsMetrics$().pipe(take(1)).toPromise();
// actual observable emission that is performed after. The extra
// `nextTick` is to ensure we've done a complete roundtrip of the event
// loop.
const nextEmission = async () => {
jest.advanceTimersByTime(testInterval);
await getOpsMetrics$().pipe(take(1)).toPromise();
await new Promise((resolve) => process.nextTick(resolve));
};
expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
expect(mockOpsCollector.reset).toHaveBeenCalledTimes(1);
let nextEmission = waitForNextEmission();
jest.advanceTimersByTime(testInterval);
await nextEmission;
await nextEmission();
expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
expect(mockOpsCollector.reset).toHaveBeenCalledTimes(2);
nextEmission = waitForNextEmission();
jest.advanceTimersByTime(testInterval);
await nextEmission;
await nextEmission();
expect(mockOpsCollector.collect).toHaveBeenCalledTimes(3);
expect(mockOpsCollector.reset).toHaveBeenCalledTimes(3);
});
@ -117,13 +119,15 @@ describe('MetricsService', () => {
await metricsService.setup({ http: httpMock });
const { getOpsMetrics$ } = await metricsService.start();
const firstEmission = getOpsMetrics$().pipe(take(1)).toPromise();
jest.advanceTimersByTime(testInterval);
expect(await firstEmission).toEqual({ metric: 'first' });
const nextEmission = async () => {
jest.advanceTimersByTime(testInterval);
const emission = await getOpsMetrics$().pipe(take(1)).toPromise();
await new Promise((resolve) => process.nextTick(resolve));
return emission;
};
const secondEmission = getOpsMetrics$().pipe(take(1)).toPromise();
jest.advanceTimersByTime(testInterval);
expect(await secondEmission).toEqual({ metric: 'second' });
expect(await nextEmission()).toEqual({ metric: 'first' });
expect(await nextEmission()).toEqual({ metric: 'second' });
});
});

View file

@ -44,7 +44,7 @@ describe('concatStreamProviders() helper', () => {
() =>
new Readable({
read() {
this.emit('error', new Error('foo'));
this.destroy(new Error('foo'));
},
}),
]);

View file

@ -54,7 +54,7 @@ export function concatStreamProviders(
source
// proxy errors from the source to the destination
.once('error', (error) => destination.emit('error', error))
.once('error', (error) => destination.destroy(error))
// pipe the source to the destination but only proxy the
// end event if this is the last source
.pipe(destination, { end: isLast });

View file

@ -27,7 +27,7 @@ import { count, map, mergeAll, mergeMap } from 'rxjs/operators';
// @ts-ignore
import { assertAbsolute } from './fs';
const getStat$ = Rx.bindNodeCallback(Fs.stat);
const getStat$ = Rx.bindNodeCallback<Fs.PathLike, Fs.Stats>(Fs.stat);
const getReadDir$ = Rx.bindNodeCallback<string, string[]>(Fs.readdir);
interface Options {

View file

@ -69,13 +69,13 @@ export async function watchStdioForLine(
}
}),
createPromiseFromStreams([
proc.stdout,
proc.stdout!, // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
createSplitStream('\n'),
skipLastEmptyLineStream(),
createMapStream(onLogLine),
]),
createPromiseFromStreams([
proc.stderr,
proc.stderr!, // TypeScript note: As long as the proc stdio[1] is 'pipe', then stderr will not be null
createSplitStream('\n'),
skipLastEmptyLineStream(),
createMapStream(onLogLine),

View file

@ -26,4 +26,4 @@ if [ -f "${CONFIG_DIR}/node.options" ]; then
KBN_NODE_OPTS="$(grep -v ^# < ${CONFIG_DIR}/node.options | xargs)"
fi
NODE_OPTIONS="--no-warnings --max-http-header-size=65536 $KBN_NODE_OPTS $NODE_OPTIONS" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli/dist" ${@}
NODE_OPTIONS="--no-warnings --max-http-header-size=65536 --tls-min-v1.0 $KBN_NODE_OPTS $NODE_OPTIONS" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli/dist" ${@}

View file

@ -47,16 +47,16 @@ const packages: Package[] = [
extractMethod: 'gunzip',
archives: {
darwin: {
url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/darwin-x64-64.gz',
sha256: '595c6653d796493ddb288fc0732a0d1df8560099796f55a1dd242357d96bb8d6',
url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/darwin-x64-72.gz',
sha256: '983106049bb86e21b7f823144b2b83e3f1408217401879b3cde0312c803512c9',
},
linux: {
url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/linux-x64-64.gz',
sha256: 'e743587bc96314edf10c3e659c03168bc374a5cd9a6623ee99d989251e331f28',
url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/linux-x64-72.gz',
sha256: '8b6692037f7b0df24dabc9c9b039038d1c3a3110f62121616b406c482169710a',
},
win32: {
url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/win32-x64-64.gz',
sha256: 'b33de62cda24fb02dc80a19fb79977d686468ac746e97cd211059d2d4c75d529',
url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/win32-x64-72.gz',
sha256: '0a6991e693577160c3e9a3f196bd2518368c52d920af331a1a183313e0175604',
},
},
},

View file

@ -72,7 +72,10 @@ run(
await Promise.all([
proc.then(() => log.debug(` - ${cmd} exited with 0`)),
Rx.merge(getLine$(proc.stdout), getLine$(proc.stderr))
Rx.merge(
getLine$(proc.stdout!), // TypeScript note: As long as the proc stdio[1] is 'pipe', then stdout will not be null
getLine$(proc.stderr!) // TypeScript note: As long as the proc stdio[2] is 'pipe', then stderr will not be null
)
.pipe(tap((line) => log.debug(line)))
.toPromise(),
]);

View file

@ -661,7 +661,7 @@ describe('Integration', () => {
{
name: 'Any of - mixed - both',
cursor: { lineNumber: 14, column: 3 },
autoCompleteSet: [tt('{'), tt(3)],
autoCompleteSet: [tt(3), tt('{')],
},
]
);

View file

@ -111,6 +111,7 @@ test('Clone adds a new embeddable', async () => {
expect(newPanel.type).toEqual('placeholder');
// let the placeholder load
await dashboard.untilEmbeddableLoaded(newPanelId!);
await new Promise((r) => process.nextTick(r)); // Allow the current loop of the event loop to run to completion
// now wait for the full embeddable to replace it
const loadedPanel = await dashboard.untilEmbeddableLoaded(newPanelId!);
expect(loadedPanel.type).toEqual(embeddable.type);

View file

@ -12,6 +12,7 @@ exports[`Inspector Data View component should render empty state 1`] = `
"_maxListeners": undefined,
"tabular": [Function],
"tabularOptions": Object {},
Symbol(kCapture): false,
},
}
}
@ -130,6 +131,7 @@ exports[`Inspector Data View component should render empty state 1`] = `
"_maxListeners": undefined,
"tabular": [Function],
"tabularOptions": Object {},
Symbol(kCapture): false,
},
}
}

View file

@ -49,6 +49,7 @@ describe('createStartServicesGetter', () => {
await new Promise((r) => setTimeout(r, 1));
future.resolve([core, plugins, self]);
await future.promise;
await new Promise((r) => process.nextTick(r)); // Allow the current loop of the event loop to run to completion
expect(start()).toEqual({
core,
@ -68,6 +69,7 @@ describe('createStartServicesGetter', () => {
await new Promise((r) => setTimeout(r, 1));
future.resolve([core, plugins, self]);
await future.promise;
await new Promise((r) => process.nextTick(r)); // Allow the current loop of the event loop to run to completion
expect(start()).toEqual({
core,

View file

@ -238,7 +238,8 @@ export const createKbnUrlControls = (
* 4. Hash history with base path
*/
export function getRelativeToHistoryPath(absoluteUrl: string, history: History): History.Path {
function stripBasename(path: string = '') {
function stripBasename(path: string | null) {
if (path === null) path = '';
const stripLeadingHash = (_: string) => (_.charAt(0) === '#' ? _.substr(1) : _);
const stripTrailingSlash = (_: string) =>
_.charAt(_.length - 1) === '/' ? _.substr(0, _.length - 1) : _;
@ -250,7 +251,7 @@ export function getRelativeToHistoryPath(absoluteUrl: string, history: History):
const parsedHash = isHashHistory ? null : parseUrlHash(absoluteUrl);
return formatUrl({
pathname: stripBasename(parsedUrl.pathname),
pathname: stripBasename(parsedUrl.pathname ?? null),
search: stringify(urlUtils.encodeQuery(parsedUrl.query), { sort: false, encode: false }),
hash: parsedHash
? formatUrl({

View file

@ -32,7 +32,7 @@ export function shortUrlAssertValid(url: string) {
throw Boom.notAcceptable(`Short url targets cannot have a hostname, found "${hostname}"`);
}
const pathnameParts = trim(pathname, '/').split('/');
const pathnameParts = trim(pathname === null ? undefined : pathname, '/').split('/');
if (pathnameParts.length !== 2) {
throw Boom.notAcceptable(
`Short url target path must be in the format "/app/{{appId}}", found "${pathname}"`

View file

@ -17,11 +17,34 @@
* under the License.
*/
if (process.noProcessWarnings !== true) {
var ignore = ['MaxListenersExceededWarning'];
var EOL = require('os').EOL;
// Be very careful of what you add to this list. Idealy this array should be
// empty, but in certain circumstances, we can allow a warning to be ignored
// temporarily.
//
// Each element in the array is a "rule-object". All rules defined in a
// "rule-object" has to match for a warning to be ignored. Possible rules are:
// `name`, `code`, `message`, `file`, `lines`, and `col`.
//
// The `file`, `line`, and `col` rules will be checked against the top stack
// frame only. Also, `file` doesn't have to match the full path, only the end of
// it.
var IGNORE_WARNINGS = [
{
name: 'MaxListenersExceededWarning',
},
{
name: 'DeprecationWarning',
code: 'DEP0066',
file: '/node_modules/supertest/node_modules/superagent/lib/node/index.js',
line: 418,
},
];
if (process.noProcessWarnings !== true) {
process.on('warning', function (warn) {
if (ignore.includes(warn.name)) return;
if (shouldIgnore(warn)) return;
if (process.traceProcessWarnings === true) {
console.error('Node.js process-warning detected - Terminating process...');
@ -48,3 +71,47 @@ if (process.noProcessWarnings !== true) {
process.exit(1);
});
}
function shouldIgnore(warn) {
warn = parseWarn(warn);
return IGNORE_WARNINGS.some(function ({ name, code, message, file, line, col }) {
if (name && name !== warn.name) return false;
if (code && code !== warn.code) return false;
if (message && message !== warn.message) return false;
if (file && !warn.frames[0].file.endsWith(file)) return false;
if (line && line !== warn.frames[0].line) return false;
if (col && col !== warn.frames[0].col) return false;
return true;
});
}
function parseWarn(warn) {
var lines = warn.stack.split(EOL);
return {
name: warn.name,
code: warn.code,
message: lines[0].split(': ')[1],
frames: parseStack(lines.slice(1)),
};
}
function parseStack(stack) {
return stack.map(parseFrame).filter(function (frame) {
return frame;
});
}
function parseFrame(frame) {
// supports the following frame types:
// - " at function-name (file-path:1:2)"
// - " at function-name (file-path)"
// - " at file-path:1:2"
var match = frame.match(/^ at (?:([^(]+) )?\(?([^:)]+)(?::(\d+):(\d+))?\)?$/);
if (match === null) return; // in case the stack trace is modified by another module, e.g. jest
return {
func: match[1],
file: match[2],
line: Number(match[3]),
col: Number(match[4]),
};
}

View file

@ -76,7 +76,7 @@ export async function createStdoutSocket() {
throw new Error('server must listen to a random port, not a unix socket');
}
const input = Net.createConnection(addressInfo.port, addressInfo.address);
const input = Net.createConnection(addressInfo!.port, addressInfo!.address); // TypeScript note: addressInfo will not be null after 'listening' has been emitted
await Rx.fromEvent<void>(input, 'connect').pipe(take(1)).toPromise();
return {

View file

@ -126,6 +126,7 @@ async function slackExecutor(
// https://slack.dev/node-slack-sdk/webhook
// node-slack-sdk use Axios inside :)
const webhook = new IncomingWebhook(webhookUrl, {
// @ts-expect-error The types exposed by 'HttpsProxyAgent' isn't up to date with 'Agent'
agent: proxyAgent,
});
result = await webhook.send(message);

View file

@ -15,7 +15,6 @@ jest.mock('../../supported_renderers');
describe('Canvas Shareable Workpad API', () => {
// Mock the AJAX load of the workpad.
beforeEach(function () {
// @ts-expect-error Applying a global in Jest is alright.
global.fetch = jest.fn().mockImplementation(() => {
const p = new Promise((resolve, _reject) => {
resolve({

View file

@ -93,6 +93,7 @@ export function getFetchOptions(targetUrl: string): RequestInit | undefined {
logger.debug(`Using ${proxyUrl} as proxy for ${targetUrl}`);
return {
// @ts-expect-error The types exposed by 'HttpsProxyAgent' isn't up to date with 'Agent'
agent: getProxyAgent({ proxyUrl, targetUrl }),
};
}

View file

@ -249,12 +249,12 @@ describe('getOperationTypesForField', () => {
},
Object {
"field": "bytes",
"operationType": "max",
"operationType": "min",
"type": "field",
},
Object {
"field": "bytes",
"operationType": "min",
"operationType": "max",
"type": "field",
},
Object {

View file

@ -186,15 +186,15 @@ describe('xy_suggestions', () => {
expect(suggestions).toHaveLength(visualizationTypes.length);
expect(suggestions.map(({ state }) => xyVisualization.getVisualizationTypeId(state))).toEqual([
'bar_stacked',
'line',
'area_percentage_stacked',
'area_stacked',
'area',
'bar_horizontal_percentage_stacked',
'bar_horizontal_stacked',
'bar_percentage_stacked',
'bar_horizontal',
'bar',
'bar_horizontal',
'bar_percentage_stacked',
'bar_horizontal_stacked',
'bar_horizontal_percentage_stacked',
'area',
'area_stacked',
'area_percentage_stacked',
'line',
]);
});
@ -226,15 +226,15 @@ describe('xy_suggestions', () => {
]);
expect(suggestions.map(({ state }) => xyVisualization.getVisualizationTypeId(state))).toEqual([
'bar_stacked',
'line',
'area_percentage_stacked',
'area_stacked',
'area',
'bar_horizontal_percentage_stacked',
'bar_horizontal_stacked',
'bar_percentage_stacked',
'bar_horizontal',
'bar',
'bar_horizontal',
'bar_percentage_stacked',
'bar_horizontal_stacked',
'bar_horizontal_percentage_stacked',
'area',
'area_stacked',
'area_percentage_stacked',
'line',
]);
});

View file

@ -48,7 +48,7 @@ export function formatTimestampToDuration(timestamp, calculationFlag, initialTim
}
return duration
.replace(/ 0 mins$/, '')
.replace(/ 0 hrs$/, '')
.replace(/ 0 days$/, ''); // See https://github.com/jsmreese/moment-duration-format/issues/64
.replace(/ -?0 mins$/, '')
.replace(/ -?0 hrs$/, '')
.replace(/ -?0 days$/, ''); // See https://github.com/jsmreese/moment-duration-format/issues/64
}

View file

@ -333,12 +333,8 @@ export class HeadlessChromiumDriver {
private _shouldUseCustomHeaders(conditions: ConditionalHeadersConditions, url: string) {
const { hostname, protocol, port, pathname } = parseUrl(url);
if (pathname === undefined) {
// There's a discrepancy between the NodeJS docs and the typescript types. NodeJS docs
// just say 'string' and the typescript types say 'string | undefined'. We haven't hit a
// situation where it's undefined but here's an explicit Error if we do.
throw new Error(`pathname is undefined, don't know how to proceed`);
}
if (port === null) throw new Error(`URL missing port: ${url}`);
if (pathname === null) throw new Error(`URL missing pathname: ${url}`);
return (
hostname === conditions.hostname &&

View file

@ -50,9 +50,9 @@ export function getFullUrls(config: ReportingConfig, job: TaskPayloadPDF | TaskP
const urls = relativeUrls.map((relativeUrl) => {
const parsedRelative: UrlWithStringQuery = urlParse(relativeUrl);
const jobUrl = getAbsoluteUrl({
path: parsedRelative.pathname,
hash: parsedRelative.hash,
search: parsedRelative.search,
path: parsedRelative.pathname === null ? undefined : parsedRelative.pathname,
hash: parsedRelative.hash === null ? undefined : parsedRelative.hash,
search: parsedRelative.search === null ? undefined : parsedRelative.search,
});
// capture the route to the visualization

View file

@ -14,7 +14,7 @@ describe('cloneHttpFetchQuery', () => {
a: 'a',
'1': 1,
undefined,
array: [1, 2, undefined],
array: [1, 2],
};
expect(cloneHttpFetchQuery(query)).toMatchInlineSnapshot(`
Object {
@ -23,7 +23,6 @@ describe('cloneHttpFetchQuery', () => {
"array": Array [
1,
2,
undefined,
],
"undefined": undefined,
}

View file

@ -32,9 +32,9 @@ type Exact<T, Shape> = T extends Shape ? ExactKeys<T, Shape> : never;
* Ensures that when creating a URL query param string, that the given input strictly
* matches the expected interface (guards against possibly leaking internal state)
*/
const querystringStringify: <ExpectedType extends object, ArgType>(
const querystringStringify = <ExpectedType, ArgType>(
params: Exact<ExpectedType, ArgType>
) => string = querystring.stringify;
): string => querystring.stringify((params as unknown) as querystring.ParsedUrlQueryInput);
/** Make `selected_endpoint` required */
type EndpointDetailsUrlProps = Omit<EndpointIndexUIQueryParams, 'selected_endpoint'> &

View file

@ -11,7 +11,7 @@ import { EndpointIndexUIQueryParams } from '../types';
import { AppLocation } from '../../../../../common/endpoint/types';
export function urlFromQueryParams(queryParams: EndpointIndexUIQueryParams): Partial<AppLocation> {
const search = querystring.stringify(queryParams);
const search = querystring.stringify(queryParams as Record<string, string>);
return {
search,
};

View file

@ -26,8 +26,6 @@ import { NetworkDetails } from './index';
type Action = 'PUSH' | 'POP' | 'REPLACE';
const pop: Action = 'POP';
type GlobalWithFetch = NodeJS.Global & { fetch: jest.Mock };
jest.mock('react-router-dom', () => {
const original = jest.requireActual('react-router-dom');
@ -85,7 +83,7 @@ describe('Network Details', () => {
indicesExist: false,
indexPattern: {},
});
(global as GlobalWithFetch).fetch = jest.fn().mockImplementationOnce(() =>
global.fetch = jest.fn().mockImplementationOnce(() =>
Promise.resolve({
ok: true,
json: () => {

View file

@ -8,9 +8,11 @@ import { DEFAULT_SPACE_ID } from '../constants';
const spaceContextRegex = /^\/s\/([a-z0-9_\-]+)/;
export function getSpaceIdFromPath(
requestBasePath: string = '/',
serverBasePath: string = '/'
requestBasePath?: string | null,
serverBasePath?: string | null
): { spaceId: string; pathHasExplicitSpaceIdentifier: boolean } {
if (requestBasePath == null) requestBasePath = '/';
if (serverBasePath == null) serverBasePath = '/';
const pathToCheck: string = stripServerBasePath(requestBasePath, serverBasePath);
// Look for `/s/space-url-context` in the base path

View file

@ -7,6 +7,7 @@
import { EuiButton, EuiCheckboxProps } from '@elastic/eui';
import { ReactWrapper } from 'enzyme';
import React from 'react';
import { wait } from '@testing-library/react';
import { mountWithIntl } from 'test_utils/enzyme_helpers';
import { ConfirmAlterActiveSpaceModal } from './confirm_alter_active_space_modal';
@ -69,7 +70,10 @@ describe('ManageSpacePage', () => {
/>
);
await waitForDataLoad(wrapper);
await wait(() => {
wrapper.update();
expect(wrapper.find('input[name="name"]')).toHaveLength(1);
});
const nameInput = wrapper.find('input[name="name"]');
const descriptionInput = wrapper.find('textarea[name="description"]');
@ -128,9 +132,11 @@ describe('ManageSpacePage', () => {
/>
);
await waitForDataLoad(wrapper);
await wait(() => {
wrapper.update();
expect(spacesManager.getSpace).toHaveBeenCalledWith('existing-space');
});
expect(spacesManager.getSpace).toHaveBeenCalledWith('existing-space');
expect(onLoadSpace).toHaveBeenCalledWith({
...spaceToUpdate,
});
@ -179,10 +185,11 @@ describe('ManageSpacePage', () => {
/>
);
await waitForDataLoad(wrapper);
expect(notifications.toasts.addError).toHaveBeenCalledWith(error, {
title: 'Error loading available features',
await wait(() => {
wrapper.update();
expect(notifications.toasts.addError).toHaveBeenCalledWith(error, {
title: 'Error loading available features',
});
});
});
@ -216,9 +223,10 @@ describe('ManageSpacePage', () => {
/>
);
await waitForDataLoad(wrapper);
expect(spacesManager.getSpace).toHaveBeenCalledWith('my-space');
await wait(() => {
wrapper.update();
expect(spacesManager.getSpace).toHaveBeenCalledWith('my-space');
});
await Promise.resolve();
@ -277,9 +285,10 @@ describe('ManageSpacePage', () => {
/>
);
await waitForDataLoad(wrapper);
expect(spacesManager.getSpace).toHaveBeenCalledWith('my-space');
await wait(() => {
wrapper.update();
expect(spacesManager.getSpace).toHaveBeenCalledWith('my-space');
});
await Promise.resolve();
@ -327,9 +336,3 @@ async function clickSaveButton(wrapper: ReactWrapper<any, any>) {
wrapper.update();
}
async function waitForDataLoad(wrapper: ReactWrapper<any, any>) {
await Promise.resolve();
await Promise.resolve();
wrapper.update();
}

View file

@ -13,6 +13,7 @@ import { SpacesManager } from '../spaces_manager';
import { NavControlPopover } from './nav_control_popover';
import { EuiHeaderSectionItemButton } from '@elastic/eui';
import { mountWithIntl } from 'test_utils/enzyme_helpers';
import { wait } from '@testing-library/react';
describe('NavControlPopover', () => {
it('renders without crashing', () => {
@ -64,10 +65,9 @@ describe('NavControlPopover', () => {
wrapper.find(EuiHeaderSectionItemButton).simulate('click');
// Wait for `getSpaces` promise to resolve
await Promise.resolve();
await Promise.resolve();
wrapper.update();
expect(wrapper.find(SpaceAvatar)).toHaveLength(3);
await wait(() => {
wrapper.update();
expect(wrapper.find(SpaceAvatar)).toHaveLength(3);
});
});
});

View file

@ -283,7 +283,7 @@ describe('copySavedObjectsToSpaces', () => {
new Readable({
objectMode: true,
read() {
this.emit('error', new Error('Something went wrong while reading this stream'));
this.destroy(new Error('Something went wrong while reading this stream'));
},
})
);

View file

@ -290,7 +290,7 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => {
new Readable({
objectMode: true,
read() {
this.emit('error', new Error('Something went wrong while reading this stream'));
this.destroy(new Error('Something went wrong while reading this stream'));
},
})
);

View file

@ -19,7 +19,6 @@ describe('SyntheticsCallout', () => {
setItem: setItemMock,
};
// @ts-expect-error replacing a call to localStorage we use for monitor list size
global.localStorage = localStorageMock;
});

View file

@ -135,7 +135,6 @@ describe('MonitorList component', () => {
setItem: jest.fn(),
};
// @ts-expect-error replacing a call to localStorage we use for monitor list size
global.localStorage = localStorageMock;
});

View file

@ -311,12 +311,36 @@ export default function jiraTest({ getService }: FtrProviderContext) {
params: {},
})
.then((resp: any) => {
expect(resp.body).to.eql({
actionId: simulatedActionId,
status: 'error',
retry: false,
message: `error validating action params: Cannot read property 'Symbol(Symbol.iterator)' of undefined`,
});
expect(Object.keys(resp.body)).to.eql(['status', 'actionId', 'message', 'retry']);
expect(resp.body.actionId).to.eql(simulatedActionId);
expect(resp.body.status).to.eql('error');
expect(resp.body.retry).to.eql(false);
// Node.js 12 oddity:
//
// The first time after the server is booted, the error message will be:
//
// undefined is not iterable (cannot read property Symbol(Symbol.iterator))
//
// After this, the error will be:
//
// Cannot destructure property 'value' of 'undefined' as it is undefined.
//
// The error seems to come from the exact same place in the code based on the
// exact same circomstances:
//
// https://github.com/elastic/kibana/blob/b0a223ebcbac7e404e8ae6da23b2cc6a4b509ff1/packages/kbn-config-schema/src/types/literal_type.ts#L28
//
// What triggers the error is that the `handleError` function expects its 2nd
// argument to be an object containing a `valids` property of type array.
//
// In this test the object does not contain a `valids` property, so hence the
// error.
//
// Why the error message isn't the same in all scenarios is unknown to me and
// could be a bug in V8.
expect(resp.body.message).to.match(
/^error validating action params: (undefined is not iterable \(cannot read property Symbol\(Symbol.iterator\)\)|Cannot destructure property 'value' of 'undefined' as it is undefined\.)$/
);
});
});

View file

@ -312,12 +312,36 @@ export default function resilientTest({ getService }: FtrProviderContext) {
params: {},
})
.then((resp: any) => {
expect(resp.body).to.eql({
actionId: simulatedActionId,
status: 'error',
retry: false,
message: `error validating action params: Cannot read property 'Symbol(Symbol.iterator)' of undefined`,
});
expect(Object.keys(resp.body)).to.eql(['status', 'actionId', 'message', 'retry']);
expect(resp.body.actionId).to.eql(simulatedActionId);
expect(resp.body.status).to.eql('error');
expect(resp.body.retry).to.eql(false);
// Node.js 12 oddity:
//
// The first time after the server is booted, the error message will be:
//
// undefined is not iterable (cannot read property Symbol(Symbol.iterator))
//
// After this, the error will be:
//
// Cannot destructure property 'value' of 'undefined' as it is undefined.
//
// The error seems to come from the exact same place in the code based on the
// exact same circomstances:
//
// https://github.com/elastic/kibana/blob/b0a223ebcbac7e404e8ae6da23b2cc6a4b509ff1/packages/kbn-config-schema/src/types/literal_type.ts#L28
//
// What triggers the error is that the `handleError` function expects its 2nd
// argument to be an object containing a `valids` property of type array.
//
// In this test the object does not contain a `valids` property, so hence the
// error.
//
// Why the error message isn't the same in all scenarios is unknown to me and
// could be a bug in V8.
expect(resp.body.message).to.match(
/^error validating action params: (undefined is not iterable \(cannot read property Symbol\(Symbol.iterator\)\)|Cannot destructure property 'value' of 'undefined' as it is undefined\.)$/
);
});
});

View file

@ -306,12 +306,36 @@ export default function servicenowTest({ getService }: FtrProviderContext) {
params: {},
})
.then((resp: any) => {
expect(resp.body).to.eql({
actionId: simulatedActionId,
status: 'error',
retry: false,
message: `error validating action params: Cannot read property 'Symbol(Symbol.iterator)' of undefined`,
});
expect(Object.keys(resp.body)).to.eql(['status', 'actionId', 'message', 'retry']);
expect(resp.body.actionId).to.eql(simulatedActionId);
expect(resp.body.status).to.eql('error');
expect(resp.body.retry).to.eql(false);
// Node.js 12 oddity:
//
// The first time after the server is booted, the error message will be:
//
// undefined is not iterable (cannot read property Symbol(Symbol.iterator))
//
// After this, the error will be:
//
// Cannot destructure property 'value' of 'undefined' as it is undefined.
//
// The error seems to come from the exact same place in the code based on the
// exact same circomstances:
//
// https://github.com/elastic/kibana/blob/b0a223ebcbac7e404e8ae6da23b2cc6a4b509ff1/packages/kbn-config-schema/src/types/literal_type.ts#L28
//
// What triggers the error is that the `handleError` function expects its 2nd
// argument to be an object containing a `valids` property of type array.
//
// In this test the object does not contain a `valids` property, so hence the
// error.
//
// Why the error message isn't the same in all scenarios is unknown to me and
// could be a bug in V8.
expect(resp.body.message).to.match(
/^error validating action params: (undefined is not iterable \(cannot read property Symbol\(Symbol.iterator\)\)|Cannot destructure property 'value' of 'undefined' as it is undefined\.)$/
);
});
});

View file

@ -20,7 +20,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
const url = `/api/apm/services/opbeans-java/transaction_groups/distribution?${qs.stringify({
start: metadata.start,
end: metadata.end,
uiFilters: {},
uiFilters: encodeURIComponent('{}'),
transactionName: 'APIRestController#stats',
transactionType: 'request',
})}`;

View file

@ -111,7 +111,7 @@ export default function serviceMapsApiTests({ getService }: FtrProviderContext)
const q = querystring.stringify({
start: metadata.start,
end: metadata.end,
uiFilters: {},
uiFilters: encodeURIComponent('{}'),
});
const response = await supertest.get(`/api/apm/service-map/service/opbeans-node?${q}`);

View file

@ -136,101 +136,106 @@ export const CSV_RESULT_TIMELESS = `name,power
`;
export const CSV_RESULT_SCRIPTED = `date,name,percent,value,year,"years_ago",gender
"Jan 1, 1980 @ 00:00:00.000",Fecki,0,92,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fecki,0,78,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fecky,"0.001","2,071","1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fekki,0,6,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felen,0,40,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felia,0,21,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felina,0,6,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felinda,"0.001","1,620","1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felinda,"0.001","1,886","1,981","38.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felisa,0,5,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felita,0,8,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felkys,0,7,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felkys,0,8,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fell,0,6,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felle,0,22,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felma,0,8,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felynda,0,31,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fenita,0,219,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fenjamin,0,22,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fenjamin,0,27,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fenji,0,5,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fennie,0,16,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fenny,0,5,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Ferenice,0,9,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Frijida,0,5,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Frita,0,14,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fritney,0,10,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fecki,0,92,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fecki,0,78,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fecky,"0.001","2,071","1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fekki,0,6,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felen,0,40,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felia,0,21,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felina,0,6,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felinda,"0.001","1,620","1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felinda,"0.001","1,886","1,981","38.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felisa,0,5,"1,981","38.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felita,0,8,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felkys,0,7,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felkys,0,8,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fell,0,6,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felle,0,22,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felma,0,8,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felynda,0,31,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fenita,0,219,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fenjamin,0,22,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fenjamin,0,27,"1,981","38.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fenji,0,5,"1,981","38.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Fennie,0,16,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fenny,0,5,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Ferenice,0,9,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Frijida,0,5,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Frita,0,14,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fritney,0,10,"1,980","39.00000000000000000000",F
`;
export const CSV_RESULT_SCRIPTED_REQUERY = `date,name,percent,value,year,"years_ago",gender
"Jan 1, 1980 @ 00:00:00.000",Felen,0,40,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felia,0,21,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felina,0,6,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felinda,"0.001","1,620","1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felinda,"0.001","1,886","1,981","38.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felisa,0,5,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felita,0,8,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felkys,0,7,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felkys,0,8,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fell,0,6,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felle,0,22,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felma,0,8,"1,981","38.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felynda,0,31,"1,980","39.000000000000000000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felen,0,40,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felia,0,21,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felina,0,6,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felinda,"0.001","1,620","1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felinda,"0.001","1,886","1,981","38.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felisa,0,5,"1,981","38.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felita,0,8,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felkys,0,7,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felkys,0,8,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Fell,0,6,"1,980","39.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felle,0,22,"1,980","39.00000000000000000000",F
"Jan 1, 1981 @ 00:00:00.000",Felma,0,8,"1,981","38.00000000000000000000",F
"Jan 1, 1980 @ 00:00:00.000",Felynda,0,31,"1,980","39.00000000000000000000",F
`;
export const CSV_RESULT_SCRIPTED_RESORTED = `date,year,name,value,"years_ago"
"Jan 1, 1981 @ 00:00:00.000","1,981",Farbara,"6,456","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Farbara,"8,026","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fecky,"1,930","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fecky,"2,071","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Felinda,"1,886","38.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Feth,"3,685","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Feth,"4,246","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fetty,"1,763","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fetty,"1,967","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Feverly,"1,987","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Feverly,"2,249","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fonnie,"2,330","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fonnie,"2,748","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Frenda,"7,162","38.000000000000000000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Frenda,"8,335","39.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Farbara,"6,456","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Farbara,"8,026","39.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fecky,"1,930","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fecky,"2,071","39.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Felinda,"1,886","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Feth,"3,685","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Feth,"4,246","39.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fetty,"1,763","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fetty,"1,967","39.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Feverly,"1,987","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Feverly,"2,249","39.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fonnie,"2,330","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fonnie,"2,748","39.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Frenda,"7,162","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Frenda,"8,335","39.00000000000000000000"
`;
export const CSV_RESULT_HUGE = `date,year,name,value,"years_ago"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fobby,"2,791","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frent,"3,416","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frett,"2,679","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Filly,"3,366","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frian,"34,468","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fenjamin,"7,191","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frandon,"5,863","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fruce,"1,855","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fryan,"7,236","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frad,"2,482","35.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fradley,"5,175","35.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fryan,"7,114","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fradley,"4,752","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Frian,"35,717","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Farbara,"4,434","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fenjamin,"5,235","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fruce,"1,914","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fobby,"2,888","36.000000000000000000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Frett,"3,031","36.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Fonnie,"1,853","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Frandy,"2,082","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Fecky,"1,786","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Frandi,"2,056","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Fridget,"1,864","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Farbara,"5,081","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Feth,"2,818","37.000000000000000000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Frenda,"6,270","37.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fetty,"1,763","38.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fonnie,"2,330","38.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Farbara,"6,456","38.000000000000000000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Felinda,"1,886","38.000000000000000000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fobby,"2,791","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frent,"3,416","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frett,"2,679","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Filly,"3,366","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frian,"34,468","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fenjamin,"7,191","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frandon,"5,863","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fruce,"1,855","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fryan,"7,236","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Frad,"2,482","35.00000000000000000000"
"Jan 1, 1984 @ 00:00:00.000","1,984",Fradley,"5,175","35.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fryan,"7,114","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fradley,"4,752","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Frian,"35,717","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Farbara,"4,434","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fenjamin,"5,235","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fruce,"1,914","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Fobby,"2,888","36.00000000000000000000"
"Jan 1, 1983 @ 00:00:00.000","1,983",Frett,"3,031","36.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Fonnie,"1,853","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Frandy,"2,082","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Fecky,"1,786","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Frandi,"2,056","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Fridget,"1,864","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Farbara,"5,081","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Feth,"2,818","37.00000000000000000000"
"Jan 1, 1982 @ 00:00:00.000","1,982",Frenda,"6,270","37.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fetty,"1,763","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fonnie,"2,330","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Farbara,"6,456","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Felinda,"1,886","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Frenda,"7,162","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Feth,"3,685","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Feverly,"1,987","38.00000000000000000000"
"Jan 1, 1981 @ 00:00:00.000","1,981",Fecky,"1,930","38.00000000000000000000"
"Jan 1, 1980 @ 00:00:00.000","1,980",Fonnie,"2,748","39.00000000000000000000"
`;
// 'UTC'

View file

@ -5272,10 +5272,10 @@
dependencies:
"@types/node" "*"
"@types/node@*", "@types/node@8.10.54", "@types/node@>= 8", "@types/node@>=10.17.17 <10.20.0", "@types/node@>=8.9.0", "@types/node@^12.0.2":
version "10.17.26"
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.26.tgz#a8a119960bff16b823be4c617da028570779bcfd"
integrity sha512-myMwkO2Cr82kirHY8uknNRHEVtn0wV3DTQfkrjx17jmkstDRZ24gNUdl8AHXVyVclTYI/bNjgTPTAWvWLqXqkw==
"@types/node@*", "@types/node@12.19.4", "@types/node@8.10.54", "@types/node@>= 8", "@types/node@>=8.9.0", "@types/node@^12.0.2":
version "12.19.4"
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.19.4.tgz#cdfbb62e26c7435ed9aab9c941393cc3598e9b46"
integrity sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==
"@types/nodemailer@^6.2.1":
version "6.2.1"
@ -5881,10 +5881,10 @@
"@types/node" "*"
chokidar "^2.1.2"
"@types/webpack-env@^1.15.2":
version "1.15.2"
resolved "https://registry.yarnpkg.com/@types/webpack-env/-/webpack-env-1.15.2.tgz#927997342bb9f4a5185a86e6579a0a18afc33b0a"
integrity sha512-67ZgZpAlhIICIdfQrB5fnDvaKFcDxpKibxznfYRVAT4mQE41Dido/3Ty+E3xGBmTogc5+0Qb8tWhna+5B8z1iQ==
"@types/webpack-env@^1.15.2", "@types/webpack-env@^1.15.3":
version "1.15.3"
resolved "https://registry.yarnpkg.com/@types/webpack-env/-/webpack-env-1.15.3.tgz#fb602cd4c2f0b7c0fb857e922075fdf677d25d84"
integrity sha512-5oiXqR7kwDGZ6+gmzIO2lTC+QsriNuQXZDWNYRV3l2XRN/zmPgnC21DLSx2D05zvD8vnXW6qUg7JnXZ4I6qLVQ==
"@types/webpack-merge@^4.1.5":
version "4.1.5"