[7.x] chore(NA): upgrade to lodash@4 (#69868) (#70683)

* chore(NA): upgrade to lodash@4 (#69868)

* chore(NA): upgrade oss to lodash4

chore(NA): migrate cli, cli_plugin, cli_keystore,  dev, test_utils and apm src script to lodash4

chore(NA): missing file for cli plugin

chore(NA): add src core

chore(NA): es archiver and fixtures

chore(NA): try to fix functional test failure

chore(NA): migrate src/legacy entirely to lodash4 except src/legacy/core_plugins

chore(NA): move legacy core plugins to lodash4

chore(NA): upgrade optimize to lodash4

chore(NA): upgrade to lodash4 on advanced_settings, charts, console and dashboard

chore(NA): migrate to lodash4 on dev_tools, discover, embeddable, es_ui)shared, expressions, home plugins

chore(NA): upgrade data plugin to lodash4

chore(NA): upgrade usage_collection, ui_actions, tile_map, telemtry, share, saved_objects, saved_objects_management, region_map and navigation to lodash4

chore(NA): missing data upgrades to lodash4

Revert "chore(NA): upgrade usage_collection, ui_actions, tile_map, telemtry, share, saved_objects, saved_objects_management, region_map and navigation to lodash4"

This reverts commit 137055c5fed2fc52bb26547e0bc1ad2e3d4fe309.

Revert "Revert "chore(NA): upgrade usage_collection, ui_actions, tile_map, telemtry, share, saved_objects, saved_objects_management, region_map and navigation to lodash4""

This reverts commit f7e73688782998513d9fb6d7e8f0765e9beb28d1.

Revert "chore(NA): missing data upgrades to lodash4"

This reverts commit 92b85bf947a89bfc70cc4052738a6b2128ffb076.

Revert "chore(NA): upgrade data plugin to lodash4"

This reverts commit 88fdb075ee1e26c4ac979b6681d8a2b002df74c6.

chore(NA): upgrade idx_pattern_mgt, input_control_vis, inspector, kbn_legacy, kbn_react, kbn_usage_collections, kbn_utils, management and maps_legacy to lodash4

chore(NA): map src plugin data to lodash3

chore(NA): missing lodash.clonedeep dep

chore(NA): change packages kbn-config-schema deps

chore(NA): update renovate config

chore(NA): upgrade vis_type plugins to lodash4

chore(NA): move vis_type_vislib to lodash3

chore(NA): update visualizations and visualize to lodash4

chore(NA): remove lodash 3 types from src and move test to lodash4

chore(NA): move home, usage_collection and management to lodash 3

Revert "chore(NA): move home, usage_collection and management to lodash 3"

This reverts commit f86e8585f02d21550746569af54215b076a79a3d.

chore(NA): move kibana_legacy, saved_objects saved_objects_management into lodash3

chore(NA): update x-pack test to mock lodash4

Revert "chore(NA): move kibana_legacy, saved_objects saved_objects_management into lodash3"

This reverts commit 2d10fe450533e1b36db21d99cfae3ce996a244e0.

* chore(NA): move x-pack and packages to lodash 4

* chore(NA): remove mention to lodash from main package.json

* chore(NA): remove helper alias for lodash4 and make it the default lodash

* chore(NA): fix last failing types in the repo

* chore(NA): fix public api

* chore(NA): fix types for agg_row.tsx

* chore(NA): fix increment of optimizer modules in the rollup plugin

* chore(NA): migrate `src/core/public/http/fetch.ts` (#5)

* omit undefined query props

* just remove merge usage

* fix types

* chore(NA): fixes for feedback from apm team

* chore(NA): recover old behaviour on apm LoadingIndeicatorContext.tsx

* chore(NA): fixes for feedback from watson

* Platform lodash4 tweaks (#6)

* chore(NA): fix types and behaviour on src/core/server/elasticsearch/errors.ts

* Canvas fixes for lodash upgrade

* [APM] Adds unit test for APM service maps transform (#7)

* Adds a snapshot unit test for getConnections and rearranges some code to make testing easier

* reverts `ArrayList` back to `String[]` in the painless script within `fetch_service_paths_from_trace_ids.ts`

* chore(NA): update yarn.lock

* chore(NA): remove any and use a real type for alerts task runner

Co-authored-by: Gidi Meir Morris <github@gidi.io>

* chore(NA): used named import for triggers_actions_ui file

* chore(NA): fix eslint

* chore(NA): fix types

* Delete most uptime lodash references.

* Simplify. Clean up types.

* [Uptime] Delete most uptime lodash references (#8)

* Delete most uptime lodash references.

* Simplify. Clean up types.

* chore(NA): add eslint rule to avoid using lodash3

* chore(NA): apply changes on feedback from es-ui team

* fix some types (#9)

* Clean up some expressions types.

* chore(NA): missing ts-expect-error statements

* Upgrade lodash 4 vislib (#11)

* replace lodash 3 with lodash 4 on vislib plugin

* Further changes

* further replacement of lodash3 to 4

* further work on upgrading to lodash 4

* final changes to update lodash

* chore(NA): upgrade data plugin to lodash4

chore(NA): upgrade data plugin public to lodash4

chore(NA): fix typecheck task

chore(NA): fix agg_config with hasIn

chore(NA): assign to assignIn and has to hasIn

chore(NA): upgrade data plugin server to lodash4

chore(NA): new signature for core api

fix(NA): match behaviour between lodash3 and lodash4 for set in search_source

* chore(NA): remove lodash3 completely from the repo

* chore(NA): fix x-pack/test/api_integration/apis/metrics_ui/snapshot.ts missing content

* chore(NA): fix lodash usage on apm

* chore(NA): fix typecheck for maps

* Patch lodash template (#12)

* Applying changes from https://github.com/elastic/kibana/pull/64985

* Using isIterateeCall, because it seems less brittle

* Also patching `lodash/template` and `lodash/fp/template`

* Reorganizing some files...

* Revising comment

* Ends up `_` is a function also... I hate JavaScript

Co-authored-by: Pierre Gayvallet <pierre.gayvallet@gmail.com>
Co-authored-by: Josh Dover <me@joshdover.com>
Co-authored-by: Clint Andrew Hall <clint.hall@elastic.co>
Co-authored-by: Oliver Gupte <ogupte@users.noreply.github.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
Co-authored-by: Gidi Meir Morris <github@gidi.io>
Co-authored-by: Justin Kambic <justin.kambic@elastic.co>
Co-authored-by: Stratoula Kalafateli <stratoula1@gmail.com>
Co-authored-by: Luke Elmers <luke.elmers@elastic.co>
Co-authored-by: Brandon Kobel <brandon.kobel@gmail.com>
Co-authored-by: kobelb <brandon.kobel@elastic.co>
# Conflicts:
#	renovate.json5
#	x-pack/plugins/apm/public/services/rest/observability_dashboard.ts
#	x-pack/plugins/canvas/common/lib/pivot_object_array.ts
#	x-pack/plugins/canvas/public/components/workpad_templates/workpad_templates.js
#	x-pack/plugins/canvas/server/collectors/custom_element_collector.ts
#	x-pack/plugins/canvas/server/collectors/workpad_collector.ts

* chore(NA): updated kbn-pm dist file
This commit is contained in:
Tiago Costa 2020-07-03 04:40:17 +01:00 committed by GitHub
parent 104d4ea90a
commit 7759b924fc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
414 changed files with 4300 additions and 1357 deletions

View file

@ -12,14 +12,14 @@ Serialize this format to a simple POJO, with only the params that are not defaul
```typescript
toJSON(): {
id: unknown;
params: _.Dictionary<unknown> | undefined;
id: any;
params: any;
};
```
<b>Returns:</b>
`{
id: unknown;
params: _.Dictionary<unknown> | undefined;
id: any;
params: any;
}`

View file

@ -83,8 +83,9 @@
"**/@types/angular": "^1.6.56",
"**/@types/hoist-non-react-statics": "^3.3.1",
"**/@types/chai": "^4.2.11",
"**/cypress/@types/lodash": "^4.14.155",
"**/typescript": "3.9.5",
"**/graphql-toolkit/lodash": "^4.17.13",
"**/graphql-toolkit/lodash": "^4.17.15",
"**/hoist-non-react-statics": "^3.3.2",
"**/isomorphic-git/**/base64-js": "^1.2.1",
"**/image-diff/gm/debug": "^2.6.9",
@ -210,8 +211,7 @@
"leaflet.heat": "0.2.0",
"less": "npm:@elastic/less@2.7.3-kibana",
"less-loader": "5.0.0",
"lodash": "npm:@elastic/lodash@3.10.1-kibana4",
"lodash.clonedeep": "^4.5.0",
"lodash": "^4.17.15",
"lru-cache": "4.1.5",
"markdown-it": "^10.0.0",
"mini-css-extract-plugin": "0.8.0",
@ -350,8 +350,7 @@
"@types/json5": "^0.0.30",
"@types/license-checker": "15.0.0",
"@types/listr": "^0.14.0",
"@types/lodash": "^3.10.1",
"@types/lodash.clonedeep": "^4.5.4",
"@types/lodash": "^4.14.155",
"@types/lru-cache": "^5.1.0",
"@types/markdown-it": "^0.0.7",
"@types/minimatch": "^2.0.29",

View file

@ -14,6 +14,7 @@
"tsd": "^0.7.4"
},
"peerDependencies": {
"lodash": "^4.17.15",
"joi": "^13.5.2",
"moment": "^2.24.0",
"type-detect": "^4.0.8"

View file

@ -11,8 +11,7 @@
"dependencies": {
"@babel/runtime": "^7.10.2",
"@kbn/i18n": "1.0.0",
"lodash": "npm:@elastic/lodash@3.10.1-kibana4",
"lodash.clone": "^4.5.0",
"lodash": "^4.17.15",
"uuid": "3.3.2"
},
"devDependencies": {

View file

@ -17,7 +17,7 @@
* under the License.
*/
import clone from 'lodash.clone';
import { clone } from 'lodash';
export class Registry {
constructor(prop = 'name') {

View file

@ -23,7 +23,7 @@ const dedent = require('dedent');
const sao = require('sao');
const chalk = require('chalk');
const getopts = require('getopts');
const snakeCase = require('lodash.snakecase');
const { snakeCase } = require('lodash');
exports.run = function run(argv) {
const options = getopts(argv, {
@ -41,7 +41,7 @@ exports.run = function run(argv) {
if (options.help) {
console.log(
dedent(chalk`
# {dim Usage:}
# {dim Usage:}
node scripts/generate-plugin {bold [name]}
Generate a fresh Kibana plugin in the plugins/ directory
`) + '\n'

View file

@ -8,10 +8,7 @@
"dedent": "^0.7.0",
"execa": "^4.0.2",
"getopts": "^2.2.4",
"lodash.camelcase": "^4.3.0",
"lodash.kebabcase": "^4.1.1",
"lodash.snakecase": "^4.1.1",
"lodash.startcase": "^4.4.0",
"lodash": "^4.17.15",
"sao": "^0.22.12"
}
}

View file

@ -20,9 +20,7 @@
const { relative, resolve } = require('path');
const fs = require('fs');
const startCase = require('lodash.startcase');
const camelCase = require('lodash.camelcase');
const snakeCase = require('lodash.snakecase');
const { camelCase, startCase, snakeCase } = require('lodash');
const chalk = require('chalk');
const execa = require('execa');

View file

@ -22,7 +22,7 @@
"@types/glob": "^5.0.35",
"@types/globby": "^6.1.0",
"@types/has-ansi": "^3.0.0",
"@types/lodash.clonedeepwith": "^4.5.3",
"@types/lodash": "^4.14.155",
"@types/log-symbols": "^2.0.0",
"@types/ncp": "^2.0.1",
"@types/node": ">=10.17.17 <10.20.0",
@ -46,7 +46,7 @@
"globby": "^8.0.1",
"has-ansi": "^3.0.0",
"is-path-inside": "^3.0.2",
"lodash.clonedeepwith": "^4.5.0",
"lodash": "^4.17.15",
"log-symbols": "^2.2.0",
"multimatch": "^4.0.0",
"ncp": "^2.0.0",

View file

@ -17,7 +17,7 @@
* under the License.
*/
import cloneDeepWith from 'lodash.clonedeepwith';
import { cloneDeepWith } from 'lodash';
import { resolve, sep as pathSep } from 'path';
const repoRoot = resolve(__dirname, '../../../../');

View file

@ -54,7 +54,6 @@ module.exports = {
'highlight.js',
'html-entities',
'jquery',
'lodash.clone',
'lodash',
'markdown-it',
'mocha',

View file

@ -14,6 +14,7 @@
"@kbn/babel-preset": "1.0.0",
"@kbn/dev-utils": "1.0.0",
"@types/joi": "^13.4.2",
"@types/lodash": "^4.14.155",
"@types/parse-link-header": "^1.0.0",
"@types/puppeteer": "^3.0.0",
"@types/strip-ansi": "^5.2.1",
@ -28,6 +29,7 @@
"getopts": "^2.2.4",
"glob": "^7.1.2",
"joi": "^13.5.2",
"lodash": "^4.17.15",
"parse-link-header": "^1.0.1",
"puppeteer": "^3.3.0",
"rxjs": "^6.5.5",

View file

@ -18,10 +18,7 @@
*/
import { Schema } from 'joi';
import { cloneDeep, get, has } from 'lodash';
// @ts-ignore internal lodash module is not typed
import toPath from 'lodash/internal/toPath';
import { cloneDeepWith, get, has, toPath } from 'lodash';
import { schema } from './schema';
@ -114,7 +111,7 @@ export class Config {
throw new Error(`Unknown config key "${key}"`);
}
return cloneDeep(get(this[$values], key, defaultValue), (v) => {
return cloneDeepWith(get(this[$values], key, defaultValue), (v) => {
if (typeof v === 'function') {
return v;
}
@ -122,7 +119,7 @@ export class Config {
}
public getAll() {
return cloneDeep(this[$values], (v) => {
return cloneDeepWith(this[$values], (v) => {
if (typeof v === 'function') {
return v;
}

View file

@ -19,8 +19,7 @@
import { resolve } from 'path';
import { format } from 'url';
import { get } from 'lodash';
import toPath from 'lodash/internal/toPath';
import { get, toPath } from 'lodash';
import { Cluster } from '@kbn/es';
import { CI_PARALLEL_PROCESS_PREFIX } from '../ci_parallel_process_prefix';
import { esTestConfig } from './es_test_config';

View file

@ -19,7 +19,6 @@
import Fs from 'fs';
import Url from 'url';
import _ from 'lodash';
import puppeteer from 'puppeteer';
import { resolve } from 'path';
import { ToolingLog } from '@kbn/dev-utils';

View file

@ -21,7 +21,7 @@ const sass = require('node-sass');
const postcss = require('postcss');
const postcssConfig = require('../../src/optimize/postcss.config');
const chokidar = require('chokidar');
const debounce = require('lodash/function/debounce');
const { debounce } = require('lodash');
const platform = require('os').platform();
const isPlatformWindows = /^win/.test(platform);

View file

@ -17,7 +17,7 @@
"dependencies": {
"classnames": "2.2.6",
"focus-trap-react": "^3.1.1",
"lodash": "npm:@elastic/lodash@3.10.1-kibana4",
"lodash": "^4.17.15",
"prop-types": "15.6.0",
"react": "^16.12.0",
"react-ace": "^5.9.0",

1109
renovate.json5 Normal file

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,7 @@
const { join } = require('path');
const { readFileSync } = require('fs');
const { execSync } = require('child_process');
const merge = require('lodash.merge');
const { merge } = require('lodash');
const { name, version, build } = require('../package.json');
const ROOT_DIR = join(__dirname, '..');

View file

@ -93,7 +93,7 @@ describe('CLI cluster manager', () => {
}
const football = {};
const messenger = sample(manager.workers);
const messenger = sample(manager.workers) as any;
messenger.emit('broadcast', football);
for (const worker of manager.workers) {

View file

@ -177,7 +177,7 @@ export class Worker extends EventEmitter {
}
flushChangeBuffer() {
const files = _.unique(this.changes.splice(0));
const files = _.uniq(this.changes.splice(0));
const prefix = files.length > 1 ? '\n - ' : '';
return files.reduce(function (list, file) {
return `${list || ''}${prefix}"${file}"`;

View file

@ -72,7 +72,7 @@ function commandsSummary(program) {
}, 0);
return cmds.reduce(function (help, cmd) {
return `${help || ''}${_.padRight(cmd[0], cmdLColWidth)} ${cmd[1] || ''}\n`;
return `${help || ''}${_.padEnd(cmd[0], cmdLColWidth)} ${cmd[1] || ''}\n`;
}, '');
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { merge } from 'lodash';
import { omitBy } from 'lodash';
import { format } from 'url';
import { BehaviorSubject } from 'rxjs';
@ -42,6 +42,10 @@ interface Params {
const JSON_CONTENT = /^(application\/(json|x-javascript)|text\/(x-)?javascript|x-json)(;.*)?$/;
const NDJSON_CONTENT = /^(application\/ndjson)(;.*)?$/;
const removedUndefined = (obj: Record<string, any> | undefined) => {
return omitBy(obj, (v) => v === undefined);
};
export class Fetch {
private readonly interceptors = new Set<HttpInterceptor>();
private readonly requestCount$ = new BehaviorSubject(0);
@ -119,24 +123,23 @@ export class Fetch {
asResponse,
asSystemRequest,
...fetchOptions
} = merge(
{
method: 'GET',
credentials: 'same-origin',
prependBasePath: true,
},
options,
{
headers: {
'Content-Type': 'application/json',
...options.headers,
'kbn-version': this.params.kibanaVersion,
},
}
);
} = {
method: 'GET',
credentials: 'same-origin',
prependBasePath: true,
...options,
// options can pass an `undefined` Content-Type to erase the default value.
// however we can't pass it to `fetch` as it will send an `Content-Type: Undefined` header
headers: removedUndefined({
'Content-Type': 'application/json',
...options.headers,
'kbn-version': this.params.kibanaVersion,
}),
};
const url = format({
pathname: shouldPrependBasePath ? this.params.basePath.prepend(options.path) : options.path,
query,
query: removedUndefined(query),
});
// Make sure the system request header is only present if `asSystemRequest` is true.
@ -144,7 +147,7 @@ export class Fetch {
fetchOptions.headers['kbn-system-request'] = 'true';
}
return new Request(url, fetchOptions);
return new Request(url, fetchOptions as RequestInit);
}
private async fetchResponse(fetchOptions: HttpFetchOptionsWithPath): Promise<HttpResponse<any>> {

View file

@ -91,7 +91,7 @@ describe('PluginsService', () => {
context: contextServiceMock.createSetupContract(),
fatalErrors: fatalErrorsServiceMock.createSetupContract(),
http: httpServiceMock.createSetupContract(),
injectedMetadata: pick(injectedMetadataServiceMock.createStartContract(), 'getInjectedVar'),
injectedMetadata: injectedMetadataServiceMock.createStartContract(),
notifications: notificationServiceMock.createSetupContract(),
uiSettings: uiSettingsServiceMock.createSetupContract(),
};
@ -99,6 +99,7 @@ describe('PluginsService', () => {
...mockSetupDeps,
application: expect.any(Object),
getStartServices: expect.any(Function),
injectedMetadata: pick(mockSetupDeps.injectedMetadata, 'getInjectedVar'),
};
mockStartDeps = {
application: applicationServiceMock.createInternalStartContract(),
@ -106,7 +107,7 @@ describe('PluginsService', () => {
http: httpServiceMock.createStartContract(),
chrome: chromeServiceMock.createStartContract(),
i18n: i18nServiceMock.createStartContract(),
injectedMetadata: pick(injectedMetadataServiceMock.createStartContract(), 'getInjectedVar'),
injectedMetadata: injectedMetadataServiceMock.createStartContract(),
notifications: notificationServiceMock.createStartContract(),
overlays: overlayServiceMock.createStartContract(),
uiSettings: uiSettingsServiceMock.createStartContract(),
@ -117,6 +118,7 @@ describe('PluginsService', () => {
...mockStartDeps,
application: expect.any(Object),
chrome: omit(mockStartDeps.chrome, 'getComponent'),
injectedMetadata: pick(mockStartDeps.injectedMetadata, 'getInjectedVar'),
};
// Reset these for each test.

View file

@ -162,7 +162,9 @@ export class SavedObjectsClient {
});
if (!foundObject) {
return queueItem.resolve(this.createSavedObject(pick(queueItem, ['id', 'type'])));
return queueItem.resolve(
this.createSavedObject(pick(queueItem, ['id', 'type']) as SavedObject)
);
}
queueItem.resolve(foundObject);

View file

@ -60,7 +60,7 @@ export class SimpleSavedObject<T = unknown> {
}
public set(key: string, value: any): T {
return set(this.attributes, key, value);
return set(this.attributes as any, key, value);
}
public has(key: string): boolean {

View file

@ -17,11 +17,11 @@
* under the License.
*/
import { merge } from 'lodash';
import { mergeWith } from 'lodash';
import { Capabilities } from './types';
export const mergeCapabilities = (...sources: Array<Partial<Capabilities>>): Capabilities =>
merge({}, ...sources, (a: any, b: any) => {
mergeWith({}, ...sources, (a: any, b: any) => {
if (
(typeof a === 'boolean' && typeof b === 'object') ||
(typeof a === 'object' && typeof b === 'boolean')

View file

@ -39,10 +39,7 @@ const dataPathDeprecation: ConfigDeprecation = (settings, fromPath, log) => {
};
const xsrfDeprecation: ConfigDeprecation = (settings, fromPath, log) => {
if (
has(settings, 'server.xsrf.whitelist') &&
get<unknown[]>(settings, 'server.xsrf.whitelist').length > 0
) {
if ((settings.server?.xsrf?.whitelist ?? []).length > 0) {
log(
'It is not recommended to disable xsrf protections for API endpoints via [server.xsrf.whitelist]. ' +
'It will be removed in 8.0 release. Instead, supply the "kbn-xsrf" header.'

View file

@ -81,7 +81,7 @@ export class LegacyElasticsearchErrorHelpers {
public static decorateNotAuthorizedError(error: Error, reason?: string) {
const decoratedError = decorate(error, ErrorCode.NOT_AUTHORIZED, 401, reason);
const wwwAuthHeader = get<string>(error, 'body.error.header[WWW-Authenticate]');
const wwwAuthHeader = get(error, 'body.error.header[WWW-Authenticate]') as string;
decoratedError.output.headers['WWW-Authenticate'] =
wwwAuthHeader || 'Basic realm="Authorization Required"';

View file

@ -24,7 +24,7 @@ import apm from 'elastic-apm-node';
import { ByteSizeValue } from '@kbn/config-schema';
import { Server, Request, ResponseToolkit } from 'hapi';
import HapiProxy from 'h2o2';
import { sample } from 'lodash';
import { sampleSize } from 'lodash';
import BrowserslistUserAgent from 'browserslist-useragent';
import * as Rx from 'rxjs';
import { take } from 'rxjs/operators';
@ -90,7 +90,7 @@ export class BasePathProxyServer {
httpConfig.maxPayload = new ByteSizeValue(ONE_GIGABYTE);
if (!httpConfig.basePath) {
httpConfig.basePath = `/${sample(alphabet, 3).join('')}`;
httpConfig.basePath = `/${sampleSize(alphabet, 3).join('')}`;
}
}

View file

@ -119,7 +119,10 @@ Object {
exports[`#set correctly sets values for paths that do not exist. 1`] = `
Object {
"unknown": "value",
"unknown": Object {
"sub1": "sub-value-1",
"sub2": "sub-value-2",
},
}
`;

View file

@ -17,7 +17,7 @@
* under the License.
*/
import toPath from 'lodash/internal/toPath';
import { toPath } from 'lodash';
import { SavedObjectsCoreFieldMapping, SavedObjectsFieldMapping, IndexMapping } from '../types';
function getPropertyMappingFromObjectMapping(

View file

@ -62,7 +62,6 @@
import Boom from 'boom';
import _ from 'lodash';
import cloneDeep from 'lodash.clonedeep';
import Semver from 'semver';
import { Logger } from '../../../logging';
import { SavedObjectUnsanitizedDoc } from '../../serialization';
@ -151,7 +150,7 @@ export class DocumentMigrator implements VersionedTransformer {
// Clone the document to prevent accidental mutations on the original data
// Ex: Importing sample data that is cached at import level, migrations would
// execute on mutated data the second time.
const clonedDoc = cloneDeep(doc);
const clonedDoc = _.cloneDeep(doc);
return this.transformDoc(clonedDoc);
};
}
@ -220,7 +219,7 @@ function buildActiveMigrations(
return {
...migrations,
[type.name]: {
latestVersion: _.last(transforms).version,
latestVersion: _.last(transforms)!.version,
transforms,
},
};

View file

@ -17,7 +17,6 @@
* under the License.
*/
import _ from 'lodash';
import { coordinateMigration } from './migration_coordinator';
import { createSavedObjectsMigrationLoggerMock } from '../mocks';

View file

@ -1346,7 +1346,7 @@ export class SavedObjectsRepository {
// method transparently to the specified namespace.
private _rawToSavedObject<T = unknown>(raw: SavedObjectsRawDoc): SavedObject<T> {
const savedObject = this._serializer.rawToSavedObject(raw);
return omit(savedObject, 'namespace');
return omit(savedObject, 'namespace') as SavedObject<T>;
}
/**

View file

@ -32,7 +32,8 @@ it('returns the first argument with all original references', () => {
it('prevents adding properties to argument', () => {
const frozen = deepFreeze({});
expect(() => {
// @ts-expect-error ts knows this shouldn't be possible, but just making sure
// ts knows this shouldn't be possible, but just making sure
// @ts-expect-error
frozen.foo = true;
}).toThrowError(`object is not extensible`);
});
@ -40,7 +41,8 @@ it('prevents adding properties to argument', () => {
it('prevents changing properties on argument', () => {
const frozen = deepFreeze({ foo: false });
expect(() => {
// @ts-expect-error ts knows this shouldn't be possible, but just making sure
// ts knows this shouldn't be possible, but just making sure
// @ts-expect-error
frozen.foo = true;
}).toThrowError(`read only property 'foo'`);
});
@ -48,7 +50,8 @@ it('prevents changing properties on argument', () => {
it('prevents changing properties on nested children of argument', () => {
const frozen = deepFreeze({ foo: { bar: { baz: { box: 1 } } } });
expect(() => {
// @ts-expect-error ts knows this shouldn't be possible, but just making sure
// ts knows this shouldn't be possible, but just making sure
// @ts-expect-error
frozen.foo.bar.baz.box = 2;
}).toThrowError(`read only property 'box'`);
});
@ -56,7 +59,8 @@ it('prevents changing properties on nested children of argument', () => {
it('prevents adding items to a frozen array', () => {
const frozen = deepFreeze({ foo: [1] });
expect(() => {
// @ts-expect-error ts knows this shouldn't be possible, but just making sure
// ts knows this shouldn't be possible, but just making sure
// @ts-expect-error
frozen.foo.push(2);
}).toThrowError(`object is not extensible`);
});
@ -64,7 +68,8 @@ it('prevents adding items to a frozen array', () => {
it('prevents reassigning items in a frozen array', () => {
const frozen = deepFreeze({ foo: [1] });
expect(() => {
// @ts-expect-error ts knows this shouldn't be possible, but just making sure
// ts knows this shouldn't be possible, but just making sure
// @ts-expect-error
frozen.foo[0] = 2;
}).toThrowError(`read only property '0'`);
});

View file

@ -97,7 +97,6 @@ export const IGNORE_DIRECTORY_GLOBS = [
'packages/*',
'packages/kbn-ui-framework/generator-kui',
'src/legacy/ui/public/flot-charts',
'src/legacy/ui/public/utils/lodash-mixins',
'test/functional/fixtures/es_archiver/visualize_source-filters',
'packages/kbn-pm/src/utils/__fixtures__/*',
'x-pack/dev-tools',

View file

@ -23,11 +23,11 @@ import * as Rx from 'rxjs';
import { toArray } from 'rxjs/operators';
import { createFailError } from '@kbn/dev-utils';
import { debounce } from 'lodash';
import { findPluginSpecs } from '../../legacy/plugin_discovery';
import { collectUiExports } from '../../legacy/ui';
import { buildAll } from '../../legacy/server/sass/build_all';
import chokidar from 'chokidar';
import debounce from 'lodash/function/debounce';
// TODO: clintandrewhall - Extract and use FSWatcher from legacy/server/sass
const build = async ({ log, kibanaDir, styleSheetPaths, watch }) => {

View file

@ -44,7 +44,7 @@ export default function GeoHashGridAggResponseFixture() {
// random number of tags
let docCount = 0;
const buckets = _.times(_.random(40, 200), function () {
return _.sample(geoHashCharts, 3).join('');
return _.sampleSize(geoHashCharts, 3).join('');
})
.sort()
.map(function (geoHash) {

View file

@ -41,7 +41,7 @@ export default function (kibana: any) {
uiExports: {
injectDefaultVars: () => ({
elasticsearchUrl: url.format(
Object.assign(url.parse(head(_legacyEsConfig.hosts)), { auth: false })
Object.assign(url.parse(head(_legacyEsConfig.hosts) as any), { auth: false })
),
}),
},

View file

@ -35,7 +35,7 @@ export function handleESError(error) {
return Boom.serverUnavailable(error);
} else if (
error instanceof esErrors.Conflict ||
_.contains(error.message, 'index_template_already_exists')
_.includes(error.message, 'index_template_already_exists')
) {
return Boom.conflict(error);
} else if (error instanceof esErrors[403]) {

View file

@ -51,7 +51,7 @@ describe('Vislib Dispatch Class Test Suite', function () {
});
it('implements on, off, emit methods', function () {
const events = _.pluck(vis.handler.charts, 'events');
const events = _.map(vis.handler.charts, 'events');
expect(events.length).to.be.above(0);
events.forEach(function (dispatch) {
expect(dispatch).to.have.property('on');

View file

@ -267,7 +267,7 @@ describe('stackData method - data set with zeros in percentage mode', function (
expect(chart.chartData.series).to.have.length(1);
const series = chart.chartData.series[0].values;
// with the interval set in seriesMonthlyInterval data, the point at x=1454309600000 does not exist
const point = _.find(series, 'x', 1454309600000);
const point = _.find(series, ['x', 1454309600000]);
expect(point).to.not.be(undefined);
expect(point.y).to.be(0);
});
@ -279,7 +279,7 @@ describe('stackData method - data set with zeros in percentage mode', function (
const chart = vis.handler.charts[0];
expect(chart.chartData.series).to.have.length(5);
const series = chart.chartData.series[0].values;
const point = _.find(series, 'x', 1415826240000);
const point = _.find(series, ['x', 1415826240000]);
expect(point).to.not.be(undefined);
expect(point.y).to.be(0);
});

View file

@ -428,7 +428,7 @@ app.controller('timelion', function (
const httpResult = $http
.post('../api/timelion/run', {
sheet: $scope.state.sheet,
time: _.extend(
time: _.assignIn(
{
from: timeRangeBounds.min,
to: timeRangeBounds.max,

View file

@ -165,7 +165,7 @@ module
};
self.getLabel = function () {
return _.words(self.properties.nouns).map(_.capitalize).join(' ');
return _.words(self.properties.nouns).map(_.upperFirst).join(' ');
};
//key handler for the filter text box

View file

@ -78,7 +78,7 @@ export function TimelionExpInput($http, $timeout) {
function init() {
$http.get('../api/timelion/functions').then(function (resp) {
Object.assign(functionReference, {
byName: _.indexBy(resp.data, 'name'),
byName: _.keyBy(resp.data, 'name'),
list: resp.data,
});
});

View file

@ -47,7 +47,7 @@ export function TimelionInterval($timeout) {
// Only run this on initialization
if (newVal !== oldVal || oldVal == null) return;
if (_.contains($scope.intervalOptions, newVal)) {
if (_.includes($scope.intervalOptions, newVal)) {
$scope.interval = newVal;
} else {
$scope.interval = 'other';

View file

@ -346,7 +346,7 @@ export function timechartFn(dependencies: TimelionVisualizationDependencies) {
}
if (serie._global) {
_.merge(options, serie._global, function (objVal, srcVal) {
_.mergeWith(options, serie._global, function (objVal, srcVal) {
// This is kind of gross, it means that you can't replace a global value with a null
// best you can do is an empty string. Deal with it.
if (objVal == null) return srcVal;

View file

@ -19,8 +19,7 @@
import { resolve, basename, isAbsolute as isAbsolutePath } from 'path';
import toPath from 'lodash/internal/toPath';
import { get } from 'lodash';
import { get, toPath } from 'lodash';
import { createInvalidPluginError } from '../errors';
import { isVersionCompatible } from './is_version_compatible';

View file

@ -33,7 +33,7 @@ export interface Integrities {
export async function getIntegrityHashes(filepaths: string[]): Promise<Integrities> {
const hashes = await Promise.all(filepaths.map(getIntegrityHash));
return zipObject(filepaths, hashes);
return zipObject(filepaths, hashes) as Integrities;
}
export async function getIntegrityHash(filepath: string): Promise<Hash | null> {

View file

@ -144,7 +144,7 @@ export default class TransformObjStream extends Stream.Transform {
data.message = message || 'Unknown error (no message)';
} else if (event.error instanceof Error) {
data.type = 'error';
data.level = _.contains(event.tags, 'fatal') ? 'fatal' : 'error';
data.level = _.includes(event.tags, 'fatal') ? 'fatal' : 'error';
data.error = serializeError(event.error);
const message = get(event, 'error.message');
data.message = message || 'Unknown error object (no message)';

View file

@ -81,7 +81,7 @@ export default class ServerStatus {
// reduce to the state with the highest severity, defaulting to green
.reduce((a, b) => (a.severity > b.severity ? a : b), states.get('green'));
const statuses = _.where(this._created, { state: state.id });
const statuses = _.filter(this._created, { state: state.id });
const since = _.get(_.sortBy(statuses, 'since'), [0, 'since']);
return {

View file

@ -73,7 +73,7 @@ export const getAll = () => [
},
];
export const getAllById = () => _.indexBy(exports.getAll(), 'id');
export const getAllById = () => _.keyBy(exports.getAll(), 'id');
export const defaults = {
icon: 'question',

View file

@ -107,7 +107,7 @@ export function EventsProvider(Promise) {
*/
Events.prototype.emit = function (name) {
const self = this;
const args = _.rest(arguments);
const args = _.tail(arguments);
if (!self._listeners[name]) {
return self._emitChain;
@ -131,7 +131,7 @@ export function EventsProvider(Promise) {
* @return {array[function]}
*/
Events.prototype.listeners = function (name) {
return _.pluck(this._listeners[name], 'handler');
return _.map(this._listeners[name], 'handler');
};
return Events;

View file

@ -30,8 +30,8 @@ const users = [
];
// this is how we used to accomplish this, before IndexedArray
users.byName = _.indexBy(users, 'name');
users.byUsername = _.indexBy(users, 'username');
users.byName = _.keyBy(users, 'name');
users.byUsername = _.keyBy(users, 'username');
users.byGroup = _.groupBy(users, 'group');
users.inIdOrder = _.sortBy(users, 'id');
@ -54,7 +54,7 @@ describe('IndexedArray', function () {
});
it('clones to an object', function () {
expect(_.isPlainObject(_.clone(reg))).to.be(true);
expect(_.isObject(_.clone(reg))).to.be(true);
expect(Array.isArray(_.clone(reg))).to.be(false);
});
});
@ -140,7 +140,7 @@ describe('IndexedArray', function () {
reg.remove({ name: 'John' });
expect(_.eq(reg.raw, reg.slice(0))).to.be(true);
expect(_.isEqual(reg.raw, reg.slice(0))).to.be(true);
expect(reg.length).to.be(3);
expect(reg[0].name).to.be('Anon');
});

View file

@ -52,7 +52,7 @@ export class IndexedArray {
this._indexNames = _.union(
this._setupIndex(config.group, inflectIndex, organizeByIndexedArray(config)),
this._setupIndex(config.index, inflectIndex, _.indexBy),
this._setupIndex(config.index, inflectIndex, _.keyBy),
this._setupIndex(config.order, inflectOrder, (raw, pluckValue) => {
return [...raw].sort((itemA, itemB) => {
const a = pluckValue(itemA);

View file

@ -46,7 +46,7 @@ describe('routes/route_manager', function () {
})
);
it('should have chainable methods: ' + _.pluck(chainableMethods, 'name').join(', '), function () {
it('should have chainable methods: ' + _.map(chainableMethods, 'name').join(', '), function () {
chainableMethods.forEach(function (meth) {
expect(routes[meth.name].apply(routes, _.clone(meth.args))).to.be(routes);
});

View file

@ -341,7 +341,7 @@ export function StateProvider(
* @return {object}
*/
State.prototype.toObject = function () {
return _.omit(this, (value, key) => {
return _.omitBy(this, (value, key) => {
return key.charAt(0) === '$' || key.charAt(0) === '_' || _.isFunction(value);
});
};

View file

@ -50,7 +50,7 @@ export function move(
}
below = !!below;
qualifier = qualifier && _.callback(qualifier);
qualifier = qualifier && _.iteratee(qualifier);
const above = !below;
const finder = below ? _.findIndex : _.findLastIndex;

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { cloneDeep } from 'lodash';
import { cloneDeepWith } from 'lodash';
// We should add `any` return type to overcome bug in lodash types, customizer
// in lodash 3.* can return `undefined` if cloning is handled by the lodash, but
@ -29,5 +29,5 @@ function cloneBuffersCustomizer(val: unknown): any {
}
export function deepCloneWithBuffers<T>(val: T): T {
return cloneDeep(val, cloneBuffersCustomizer);
return cloneDeepWith(val, cloneBuffersCustomizer);
}

View file

@ -18,11 +18,10 @@
*/
import _ from 'lodash';
import toPath from 'lodash/internal/toPath';
export function unset(object, rawPath) {
if (!object) return;
const path = toPath(rawPath);
const path = _.toPath(rawPath);
switch (path.length) {
case 0:

View file

@ -61,7 +61,7 @@ describe('Mapped Colors', () => {
mappedColors.mapKeys(arr);
const colorValues = _(mappedColors.mapping).values();
expect(colorValues.contains(seedColors[0])).toBe(false);
expect(colorValues.includes(seedColors[0])).toBe(false);
expect(colorValues.uniq().size()).toBe(arr.length);
});

View file

@ -54,7 +54,7 @@ export class MappedColors {
}
get(key: string | number) {
return this.getConfigColorMapping()[key] || this._mapping[key];
return this.getConfigColorMapping()[key as any] || this._mapping[key];
}
flush() {
@ -75,10 +75,10 @@ export class MappedColors {
const keysToMap: Array<string | number> = [];
_.each(keys, (key) => {
// If this key is mapped in the config, it's unnecessary to have it mapped here
if (configMapping[key]) delete this._mapping[key];
if (configMapping[key as any]) delete this._mapping[key];
// If this key is mapped to a color used by the config color mapping, we need to remap it
if (_.contains(configColors, this._mapping[key])) keysToMap.push(key);
if (_.includes(configColors, this._mapping[key])) keysToMap.push(key);
// if key exist in oldMap, move it to mapping
if (this._oldMap[key]) this._mapping[key] = this._oldMap[key];
@ -93,7 +93,7 @@ export class MappedColors {
let newColors = _.difference(colorPalette, allColors);
while (keysToMap.length > newColors.length) {
newColors = newColors.concat(_.sample(allColors, keysToMap.length - newColors.length));
newColors = newColors.concat(_.sampleSize(allColors, keysToMap.length - newColors.length));
}
_.merge(this._mapping, _.zipObject(keysToMap, newColors));

View file

@ -17,6 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import React, { Fragment, useState } from 'react';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';

View file

@ -117,7 +117,7 @@ describe('Integration', () => {
return t;
});
if (terms.length !== expectedTerms.length) {
expect(_.pluck(terms, 'name')).toEqual(_.pluck(expectedTerms, 'name'));
expect(_.map(terms, 'name')).toEqual(_.map(expectedTerms, 'name'));
} else {
const filteredActualTerms = _.map(terms, function (actualTerm, i) {
const expectedTerm = expectedTerms[i];

View file

@ -51,7 +51,7 @@ function resolvePathToComponents(tokenPath, context, editor, components) {
context,
editor
);
const result = [].concat.apply([], _.pluck(walkStates, 'components'));
const result = [].concat.apply([], _.map(walkStates, 'components'));
return result;
}

View file

@ -62,7 +62,7 @@ export class ListComponent extends SharedComponent {
// verify we have all tokens
const list = this.listGenerator();
const notFound = _.any(tokens, function (token) {
const notFound = _.some(tokens, function (token) {
return list.indexOf(token) === -1;
});

View file

@ -61,73 +61,64 @@ export class UrlPatternMatcher {
}
const endpointComponents = endpoint.url_components || {};
const partList = pattern.split('/');
_.each(
partList,
function (part, partIndex) {
if (part.search(/^{.+}$/) >= 0) {
part = part.substr(1, part.length - 2);
if (activeComponent.getComponent(part)) {
// we already have something for this, reuse
activeComponent = activeComponent.getComponent(part);
return;
}
// a new path, resolve.
if ((c = endpointComponents[part])) {
// endpoint specific. Support list
if (Array.isArray(c)) {
c = new ListComponent(part, c, activeComponent);
} else if (_.isObject(c) && c.type === 'list') {
c = new ListComponent(
part,
c.list,
activeComponent,
c.multiValued,
c.allow_non_valid
);
} else {
console.warn(
'incorrectly configured url component ',
part,
' in endpoint',
endpoint
);
c = new SharedComponent(part);
}
} else if ((c = this[method].parametrizedComponentFactories.getComponent(part))) {
// c is a f
c = c(part, activeComponent);
} else {
// just accept whatever with not suggestions
c = new SimpleParamComponent(part, activeComponent);
}
activeComponent = c;
} else {
// not pattern
let lookAhead = part;
let s;
for (partIndex++; partIndex < partList.length; partIndex++) {
s = partList[partIndex];
if (s.indexOf('{') >= 0) {
break;
}
lookAhead += '/' + s;
}
if (activeComponent.getComponent(part)) {
// we already have something for this, reuse
activeComponent = activeComponent.getComponent(part);
activeComponent.addOption(lookAhead);
} else {
c = new ConstantComponent(part, activeComponent, lookAhead);
activeComponent = c;
}
_.each(partList, (part, partIndex) => {
if (part.search(/^{.+}$/) >= 0) {
part = part.substr(1, part.length - 2);
if (activeComponent.getComponent(part)) {
// we already have something for this, reuse
activeComponent = activeComponent.getComponent(part);
return;
}
},
this
);
// a new path, resolve.
if ((c = endpointComponents[part])) {
// endpoint specific. Support list
if (Array.isArray(c)) {
c = new ListComponent(part, c, activeComponent);
} else if (_.isObject(c) && c.type === 'list') {
c = new ListComponent(
part,
c.list,
activeComponent,
c.multiValued,
c.allow_non_valid
);
} else {
console.warn('incorrectly configured url component ', part, ' in endpoint', endpoint);
c = new SharedComponent(part);
}
} else if ((c = this[method].parametrizedComponentFactories.getComponent(part))) {
// c is a f
c = c(part, activeComponent);
} else {
// just accept whatever with not suggestions
c = new SimpleParamComponent(part, activeComponent);
}
activeComponent = c;
} else {
// not pattern
let lookAhead = part;
let s;
for (partIndex++; partIndex < partList.length; partIndex++) {
s = partList[partIndex];
if (s.indexOf('{') >= 0) {
break;
}
lookAhead += '/' + s;
}
if (activeComponent.getComponent(part)) {
// we already have something for this, reuse
activeComponent = activeComponent.getComponent(part);
activeComponent.addOption(lookAhead);
} else {
c = new ConstantComponent(part, activeComponent, lookAhead);
activeComponent = c;
}
}
});
// mark end of endpoint path
new AcceptEndpointComponent(endpoint, activeComponent);
});

View file

@ -26,16 +26,12 @@ export function wrapComponentWithDefaults(component, defaults) {
if (!result) {
return result;
}
result = _.map(
result,
function (term) {
if (!_.isObject(term)) {
term = { name: term };
}
return _.defaults(term, defaults);
},
this
);
result = _.map(result, (term) => {
if (!_.isObject(term)) {
term = { name: term };
}
return _.defaults(term, defaults);
});
return result;
};
return component;
@ -145,7 +141,7 @@ export function populateContext(tokenPath, context, editor, includeAutoComplete,
});
});
});
autoCompleteSet = _.uniq(autoCompleteSet, false);
autoCompleteSet = _.uniq(autoCompleteSet);
context.autoCompleteSet = autoCompleteSet;
}

View file

@ -50,18 +50,14 @@ export class UrlParams {
}
description = _.clone(description || {});
_.defaults(description, defaults);
_.each(
description,
function (pDescription, param) {
const component = new ParamComponent(param, this.rootComponent, pDescription);
if (Array.isArray(pDescription)) {
new ListComponent(param, pDescription, component);
} else if (pDescription === '__flag__') {
new ListComponent(param, ['true', 'false'], component);
}
},
this
);
_.each(description, (pDescription, param) => {
const component = new ParamComponent(param, this.rootComponent, pDescription);
if (Array.isArray(pDescription)) {
new ListComponent(param, pDescription, component);
} else if (pDescription === '__flag__') {
new ListComponent(param, ['true', 'false'], component);
}
});
}
getTopLevelComponents() {
return this.rootComponent.next;

View file

@ -60,19 +60,15 @@ function Api(urlParametrizedComponentFactories, bodyParametrizedComponentFactori
cls.addEndpointDescription = function (endpoint, description) {
const copiedDescription = {};
_.extend(copiedDescription, description || {});
_.assign(copiedDescription, description || {});
_.defaults(copiedDescription, {
id: endpoint,
patterns: [endpoint],
methods: ['GET'],
});
_.each(
copiedDescription.patterns,
function (p) {
this.urlPatternMatcher.addEndpoint(p, copiedDescription);
},
this
);
_.each(copiedDescription.patterns, (p) => {
this.urlPatternMatcher.addEndpoint(p, copiedDescription);
});
copiedDescription.paramsAutocomplete = new UrlParams(copiedDescription.url_params);
copiedDescription.bodyAutocompleteRootComponents = compileBodyDescription(

View file

@ -98,7 +98,7 @@ export function getFields(indices, types) {
ret = [].concat.apply([], ret);
}
return _.uniq(ret, function (f) {
return _.uniqBy(ret, function (f) {
return f.name + ':' + f.type;
});
}
@ -191,7 +191,7 @@ function getFieldNamesFromProperties(properties = {}) {
});
// deduping
return _.uniq(fieldList, function (f) {
return _.uniqBy(fieldList, function (f) {
return f.name + ':' + f.type;
});
}

View file

@ -25,7 +25,7 @@ import url from 'url';
import { ESConfigForProxy } from '../types';
const createAgent = (legacyConfig: ESConfigForProxy) => {
const target = url.parse(_.head(legacyConfig.hosts));
const target = url.parse(_.head(legacyConfig.hosts) as any);
if (!/^https/.test(target.protocol || '')) return new http.Agent();
const agentOptions: https.AgentOptions = {};

View file

@ -19,7 +19,7 @@
import { Agent, IncomingMessage } from 'http';
import * as url from 'url';
import { pick, trimLeft, trimRight } from 'lodash';
import { pick, trimStart, trimEnd } from 'lodash';
import { KibanaRequest, Logger, RequestHandler } from 'kibana/server';
@ -46,7 +46,7 @@ export interface CreateHandlerDependencies {
}
function toURL(base: string, path: string) {
const urlResult = new url.URL(`${trimRight(base, '/')}/${trimLeft(path, '/')}`);
const urlResult = new url.URL(`${trimEnd(base, '/')}/${trimStart(path, '/')}`);
// Appending pretty here to have Elasticsearch do the JSON formatting, as doing
// in JS can lead to data loss (7.0 will get munged into 7, thus losing indication of
// measurement precision)

View file

@ -55,11 +55,11 @@ export class SpecDefinitionsService {
});
if (urlParamsDef) {
description.url_params = _.extend(description.url_params || {}, copiedDescription.url_params);
description.url_params = _.assign(description.url_params || {}, copiedDescription.url_params);
_.defaults(description.url_params, urlParamsDef);
}
_.extend(copiedDescription, description);
_.assign(copiedDescription, description);
_.defaults(copiedDescription, {
id: endpoint,
patterns: [endpoint],

View file

@ -20,6 +20,7 @@
import { i18n } from '@kbn/i18n';
import { CoreStart } from 'src/core/public';
import uuid from 'uuid';
import _ from 'lodash';
import { ActionByType, IncompatibleActionError } from '../../ui_actions_plugin';
import { ViewMode, PanelState, IEmbeddable } from '../../embeddable_plugin';
import { SavedObject } from '../../../../saved_objects/public';

View file

@ -19,6 +19,7 @@
import { i18n } from '@kbn/i18n';
import React from 'react';
import _ from 'lodash';
import { EuiFlyout, EuiFlyoutBody, EuiFlyoutHeader, EuiTitle } from '@elastic/eui';
import { NotificationsStart, Toast } from 'src/core/public';
import { DashboardPanelState } from '../embeddable';

View file

@ -17,7 +17,7 @@
* under the License.
*/
import _, { uniq } from 'lodash';
import _, { uniqBy } from 'lodash';
import { i18n } from '@kbn/i18n';
import { EUI_MODAL_CANCEL_BUTTON, EuiCheckboxGroup } from '@elastic/eui';
import { EuiCheckboxGroupIdToSelectedMap } from '@elastic/eui/src/components/form/checkbox/checkbox_group';
@ -265,7 +265,7 @@ export class DashboardAppController {
if (!embeddableIndexPatterns) return;
panelIndexPatterns.push(...embeddableIndexPatterns);
});
panelIndexPatterns = uniq(panelIndexPatterns, 'id');
panelIndexPatterns = uniqBy(panelIndexPatterns, 'id');
if (panelIndexPatterns && panelIndexPatterns.length > 0) {
$scope.$evalAsync(() => {
@ -520,7 +520,7 @@ export class DashboardAppController {
differences.filters = appStateDashboardInput.filters;
}
Object.keys(_.omit(containerInput, 'filters')).forEach((key) => {
Object.keys(_.omit(containerInput, ['filters'])).forEach((key) => {
const containerValue = (containerInput as { [key: string]: unknown })[key];
const appStateValue = ((appStateDashboardInput as unknown) as { [key: string]: unknown })[
key

View file

@ -17,7 +17,6 @@
* under the License.
*/
import _ from 'lodash';
import { PanelState, EmbeddableInput } from '../../../embeddable_plugin';
import { DEFAULT_PANEL_HEIGHT, DEFAULT_PANEL_WIDTH } from '../dashboard_constants';
import { DashboardPanelState } from '../types';

View file

@ -17,6 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import { PanelNotFoundError } from '../../../embeddable_plugin';
import { GridData } from '../../../../common';
import { DashboardPanelState, DASHBOARD_GRID_COLUMN_COUNT } from '..';

View file

@ -47,7 +47,7 @@ export function updateSavedDashboard(
'pause',
'section',
'value',
]);
]) as RefreshInterval;
savedDashboard.refreshInterval = savedDashboard.timeRestore ? timeRestoreObj : undefined;
// save only unpinned filters

View file

@ -111,7 +111,7 @@ export const dashboardSavedObjectTypeMigrations = {
* in that version. So we apply this twice, once with 6.7.2 and once with 7.0.1 while the backport to 6.7
* only contained the 6.7.2 migration and not the 7.0.1 migration.
*/
'6.7.2': flow<SavedObjectMigrationFn<any, any>>(migrateMatchAllQuery),
'7.0.0': flow<SavedObjectMigrationFn<any, DashboardDoc700To720['attributes']>>(migrations700),
'7.3.0': flow<SavedObjectMigrationFn<any, any>>(migrations730),
'6.7.2': flow(migrateMatchAllQuery),
'7.0.0': flow(migrations700),
'7.3.0': flow(migrations730),
};

View file

@ -22,7 +22,7 @@ import { get } from 'lodash';
import { DEFAULT_QUERY_LANGUAGE } from '../../../data/common';
export const migrateMatchAllQuery: SavedObjectMigrationFn<any, any> = (doc) => {
const searchSourceJSON = get<string>(doc, 'attributes.kibanaSavedObjectMeta.searchSourceJSON');
const searchSourceJSON = get(doc, 'attributes.kibanaSavedObjectMeta.searchSourceJSON');
if (searchSourceJSON) {
let searchSource: any;

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { isEqual, clone } from 'lodash';
import { isEqual, cloneDeep } from 'lodash';
import { migrateFilter, DeprecatedMatchPhraseFilter } from './migrate_filter';
import { PhraseFilter, MatchAllFilter } from '../filters';
@ -52,7 +52,7 @@ describe('migrateFilter', function () {
});
it('should not modify the original filter', function () {
const oldMatchPhraseFilterCopy = clone(oldMatchPhraseFilter, true);
const oldMatchPhraseFilterCopy = cloneDeep(oldMatchPhraseFilter);
migrateFilter(oldMatchPhraseFilter, undefined);

View file

@ -44,6 +44,6 @@ export * from './types';
* @param {object} filter The filter to clean
* @returns {object}
*/
export const cleanFilter = (filter: Filter): Filter => omit(filter, ['meta', '$state']);
export const cleanFilter = (filter: Filter): Filter => omit(filter, ['meta', '$state']) as Filter;
export const isFilterDisabled = (filter: Filter): boolean => get(filter, 'meta.disabled', false);

View file

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { map, reduce, mapValues, get, keys, pick } from 'lodash';
import { map, reduce, mapValues, get, keys, pickBy } from 'lodash';
import { Filter, FilterMeta } from './meta_filter';
import { IIndexPattern, IFieldType } from '../../index_patterns';
@ -112,7 +112,7 @@ export const buildRangeFilter = (
filter.meta.formattedValue = formattedValue;
}
params = mapValues(params, (value) => (field.type === 'number' ? parseFloat(value) : value));
params = mapValues(params, (value: any) => (field.type === 'number' ? parseFloat(value) : value));
if ('gte' in params && 'gt' in params) throw new Error('gte and gt are mutually exclusive');
if ('lte' in params && 'lt' in params) throw new Error('lte and lt are mutually exclusive');
@ -148,7 +148,7 @@ export const buildRangeFilter = (
};
export const getRangeScript = (field: IFieldType, params: RangeFilterParams) => {
const knownParams = pick(params, (val, key: any) => key in operators);
const knownParams = pickBy(params, (val, key: any) => key in operators);
let script = map(
knownParams,
(val: any, key: string) => '(' + field.script + ')' + get(operators, key) + key

View file

@ -97,7 +97,7 @@ export function toElasticsearchQuery(
});
}
const isExistsQuery = valueArg.type === 'wildcard' && value === '*';
const isExistsQuery = valueArg.type === 'wildcard' && (value as any) === '*';
const isAllFieldsQuery =
(fullFieldNameArg.type === 'wildcard' && ((fieldName as unknown) as string) === '*') ||
(fields && indexPattern && fields.length === indexPattern.fields.length);
@ -135,7 +135,7 @@ export function toElasticsearchQuery(
...accumulator,
{
script: {
...getPhraseScript(field, value),
...getPhraseScript(field, value as any),
},
},
];

View file

@ -18,7 +18,7 @@
*/
import { i18n } from '@kbn/i18n';
import { trunc } from 'lodash';
import { truncate } from 'lodash';
import { KBN_FIELD_TYPES } from '../../kbn_field_types/types';
import { FieldFormat } from '../field_format';
import { TextContextTypeConvert, FIELD_FORMAT_IDS } from '../types';
@ -35,7 +35,7 @@ export class TruncateFormat extends FieldFormat {
textConvert: TextContextTypeConvert = (val) => {
const length = this.param('fieldLength');
if (length > 0) {
return trunc(val, {
return truncate(val, {
length: length + omission.length,
omission,
});

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { constant, trimRight, trimLeft, get } from 'lodash';
import { constant, trimEnd, trimStart, get } from 'lodash';
import { FieldFormat } from './field_format';
import { asPrettyString } from './utils';
@ -120,8 +120,8 @@ describe('FieldFormat class', () => {
test('does escape the output of the text converter if used in an html context', () => {
const f = getTestFormat(undefined, constant('<script>alert("xxs");</script>'));
const expected = trimRight(
trimLeft(f.convert('', 'html'), '<span ng-non-bindable>'),
const expected = trimEnd(
trimStart(f.convert('', 'html'), '<span ng-non-bindable>'),
'</span>'
);

View file

@ -185,7 +185,7 @@ export abstract class FieldFormat {
const params = transform(
this._params,
(uniqParams, val, param) => {
(uniqParams: any, val, param) => {
if (param && val !== get(defaultsParams, param)) {
uniqParams[param] = val;
}

View file

@ -233,7 +233,7 @@ export class FieldFormatsRegistry {
parseDefaultTypeMap(value: any) {
this.defaultMap = value;
forOwn(this, (fn) => {
if (isFunction(fn) && fn.cache) {
if (isFunction(fn) && (fn as any).cache) {
// clear all memoize caches
// @ts-ignore
fn.cache = new memoize.Cache();

View file

@ -28,7 +28,7 @@ type ShorthandFieldMapObject = FieldMappingSpec | ES_FIELD_TYPES | 'json';
/** @public */
export const expandShorthand = (sh: Record<string, ShorthandFieldMapObject>): MappingObject => {
return mapValues<Record<string, ShorthandFieldMapObject>>(sh, (val: ShorthandFieldMapObject) => {
return mapValues(sh, (val: ShorthandFieldMapObject) => {
const fieldMap = isString(val) ? { type: val } : val;
const json: FieldMappingSpec = {
type: ES_FIELD_TYPES.TEXT,

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { contains } from 'lodash';
import { includes } from 'lodash';
import { IndexPatternsContract } from './index_patterns';
import { UiSettingsCommon } from '../types';
@ -35,7 +35,7 @@ export const createEnsureDefaultIndexPattern = (
const patterns = await this.getIds();
let defaultId = await uiSettings.get('defaultIndex');
let defined = !!defaultId;
const exists = contains(patterns, defaultId);
const exists = includes(patterns, defaultId);
if (defined && !exists) {
await uiSettings.remove('defaultIndex');

View file

@ -77,7 +77,7 @@ function decorateFlattenedWrapper(hit: Record<string, any>, metaFields: Record<s
// unwrap computed fields
_.forOwn(hit.fields, function (val, key: any) {
if (key[0] === '_' && !_.contains(metaFields, key)) return;
if (key[0] === '_' && !_.includes(metaFields, key)) return;
flattened[key] = Array.isArray(val) && val.length === 1 ? val[0] : val;
});

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { defaults, pluck, last, get } from 'lodash';
import { defaults, map, last, get } from 'lodash';
import { IndexPattern } from './index_pattern';
@ -173,7 +173,7 @@ describe('IndexPattern', () => {
const scriptedNames = mockLogStashFields()
.filter((item: Field) => item.scripted === true)
.map((item: Field) => item.name);
const respNames = pluck(indexPattern.getScriptedFields(), 'name');
const respNames = map(indexPattern.getScriptedFields(), 'name');
expect(respNames).toEqual(scriptedNames);
});
@ -217,7 +217,7 @@ describe('IndexPattern', () => {
const notScriptedNames = mockLogStashFields()
.filter((item: Field) => item.scripted === false)
.map((item: Field) => item.name);
const respNames = pluck(indexPattern.getNonScriptedFields(), 'name');
const respNames = map(indexPattern.getNonScriptedFields(), 'name');
expect(respNames).toEqual(notScriptedNames);
});
@ -288,7 +288,7 @@ describe('IndexPattern', () => {
// const saveSpy = sinon.spy(indexPattern, 'save');
const scriptedFields = indexPattern.getScriptedFields();
const oldCount = scriptedFields.length;
const scriptedField = last(scriptedFields);
const scriptedField = last(scriptedFields) as any;
await indexPattern.removeScriptedField(scriptedField);
@ -299,7 +299,7 @@ describe('IndexPattern', () => {
test('should not allow duplicate names', async () => {
const scriptedFields = indexPattern.getScriptedFields();
const scriptedField = last(scriptedFields);
const scriptedField = last(scriptedFields) as any;
expect.assertions(1);
try {
await indexPattern.addScriptedField(scriptedField.name, "'new script'", 'string', 'lang');

View file

@ -388,9 +388,9 @@ export class IndexPattern implements IIndexPattern {
async addScriptedField(name: string, script: string, fieldType: string = 'string', lang: string) {
const scriptedFields = this.getScriptedFields();
const names = _.pluck(scriptedFields, 'name');
const names = _.map(scriptedFields, 'name');
if (_.contains(names, name)) {
if (_.includes(names, name)) {
throw new DuplicateField(name);
}
@ -452,11 +452,11 @@ export class IndexPattern implements IIndexPattern {
}
getNonScriptedFields() {
return _.where(this.fields, { scripted: false });
return _.filter(this.fields, { scripted: false });
}
getScriptedFields() {
return _.where(this.fields, { scripted: true });
return _.filter(this.fields, { scripted: true });
}
getIndex() {

View file

@ -44,7 +44,7 @@ const mapFilter = (
comparators: FilterCompareOptions,
excludedAttributes: string[]
) => {
const cleaned: FilterMeta = omit(filter, excludedAttributes);
const cleaned: FilterMeta = omit(filter, excludedAttributes) as FilterMeta;
if (comparators.index) cleaned.index = filter.meta?.index;
if (comparators.negate) cleaned.negate = filter.meta && Boolean(filter.meta.negate);

View file

@ -27,12 +27,11 @@ import {
type SOClient = Pick<SavedObjectsClient, 'find' | 'get' | 'update' | 'create' | 'delete'>;
const simpleSavedObjectToSavedObject = <T>(
simpleSavedObject: SimpleSavedObject
): SavedObject<T> => ({
version: simpleSavedObject._version,
...omit(simpleSavedObject, '_version'),
});
const simpleSavedObjectToSavedObject = <T>(simpleSavedObject: SimpleSavedObject): SavedObject<T> =>
({
version: simpleSavedObject._version,
...omit(simpleSavedObject, '_version'),
} as any);
export class SavedObjectsClientPublicToCommon implements SavedObjectsClientCommon {
private savedObjectClient: SOClient;

View file

@ -584,8 +584,8 @@ export abstract class FieldFormat {
textConvert: TextContextTypeConvert | undefined;
static title: string;
toJSON(): {
id: unknown;
params: _.Dictionary<unknown> | undefined;
id: any;
params: any;
};
type: any;
}

View file

@ -65,7 +65,7 @@ export class FilterManager {
}
// matching filter in globalState, update global and don't add from appState
_.assign(match.meta, filter.meta);
_.assignIn(match.meta, filter.meta);
});
return FilterManager.mergeFilters(cleanedAppFilters, globalFilters);

Some files were not shown because too many files have changed in this diff Show more