mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* [ML] Removing full lodash library imports * more has * fixing missing filter * removing _ * removing unused file * removing first use * removing comment
This commit is contained in:
parent
a867c0f85d
commit
454b353dfa
34 changed files with 365 additions and 371 deletions
|
@ -4,7 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
import each from 'lodash/each';
|
||||
import pick from 'lodash/pick';
|
||||
|
||||
import semver from 'semver';
|
||||
import moment, { Duration } from 'moment';
|
||||
// @ts-ignore
|
||||
|
@ -307,7 +311,7 @@ export function getSafeAggregationName(fieldName: string, index: number): string
|
|||
|
||||
export function uniqWithIsEqual<T extends any[]>(arr: T): T {
|
||||
return arr.reduce((dedupedArray, value) => {
|
||||
if (dedupedArray.filter((compareValue: any) => _.isEqual(compareValue, value)).length === 0) {
|
||||
if (dedupedArray.filter((compareValue: any) => isEqual(compareValue, value)).length === 0) {
|
||||
dedupedArray.push(value);
|
||||
}
|
||||
return dedupedArray;
|
||||
|
@ -328,7 +332,7 @@ export function basicJobValidation(
|
|||
|
||||
if (job) {
|
||||
// Job details
|
||||
if (_.isEmpty(job.job_id)) {
|
||||
if (isEmpty(job.job_id)) {
|
||||
messages.push({ id: 'job_id_empty' });
|
||||
valid = false;
|
||||
} else if (isJobIdValid(job.job_id) === false) {
|
||||
|
@ -350,7 +354,7 @@ export function basicJobValidation(
|
|||
// Analysis Configuration
|
||||
if (job.analysis_config.categorization_filters) {
|
||||
let v = true;
|
||||
_.each(job.analysis_config.categorization_filters, (d) => {
|
||||
each(job.analysis_config.categorization_filters, (d) => {
|
||||
try {
|
||||
new RegExp(d);
|
||||
} catch (e) {
|
||||
|
@ -382,8 +386,8 @@ export function basicJobValidation(
|
|||
valid = false;
|
||||
} else {
|
||||
let v = true;
|
||||
_.each(job.analysis_config.detectors, (d) => {
|
||||
if (_.isEmpty(d.function)) {
|
||||
each(job.analysis_config.detectors, (d) => {
|
||||
if (isEmpty(d.function)) {
|
||||
v = false;
|
||||
}
|
||||
});
|
||||
|
@ -400,7 +404,7 @@ export function basicJobValidation(
|
|||
// create an array of objects with a subset of the attributes
|
||||
// where we want to make sure they are not be the same across detectors
|
||||
const compareSubSet = job.analysis_config.detectors.map((d) =>
|
||||
_.pick(d, [
|
||||
pick(d, [
|
||||
'function',
|
||||
'field_name',
|
||||
'by_field_name',
|
||||
|
|
|
@ -9,7 +9,9 @@
|
|||
* This version supports both fetching the annotations by itself (used in the jobs list) and
|
||||
* getting the annotations via props (used in Anomaly Explorer and Single Series Viewer).
|
||||
*/
|
||||
import _ from 'lodash';
|
||||
|
||||
import uniq from 'lodash/uniq';
|
||||
|
||||
import PropTypes from 'prop-types';
|
||||
import rison from 'rison-node';
|
||||
import React, { Component, Fragment } from 'react';
|
||||
|
@ -255,18 +257,18 @@ export class AnnotationsTable extends Component {
|
|||
|
||||
// if the annotation is at the series level
|
||||
// then pass the partitioning field(s) and detector index to the Single Metric Viewer
|
||||
if (_.has(annotation, 'detector_index')) {
|
||||
if (annotation.detector_index !== undefined) {
|
||||
mlTimeSeriesExplorer.detectorIndex = annotation.detector_index;
|
||||
}
|
||||
if (_.has(annotation, 'partition_field_value')) {
|
||||
if (annotation.partition_field_value !== undefined) {
|
||||
entityCondition[annotation.partition_field_name] = annotation.partition_field_value;
|
||||
}
|
||||
|
||||
if (_.has(annotation, 'over_field_value')) {
|
||||
if (annotation.over_field_value !== undefined) {
|
||||
entityCondition[annotation.over_field_name] = annotation.over_field_value;
|
||||
}
|
||||
|
||||
if (_.has(annotation, 'by_field_value')) {
|
||||
if (annotation.by_field_value !== undefined) {
|
||||
// Note that analyses with by and over fields, will have a top-level by_field_name,
|
||||
// but the by_field_value(s) will be in the nested causes array.
|
||||
entityCondition[annotation.by_field_name] = annotation.by_field_value;
|
||||
|
@ -421,7 +423,7 @@ export class AnnotationsTable extends Component {
|
|||
},
|
||||
];
|
||||
|
||||
const jobIds = _.uniq(annotations.map((a) => a.job_id));
|
||||
const jobIds = uniq(annotations.map((a) => a.job_id));
|
||||
if (jobIds.length > 1) {
|
||||
columns.unshift({
|
||||
field: 'job_id',
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
*/
|
||||
|
||||
import PropTypes from 'prop-types';
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
|
||||
import React, { Component } from 'react';
|
||||
|
||||
|
@ -70,7 +70,7 @@ class AnomaliesTable extends Component {
|
|||
} else {
|
||||
const examples =
|
||||
item.entityName === 'mlcategory'
|
||||
? _.get(this.props.tableData, ['examplesByJobId', item.jobId, item.entityValue])
|
||||
? get(this.props.tableData, ['examplesByJobId', item.jobId, item.entityValue])
|
||||
: undefined;
|
||||
let definition = undefined;
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import { EuiButtonIcon, EuiLink, EuiScreenReaderOnly } from '@elastic/eui';
|
||||
|
||||
import React from 'react';
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
|
@ -251,7 +251,7 @@ export function getColumns(
|
|||
sortable: false,
|
||||
truncateText: true,
|
||||
render: (item) => {
|
||||
const examples = _.get(examplesByJobId, [item.jobId, item.entityValue], []);
|
||||
const examples = get(examplesByJobId, [item.jobId, item.entityValue], []);
|
||||
return (
|
||||
<EuiLink
|
||||
className="mlAnomalyCategoryExamples__link"
|
||||
|
|
|
@ -11,7 +11,8 @@
|
|||
|
||||
import PropTypes from 'prop-types';
|
||||
import React, { Component, Fragment } from 'react';
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
import pick from 'lodash/pick';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
|
||||
|
@ -63,16 +64,12 @@ function getDetailsItems(anomaly, examples, filter) {
|
|||
}
|
||||
} else {
|
||||
causes = sourceCauses.map((cause) => {
|
||||
const simplified = _.pick(cause, 'typical', 'actual', 'probability');
|
||||
const simplified = pick(cause, 'typical', 'actual', 'probability');
|
||||
// Get the 'entity field name/value' to display in the cause -
|
||||
// For by and over, use by_field_name/value (over_field_name/value are in the top level fields)
|
||||
// For just an 'over' field - the over_field_name/value appear in both top level and cause.
|
||||
simplified.entityName = _.has(cause, 'by_field_name')
|
||||
? cause.by_field_name
|
||||
: cause.over_field_name;
|
||||
simplified.entityValue = _.has(cause, 'by_field_value')
|
||||
? cause.by_field_value
|
||||
: cause.over_field_value;
|
||||
simplified.entityName = cause.by_field_name ? cause.by_field_name : cause.over_field_name;
|
||||
simplified.entityValue = cause.by_field_value ? cause.by_field_value : cause.over_field_value;
|
||||
return simplified;
|
||||
});
|
||||
}
|
||||
|
@ -471,7 +468,7 @@ export class AnomalyDetails extends Component {
|
|||
|
||||
renderDetails() {
|
||||
const detailItems = getDetailsItems(this.props.anomaly, this.props.examples, this.props.filter);
|
||||
const isInterimResult = _.get(this.props.anomaly, 'source.is_interim', false);
|
||||
const isInterimResult = get(this.props.anomaly, 'source.is_interim', false);
|
||||
return (
|
||||
<React.Fragment>
|
||||
<EuiText size="xs">
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
import PropTypes from 'prop-types';
|
||||
import React, { Component } from 'react';
|
||||
|
||||
|
@ -148,7 +148,7 @@ export class InfluencersCell extends Component {
|
|||
|
||||
const influencers = [];
|
||||
recordInfluencers.forEach((influencer) => {
|
||||
_.each(influencer, (influencerFieldValue, influencerFieldName) => {
|
||||
each(influencer, (influencerFieldValue, influencerFieldName) => {
|
||||
influencers.push({
|
||||
influencerFieldName,
|
||||
influencerFieldValue,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import moment from 'moment';
|
||||
import rison from 'rison-node';
|
||||
import PropTypes from 'prop-types';
|
||||
|
@ -58,7 +58,7 @@ class LinksMenuUI extends Component {
|
|||
|
||||
// If url_value contains $earliest$ and $latest$ tokens, add in times to the source record.
|
||||
// Create a copy of the record as we are adding properties into it.
|
||||
const record = _.cloneDeep(anomaly.source);
|
||||
const record = cloneDeep(anomaly.source);
|
||||
const timestamp = record.timestamp;
|
||||
const configuredUrlValue = customUrl.url_value;
|
||||
const timeRangeInterval = parseInterval(customUrl.time_range);
|
||||
|
@ -99,7 +99,7 @@ class LinksMenuUI extends Component {
|
|||
if (
|
||||
(configuredUrlValue.includes('$mlcategoryterms$') ||
|
||||
configuredUrlValue.includes('$mlcategoryregex$')) &&
|
||||
_.has(record, 'mlcategory')
|
||||
record.mlcategory !== undefined
|
||||
) {
|
||||
const jobId = record.job_id;
|
||||
|
||||
|
@ -156,15 +156,15 @@ class LinksMenuUI extends Component {
|
|||
// Extract the by, over and partition fields for the record.
|
||||
const entityCondition = {};
|
||||
|
||||
if (_.has(record, 'partition_field_value')) {
|
||||
if (record.partition_field_value !== undefined) {
|
||||
entityCondition[record.partition_field_name] = record.partition_field_value;
|
||||
}
|
||||
|
||||
if (_.has(record, 'over_field_value')) {
|
||||
if (record.over_field_value !== undefined) {
|
||||
entityCondition[record.over_field_name] = record.over_field_value;
|
||||
}
|
||||
|
||||
if (_.has(record, 'by_field_value')) {
|
||||
if (record.by_field_value !== undefined) {
|
||||
// Note that analyses with by and over fields, will have a top-level by_field_name,
|
||||
// but the by_field_value(s) will be in the nested causes array.
|
||||
// TODO - drilldown from cause in expanded row only?
|
||||
|
|
|
@ -9,8 +9,6 @@
|
|||
* the raw data in the Explorer dashboard.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
|
||||
import { parseInterval } from '../../../../common/util/parse_interval';
|
||||
import { getEntityFieldList } from '../../../../common/util/anomaly_utils';
|
||||
import { buildConfigFromDetector } from '../../util/chart_config_builder';
|
||||
|
@ -30,7 +28,7 @@ export function buildConfig(record) {
|
|||
|
||||
config.detectorLabel = record.function;
|
||||
if (
|
||||
_.has(mlJobService.detectorsByJob, record.job_id) &&
|
||||
mlJobService.detectorsByJob[record.job_id] !== undefined &&
|
||||
detectorIndex < mlJobService.detectorsByJob[record.job_id].length
|
||||
) {
|
||||
config.detectorLabel =
|
||||
|
|
|
@ -11,8 +11,8 @@
|
|||
|
||||
import PropTypes from 'prop-types';
|
||||
import React from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import _ from 'lodash';
|
||||
import d3 from 'd3';
|
||||
import $ from 'jquery';
|
||||
import moment from 'moment';
|
||||
|
@ -33,8 +33,6 @@ import { mlFieldFormatService } from '../../services/field_format_service';
|
|||
|
||||
import { CHART_TYPE } from '../explorer_constants';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
const CONTENT_WRAPPER_HEIGHT = 215;
|
||||
|
||||
// If a rare/event-distribution chart has a cardinality of 10 or less,
|
||||
|
@ -403,7 +401,7 @@ export class ExplorerChartDistribution extends React.Component {
|
|||
.attr('cy', (d) => lineChartYScale(d[CHART_Y_ATTRIBUTE]))
|
||||
.attr('class', (d) => {
|
||||
let markerClass = 'metric-value';
|
||||
if (_.has(d, 'anomalyScore') && Number(d.anomalyScore) >= severity) {
|
||||
if (d.anomalyScore !== undefined && Number(d.anomalyScore) >= severity) {
|
||||
markerClass += ' anomaly-marker ';
|
||||
markerClass += getSeverityWithLow(d.anomalyScore).id;
|
||||
}
|
||||
|
@ -444,7 +442,7 @@ export class ExplorerChartDistribution extends React.Component {
|
|||
const tooltipData = [{ label: formattedDate }];
|
||||
const seriesKey = config.detectorLabel;
|
||||
|
||||
if (_.has(marker, 'entity')) {
|
||||
if (marker.entity !== undefined) {
|
||||
tooltipData.push({
|
||||
label: i18n.translate('xpack.ml.explorer.distributionChart.entityLabel', {
|
||||
defaultMessage: 'entity',
|
||||
|
@ -457,7 +455,7 @@ export class ExplorerChartDistribution extends React.Component {
|
|||
});
|
||||
}
|
||||
|
||||
if (_.has(marker, 'anomalyScore')) {
|
||||
if (marker.anomalyScore !== undefined) {
|
||||
const score = parseInt(marker.anomalyScore);
|
||||
const displayScore = score > 0 ? score : '< 1';
|
||||
tooltipData.push({
|
||||
|
@ -494,7 +492,7 @@ export class ExplorerChartDistribution extends React.Component {
|
|||
valueAccessor: 'typical',
|
||||
});
|
||||
}
|
||||
if (typeof marker.byFieldName !== 'undefined' && _.has(marker, 'numberOfCauses')) {
|
||||
if (typeof marker.byFieldName !== 'undefined' && marker.numberOfCauses !== undefined) {
|
||||
tooltipData.push({
|
||||
label: i18n.translate(
|
||||
'xpack.ml.explorer.distributionChart.unusualByFieldValuesLabel',
|
||||
|
@ -532,7 +530,7 @@ export class ExplorerChartDistribution extends React.Component {
|
|||
});
|
||||
}
|
||||
|
||||
if (_.has(marker, 'scheduledEvents')) {
|
||||
if (marker.scheduledEvents !== undefined) {
|
||||
marker.scheduledEvents.forEach((scheduledEvent, i) => {
|
||||
tooltipData.push({
|
||||
label: i18n.translate(
|
||||
|
|
|
@ -12,10 +12,10 @@
|
|||
import PropTypes from 'prop-types';
|
||||
import React from 'react';
|
||||
|
||||
import _ from 'lodash';
|
||||
import d3 from 'd3';
|
||||
import $ from 'jquery';
|
||||
import moment from 'moment';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { formatHumanReadableDateTime } from '../../util/date_utils';
|
||||
import { formatValue } from '../../formatters/format_value';
|
||||
|
@ -40,8 +40,6 @@ import { getTimeBucketsFromCache } from '../../util/time_buckets';
|
|||
import { mlEscape } from '../../util/string_utils';
|
||||
import { mlFieldFormatService } from '../../services/field_format_service';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
const CONTENT_WRAPPER_HEIGHT = 215;
|
||||
const CONTENT_WRAPPER_CLASS = 'ml-explorer-chart-content-wrapper';
|
||||
|
||||
|
@ -307,7 +305,7 @@ export class ExplorerChartSingleMetric extends React.Component {
|
|||
.on('mouseout', () => tooltipService.hide());
|
||||
|
||||
const isAnomalyVisible = (d) =>
|
||||
_.has(d, 'anomalyScore') && Number(d.anomalyScore) >= severity;
|
||||
d.anomalyScore !== undefined && Number(d.anomalyScore) >= severity;
|
||||
|
||||
// Update all dots to new positions.
|
||||
dots
|
||||
|
@ -380,7 +378,7 @@ export class ExplorerChartSingleMetric extends React.Component {
|
|||
const tooltipData = [{ label: formattedDate }];
|
||||
const seriesKey = config.detectorLabel;
|
||||
|
||||
if (_.has(marker, 'anomalyScore')) {
|
||||
if (marker.anomalyScore !== undefined) {
|
||||
const score = parseInt(marker.anomalyScore);
|
||||
const displayScore = score > 0 ? score : '< 1';
|
||||
tooltipData.push({
|
||||
|
@ -411,7 +409,7 @@ export class ExplorerChartSingleMetric extends React.Component {
|
|||
// Show actual/typical when available except for rare detectors.
|
||||
// Rare detectors always have 1 as actual and the probability as typical.
|
||||
// Exposing those values in the tooltip with actual/typical labels might irritate users.
|
||||
if (_.has(marker, 'actual') && config.functionDescription !== 'rare') {
|
||||
if (marker.actual !== undefined && config.functionDescription !== 'rare') {
|
||||
// Display the record actual in preference to the chart value, which may be
|
||||
// different depending on the aggregation interval of the chart.
|
||||
tooltipData.push({
|
||||
|
@ -445,7 +443,7 @@ export class ExplorerChartSingleMetric extends React.Component {
|
|||
},
|
||||
valueAccessor: 'value',
|
||||
});
|
||||
if (_.has(marker, 'byFieldName') && _.has(marker, 'numberOfCauses')) {
|
||||
if (marker.byFieldName !== undefined && marker.numberOfCauses !== undefined) {
|
||||
tooltipData.push({
|
||||
label: i18n.translate(
|
||||
'xpack.ml.explorer.distributionChart.unusualByFieldValuesLabel',
|
||||
|
@ -483,7 +481,7 @@ export class ExplorerChartSingleMetric extends React.Component {
|
|||
});
|
||||
}
|
||||
|
||||
if (_.has(marker, 'scheduledEvents')) {
|
||||
if (marker.scheduledEvents !== undefined) {
|
||||
tooltipData.push({
|
||||
label: i18n.translate('xpack.ml.explorer.singleMetricChart.scheduledEventsLabel', {
|
||||
defaultMessage: 'Scheduled events',
|
||||
|
|
|
@ -11,7 +11,12 @@
|
|||
* and manages the layout of the charts in the containing div.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
import each from 'lodash/each';
|
||||
import find from 'lodash/find';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import map from 'lodash/map';
|
||||
import reduce from 'lodash/reduce';
|
||||
|
||||
import { buildConfig } from './explorer_chart_config_builder';
|
||||
import { chartLimits, getChartType } from '../../util/chart_utils';
|
||||
|
@ -113,7 +118,7 @@ export const anomalyDataChange = function (
|
|||
// If source data can be plotted, use that, otherwise model plot will be available.
|
||||
const useSourceData = isSourceDataChartableForDetector(job, detectorIndex);
|
||||
if (useSourceData === true) {
|
||||
const datafeedQuery = _.get(config, 'datafeedConfig.query', null);
|
||||
const datafeedQuery = get(config, 'datafeedConfig.query', null);
|
||||
return mlResultsService
|
||||
.getMetricData(
|
||||
config.datafeedConfig.indices,
|
||||
|
@ -131,8 +136,8 @@ export const anomalyDataChange = function (
|
|||
// Extract the partition, by, over fields on which to filter.
|
||||
const criteriaFields = [];
|
||||
const detector = job.analysis_config.detectors[detectorIndex];
|
||||
if (_.has(detector, 'partition_field_name')) {
|
||||
const partitionEntity = _.find(entityFields, {
|
||||
if (detector.partition_field_name !== undefined) {
|
||||
const partitionEntity = find(entityFields, {
|
||||
fieldName: detector.partition_field_name,
|
||||
});
|
||||
if (partitionEntity !== undefined) {
|
||||
|
@ -143,8 +148,8 @@ export const anomalyDataChange = function (
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(detector, 'over_field_name')) {
|
||||
const overEntity = _.find(entityFields, { fieldName: detector.over_field_name });
|
||||
if (detector.over_field_name !== undefined) {
|
||||
const overEntity = find(entityFields, { fieldName: detector.over_field_name });
|
||||
if (overEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'over_field_name', fieldValue: overEntity.fieldName },
|
||||
|
@ -153,8 +158,8 @@ export const anomalyDataChange = function (
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(detector, 'by_field_name')) {
|
||||
const byEntity = _.find(entityFields, { fieldName: detector.by_field_name });
|
||||
if (detector.by_field_name !== undefined) {
|
||||
const byEntity = find(entityFields, { fieldName: detector.by_field_name });
|
||||
if (byEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'by_field_name', fieldValue: byEntity.fieldName },
|
||||
|
@ -236,7 +241,7 @@ export const anomalyDataChange = function (
|
|||
filterField = config.entityFields.find((f) => f.fieldType === 'partition');
|
||||
}
|
||||
|
||||
const datafeedQuery = _.get(config, 'datafeedConfig.query', null);
|
||||
const datafeedQuery = get(config, 'datafeedConfig.query', null);
|
||||
return mlResultsService.getEventDistributionData(
|
||||
config.datafeedConfig.indices,
|
||||
splitField,
|
||||
|
@ -285,7 +290,7 @@ export const anomalyDataChange = function (
|
|||
if (eventDistribution.length > 0 && records.length > 0) {
|
||||
const filterField = records[0].by_field_value || records[0].over_field_value;
|
||||
chartData = eventDistribution.filter((d) => d.entity !== filterField);
|
||||
_.map(metricData, (value, time) => {
|
||||
map(metricData, (value, time) => {
|
||||
// The filtering for rare/event_distribution charts needs to be handled
|
||||
// differently because of how the source data is structured.
|
||||
// For rare chart values we are only interested wether a value is either `0` or not,
|
||||
|
@ -304,7 +309,7 @@ export const anomalyDataChange = function (
|
|||
}
|
||||
});
|
||||
} else {
|
||||
chartData = _.map(metricData, (value, time) => ({
|
||||
chartData = map(metricData, (value, time) => ({
|
||||
date: +time,
|
||||
value: value,
|
||||
}));
|
||||
|
@ -314,7 +319,7 @@ export const anomalyDataChange = function (
|
|||
// Iterate through the anomaly records, adding anomalyScore properties
|
||||
// to the chartData entries for anomalous buckets.
|
||||
const chartDataForPointSearch = getChartDataForPointSearch(chartData, records[0], chartType);
|
||||
_.each(records, (record) => {
|
||||
each(records, (record) => {
|
||||
// Look for a chart point with the same time as the record.
|
||||
// If none found, insert a point for anomalies due to a gap in the data.
|
||||
const recordTime = record[ML_TIME_FIELD_NAME];
|
||||
|
@ -330,13 +335,13 @@ export const anomalyDataChange = function (
|
|||
chartPoint.actual = record.actual;
|
||||
chartPoint.typical = record.typical;
|
||||
} else {
|
||||
const causes = _.get(record, 'causes', []);
|
||||
const causes = get(record, 'causes', []);
|
||||
if (causes.length > 0) {
|
||||
chartPoint.byFieldName = record.by_field_name;
|
||||
chartPoint.numberOfCauses = causes.length;
|
||||
if (causes.length === 1) {
|
||||
// If only a single cause, copy actual and typical values to the top level.
|
||||
const cause = _.first(record.causes);
|
||||
const cause = record.causes[0];
|
||||
chartPoint.actual = cause.actual;
|
||||
chartPoint.typical = cause.typical;
|
||||
}
|
||||
|
@ -351,7 +356,7 @@ export const anomalyDataChange = function (
|
|||
// Add a scheduledEvents property to any points in the chart data set
|
||||
// which correspond to times of scheduled events for the job.
|
||||
if (scheduledEvents !== undefined) {
|
||||
_.each(scheduledEvents, (events, time) => {
|
||||
each(scheduledEvents, (events, time) => {
|
||||
const chartPoint = findChartPointForTime(chartDataForPointSearch, Number(time));
|
||||
if (chartPoint !== undefined) {
|
||||
// Note if the scheduled event coincides with an absence of the underlying metric data,
|
||||
|
@ -385,10 +390,10 @@ export const anomalyDataChange = function (
|
|||
.then((response) => {
|
||||
// calculate an overall min/max for all series
|
||||
const processedData = response.map(processChartData);
|
||||
const allDataPoints = _.reduce(
|
||||
const allDataPoints = reduce(
|
||||
processedData,
|
||||
(datapoints, series) => {
|
||||
_.each(series, (d) => datapoints.push(d));
|
||||
each(series, (d) => datapoints.push(d));
|
||||
return datapoints;
|
||||
},
|
||||
[]
|
||||
|
@ -420,7 +425,7 @@ function processRecordsForDisplay(anomalyRecords) {
|
|||
|
||||
// Aggregate by job, detector, and analysis fields (partition, by, over).
|
||||
const aggregatedData = {};
|
||||
_.each(anomalyRecords, (record) => {
|
||||
each(anomalyRecords, (record) => {
|
||||
// Check if we can plot a chart for this record, depending on whether the source data
|
||||
// is chartable, and if model plot is enabled for the job.
|
||||
const job = mlJobService.getJob(record.job_id);
|
||||
|
@ -524,20 +529,20 @@ function processRecordsForDisplay(anomalyRecords) {
|
|||
|
||||
let recordsForSeries = [];
|
||||
// Convert to an array of the records with the highest record_score per unique series.
|
||||
_.each(aggregatedData, (detectorsForJob) => {
|
||||
_.each(detectorsForJob, (groupsForDetector) => {
|
||||
each(aggregatedData, (detectorsForJob) => {
|
||||
each(detectorsForJob, (groupsForDetector) => {
|
||||
if (groupsForDetector.maxScoreRecord !== undefined) {
|
||||
// Detector with no partition / by field.
|
||||
recordsForSeries.push(groupsForDetector.maxScoreRecord);
|
||||
} else {
|
||||
_.each(groupsForDetector, (valuesForGroup) => {
|
||||
_.each(valuesForGroup, (dataForGroupValue) => {
|
||||
each(groupsForDetector, (valuesForGroup) => {
|
||||
each(valuesForGroup, (dataForGroupValue) => {
|
||||
if (dataForGroupValue.maxScoreRecord !== undefined) {
|
||||
recordsForSeries.push(dataForGroupValue.maxScoreRecord);
|
||||
} else {
|
||||
// Second level of aggregation for partition and by/over.
|
||||
_.each(dataForGroupValue, (splitsForGroup) => {
|
||||
_.each(splitsForGroup, (dataForSplitValue) => {
|
||||
each(dataForGroupValue, (splitsForGroup) => {
|
||||
each(splitsForGroup, (dataForSplitValue) => {
|
||||
recordsForSeries.push(dataForSplitValue.maxScoreRecord);
|
||||
});
|
||||
});
|
||||
|
@ -547,7 +552,7 @@ function processRecordsForDisplay(anomalyRecords) {
|
|||
}
|
||||
});
|
||||
});
|
||||
recordsForSeries = _.sortBy(recordsForSeries, 'record_score').reverse();
|
||||
recordsForSeries = sortBy(recordsForSeries, 'record_score').reverse();
|
||||
|
||||
return recordsForSeries;
|
||||
}
|
||||
|
@ -564,7 +569,7 @@ function calculateChartRange(
|
|||
// Calculate the time range for the charts.
|
||||
// Fit in as many points in the available container width plotted at the job bucket span.
|
||||
const midpointMs = Math.ceil((earliestMs + latestMs) / 2);
|
||||
const maxBucketSpanMs = Math.max.apply(null, _.map(seriesConfigs, 'bucketSpanSeconds')) * 1000;
|
||||
const maxBucketSpanMs = Math.max.apply(null, map(seriesConfigs, 'bucketSpanSeconds')) * 1000;
|
||||
|
||||
const pointsToPlotFullSelection = Math.ceil((latestMs - earliestMs) / maxBucketSpanMs);
|
||||
|
||||
|
@ -588,7 +593,7 @@ function calculateChartRange(
|
|||
let minMs = recordsToPlot[0][timeFieldName];
|
||||
let maxMs = recordsToPlot[0][timeFieldName];
|
||||
|
||||
_.each(recordsToPlot, (record) => {
|
||||
each(recordsToPlot, (record) => {
|
||||
const diffMs = maxMs - minMs;
|
||||
if (diffMs < maxTimeSpan) {
|
||||
const recordTime = record[timeFieldName];
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
|
||||
import mockAnomalyChartRecords from './__mocks__/mock_anomaly_chart_records.json';
|
||||
import mockDetectorsByJob from './__mocks__/mock_detectors_by_job.json';
|
||||
|
@ -24,10 +24,10 @@ import mockSeriesPromisesResponse from './__mocks__/mock_series_promises_respons
|
|||
// suitable responses from the mocked services. The mocked services check against the
|
||||
// provided alternative values and return specific modified mock responses for the test case.
|
||||
|
||||
const mockJobConfigClone = _.cloneDeep(mockJobConfig);
|
||||
const mockJobConfigClone = cloneDeep(mockJobConfig);
|
||||
|
||||
// adjust mock data to tests against null/0 values
|
||||
const mockMetricClone = _.cloneDeep(mockSeriesPromisesResponse[0][0]);
|
||||
const mockMetricClone = cloneDeep(mockSeriesPromisesResponse[0][0]);
|
||||
mockMetricClone.results['1486712700000'] = null;
|
||||
mockMetricClone.results['1486713600000'] = 0;
|
||||
|
||||
|
@ -127,7 +127,7 @@ describe('explorerChartsContainerService', () => {
|
|||
});
|
||||
|
||||
test('filtering should skip values of null', (done) => {
|
||||
const mockAnomalyChartRecordsClone = _.cloneDeep(mockAnomalyChartRecords).map((d) => {
|
||||
const mockAnomalyChartRecordsClone = cloneDeep(mockAnomalyChartRecords).map((d) => {
|
||||
d.job_id = 'mock-job-id-distribution';
|
||||
return d;
|
||||
});
|
||||
|
@ -151,7 +151,7 @@ describe('explorerChartsContainerService', () => {
|
|||
});
|
||||
|
||||
test('field value with trailing dot should not throw an error', (done) => {
|
||||
const mockAnomalyChartRecordsClone = _.cloneDeep(mockAnomalyChartRecords);
|
||||
const mockAnomalyChartRecordsClone = cloneDeep(mockAnomalyChartRecords);
|
||||
mockAnomalyChartRecordsClone[1].partition_field_value = 'AAL.';
|
||||
|
||||
expect(() => {
|
||||
|
|
|
@ -10,7 +10,9 @@
|
|||
|
||||
import React from 'react';
|
||||
import './_explorer.scss';
|
||||
import _, { isEqual } from 'lodash';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
import uniq from 'lodash/uniq';
|
||||
import get from 'lodash/get';
|
||||
import d3 from 'd3';
|
||||
import moment from 'moment';
|
||||
import DragSelect from 'dragselect';
|
||||
|
@ -176,9 +178,9 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
|
|||
}
|
||||
);
|
||||
|
||||
selectedData.laneLabels = _.uniq(selectedData.laneLabels);
|
||||
selectedData.times = _.uniq(selectedData.times);
|
||||
if (_.isEqual(selectedData, previousSelectedData) === false) {
|
||||
selectedData.laneLabels = uniq(selectedData.laneLabels);
|
||||
selectedData.times = uniq(selectedData.times);
|
||||
if (isEqual(selectedData, previousSelectedData) === false) {
|
||||
// If no cells containing anomalies have been selected,
|
||||
// immediately clear the selection, otherwise trigger
|
||||
// a reload with the updated selected cells.
|
||||
|
@ -246,7 +248,7 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
|
|||
selectedTimes: d3.extent(times),
|
||||
};
|
||||
|
||||
if (_.isEqual(oldSelection, newSelection)) {
|
||||
if (isEqual(oldSelection, newSelection)) {
|
||||
triggerNewSelection = false;
|
||||
}
|
||||
|
||||
|
@ -277,8 +279,8 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
|
|||
// Check for selection and reselect the corresponding swimlane cell
|
||||
// if the time range and lane label are still in view.
|
||||
const selectionState = selection;
|
||||
const selectedType = _.get(selectionState, 'type', undefined);
|
||||
const selectionViewByFieldName = _.get(selectionState, 'viewByFieldName', '');
|
||||
const selectedType = get(selectionState, 'type', undefined);
|
||||
const selectionViewByFieldName = get(selectionState, 'viewByFieldName', '');
|
||||
|
||||
// If a selection was done in the other swimlane, add the "masked" classes
|
||||
// to de-emphasize the swimlane cells.
|
||||
|
@ -288,8 +290,8 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
|
|||
}
|
||||
|
||||
const cellsToSelect: Node[] = [];
|
||||
const selectedLanes = _.get(selectionState, 'lanes', []);
|
||||
const selectedTimes = _.get(selectionState, 'times', []);
|
||||
const selectedLanes = get(selectionState, 'lanes', []);
|
||||
const selectedTimes = get(selectionState, 'times', []);
|
||||
const selectedTimeExtent = d3.extent(selectedTimes);
|
||||
|
||||
if (
|
||||
|
|
|
@ -6,7 +6,9 @@
|
|||
|
||||
// Service for carrying out requests to run ML forecasts and to obtain
|
||||
// data on forecasts that have been performed.
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
import find from 'lodash/find';
|
||||
import each from 'lodash/each';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import { ml } from './ml_api_service';
|
||||
|
@ -129,8 +131,8 @@ function getForecastDateRange(job, forecastId) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
obj.earliest = _.get(resp, 'aggregations.earliest.value', null);
|
||||
obj.latest = _.get(resp, 'aggregations.latest.value', null);
|
||||
obj.earliest = get(resp, 'aggregations.earliest.value', null);
|
||||
obj.latest = get(resp, 'aggregations.latest.value', null);
|
||||
if (obj.earliest === null || obj.latest === null) {
|
||||
reject(resp);
|
||||
} else {
|
||||
|
@ -157,8 +159,8 @@ function getForecastData(
|
|||
// Extract the partition, by, over fields on which to filter.
|
||||
const criteriaFields = [];
|
||||
const detector = job.analysis_config.detectors[detectorIndex];
|
||||
if (_.has(detector, 'partition_field_name')) {
|
||||
const partitionEntity = _.find(entityFields, { fieldName: detector.partition_field_name });
|
||||
if (detector.partition_field_name !== undefined) {
|
||||
const partitionEntity = find(entityFields, { fieldName: detector.partition_field_name });
|
||||
if (partitionEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'partition_field_name', fieldValue: partitionEntity.fieldName },
|
||||
|
@ -167,8 +169,8 @@ function getForecastData(
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(detector, 'over_field_name')) {
|
||||
const overEntity = _.find(entityFields, { fieldName: detector.over_field_name });
|
||||
if (detector.over_field_name !== undefined) {
|
||||
const overEntity = find(entityFields, { fieldName: detector.over_field_name });
|
||||
if (overEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'over_field_name', fieldValue: overEntity.fieldName },
|
||||
|
@ -177,8 +179,8 @@ function getForecastData(
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(detector, 'by_field_name')) {
|
||||
const byEntity = _.find(entityFields, { fieldName: detector.by_field_name });
|
||||
if (detector.by_field_name !== undefined) {
|
||||
const byEntity = find(entityFields, { fieldName: detector.by_field_name });
|
||||
if (byEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'by_field_name', fieldValue: byEntity.fieldName },
|
||||
|
@ -222,7 +224,7 @@ function getForecastData(
|
|||
];
|
||||
|
||||
// Add in term queries for each of the specified criteria.
|
||||
_.each(criteriaFields, (criteria) => {
|
||||
each(criteriaFields, (criteria) => {
|
||||
filterCriteria.push({
|
||||
term: {
|
||||
[criteria.fieldName]: criteria.fieldValue,
|
||||
|
@ -281,13 +283,13 @@ function getForecastData(
|
|||
})
|
||||
.pipe(
|
||||
map((resp) => {
|
||||
const aggregationsByTime = _.get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
_.each(aggregationsByTime, (dataForTime) => {
|
||||
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
each(aggregationsByTime, (dataForTime) => {
|
||||
const time = dataForTime.key;
|
||||
obj.results[time] = {
|
||||
prediction: _.get(dataForTime, ['prediction', 'value']),
|
||||
forecastUpper: _.get(dataForTime, ['forecastUpper', 'value']),
|
||||
forecastLower: _.get(dataForTime, ['forecastLower', 'value']),
|
||||
prediction: get(dataForTime, ['prediction', 'value']),
|
||||
forecastUpper: get(dataForTime, ['forecastUpper', 'value']),
|
||||
forecastLower: get(dataForTime, ['forecastLower', 'value']),
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -355,7 +357,7 @@ function getForecastRequestStats(job, forecastId) {
|
|||
})
|
||||
.then((resp) => {
|
||||
if (resp.hits.total !== 0) {
|
||||
obj.stats = _.first(resp.hits.hits)._source;
|
||||
obj.stats = resp.hits.hits[0]._source;
|
||||
}
|
||||
resolve(obj);
|
||||
})
|
||||
|
|
|
@ -4,7 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import each from 'lodash/each';
|
||||
import find from 'lodash/find';
|
||||
import get from 'lodash/get';
|
||||
import isNumber from 'lodash/isNumber';
|
||||
import moment from 'moment';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
|
@ -135,10 +139,10 @@ class JobService {
|
|||
const jobStats = statsResp.jobs[j];
|
||||
if (job.job_id === jobStats.job_id) {
|
||||
job.state = jobStats.state;
|
||||
job.data_counts = _.cloneDeep(jobStats.data_counts);
|
||||
job.model_size_stats = _.cloneDeep(jobStats.model_size_stats);
|
||||
job.data_counts = cloneDeep(jobStats.data_counts);
|
||||
job.model_size_stats = cloneDeep(jobStats.model_size_stats);
|
||||
if (jobStats.node) {
|
||||
job.node = _.cloneDeep(jobStats.node);
|
||||
job.node = cloneDeep(jobStats.node);
|
||||
}
|
||||
if (jobStats.open_time) {
|
||||
job.open_time = jobStats.open_time;
|
||||
|
@ -212,10 +216,10 @@ class JobService {
|
|||
newJob.state = statsJob.state;
|
||||
newJob.data_counts = {};
|
||||
newJob.model_size_stats = {};
|
||||
newJob.data_counts = _.cloneDeep(statsJob.data_counts);
|
||||
newJob.model_size_stats = _.cloneDeep(statsJob.model_size_stats);
|
||||
newJob.data_counts = cloneDeep(statsJob.data_counts);
|
||||
newJob.model_size_stats = cloneDeep(statsJob.model_size_stats);
|
||||
if (newJob.node) {
|
||||
newJob.node = _.cloneDeep(statsJob.node);
|
||||
newJob.node = cloneDeep(statsJob.node);
|
||||
}
|
||||
|
||||
if (statsJob.open_time) {
|
||||
|
@ -352,7 +356,7 @@ class JobService {
|
|||
// create a deep copy of a job object
|
||||
// also remove items from the job which are set by the server and not needed
|
||||
// in the future this formatting could be optional
|
||||
const tempJob = _.cloneDeep(job);
|
||||
const tempJob = cloneDeep(job);
|
||||
|
||||
// remove all of the items which should not be copied
|
||||
// such as counts, state and times
|
||||
|
@ -375,7 +379,7 @@ class JobService {
|
|||
|
||||
delete tempJob.analysis_config.use_per_partition_normalization;
|
||||
|
||||
_.each(tempJob.analysis_config.detectors, (d) => {
|
||||
each(tempJob.analysis_config.detectors, (d) => {
|
||||
delete d.detector_index;
|
||||
});
|
||||
|
||||
|
@ -469,7 +473,7 @@ class JobService {
|
|||
|
||||
// find a job based on the id
|
||||
getJob(jobId) {
|
||||
const job = _.find(jobs, (j) => {
|
||||
const job = find(jobs, (j) => {
|
||||
return j.job_id === jobId;
|
||||
});
|
||||
|
||||
|
@ -550,7 +554,7 @@ class JobService {
|
|||
|
||||
// get fields from detectors
|
||||
if (job.analysis_config.detectors) {
|
||||
_.each(job.analysis_config.detectors, (dtr) => {
|
||||
each(job.analysis_config.detectors, (dtr) => {
|
||||
if (dtr.by_field_name) {
|
||||
fields[dtr.by_field_name] = {};
|
||||
}
|
||||
|
@ -568,7 +572,7 @@ class JobService {
|
|||
|
||||
// get fields from influencers
|
||||
if (job.analysis_config.influencers) {
|
||||
_.each(job.analysis_config.influencers, (inf) => {
|
||||
each(job.analysis_config.influencers, (inf) => {
|
||||
fields[inf] = {};
|
||||
});
|
||||
}
|
||||
|
@ -659,7 +663,7 @@ class JobService {
|
|||
return new Promise((resolve, reject) => {
|
||||
// if the end timestamp is a number, add one ms to it to make it
|
||||
// inclusive of the end of the data
|
||||
if (_.isNumber(end)) {
|
||||
if (isNumber(end)) {
|
||||
end++;
|
||||
}
|
||||
|
||||
|
@ -780,7 +784,7 @@ class JobService {
|
|||
});
|
||||
}
|
||||
});
|
||||
_.each(tempGroups, (js, id) => {
|
||||
each(tempGroups, (js, id) => {
|
||||
groups.push({ id, jobs: js });
|
||||
});
|
||||
return groups;
|
||||
|
@ -837,9 +841,9 @@ function processBasicJobInfo(localJobService, jobsList) {
|
|||
const customUrlsByJob = {};
|
||||
|
||||
// use cloned copy of jobs list so not to alter the original
|
||||
const jobsListCopy = _.cloneDeep(jobsList);
|
||||
const jobsListCopy = cloneDeep(jobsList);
|
||||
|
||||
_.each(jobsListCopy, (jobObj) => {
|
||||
each(jobsListCopy, (jobObj) => {
|
||||
const analysisConfig = jobObj.analysis_config;
|
||||
const bucketSpan = parseInterval(analysisConfig.bucket_span);
|
||||
|
||||
|
@ -848,20 +852,20 @@ function processBasicJobInfo(localJobService, jobsList) {
|
|||
bucketSpanSeconds: bucketSpan.asSeconds(),
|
||||
};
|
||||
|
||||
if (_.has(jobObj, 'description') && /^\s*$/.test(jobObj.description) === false) {
|
||||
if (jobObj.description !== undefined && /^\s*$/.test(jobObj.description) === false) {
|
||||
job.description = jobObj.description;
|
||||
} else {
|
||||
// Just use the id as the description.
|
||||
job.description = jobObj.job_id;
|
||||
}
|
||||
|
||||
job.detectors = _.get(analysisConfig, 'detectors', []);
|
||||
job.detectors = get(analysisConfig, 'detectors', []);
|
||||
detectorsByJob[job.id] = job.detectors;
|
||||
|
||||
if (_.has(jobObj, 'custom_settings.custom_urls')) {
|
||||
if (jobObj.custom_settings !== undefined && jobObj.custom_settings.custom_urls !== undefined) {
|
||||
job.customUrls = [];
|
||||
_.each(jobObj.custom_settings.custom_urls, (url) => {
|
||||
if (_.has(url, 'url_name') && _.has(url, 'url_value') && isWebUrl(url.url_value)) {
|
||||
each(jobObj.custom_settings.custom_urls, (url) => {
|
||||
if (url.url_name !== undefined && url.url_value !== undefined && isWebUrl(url.url_value)) {
|
||||
// Only make web URLs (i.e. http or https) available in dashboard drilldowns.
|
||||
job.customUrls.push(url);
|
||||
}
|
||||
|
@ -897,7 +901,7 @@ function createJobStats(jobsList, jobStats) {
|
|||
const mlNodes = {};
|
||||
let failedJobs = 0;
|
||||
|
||||
_.each(jobsList, (job) => {
|
||||
each(jobsList, (job) => {
|
||||
if (job.state === 'opened') {
|
||||
jobStats.open.value++;
|
||||
} else if (job.state === 'closed') {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
|
||||
import { ml } from './ml_api_service';
|
||||
|
||||
|
@ -16,8 +16,8 @@ export function getFieldTypeFromMapping(index, fieldName) {
|
|||
ml.getFieldCaps({ index, fields: [fieldName] })
|
||||
.then((resp) => {
|
||||
let fieldType = '';
|
||||
_.each(resp.fields, (field) => {
|
||||
_.each(field, (type) => {
|
||||
each(resp.fields, (field) => {
|
||||
each(field, (type) => {
|
||||
if (fieldType === '') {
|
||||
fieldType = type.type;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,8 @@
|
|||
// Returned response contains a results property containing the requested aggregation.
|
||||
import { Observable } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
import get from 'lodash/get';
|
||||
import { Dictionary } from '../../../../common/types/common';
|
||||
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
|
||||
import { JobId } from '../../../../common/types/anomaly_detection_jobs';
|
||||
|
@ -237,7 +238,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
];
|
||||
|
||||
// Add in term queries for each of the specified criteria.
|
||||
_.each(criteriaFields, (criteria) => {
|
||||
each(criteriaFields, (criteria) => {
|
||||
mustCriteria.push({
|
||||
term: {
|
||||
[criteria.fieldName]: criteria.fieldValue,
|
||||
|
@ -316,12 +317,12 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
})
|
||||
.pipe(
|
||||
map((resp) => {
|
||||
const aggregationsByTime = _.get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
_.each(aggregationsByTime, (dataForTime: any) => {
|
||||
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
each(aggregationsByTime, (dataForTime: any) => {
|
||||
const time = dataForTime.key;
|
||||
const modelUpper: number | undefined = _.get(dataForTime, ['modelUpper', 'value']);
|
||||
const modelLower: number | undefined = _.get(dataForTime, ['modelLower', 'value']);
|
||||
const actual = _.get(dataForTime, ['actual', 'value']);
|
||||
const modelUpper: number | undefined = get(dataForTime, ['modelUpper', 'value']);
|
||||
const modelLower: number | undefined = get(dataForTime, ['modelLower', 'value']);
|
||||
const actual = get(dataForTime, ['actual', 'value']);
|
||||
|
||||
obj.results[time] = {
|
||||
actual,
|
||||
|
@ -375,7 +376,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -391,7 +392,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
}
|
||||
|
||||
// Add in term queries for each of the specified criteria.
|
||||
_.each(criteriaFields, (criteria) => {
|
||||
each(criteriaFields, (criteria) => {
|
||||
boolCriteria.push({
|
||||
term: {
|
||||
[criteria.fieldName]: criteria.fieldValue,
|
||||
|
@ -428,7 +429,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
.pipe(
|
||||
map((resp) => {
|
||||
if (resp.hits.total !== 0) {
|
||||
_.each(resp.hits.hits, (hit: any) => {
|
||||
each(resp.hits.hits, (hit: any) => {
|
||||
obj.records.push(hit._source);
|
||||
});
|
||||
}
|
||||
|
@ -473,7 +474,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
jobIdFilterStr += `${i > 0 ? ' OR ' : ''}job_id:${jobId}`;
|
||||
});
|
||||
boolCriteria.push({
|
||||
|
@ -536,15 +537,15 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
|
|||
})
|
||||
.pipe(
|
||||
map((resp) => {
|
||||
const dataByJobId = _.get(resp, ['aggregations', 'jobs', 'buckets'], []);
|
||||
_.each(dataByJobId, (dataForJob: any) => {
|
||||
const dataByJobId = get(resp, ['aggregations', 'jobs', 'buckets'], []);
|
||||
each(dataByJobId, (dataForJob: any) => {
|
||||
const jobId: string = dataForJob.key;
|
||||
const resultsForTime: Record<string, any> = {};
|
||||
const dataByTime = _.get(dataForJob, ['times', 'buckets'], []);
|
||||
_.each(dataByTime, (dataForTime: any) => {
|
||||
const dataByTime = get(dataForJob, ['times', 'buckets'], []);
|
||||
each(dataByTime, (dataForTime: any) => {
|
||||
const time: string = dataForTime.key;
|
||||
const events: object[] = _.get(dataForTime, ['events', 'buckets']);
|
||||
resultsForTime[time] = _.map(events, 'key');
|
||||
const events: any[] = get(dataForTime, ['events', 'buckets']);
|
||||
resultsForTime[time] = events.map((e) => e.key);
|
||||
});
|
||||
obj.events[jobId] = resultsForTime;
|
||||
});
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
import get from 'lodash/get';
|
||||
|
||||
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
|
||||
import { escapeForElasticsearchQuery } from '../../util/string_utils';
|
||||
|
@ -50,7 +51,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -131,18 +132,18 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const dataByJobId = _.get(resp, ['aggregations', 'jobId', 'buckets'], []);
|
||||
_.each(dataByJobId, (dataForJob) => {
|
||||
const dataByJobId = get(resp, ['aggregations', 'jobId', 'buckets'], []);
|
||||
each(dataByJobId, (dataForJob) => {
|
||||
const jobId = dataForJob.key;
|
||||
|
||||
const resultsForTime = {};
|
||||
|
||||
const dataByTime = _.get(dataForJob, ['byTime', 'buckets'], []);
|
||||
_.each(dataByTime, (dataForTime) => {
|
||||
const value = _.get(dataForTime, ['anomalyScore', 'value']);
|
||||
const dataByTime = get(dataForJob, ['byTime', 'buckets'], []);
|
||||
each(dataByTime, (dataForTime) => {
|
||||
const value = get(dataForTime, ['anomalyScore', 'value']);
|
||||
if (value !== undefined) {
|
||||
const time = dataForTime.key;
|
||||
resultsForTime[time] = _.get(dataForTime, ['anomalyScore', 'value']);
|
||||
resultsForTime[time] = get(dataForTime, ['anomalyScore', 'value']);
|
||||
}
|
||||
});
|
||||
obj.results[jobId] = resultsForTime;
|
||||
|
@ -198,7 +199,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -305,17 +306,17 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const fieldNameBuckets = _.get(
|
||||
const fieldNameBuckets = get(
|
||||
resp,
|
||||
['aggregations', 'influencerFieldNames', 'buckets'],
|
||||
[]
|
||||
);
|
||||
_.each(fieldNameBuckets, (nameBucket) => {
|
||||
each(fieldNameBuckets, (nameBucket) => {
|
||||
const fieldName = nameBucket.key;
|
||||
const fieldValues = [];
|
||||
|
||||
const fieldValueBuckets = _.get(nameBucket, ['influencerFieldValues', 'buckets'], []);
|
||||
_.each(fieldValueBuckets, (valueBucket) => {
|
||||
const fieldValueBuckets = get(nameBucket, ['influencerFieldValues', 'buckets'], []);
|
||||
each(fieldValueBuckets, (valueBucket) => {
|
||||
const fieldValueResult = {
|
||||
influencerFieldValue: valueBucket.key,
|
||||
maxAnomalyScore: valueBucket.maxAnomalyScore.value,
|
||||
|
@ -360,7 +361,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -424,8 +425,8 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const buckets = _.get(resp, ['aggregations', 'influencerFieldValues', 'buckets'], []);
|
||||
_.each(buckets, (bucket) => {
|
||||
const buckets = get(resp, ['aggregations', 'influencerFieldValues', 'buckets'], []);
|
||||
each(buckets, (bucket) => {
|
||||
const result = {
|
||||
influencerFieldValue: bucket.key,
|
||||
maxAnomalyScore: bucket.maxAnomalyScore.value,
|
||||
|
@ -458,9 +459,9 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
end: latestMs,
|
||||
})
|
||||
.then((resp) => {
|
||||
const dataByTime = _.get(resp, ['overall_buckets'], []);
|
||||
_.each(dataByTime, (dataForTime) => {
|
||||
const value = _.get(dataForTime, ['overall_score']);
|
||||
const dataByTime = get(resp, ['overall_buckets'], []);
|
||||
each(dataByTime, (dataForTime) => {
|
||||
const value = get(dataForTime, ['overall_score']);
|
||||
if (value !== undefined) {
|
||||
obj.results[dataForTime.timestamp] = value;
|
||||
}
|
||||
|
@ -517,7 +518,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -537,7 +538,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (influencerFieldValues && influencerFieldValues.length > 0) {
|
||||
let influencerFilterStr = '';
|
||||
_.each(influencerFieldValues, (value, i) => {
|
||||
each(influencerFieldValues, (value, i) => {
|
||||
if (i > 0) {
|
||||
influencerFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -625,17 +626,17 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const fieldValueBuckets = _.get(
|
||||
const fieldValueBuckets = get(
|
||||
resp,
|
||||
['aggregations', 'influencerFieldValues', 'buckets'],
|
||||
[]
|
||||
);
|
||||
_.each(fieldValueBuckets, (valueBucket) => {
|
||||
each(fieldValueBuckets, (valueBucket) => {
|
||||
const fieldValue = valueBucket.key;
|
||||
const fieldValues = {};
|
||||
|
||||
const timeBuckets = _.get(valueBucket, ['byTime', 'buckets'], []);
|
||||
_.each(timeBuckets, (timeBucket) => {
|
||||
const timeBuckets = get(valueBucket, ['byTime', 'buckets'], []);
|
||||
each(timeBuckets, (timeBucket) => {
|
||||
const time = timeBucket.key;
|
||||
const score = timeBucket.maxAnomalyScore.value;
|
||||
fieldValues[time] = score;
|
||||
|
@ -701,7 +702,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -744,7 +745,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
})
|
||||
.then((resp) => {
|
||||
if (resp.hits.total !== 0) {
|
||||
_.each(resp.hits.hits, (hit) => {
|
||||
each(resp.hits.hits, (hit) => {
|
||||
obj.records.push(hit._source);
|
||||
});
|
||||
}
|
||||
|
@ -797,7 +798,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i) => {
|
||||
each(jobIds, (jobId, i) => {
|
||||
if (i > 0) {
|
||||
jobIdFilterStr += ' OR ';
|
||||
}
|
||||
|
@ -875,7 +876,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
})
|
||||
.then((resp) => {
|
||||
if (resp.hits.total !== 0) {
|
||||
_.each(resp.hits.hits, (hit) => {
|
||||
each(resp.hits.hits, (hit) => {
|
||||
obj.records.push(hit._source);
|
||||
});
|
||||
}
|
||||
|
@ -1000,7 +1001,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
})
|
||||
.then((resp) => {
|
||||
if (resp.hits.total !== 0) {
|
||||
_.each(resp.hits.hits, (hit) => {
|
||||
each(resp.hits.hits, (hit) => {
|
||||
obj.records.push(hit._source);
|
||||
});
|
||||
}
|
||||
|
@ -1079,8 +1080,8 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const dataByTimeBucket = _.get(resp, ['aggregations', 'eventRate', 'buckets'], []);
|
||||
_.each(dataByTimeBucket, (dataForTime) => {
|
||||
const dataByTimeBucket = get(resp, ['aggregations', 'eventRate', 'buckets'], []);
|
||||
each(dataByTimeBucket, (dataForTime) => {
|
||||
const time = dataForTime.key;
|
||||
obj.results[time] = dataForTime.doc_count;
|
||||
});
|
||||
|
@ -1227,18 +1228,18 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
// Because of the sampling, results of metricFunctions which use sum or count
|
||||
// can be significantly skewed. Taking into account totalHits we calculate a
|
||||
// a factor to normalize results for these metricFunctions.
|
||||
const totalHits = _.get(resp, ['hits', 'total'], 0);
|
||||
const successfulShards = _.get(resp, ['_shards', 'successful'], 0);
|
||||
const totalHits = get(resp, ['hits', 'total'], 0);
|
||||
const successfulShards = get(resp, ['_shards', 'successful'], 0);
|
||||
|
||||
let normalizeFactor = 1;
|
||||
if (totalHits > successfulShards * SAMPLER_TOP_TERMS_SHARD_SIZE) {
|
||||
normalizeFactor = totalHits / (successfulShards * SAMPLER_TOP_TERMS_SHARD_SIZE);
|
||||
}
|
||||
|
||||
const dataByTime = _.get(resp, ['aggregations', 'sample', 'byTime', 'buckets'], []);
|
||||
const dataByTime = get(resp, ['aggregations', 'sample', 'byTime', 'buckets'], []);
|
||||
const data = dataByTime.reduce((d, dataForTime) => {
|
||||
const date = +dataForTime.key;
|
||||
const entities = _.get(dataForTime, ['entities', 'buckets'], []);
|
||||
const entities = get(dataForTime, ['entities', 'buckets'], []);
|
||||
entities.forEach((entity) => {
|
||||
let value = metricFunction === 'count' ? entity.doc_count : entity.metric.value;
|
||||
|
||||
|
@ -1291,7 +1292,7 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
{ term: { job_id: jobId } },
|
||||
];
|
||||
|
||||
_.each(criteriaFields, (criteria) => {
|
||||
each(criteriaFields, (criteria) => {
|
||||
mustCriteria.push({
|
||||
term: {
|
||||
[criteria.fieldName]: criteria.fieldValue,
|
||||
|
@ -1339,11 +1340,11 @@ export function resultsServiceProvider(mlApiServices) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const aggregationsByTime = _.get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
_.each(aggregationsByTime, (dataForTime) => {
|
||||
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
each(aggregationsByTime, (dataForTime) => {
|
||||
const time = dataForTime.key;
|
||||
obj.results[time] = {
|
||||
score: _.get(dataForTime, ['recordScore', 'value']),
|
||||
score: get(dataForTime, ['recordScore', 'value']),
|
||||
};
|
||||
});
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
*/
|
||||
|
||||
import PropTypes from 'prop-types';
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
|
||||
import React, { Component } from 'react';
|
||||
|
||||
|
@ -250,8 +250,8 @@ export class ForecastingModalUI extends Component {
|
|||
.getForecastRequestStats(this.props.job, forecastId)
|
||||
.then((resp) => {
|
||||
// Get the progress (stats value is between 0 and 1).
|
||||
const progress = _.get(resp, ['stats', 'forecast_progress'], previousProgress);
|
||||
const status = _.get(resp, ['stats', 'forecast_status']);
|
||||
const progress = get(resp, ['stats', 'forecast_progress'], previousProgress);
|
||||
const status = get(resp, ['stats', 'forecast_status']);
|
||||
|
||||
// The requests for forecast stats can get routed to different shards,
|
||||
// and if these operate at different speeds there is a chance that a
|
||||
|
@ -263,7 +263,7 @@ export class ForecastingModalUI extends Component {
|
|||
}
|
||||
|
||||
// Display any messages returned in the request stats.
|
||||
let messages = _.get(resp, ['stats', 'forecast_messages'], []);
|
||||
let messages = get(resp, ['stats', 'forecast_messages'], []);
|
||||
messages = messages.map((message) => ({ message, status: MESSAGE_LEVEL.WARNING }));
|
||||
this.setState({ messages });
|
||||
|
||||
|
|
|
@ -12,9 +12,13 @@
|
|||
import PropTypes from 'prop-types';
|
||||
import React, { Component } from 'react';
|
||||
import useObservable from 'react-use/lib/useObservable';
|
||||
import _ from 'lodash';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
import reduce from 'lodash/reduce';
|
||||
import each from 'lodash/each';
|
||||
import get from 'lodash/get';
|
||||
import d3 from 'd3';
|
||||
import moment from 'moment';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import {
|
||||
getSeverityWithLow,
|
||||
|
@ -49,8 +53,6 @@ import {
|
|||
unhighlightFocusChartAnnotation,
|
||||
} from './timeseries_chart_annotations';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
const focusZoomPanelHeight = 25;
|
||||
const focusChartHeight = 310;
|
||||
const focusHeight = focusZoomPanelHeight + focusChartHeight;
|
||||
|
@ -399,7 +401,7 @@ class TimeseriesChartIntl extends Component {
|
|||
if (zoomFrom) {
|
||||
focusLoadFrom = zoomFrom.getTime();
|
||||
} else {
|
||||
focusLoadFrom = _.reduce(
|
||||
focusLoadFrom = reduce(
|
||||
combinedData,
|
||||
(memo, point) => Math.min(memo, point.date.getTime()),
|
||||
new Date(2099, 12, 31).getTime()
|
||||
|
@ -410,11 +412,7 @@ class TimeseriesChartIntl extends Component {
|
|||
if (zoomTo) {
|
||||
focusLoadTo = zoomTo.getTime();
|
||||
} else {
|
||||
focusLoadTo = _.reduce(
|
||||
combinedData,
|
||||
(memo, point) => Math.max(memo, point.date.getTime()),
|
||||
0
|
||||
);
|
||||
focusLoadTo = reduce(combinedData, (memo, point) => Math.max(memo, point.date.getTime()), 0);
|
||||
}
|
||||
focusLoadTo = Math.min(focusLoadTo, contextXMax);
|
||||
|
||||
|
@ -431,7 +429,7 @@ class TimeseriesChartIntl extends Component {
|
|||
min: moment(new Date(contextXScaleDomain[0])),
|
||||
max: moment(contextXScaleDomain[1]),
|
||||
};
|
||||
if (!_.isEqual(newSelectedBounds, this.selectedBounds)) {
|
||||
if (!isEqual(newSelectedBounds, this.selectedBounds)) {
|
||||
this.selectedBounds = newSelectedBounds;
|
||||
this.setContextBrushExtent(
|
||||
new Date(contextXScaleDomain[0]),
|
||||
|
@ -764,7 +762,7 @@ class TimeseriesChartIntl extends Component {
|
|||
})
|
||||
.attr('class', (d) => {
|
||||
let markerClass = 'metric-value';
|
||||
if (_.has(d, 'anomalyScore')) {
|
||||
if (d.anomalyScore !== undefined) {
|
||||
markerClass += ` anomaly-marker ${getSeverityWithLow(d.anomalyScore).id}`;
|
||||
}
|
||||
return markerClass;
|
||||
|
@ -887,14 +885,14 @@ class TimeseriesChartIntl extends Component {
|
|||
);
|
||||
|
||||
const zoomOptions = [{ durationMs: autoZoomDuration, label: 'auto' }];
|
||||
_.each(ZOOM_INTERVAL_OPTIONS, (option) => {
|
||||
each(ZOOM_INTERVAL_OPTIONS, (option) => {
|
||||
if (option.duration.asSeconds() > minSecs && option.duration.asSeconds() < boundsSecs) {
|
||||
zoomOptions.push({ durationMs: option.duration.asMilliseconds(), label: option.label });
|
||||
}
|
||||
});
|
||||
xPos += zoomLabel.node().getBBox().width + 4;
|
||||
|
||||
_.each(zoomOptions, (option) => {
|
||||
each(zoomOptions, (option) => {
|
||||
const text = zoomGroup
|
||||
.append('a')
|
||||
.attr('data-ms', option.durationMs)
|
||||
|
@ -960,7 +958,7 @@ class TimeseriesChartIntl extends Component {
|
|||
const combinedData =
|
||||
contextForecastData === undefined ? data : data.concat(contextForecastData);
|
||||
const valuesRange = { min: Number.MAX_VALUE, max: Number.MIN_VALUE };
|
||||
_.each(combinedData, (item) => {
|
||||
each(combinedData, (item) => {
|
||||
valuesRange.min = Math.min(item.value, valuesRange.min);
|
||||
valuesRange.max = Math.max(item.value, valuesRange.max);
|
||||
});
|
||||
|
@ -973,7 +971,7 @@ class TimeseriesChartIntl extends Component {
|
|||
(contextForecastData !== undefined && contextForecastData.length > 0)
|
||||
) {
|
||||
const boundsRange = { min: Number.MAX_VALUE, max: Number.MIN_VALUE };
|
||||
_.each(combinedData, (item) => {
|
||||
each(combinedData, (item) => {
|
||||
boundsRange.min = Math.min(item.lower, boundsRange.min);
|
||||
boundsRange.max = Math.max(item.upper, boundsRange.max);
|
||||
});
|
||||
|
@ -1294,7 +1292,7 @@ class TimeseriesChartIntl extends Component {
|
|||
if (swimlaneData !== undefined && swimlaneData.length > 0) {
|
||||
// Adjust the earliest back to the time of the first swimlane point
|
||||
// if this is before the time filter minimum.
|
||||
earliest = Math.min(_.first(swimlaneData).date.getTime(), bounds.min.valueOf());
|
||||
earliest = Math.min(swimlaneData[0].date.getTime(), bounds.min.valueOf());
|
||||
}
|
||||
|
||||
const contextAggMs = contextAggregationInterval.asMilliseconds();
|
||||
|
@ -1352,7 +1350,7 @@ class TimeseriesChartIntl extends Component {
|
|||
const formattedDate = formatHumanReadableDateTimeSeconds(marker.date);
|
||||
const tooltipData = [{ label: formattedDate }];
|
||||
|
||||
if (_.has(marker, 'anomalyScore')) {
|
||||
if (marker.anomalyScore !== undefined) {
|
||||
const score = parseInt(marker.anomalyScore);
|
||||
const displayScore = score > 0 ? score : '< 1';
|
||||
tooltipData.push({
|
||||
|
@ -1387,7 +1385,7 @@ class TimeseriesChartIntl extends Component {
|
|||
// Show actual/typical when available except for rare detectors.
|
||||
// Rare detectors always have 1 as actual and the probability as typical.
|
||||
// Exposing those values in the tooltip with actual/typical labels might irritate users.
|
||||
if (_.has(marker, 'actual') && marker.function !== 'rare') {
|
||||
if (marker.actual !== undefined && marker.function !== 'rare') {
|
||||
// Display the record actual in preference to the chart value, which may be
|
||||
// different depending on the aggregation interval of the chart.
|
||||
tooltipData.push({
|
||||
|
@ -1421,7 +1419,7 @@ class TimeseriesChartIntl extends Component {
|
|||
},
|
||||
valueAccessor: 'value',
|
||||
});
|
||||
if (_.has(marker, 'byFieldName') && _.has(marker, 'numberOfCauses')) {
|
||||
if (marker.byFieldName !== undefined && marker.numberOfCauses !== undefined) {
|
||||
const numberOfCauses = marker.numberOfCauses;
|
||||
// If numberOfCauses === 1, won't go into this block as actual/typical copied to top level fields.
|
||||
const byFieldName = mlEscape(marker.byFieldName);
|
||||
|
@ -1488,7 +1486,7 @@ class TimeseriesChartIntl extends Component {
|
|||
}
|
||||
} else {
|
||||
// TODO - need better formatting for small decimals.
|
||||
if (_.get(marker, 'isForecast', false) === true) {
|
||||
if (get(marker, 'isForecast', false) === true) {
|
||||
tooltipData.push({
|
||||
label: i18n.translate(
|
||||
'xpack.ml.timeSeriesExplorer.timeSeriesChart.withoutAnomalyScore.predictionLabel',
|
||||
|
@ -1548,7 +1546,7 @@ class TimeseriesChartIntl extends Component {
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(marker, 'scheduledEvents')) {
|
||||
if (marker.scheduledEvents !== undefined) {
|
||||
marker.scheduledEvents.forEach((scheduledEvent, i) => {
|
||||
tooltipData.push({
|
||||
label: i18n.translate(
|
||||
|
@ -1569,7 +1567,7 @@ class TimeseriesChartIntl extends Component {
|
|||
});
|
||||
}
|
||||
|
||||
if (_.has(marker, 'annotation')) {
|
||||
if (marker.annotation !== undefined) {
|
||||
tooltipData.length = 0;
|
||||
// header
|
||||
tooltipData.push({
|
||||
|
|
|
@ -4,7 +4,10 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
import find from 'lodash/find';
|
||||
import get from 'lodash/get';
|
||||
import filter from 'lodash/filter';
|
||||
|
||||
import { Observable } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
@ -35,8 +38,8 @@ function getMetricData(
|
|||
// Extract the partition, by, over fields on which to filter.
|
||||
const criteriaFields = [];
|
||||
const detector = job.analysis_config.detectors[detectorIndex];
|
||||
if (_.has(detector, 'partition_field_name')) {
|
||||
const partitionEntity: any = _.find(entityFields, {
|
||||
if (detector.partition_field_name !== undefined) {
|
||||
const partitionEntity: any = find(entityFields, {
|
||||
fieldName: detector.partition_field_name,
|
||||
});
|
||||
if (partitionEntity !== undefined) {
|
||||
|
@ -47,8 +50,8 @@ function getMetricData(
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(detector, 'over_field_name')) {
|
||||
const overEntity: any = _.find(entityFields, { fieldName: detector.over_field_name });
|
||||
if (detector.over_field_name !== undefined) {
|
||||
const overEntity: any = find(entityFields, { fieldName: detector.over_field_name });
|
||||
if (overEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'over_field_name', fieldValue: overEntity.fieldName },
|
||||
|
@ -57,8 +60,8 @@ function getMetricData(
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(detector, 'by_field_name')) {
|
||||
const byEntity: any = _.find(entityFields, { fieldName: detector.by_field_name });
|
||||
if (detector.by_field_name !== undefined) {
|
||||
const byEntity: any = find(entityFields, { fieldName: detector.by_field_name });
|
||||
if (byEntity !== undefined) {
|
||||
criteriaFields.push(
|
||||
{ fieldName: 'by_field_name', fieldValue: byEntity.fieldName },
|
||||
|
@ -97,7 +100,7 @@ function getMetricData(
|
|||
)
|
||||
.pipe(
|
||||
map((resp) => {
|
||||
_.each(resp.results, (value, time) => {
|
||||
each(resp.results, (value, time) => {
|
||||
// @ts-ignore
|
||||
obj.results[time] = {
|
||||
actual: value,
|
||||
|
@ -134,7 +137,7 @@ function getChartDetails(
|
|||
}
|
||||
obj.results.functionLabel = functionLabel;
|
||||
|
||||
const blankEntityFields = _.filter(entityFields, (entity) => {
|
||||
const blankEntityFields = filter(entityFields, (entity) => {
|
||||
return entity.fieldValue === null;
|
||||
});
|
||||
|
||||
|
@ -145,7 +148,7 @@ function getChartDetails(
|
|||
obj.results.entityData.entities = entityFields;
|
||||
resolve(obj);
|
||||
} else {
|
||||
const entityFieldNames: string[] = _.map(blankEntityFields, 'fieldName');
|
||||
const entityFieldNames: string[] = blankEntityFields.map((f) => f.fieldName);
|
||||
ml.getCardinalityOfFields({
|
||||
index: chartConfig.datafeedConfig.indices,
|
||||
fieldNames: entityFieldNames,
|
||||
|
@ -155,12 +158,12 @@ function getChartDetails(
|
|||
latestMs,
|
||||
})
|
||||
.then((results: any) => {
|
||||
_.each(blankEntityFields, (field) => {
|
||||
each(blankEntityFields, (field) => {
|
||||
// results will not contain keys for non-aggregatable fields,
|
||||
// so store as 0 to indicate over all field values.
|
||||
obj.results.entityData.entities.push({
|
||||
fieldName: field.fieldName,
|
||||
cardinality: _.get(results, field.fieldName, 0),
|
||||
cardinality: get(results, field.fieldName, 0),
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -10,7 +10,9 @@
|
|||
* Viewer dashboard.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
import get from 'lodash/get';
|
||||
import find from 'lodash/find';
|
||||
import moment from 'moment-timezone';
|
||||
|
||||
import { isTimeSeriesViewJob } from '../../../../common/util/job_utils';
|
||||
|
@ -41,7 +43,7 @@ export function createTimeSeriesJobData(jobs) {
|
|||
export function processMetricPlotResults(metricPlotData, modelPlotEnabled) {
|
||||
const metricPlotChartData = [];
|
||||
if (modelPlotEnabled === true) {
|
||||
_.each(metricPlotData, (dataForTime, time) => {
|
||||
each(metricPlotData, (dataForTime, time) => {
|
||||
metricPlotChartData.push({
|
||||
date: new Date(+time),
|
||||
lower: dataForTime.modelLower,
|
||||
|
@ -50,7 +52,7 @@ export function processMetricPlotResults(metricPlotData, modelPlotEnabled) {
|
|||
});
|
||||
});
|
||||
} else {
|
||||
_.each(metricPlotData, (dataForTime, time) => {
|
||||
each(metricPlotData, (dataForTime, time) => {
|
||||
metricPlotChartData.push({
|
||||
date: new Date(+time),
|
||||
value: dataForTime.actual,
|
||||
|
@ -66,7 +68,7 @@ export function processMetricPlotResults(metricPlotData, modelPlotEnabled) {
|
|||
// value, lower and upper keys.
|
||||
export function processForecastResults(forecastData) {
|
||||
const forecastPlotChartData = [];
|
||||
_.each(forecastData, (dataForTime, time) => {
|
||||
each(forecastData, (dataForTime, time) => {
|
||||
forecastPlotChartData.push({
|
||||
date: new Date(+time),
|
||||
isForecast: true,
|
||||
|
@ -83,7 +85,7 @@ export function processForecastResults(forecastData) {
|
|||
// i.e. array of Objects with keys date (JavaScript date) and score.
|
||||
export function processRecordScoreResults(scoreData) {
|
||||
const bucketScoreData = [];
|
||||
_.each(scoreData, (dataForTime, time) => {
|
||||
each(scoreData, (dataForTime, time) => {
|
||||
bucketScoreData.push({
|
||||
date: new Date(+time),
|
||||
score: dataForTime.score,
|
||||
|
@ -153,7 +155,7 @@ export function processDataForFocusAnomalies(
|
|||
chartPoint.anomalyScore = recordScore;
|
||||
chartPoint.function = record.function;
|
||||
|
||||
if (_.has(record, 'actual')) {
|
||||
if (record.actual !== undefined) {
|
||||
// If cannot match chart point for anomaly time
|
||||
// substitute the value with the record's actual so it won't plot as null/0
|
||||
if (chartPoint.value === null) {
|
||||
|
@ -163,13 +165,13 @@ export function processDataForFocusAnomalies(
|
|||
chartPoint.actual = record.actual;
|
||||
chartPoint.typical = record.typical;
|
||||
} else {
|
||||
const causes = _.get(record, 'causes', []);
|
||||
const causes = get(record, 'causes', []);
|
||||
if (causes.length > 0) {
|
||||
chartPoint.byFieldName = record.by_field_name;
|
||||
chartPoint.numberOfCauses = causes.length;
|
||||
if (causes.length === 1) {
|
||||
// If only a single cause, copy actual and typical values to the top level.
|
||||
const cause = _.first(record.causes);
|
||||
const cause = record.causes[0];
|
||||
chartPoint.actual = cause.actual;
|
||||
chartPoint.typical = cause.typical;
|
||||
// substitute the value with the record's actual so it won't plot as null/0
|
||||
|
@ -180,7 +182,7 @@ export function processDataForFocusAnomalies(
|
|||
}
|
||||
}
|
||||
|
||||
if (_.has(record, 'multi_bucket_impact')) {
|
||||
if (record.multi_bucket_impact !== undefined) {
|
||||
chartPoint.multiBucketImpact = record.multi_bucket_impact;
|
||||
}
|
||||
}
|
||||
|
@ -194,7 +196,7 @@ export function processDataForFocusAnomalies(
|
|||
// which correspond to times of scheduled events for the job.
|
||||
export function processScheduledEventsForChart(chartData, scheduledEvents) {
|
||||
if (scheduledEvents !== undefined) {
|
||||
_.each(scheduledEvents, (events, time) => {
|
||||
each(scheduledEvents, (events, time) => {
|
||||
const chartPoint = findNearestChartPointToTime(chartData, time);
|
||||
if (chartPoint !== undefined) {
|
||||
// Note if the scheduled event coincides with an absence of the underlying metric data,
|
||||
|
@ -301,7 +303,7 @@ export function calculateAggregationInterval(bounds, bucketsTarget, jobs, select
|
|||
|
||||
// Ensure the aggregation interval is always a multiple of the bucket span to avoid strange
|
||||
// behaviour such as adjacent chart buckets holding different numbers of job results.
|
||||
const bucketSpanSeconds = _.find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds;
|
||||
const bucketSpanSeconds = find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds;
|
||||
let aggInterval = buckets.getIntervalToNearestMultiple(bucketSpanSeconds);
|
||||
|
||||
// Set the interval back to the job bucket span if the auto interval is smaller.
|
||||
|
@ -324,8 +326,8 @@ export function calculateDefaultFocusRange(
|
|||
|
||||
const combinedData =
|
||||
isForecastData === false ? contextChartData : contextChartData.concat(contextForecastData);
|
||||
const earliestDataDate = _.first(combinedData).date;
|
||||
const latestDataDate = _.last(combinedData).date;
|
||||
const earliestDataDate = combinedData[0].date;
|
||||
const latestDataDate = combinedData[combinedData.length - 1].date;
|
||||
|
||||
let rangeEarliestMs;
|
||||
let rangeLatestMs;
|
||||
|
@ -333,8 +335,8 @@ export function calculateDefaultFocusRange(
|
|||
if (isForecastData === true) {
|
||||
// Return a range centred on the start of the forecast range, depending
|
||||
// on the time range of the forecast and data.
|
||||
const earliestForecastDataDate = _.first(contextForecastData).date;
|
||||
const latestForecastDataDate = _.last(contextForecastData).date;
|
||||
const earliestForecastDataDate = contextForecastData[0].date;
|
||||
const latestForecastDataDate = contextForecastData[contextForecastData.length - 1].date;
|
||||
|
||||
rangeLatestMs = Math.min(
|
||||
earliestForecastDataDate.getTime() + autoZoomDuration / 2,
|
||||
|
@ -379,7 +381,7 @@ export function getAutoZoomDuration(jobs, selectedJob) {
|
|||
// Calculate the 'auto' zoom duration which shows data at bucket span granularity.
|
||||
// Get the minimum bucket span of selected jobs.
|
||||
// TODO - only look at jobs for which data has been returned?
|
||||
const bucketSpanSeconds = _.find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds;
|
||||
const bucketSpanSeconds = find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds;
|
||||
|
||||
// In most cases the duration can be obtained by simply multiplying the points target
|
||||
// Check that this duration returns the bucket span when run back through the
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
* in the source metric data.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
|
||||
import { mlFunctionToESAggregation } from '../../../common/util/job_utils';
|
||||
|
||||
|
@ -44,15 +44,16 @@ export function buildConfigFromDetector(job, detectorIndex) {
|
|||
// aggregations/<agg_name>/aggregations/<summaryCountFieldName>/cardinality/field
|
||||
// or aggs/<agg_name>/aggs/<summaryCountFieldName>/cardinality/field
|
||||
let cardinalityField = undefined;
|
||||
const topAgg = _.get(job.datafeed_config, 'aggregations') || _.get(job.datafeed_config, 'aggs');
|
||||
if (topAgg !== undefined && _.values(topAgg).length > 0) {
|
||||
const topAgg = get(job.datafeed_config, 'aggregations') || get(job.datafeed_config, 'aggs');
|
||||
if (topAgg !== undefined && Object.values(topAgg).length > 0) {
|
||||
cardinalityField =
|
||||
_.get(_.values(topAgg)[0], [
|
||||
get(Object.values(topAgg)[0], [
|
||||
'aggregations',
|
||||
summaryCountFieldName,
|
||||
'cardinality',
|
||||
'field',
|
||||
]) || _.get(_.values(topAgg)[0], ['aggs', summaryCountFieldName, 'cardinality', 'field']);
|
||||
]) ||
|
||||
get(Object.values(topAgg)[0], ['aggs', summaryCountFieldName, 'cardinality', 'field']);
|
||||
}
|
||||
|
||||
if (detector.function === 'non_zero_count' && cardinalityField !== undefined) {
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
// create a property descriptor for properties
|
||||
// that won't change
|
||||
function describeConst(val) {
|
||||
return {
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: false,
|
||||
value: val,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply inheritance in the legacy `_.class(SubClass).inherits(SuperClass)`
|
||||
* @param {Function} SubClass class that should inherit SuperClass
|
||||
* @param {Function} SuperClass
|
||||
* @return {Function}
|
||||
*/
|
||||
export function inherits(SubClass, SuperClass) {
|
||||
const prototype = Object.create(SuperClass.prototype, {
|
||||
constructor: describeConst(SubClass),
|
||||
superConstructor: describeConst(SuperClass),
|
||||
});
|
||||
|
||||
Object.defineProperties(SubClass, {
|
||||
prototype: describeConst(prototype),
|
||||
Super: describeConst(SuperClass),
|
||||
});
|
||||
|
||||
return SubClass;
|
||||
}
|
|
@ -4,7 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import isPlainObject from 'lodash/isPlainObject';
|
||||
import isString from 'lodash/isString';
|
||||
import ary from 'lodash/ary';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import assign from 'lodash/assign';
|
||||
import moment from 'moment';
|
||||
import dateMath from '@elastic/datemath';
|
||||
|
||||
|
@ -80,16 +84,16 @@ TimeBuckets.prototype.setBounds = function (input) {
|
|||
if (!input) return this.clearBounds();
|
||||
|
||||
let bounds;
|
||||
if (_.isPlainObject(input)) {
|
||||
if (isPlainObject(input)) {
|
||||
// accept the response from timefilter.getActiveBounds()
|
||||
bounds = [input.min, input.max];
|
||||
} else {
|
||||
bounds = Array.isArray(input) ? input : [];
|
||||
}
|
||||
|
||||
const moments = _(bounds).map(_.ary(moment, 1)).sortBy(Number);
|
||||
const moments = sortBy(bounds.map(ary(moment, 1)), Number);
|
||||
|
||||
const valid = moments.size() === 2 && moments.every(isValidMoment);
|
||||
const valid = moments.length === 2 && moments.every(isValidMoment);
|
||||
if (!valid) {
|
||||
this.clearBounds();
|
||||
throw new Error('invalid bounds set: ' + input);
|
||||
|
@ -175,7 +179,7 @@ TimeBuckets.prototype.setInterval = function (input) {
|
|||
return;
|
||||
}
|
||||
|
||||
if (_.isString(interval)) {
|
||||
if (isString(interval)) {
|
||||
input = interval;
|
||||
interval = parseInterval(interval);
|
||||
if (+interval === 0) {
|
||||
|
@ -256,7 +260,7 @@ TimeBuckets.prototype.getInterval = function () {
|
|||
if (+scaled === +interval) return interval;
|
||||
|
||||
decorateInterval(interval, duration);
|
||||
return _.assign(scaled, {
|
||||
return assign(scaled, {
|
||||
preScaled: interval,
|
||||
scale: interval / scaled,
|
||||
scaled: true,
|
||||
|
@ -287,7 +291,7 @@ TimeBuckets.prototype.getIntervalToNearestMultiple = function (divisorSecs) {
|
|||
decorateInterval(nearestMultipleInt, this.getDuration());
|
||||
|
||||
// Check to see if the new interval is scaled compared to the original.
|
||||
const preScaled = _.get(interval, 'preScaled');
|
||||
const preScaled = interval.preScaled;
|
||||
if (preScaled !== undefined && preScaled < nearestMultipleInt) {
|
||||
nearestMultipleInt.preScaled = preScaled;
|
||||
nearestMultipleInt.scale = preScaled / nearestMultipleInt;
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
import { ISavedObjectsRepository } from 'kibana/server';
|
||||
|
||||
import { getInternalRepository } from './internal_repository';
|
||||
|
@ -58,7 +58,7 @@ export async function updateTelemetry(internalRepo?: ISavedObjectsRepository) {
|
|||
|
||||
let telemetry = await getTelemetry(internalRepository);
|
||||
// Create if doesn't exist
|
||||
if (telemetry === null || _.isEmpty(telemetry)) {
|
||||
if (telemetry === null || isEmpty(telemetry)) {
|
||||
const newTelemetrySavedObject = await internalRepository.create(
|
||||
TELEMETRY_DOC_ID,
|
||||
initTelemetry(),
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
import _ from 'lodash';
|
||||
import each from 'lodash/each';
|
||||
import get from 'lodash/get';
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
|
||||
import { ANNOTATION_EVENT_USER, ANNOTATION_TYPE } from '../../../common/constants/annotations';
|
||||
|
@ -190,7 +191,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
|
|||
|
||||
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
|
||||
let jobIdFilterStr = '';
|
||||
_.each(jobIds, (jobId, i: number) => {
|
||||
each(jobIds, (jobId, i: number) => {
|
||||
jobIdFilterStr += `${i! > 0 ? ' OR ' : ''}job_id:${jobId}`;
|
||||
});
|
||||
boolCriteria.push({
|
||||
|
@ -293,7 +294,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
|
|||
throw new Error(`Annotations couldn't be retrieved from Elasticsearch.`);
|
||||
}
|
||||
|
||||
const docs: Annotations = _.get(resp, ['hits', 'hits'], []).map((d: EsResult) => {
|
||||
const docs: Annotations = get(resp, ['hits', 'hits'], []).map((d: EsResult) => {
|
||||
// get the original source document and the document id, we need it
|
||||
// to identify the annotation when editing/deleting it.
|
||||
// if original `event` is undefined then substitute with 'user` by default
|
||||
|
@ -305,7 +306,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
|
|||
} as Annotation;
|
||||
});
|
||||
|
||||
const aggregations = _.get(resp, ['aggregations'], {}) as EsAggregationResult;
|
||||
const aggregations = get(resp, ['aggregations'], {}) as EsAggregationResult;
|
||||
if (fields) {
|
||||
obj.aggregations = aggregations;
|
||||
}
|
||||
|
|
|
@ -4,7 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import each from 'lodash/each';
|
||||
import remove from 'lodash/remove';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import get from 'lodash/get';
|
||||
|
||||
import { mlLog } from '../../client/log';
|
||||
|
||||
|
@ -91,7 +95,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
} else {
|
||||
// loop over partition values
|
||||
for (let j = 0; j < this.splitFieldValues.length; j++) {
|
||||
const queryCopy = _.cloneDeep(this.query);
|
||||
const queryCopy = cloneDeep(this.query);
|
||||
// add a term to the query to filter on the partition value
|
||||
queryCopy.bool.must.push({
|
||||
term: {
|
||||
|
@ -151,7 +155,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
}
|
||||
};
|
||||
|
||||
_.each(this.checkers, (check) => {
|
||||
each(this.checkers, (check) => {
|
||||
check.check
|
||||
.run()
|
||||
.then((interval) => {
|
||||
|
@ -174,7 +178,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
}
|
||||
|
||||
processResults() {
|
||||
const allResults = _.map(this.checkers, 'result');
|
||||
const allResults = this.checkers.map((c) => c.result);
|
||||
|
||||
let reducedResults = [];
|
||||
const numberOfSplitFields = this.splitFieldValues.length || 1;
|
||||
|
@ -185,8 +189,8 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
const pos = i * numberOfSplitFields;
|
||||
let resultsSubset = allResults.slice(pos, pos + numberOfSplitFields);
|
||||
// remove results of tests which have failed
|
||||
resultsSubset = _.remove(resultsSubset, (res) => res !== null);
|
||||
resultsSubset = _.sortBy(resultsSubset, (r) => r.ms);
|
||||
resultsSubset = remove(resultsSubset, (res) => res !== null);
|
||||
resultsSubset = sortBy(resultsSubset, (r) => r.ms);
|
||||
|
||||
const tempMedian = this.findMedian(resultsSubset);
|
||||
if (tempMedian !== null) {
|
||||
|
@ -194,7 +198,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
}
|
||||
}
|
||||
|
||||
reducedResults = _.sortBy(reducedResults, (r) => r.ms);
|
||||
reducedResults = sortBy(reducedResults, (r) => r.ms);
|
||||
|
||||
return this.findMedian(reducedResults);
|
||||
}
|
||||
|
@ -256,7 +260,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
},
|
||||
})
|
||||
.then((resp) => {
|
||||
const value = _.get(resp, ['aggregations', 'field_count', 'value'], 0);
|
||||
const value = get(resp, ['aggregations', 'field_count', 'value'], 0);
|
||||
resolve(value);
|
||||
})
|
||||
.catch((resp) => {
|
||||
|
@ -293,9 +297,10 @@ export function estimateBucketSpanFactory(mlClusterClient) {
|
|||
},
|
||||
})
|
||||
.then((partitionResp) => {
|
||||
if (_.has(partitionResp, 'aggregations.fields_bucket_counts.buckets')) {
|
||||
// eslint-disable-next-line camelcase
|
||||
if (partitionResp.aggregations?.fields_bucket_counts?.buckets !== undefined) {
|
||||
const buckets = partitionResp.aggregations.fields_bucket_counts.buckets;
|
||||
fieldValues = _.map(buckets, (b) => b.key);
|
||||
fieldValues = buckets.map((b) => b.key);
|
||||
}
|
||||
resolve(fieldValues);
|
||||
})
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
* And a minimum bucket span
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
|
||||
export function polledDataCheckerFactory({ callAsCurrentUser }) {
|
||||
class PolledDataChecker {
|
||||
|
@ -29,7 +29,7 @@ export function polledDataCheckerFactory({ callAsCurrentUser }) {
|
|||
const interval = { name: '1m', ms: 60000 };
|
||||
this.performSearch(interval.ms)
|
||||
.then((resp) => {
|
||||
const fullBuckets = _.get(resp, 'aggregations.non_empty_buckets.buckets', []);
|
||||
const fullBuckets = get(resp, 'aggregations.non_empty_buckets.buckets', []);
|
||||
const result = this.isPolledData(fullBuckets, interval);
|
||||
if (result.pass) {
|
||||
// data is polled, return a flag and the minimumBucketSpan which should be
|
||||
|
|
|
@ -5,7 +5,10 @@
|
|||
*/
|
||||
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
import _ from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
import each from 'lodash/each';
|
||||
import last from 'lodash/last';
|
||||
import find from 'lodash/find';
|
||||
import { KBN_FIELD_TYPES } from '../../../../../../src/plugins/data/server';
|
||||
import { ML_JOB_FIELD_TYPES } from '../../../common/constants/field_types';
|
||||
import { getSafeAggregationName } from '../../../common/util/job_utils';
|
||||
|
@ -216,7 +219,7 @@ const getAggIntervals = async (
|
|||
|
||||
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
|
||||
const aggregations =
|
||||
aggsPath.length > 0 ? _.get(respStats.aggregations, aggsPath) : respStats.aggregations;
|
||||
aggsPath.length > 0 ? get(respStats.aggregations, aggsPath) : respStats.aggregations;
|
||||
|
||||
return Object.keys(aggregations).reduce((p, aggName) => {
|
||||
const stats = [aggregations[aggName].min, aggregations[aggName].max];
|
||||
|
@ -300,9 +303,7 @@ export const getHistogramsForFields = async (
|
|||
|
||||
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
|
||||
const aggregations =
|
||||
aggsPath.length > 0
|
||||
? _.get(respChartsData.aggregations, aggsPath)
|
||||
: respChartsData.aggregations;
|
||||
aggsPath.length > 0 ? get(respChartsData.aggregations, aggsPath) : respChartsData.aggregations;
|
||||
|
||||
const chartsData: ChartData[] = fields.map(
|
||||
(field): ChartData => {
|
||||
|
@ -382,8 +383,8 @@ export class DataVisualizer {
|
|||
// To avoid checking for the existence of too many aggregatable fields in one request,
|
||||
// split the check into multiple batches (max 200 fields per request).
|
||||
const batches: string[][] = [[]];
|
||||
_.each(aggregatableFields, (field) => {
|
||||
let lastArray: string[] = _.last(batches) as string[];
|
||||
each(aggregatableFields, (field) => {
|
||||
let lastArray: string[] = last(batches) as string[];
|
||||
if (lastArray.length === AGGREGATABLE_EXISTS_REQUEST_BATCH_SIZE) {
|
||||
lastArray = [];
|
||||
batches.push(lastArray);
|
||||
|
@ -475,7 +476,7 @@ export class DataVisualizer {
|
|||
// Batch up fields by type, getting stats for multiple fields at a time.
|
||||
const batches: Field[][] = [];
|
||||
const batchedFields: { [key: string]: Field[][] } = {};
|
||||
_.each(fields, (field) => {
|
||||
each(fields, (field) => {
|
||||
if (field.fieldName === undefined) {
|
||||
// undefined fieldName is used for a document count request.
|
||||
// getDocumentCountStats requires timeField - don't add to batched requests if not defined
|
||||
|
@ -487,7 +488,7 @@ export class DataVisualizer {
|
|||
if (batchedFields[fieldType] === undefined) {
|
||||
batchedFields[fieldType] = [[]];
|
||||
}
|
||||
let lastArray: Field[] = _.last(batchedFields[fieldType]) as Field[];
|
||||
let lastArray: Field[] = last(batchedFields[fieldType]) as Field[];
|
||||
if (lastArray.length === FIELDS_REQUEST_BATCH_SIZE) {
|
||||
lastArray = [];
|
||||
batchedFields[fieldType].push(lastArray);
|
||||
|
@ -496,7 +497,7 @@ export class DataVisualizer {
|
|||
}
|
||||
});
|
||||
|
||||
_.each(batchedFields, (lists) => {
|
||||
each(batchedFields, (lists) => {
|
||||
batches.push(...lists);
|
||||
});
|
||||
|
||||
|
@ -636,7 +637,7 @@ export class DataVisualizer {
|
|||
body,
|
||||
});
|
||||
const aggregations = resp.aggregations;
|
||||
const totalCount = _.get(resp, ['hits', 'total'], 0);
|
||||
const totalCount = get(resp, ['hits', 'total'], 0);
|
||||
const stats = {
|
||||
totalCount,
|
||||
aggregatableExistsFields: [] as FieldData[],
|
||||
|
@ -645,12 +646,12 @@ export class DataVisualizer {
|
|||
|
||||
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
|
||||
const sampleCount =
|
||||
samplerShardSize > 0 ? _.get(aggregations, ['sample', 'doc_count'], 0) : totalCount;
|
||||
samplerShardSize > 0 ? get(aggregations, ['sample', 'doc_count'], 0) : totalCount;
|
||||
aggregatableFields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field, i);
|
||||
const count = _.get(aggregations, [...aggsPath, `${safeFieldName}_count`, 'doc_count'], 0);
|
||||
const count = get(aggregations, [...aggsPath, `${safeFieldName}_count`, 'doc_count'], 0);
|
||||
if (count > 0) {
|
||||
const cardinality = _.get(
|
||||
const cardinality = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_cardinality`, 'value'],
|
||||
0
|
||||
|
@ -745,12 +746,12 @@ export class DataVisualizer {
|
|||
});
|
||||
|
||||
const buckets: { [key: string]: number } = {};
|
||||
const dataByTimeBucket: Array<{ key: string; doc_count: number }> = _.get(
|
||||
const dataByTimeBucket: Array<{ key: string; doc_count: number }> = get(
|
||||
resp,
|
||||
['aggregations', 'eventRate', 'buckets'],
|
||||
[]
|
||||
);
|
||||
_.each(dataByTimeBucket, (dataForTime) => {
|
||||
each(dataByTimeBucket, (dataForTime) => {
|
||||
const time = dataForTime.key;
|
||||
buckets[time] = dataForTime.doc_count;
|
||||
});
|
||||
|
@ -851,12 +852,12 @@ export class DataVisualizer {
|
|||
const batchStats: NumericFieldStats[] = [];
|
||||
fields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
const docCount = _.get(
|
||||
const docCount = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_field_stats`, 'doc_count'],
|
||||
0
|
||||
);
|
||||
const fieldStatsResp = _.get(
|
||||
const fieldStatsResp = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_field_stats`, 'actual_stats'],
|
||||
{}
|
||||
|
@ -867,20 +868,20 @@ export class DataVisualizer {
|
|||
topAggsPath.push('top');
|
||||
}
|
||||
|
||||
const topValues: Bucket[] = _.get(aggregations, [...topAggsPath, 'buckets'], []);
|
||||
const topValues: Bucket[] = get(aggregations, [...topAggsPath, 'buckets'], []);
|
||||
|
||||
const stats: NumericFieldStats = {
|
||||
fieldName: field.fieldName,
|
||||
count: docCount,
|
||||
min: _.get(fieldStatsResp, 'min', 0),
|
||||
max: _.get(fieldStatsResp, 'max', 0),
|
||||
avg: _.get(fieldStatsResp, 'avg', 0),
|
||||
min: get(fieldStatsResp, 'min', 0),
|
||||
max: get(fieldStatsResp, 'max', 0),
|
||||
avg: get(fieldStatsResp, 'avg', 0),
|
||||
isTopValuesSampled:
|
||||
field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD || samplerShardSize > 0,
|
||||
topValues,
|
||||
topValuesSampleSize: topValues.reduce(
|
||||
(acc, curr) => acc + curr.doc_count,
|
||||
_.get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0)
|
||||
get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0)
|
||||
),
|
||||
topValuesSamplerShardSize:
|
||||
field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD
|
||||
|
@ -889,12 +890,12 @@ export class DataVisualizer {
|
|||
};
|
||||
|
||||
if (stats.count > 0) {
|
||||
const percentiles = _.get(
|
||||
const percentiles = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_percentiles`, 'values'],
|
||||
[]
|
||||
);
|
||||
const medianPercentile: { value: number; key: number } | undefined = _.find(percentiles, {
|
||||
const medianPercentile: { value: number; key: number } | undefined = find(percentiles, {
|
||||
key: 50,
|
||||
});
|
||||
stats.median = medianPercentile !== undefined ? medianPercentile!.value : 0;
|
||||
|
@ -978,7 +979,7 @@ export class DataVisualizer {
|
|||
topAggsPath.push('top');
|
||||
}
|
||||
|
||||
const topValues: Bucket[] = _.get(aggregations, [...topAggsPath, 'buckets'], []);
|
||||
const topValues: Bucket[] = get(aggregations, [...topAggsPath, 'buckets'], []);
|
||||
|
||||
const stats = {
|
||||
fieldName: field.fieldName,
|
||||
|
@ -987,7 +988,7 @@ export class DataVisualizer {
|
|||
topValues,
|
||||
topValuesSampleSize: topValues.reduce(
|
||||
(acc, curr) => acc + curr.doc_count,
|
||||
_.get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0)
|
||||
get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0)
|
||||
),
|
||||
topValuesSamplerShardSize:
|
||||
field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD
|
||||
|
@ -1046,12 +1047,12 @@ export class DataVisualizer {
|
|||
const batchStats: DateFieldStats[] = [];
|
||||
fields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
const docCount = _.get(
|
||||
const docCount = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_field_stats`, 'doc_count'],
|
||||
0
|
||||
);
|
||||
const fieldStatsResp = _.get(
|
||||
const fieldStatsResp = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_field_stats`, 'actual_stats'],
|
||||
{}
|
||||
|
@ -1059,8 +1060,8 @@ export class DataVisualizer {
|
|||
batchStats.push({
|
||||
fieldName: field.fieldName,
|
||||
count: docCount,
|
||||
earliest: _.get(fieldStatsResp, 'min', 0),
|
||||
latest: _.get(fieldStatsResp, 'max', 0),
|
||||
earliest: get(fieldStatsResp, 'min', 0),
|
||||
latest: get(fieldStatsResp, 'max', 0),
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -1115,17 +1116,17 @@ export class DataVisualizer {
|
|||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
const stats: BooleanFieldStats = {
|
||||
fieldName: field.fieldName,
|
||||
count: _.get(aggregations, [...aggsPath, `${safeFieldName}_value_count`, 'doc_count'], 0),
|
||||
count: get(aggregations, [...aggsPath, `${safeFieldName}_value_count`, 'doc_count'], 0),
|
||||
trueCount: 0,
|
||||
falseCount: 0,
|
||||
};
|
||||
|
||||
const valueBuckets: Array<{ [key: string]: number }> = _.get(
|
||||
const valueBuckets: Array<{ [key: string]: number }> = get(
|
||||
aggregations,
|
||||
[...aggsPath, `${safeFieldName}_values`, 'buckets'],
|
||||
[]
|
||||
);
|
||||
_.forEach(valueBuckets, (bucket) => {
|
||||
valueBuckets.forEach((bucket) => {
|
||||
stats[`${bucket.key_as_string}Count`] = bucket.doc_count;
|
||||
});
|
||||
|
||||
|
@ -1182,8 +1183,8 @@ export class DataVisualizer {
|
|||
// If the field is not in the _source (as will happen if the
|
||||
// field is populated using copy_to in the index mapping),
|
||||
// there will be no example to add.
|
||||
// Use lodash _.get() to support field names containing dots.
|
||||
const example: any = _.get(hits[i]._source, field);
|
||||
// Use lodash get() to support field names containing dots.
|
||||
const example: any = get(hits[i]._source, field);
|
||||
if (example !== undefined && stats.examples.indexOf(example) === -1) {
|
||||
stats.examples.push(example);
|
||||
if (stats.examples.length === maxExamples) {
|
||||
|
@ -1216,7 +1217,7 @@ export class DataVisualizer {
|
|||
|
||||
// Look ahead to the last percentiles and process these too if
|
||||
// they don't add more than 50% to the value range.
|
||||
const lastValue = (_.last(percentileBuckets) as any).value;
|
||||
const lastValue = (last(percentileBuckets) as any).value;
|
||||
const upperBound = lowerBound + 1.5 * (lastValue - lowerBound);
|
||||
const filteredLength = percentileBuckets.length;
|
||||
for (let i = filteredLength; i < percentiles.length; i++) {
|
||||
|
@ -1237,7 +1238,7 @@ export class DataVisualizer {
|
|||
|
||||
// Add in 0-5 and 95-100% if they don't add more
|
||||
// than 25% to the value range at either end.
|
||||
const lastValue: number = (_.last(percentileBuckets) as any).value;
|
||||
const lastValue: number = (last(percentileBuckets) as any).value;
|
||||
const maxDiff = 0.25 * (lastValue - lowerBound);
|
||||
if (lowerBound - dataMin < maxDiff) {
|
||||
percentileBuckets.splice(0, 0, percentiles[0]);
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
|
||||
|
@ -145,7 +145,7 @@ describe('ML - validateCardinality', () => {
|
|||
test: (ids: string[]) => void
|
||||
) => {
|
||||
const job = getJobConfig(fieldName);
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
const mockCardinality = cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(
|
||||
mlClusterClientFactory(mockCardinality),
|
||||
|
@ -250,7 +250,7 @@ describe('ML - validateCardinality', () => {
|
|||
it(`disabled model_plot, over field cardinality of ${cardinality} doesn't trigger a warning`, () => {
|
||||
const job = (getJobConfig('over_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: false };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
const mockCardinality = cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
|
||||
const ids = messages.map((m) => m.id);
|
||||
|
@ -261,7 +261,7 @@ describe('ML - validateCardinality', () => {
|
|||
it(`enabled model_plot, over field cardinality of ${cardinality} triggers a model plot warning`, () => {
|
||||
const job = (getJobConfig('over_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: true };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
const mockCardinality = cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
|
||||
const ids = messages.map((m) => m.id);
|
||||
|
@ -272,7 +272,7 @@ describe('ML - validateCardinality', () => {
|
|||
it(`disabled model_plot, by field cardinality of ${cardinality} triggers a field cardinality warning`, () => {
|
||||
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: false };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
const mockCardinality = cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
|
||||
const ids = messages.map((m) => m.id);
|
||||
|
@ -283,7 +283,7 @@ describe('ML - validateCardinality', () => {
|
|||
it(`enabled model_plot, by field cardinality of ${cardinality} triggers a model plot warning and field cardinality warning`, () => {
|
||||
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: true };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
const mockCardinality = cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
|
||||
const ids = messages.map((m) => m.id);
|
||||
|
@ -294,7 +294,7 @@ describe('ML - validateCardinality', () => {
|
|||
it(`enabled model_plot with terms, by field cardinality of ${cardinality} triggers just field cardinality warning`, () => {
|
||||
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: true, terms: 'AAL,AAB' };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
const mockCardinality = cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
|
||||
const ids = messages.map((m) => m.id);
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
|
||||
|
@ -144,7 +144,7 @@ describe('ML - validateTimeRange', () => {
|
|||
});
|
||||
|
||||
it('invalid time field', () => {
|
||||
const mockSearchResponseInvalid = _.cloneDeep(mockSearchResponse);
|
||||
const mockSearchResponseInvalid = cloneDeep(mockSearchResponse);
|
||||
mockSearchResponseInvalid.fieldCaps = undefined;
|
||||
const duration = { start: 0, end: 1 };
|
||||
return validateTimeRange(
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import each from 'lodash/each';
|
||||
import moment from 'moment-timezone';
|
||||
|
||||
import {
|
||||
|
@ -55,7 +56,7 @@ export function buildAnomalyTableItems(anomalyRecords, aggregationInterval, date
|
|||
|
||||
if (source.influencers !== undefined) {
|
||||
const influencers = [];
|
||||
const sourceInfluencers = _.sortBy(source.influencers, 'influencer_field_name');
|
||||
const sourceInfluencers = sortBy(source.influencers, 'influencer_field_name');
|
||||
sourceInfluencers.forEach((influencer) => {
|
||||
const influencerFieldName = influencer.influencer_field_name;
|
||||
influencer.influencer_field_values.forEach((influencerFieldValue) => {
|
||||
|
@ -172,10 +173,10 @@ function aggregateAnomalies(anomalyRecords, interval, dateFormatTz) {
|
|||
// Flatten the aggregatedData to give a list of records with
|
||||
// the highest score per bucketed time / jobId / detectorIndex.
|
||||
const summaryRecords = [];
|
||||
_.each(aggregatedData, (times, roundedTime) => {
|
||||
_.each(times, (jobIds) => {
|
||||
_.each(jobIds, (entityDetectors) => {
|
||||
_.each(entityDetectors, (record) => {
|
||||
each(aggregatedData, (times, roundedTime) => {
|
||||
each(times, (jobIds) => {
|
||||
each(jobIds, (entityDetectors) => {
|
||||
each(entityDetectors, (record) => {
|
||||
summaryRecords.push({
|
||||
time: +roundedTime,
|
||||
source: record,
|
||||
|
|
|
@ -4,7 +4,9 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import slice from 'lodash/slice';
|
||||
import get from 'lodash/get';
|
||||
import moment from 'moment';
|
||||
import { SearchResponse } from 'elasticsearch';
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
|
@ -175,7 +177,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
});
|
||||
|
||||
// Sort anomalies in ascending time order.
|
||||
records = _.sortBy(records, 'timestamp');
|
||||
records = sortBy(records, 'timestamp');
|
||||
tableData.interval = aggregationInterval;
|
||||
if (aggregationInterval === 'auto') {
|
||||
// Determine the actual interval to use if aggregating.
|
||||
|
@ -197,7 +199,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
|
||||
const categoryIdsByJobId: { [key: string]: any } = {};
|
||||
categoryAnomalies.forEach((anomaly) => {
|
||||
if (!_.has(categoryIdsByJobId, anomaly.jobId)) {
|
||||
if (categoryIdsByJobId[anomaly.jobId] === undefined) {
|
||||
categoryIdsByJobId[anomaly.jobId] = [];
|
||||
}
|
||||
if (categoryIdsByJobId[anomaly.jobId].indexOf(anomaly.entityValue) === -1) {
|
||||
|
@ -289,7 +291,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
};
|
||||
|
||||
const resp = await callAsInternalUser('search', query);
|
||||
const maxScore = _.get(resp, ['aggregations', 'max_score', 'value'], null);
|
||||
const maxScore = get(resp, ['aggregations', 'max_score', 'value'], null);
|
||||
|
||||
return { maxScore };
|
||||
}
|
||||
|
@ -353,7 +355,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
},
|
||||
});
|
||||
|
||||
const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = _.get(
|
||||
const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = get(
|
||||
resp,
|
||||
['aggregations', 'byJobId', 'buckets'],
|
||||
[]
|
||||
|
@ -387,7 +389,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
if (resp.hits.total !== 0) {
|
||||
resp.hits.hits.forEach((hit: any) => {
|
||||
if (maxExamples) {
|
||||
examplesByCategoryId[hit._source.category_id] = _.slice(
|
||||
examplesByCategoryId[hit._source.category_id] = slice(
|
||||
hit._source.examples,
|
||||
0,
|
||||
Math.min(hit._source.examples.length, maxExamples)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue