mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[ML] AIOps: Identify spike/dips with change point detection for log rate analysis (#178338)
## Summary - Uses change point detection to identify point in time for deviation timestamp. - Expands deviation timestamp into time range covering the whole deviation area of interest and highlights the time range in the date histogram chart. - When clicking on the detected deviation time range, the selected deviation will be exactly the detected time range. - If no change point is detected, we just fall back to the previous behavior. ### Checklist - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [x] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed - [x] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
This commit is contained in:
parent
ad8163db82
commit
176ea275a8
48 changed files with 809 additions and 276 deletions
|
@ -19,13 +19,16 @@ import type {
|
|||
BarStyleAccessor,
|
||||
RectAnnotationSpec,
|
||||
} from '@elastic/charts/dist/chart_types/xy_chart/utils/specs';
|
||||
|
||||
import { getTimeZone } from '@kbn/visualization-utils';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import type { IUiSettingsClient } from '@kbn/core/public';
|
||||
import {
|
||||
getLogRateAnalysisType,
|
||||
getSnappedTimestamps,
|
||||
getSnappedWindowParameters,
|
||||
getWindowParameters,
|
||||
getWindowParametersForTrigger,
|
||||
type DocumentCountStatsChangePoint,
|
||||
type LogRateAnalysisType,
|
||||
type LogRateHistogramItem,
|
||||
type WindowParameters,
|
||||
|
@ -129,6 +132,8 @@ export interface DocumentCountChartProps {
|
|||
baselineBrush?: BrushSettings;
|
||||
/** Optional data-test-subject */
|
||||
dataTestSubj?: string;
|
||||
/** Optional change point metadata */
|
||||
changePoint?: DocumentCountStatsChangePoint;
|
||||
}
|
||||
|
||||
const SPEC_ID = 'document_count';
|
||||
|
@ -163,6 +168,7 @@ function getBaselineBadgeOverflow(
|
|||
*/
|
||||
export const DocumentCountChart: FC<DocumentCountChartProps> = (props) => {
|
||||
const {
|
||||
changePoint,
|
||||
dataTestSubj,
|
||||
dependencies,
|
||||
brushSelectionUpdateHandler,
|
||||
|
@ -250,17 +256,10 @@ export const DocumentCountChart: FC<DocumentCountChartProps> = (props) => {
|
|||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [chartPointsSplit, timeRangeEarliest, timeRangeLatest, interval]);
|
||||
|
||||
const snapTimestamps = useMemo(() => {
|
||||
const timestamps: number[] = [];
|
||||
let n = timeRangeEarliest;
|
||||
|
||||
while (n <= timeRangeLatest + interval) {
|
||||
timestamps.push(n);
|
||||
n += interval;
|
||||
}
|
||||
|
||||
return timestamps;
|
||||
}, [timeRangeEarliest, timeRangeLatest, interval]);
|
||||
const snapTimestamps = useMemo(
|
||||
() => getSnappedTimestamps(timeRangeEarliest, timeRangeLatest, interval),
|
||||
[timeRangeEarliest, timeRangeLatest, interval]
|
||||
);
|
||||
|
||||
const timefilterUpdateHandler = useCallback(
|
||||
(range: TimeFilterRange) => {
|
||||
|
@ -306,14 +305,13 @@ export const DocumentCountChart: FC<DocumentCountChartProps> = (props) => {
|
|||
windowParameters === undefined &&
|
||||
adjustedChartPoints !== undefined
|
||||
) {
|
||||
const wp =
|
||||
typeof startRange === 'number'
|
||||
? getWindowParameters(
|
||||
startRange + interval / 2,
|
||||
timeRangeEarliest,
|
||||
timeRangeLatest + interval
|
||||
)
|
||||
: startRange;
|
||||
const wp = getWindowParametersForTrigger(
|
||||
startRange,
|
||||
interval,
|
||||
timeRangeEarliest,
|
||||
timeRangeLatest,
|
||||
changePoint
|
||||
);
|
||||
const wpSnap = getSnappedWindowParameters(wp, snapTimestamps);
|
||||
setOriginalWindowParameters(wpSnap);
|
||||
setWindowParameters(wpSnap);
|
||||
|
@ -329,6 +327,7 @@ export const DocumentCountChart: FC<DocumentCountChartProps> = (props) => {
|
|||
}
|
||||
},
|
||||
[
|
||||
changePoint,
|
||||
interval,
|
||||
timeRangeEarliest,
|
||||
timeRangeLatest,
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ItemSet } from '@kbn/aiops-utils/types';
|
||||
import type { ItemSet } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
export const filteredFrequentItemSets: ItemSet[] = [
|
||||
{
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ItemSet } from '@kbn/aiops-utils/types';
|
||||
import type { ItemSet } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
export const frequentItemSets: ItemSet[] = [
|
||||
{
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const kibanaSampleDataLogsSignificantTermsBase = [
|
||||
{
|
||||
fieldName: 'agent.keyword',
|
||||
fieldValue:
|
||||
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.50 Safari/534.24',
|
||||
pValue: '5.82e-12',
|
||||
},
|
||||
{
|
||||
fieldName: 'clientip',
|
||||
fieldValue: '30.156.16.164',
|
||||
pValue: '2.81e-53',
|
||||
},
|
||||
{
|
||||
fieldName: 'extension.keyword',
|
||||
fieldValue: '',
|
||||
pValue: '5.72e-12',
|
||||
},
|
||||
{
|
||||
fieldName: 'geo.dest',
|
||||
fieldValue: 'IN',
|
||||
pValue: '8.35e-21',
|
||||
},
|
||||
{
|
||||
fieldName: 'geo.srcdest',
|
||||
fieldValue: 'US:IN',
|
||||
pValue: '8.35e-21',
|
||||
},
|
||||
{
|
||||
fieldName: 'host.keyword',
|
||||
fieldValue: 'elastic-elastic-elastic.org',
|
||||
pValue: '3.94e-45',
|
||||
},
|
||||
{
|
||||
fieldName: 'ip',
|
||||
fieldValue: '30.156.16.163',
|
||||
pValue: '9.50e-54',
|
||||
},
|
||||
{
|
||||
fieldName: 'machine.os.keyword',
|
||||
fieldValue: 'win xp',
|
||||
pValue: '4.25e-18',
|
||||
},
|
||||
{
|
||||
fieldName: 'referer',
|
||||
fieldValue: 'http://www.elastic-elastic-elastic.com/success/timothy-l-kopra',
|
||||
pValue: '1.41e-53',
|
||||
},
|
||||
{
|
||||
fieldName: 'response.keyword',
|
||||
fieldValue: '404',
|
||||
pValue: '2.10e-35',
|
||||
},
|
||||
];
|
|
@ -5,11 +5,17 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export { getLogRateAnalysisType } from './get_log_rate_analysis_type';
|
||||
export { LOG_RATE_ANALYSIS_TYPE, type LogRateAnalysisType } from './log_rate_analysis_type';
|
||||
export { type LogRateHistogramItem } from './log_rate_histogram_item';
|
||||
export { LOG_RATE_ANALYSIS_HIGHLIGHT_COLOR } from './log_rate_analysis/constants';
|
||||
export { getLogRateAnalysisType } from './log_rate_analysis/get_log_rate_analysis_type';
|
||||
export {
|
||||
getSnappedWindowParameters,
|
||||
getWindowParameters,
|
||||
type WindowParameters,
|
||||
} from './window_parameters';
|
||||
LOG_RATE_ANALYSIS_TYPE,
|
||||
type LogRateAnalysisType,
|
||||
} from './log_rate_analysis/log_rate_analysis_type';
|
||||
export type { LogRateHistogramItem } from './log_rate_analysis/log_rate_histogram_item';
|
||||
export type { DocumentCountStatsChangePoint } from './log_rate_analysis/types';
|
||||
export type { WindowParameters } from './log_rate_analysis/window_parameters';
|
||||
export { getSnappedTimestamps } from './log_rate_analysis/get_snapped_timestamps';
|
||||
export { getSnappedWindowParameters } from './log_rate_analysis/get_snapped_window_parameters';
|
||||
export { getWindowParameters } from './log_rate_analysis/get_window_parameters';
|
||||
export { getWindowParametersForTrigger } from './log_rate_analysis/get_window_parameters_for_trigger';
|
||||
export { getExtendedChangePoint } from './log_rate_analysis/get_extended_change_point';
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const getDateHistogramBuckets = (): Record<string, number> => ({
|
||||
1654566600000: 4929,
|
||||
1654566900000: 4686,
|
||||
1654567200000: 5243,
|
||||
1654567500000: 4186,
|
||||
1654567800000: 5529,
|
||||
1654568100000: 6071,
|
||||
1654568400000: 4500,
|
||||
1654568700000: 6157,
|
||||
1654569000000: 4886,
|
||||
1654569300000: 5886,
|
||||
1654569600000: 4843,
|
||||
1654569900000: 4871,
|
||||
1654570200000: 5129,
|
||||
1654570500000: 4529,
|
||||
1654570800000: 5171,
|
||||
1654571100000: 6357,
|
||||
1654571400000: 4100,
|
||||
1654571700000: 4714,
|
||||
1654572000000: 5029,
|
||||
1654572300000: 4100,
|
||||
1654572600000: 5057,
|
||||
1654572900000: 5129,
|
||||
1654573200000: 4871,
|
||||
1654573500000: 4914,
|
||||
1654573800000: 4586,
|
||||
1654574100000: 3857,
|
||||
1654574400000: 3886,
|
||||
1654574700000: 5286,
|
||||
1654575000000: 4543,
|
||||
1654575300000: 5800,
|
||||
1654575600000: 4943,
|
||||
1654575900000: 5071,
|
||||
1654576200000: 6486,
|
||||
1654576500000: 5914,
|
||||
1654576800000: 5643,
|
||||
1654577100000: 6500,
|
||||
1654577400000: 7014,
|
||||
1654577700000: 5300,
|
||||
1654578000000: 6086,
|
||||
1654578300000: 5829,
|
||||
1654578600000: 6743,
|
||||
1654578900000: 7457,
|
||||
1654579200000: 5729,
|
||||
1654579500000: 6871,
|
||||
1654579800000: 7457,
|
||||
1654580100000: 6657,
|
||||
1654580400000: 8543,
|
||||
1654580700000: 8629,
|
||||
1654581000000: 8586,
|
||||
1654581300000: 7043,
|
||||
1654581600000: 8071,
|
||||
1654581900000: 8471,
|
||||
1654582200000: 12243,
|
||||
1654582500000: 10171,
|
||||
1654582800000: 10143,
|
||||
1654583100000: 11529,
|
||||
1654583400000: 10986,
|
||||
1654583700000: 10757,
|
||||
1654584000000: 12614,
|
||||
1654584300000: 11771,
|
||||
1654584600000: 11771,
|
||||
1654584900000: 11543,
|
||||
1654585200000: 10671,
|
||||
1654585500000: 14914,
|
||||
1654585800000: 12500,
|
||||
1654586100000: 15029,
|
||||
1654586400000: 99900,
|
||||
1654586700000: 78971,
|
||||
1654587000000: 20600,
|
||||
1654587300000: 4300,
|
||||
1654587600000: 11671,
|
||||
1654587900000: 2629,
|
||||
1654588200000: 2200,
|
||||
1654588500000: 13157,
|
||||
1654588800000: 2714,
|
||||
});
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/** Highlighting color for charts */
|
||||
export const LOG_RATE_ANALYSIS_HIGHLIGHT_COLOR = 'orange';
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getDateHistogramBuckets } from './__mocks__/date_histogram';
|
||||
import { getExtendedChangePoint } from './get_extended_change_point';
|
||||
|
||||
describe('getExtendedChangePoint', () => {
|
||||
test('returns the extended change point', () => {
|
||||
const changePointTs = 1654586400000;
|
||||
expect(getExtendedChangePoint(getDateHistogramBuckets(), changePointTs)).toEqual({
|
||||
endTs: 1654587000000,
|
||||
startTs: 1654586100000,
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mean } from 'd3-array';
|
||||
|
||||
/**
|
||||
* Calculates and returns an extended change point range based on the specified change point timestamp.
|
||||
*
|
||||
* @param buckets - An object where keys are bucket timestamps as strings
|
||||
* and values are numeric values associated with each bucket.
|
||||
* @param changePointTs - The timestamp of the change point as a number. This timestamp must
|
||||
* be one of the keys in the `buckets` object.
|
||||
* @returns An object containing two properties: `startTs` and `endTs`.
|
||||
*/
|
||||
export const getExtendedChangePoint = (buckets: Record<string, number>, changePointTs: number) => {
|
||||
const bucketKeys = Object.keys(buckets);
|
||||
const bucketValues = Object.values(buckets);
|
||||
const meanValue = Math.round(mean(bucketValues) ?? 0);
|
||||
const cpIndex = bucketKeys.findIndex((d) => +d === changePointTs);
|
||||
const cpValue = buckets[changePointTs];
|
||||
|
||||
let lIndex = cpIndex - 1;
|
||||
let uIndex = cpIndex + 1;
|
||||
|
||||
while (
|
||||
lIndex >= 0 &&
|
||||
Math.abs(bucketValues[lIndex] - meanValue) > Math.abs(bucketValues[lIndex] - cpValue)
|
||||
) {
|
||||
lIndex--;
|
||||
}
|
||||
|
||||
while (
|
||||
uIndex < bucketValues.length &&
|
||||
Math.abs(bucketValues[uIndex] - meanValue) > Math.abs(bucketValues[uIndex] - cpValue)
|
||||
) {
|
||||
uIndex++;
|
||||
}
|
||||
|
||||
return { startTs: +bucketKeys[lIndex], endTs: +bucketKeys[uIndex] };
|
||||
};
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Generates an array of timestamps evenly spaced within a given time range.
|
||||
*
|
||||
* @param timeRangeEarliest The earliest timestamp in the time range.
|
||||
* @param timeRangeLatest The latest timestamp in the time range.
|
||||
* @param interval The interval between timestamps in milliseconds.
|
||||
* @returns Array of timestamps spaced by the specified interval within the given range.
|
||||
*/
|
||||
export const getSnappedTimestamps = (
|
||||
timeRangeEarliest: number,
|
||||
timeRangeLatest: number,
|
||||
interval: number
|
||||
) => {
|
||||
const timestamps: number[] = [];
|
||||
let n = timeRangeEarliest;
|
||||
|
||||
while (n <= timeRangeLatest + interval) {
|
||||
timestamps.push(n);
|
||||
n += interval;
|
||||
}
|
||||
|
||||
return timestamps;
|
||||
};
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getDateHistogramBuckets } from './__mocks__/date_histogram';
|
||||
import { getSnappedWindowParameters } from './get_snapped_window_parameters';
|
||||
|
||||
const windowParameters = {
|
||||
baselineMin: 1654579807500,
|
||||
baselineMax: 1654586107500,
|
||||
deviationMin: 1654586400000,
|
||||
deviationMax: 1654587007500,
|
||||
};
|
||||
|
||||
const snapTimestamps = Object.keys(getDateHistogramBuckets()).map((d) => +d);
|
||||
|
||||
describe('getSnappedWindowParameters', () => {
|
||||
test('returns the snapped window parameters', () => {
|
||||
const snappedWindowParameters = getSnappedWindowParameters(windowParameters, snapTimestamps);
|
||||
|
||||
expect(getSnappedWindowParameters(windowParameters, snapTimestamps)).toEqual({
|
||||
baselineMax: 1654586100000,
|
||||
baselineMin: 1654579800000,
|
||||
deviationMax: 1654587000000,
|
||||
deviationMin: 1654586400000,
|
||||
});
|
||||
|
||||
expect(snapTimestamps.includes(snappedWindowParameters.baselineMin)).toBe(true);
|
||||
expect(snapTimestamps.includes(snappedWindowParameters.baselineMax)).toBe(true);
|
||||
expect(snapTimestamps.includes(snappedWindowParameters.deviationMin)).toBe(true);
|
||||
expect(snapTimestamps.includes(snappedWindowParameters.deviationMax)).toBe(true);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { WindowParameters } from './window_parameters';
|
||||
|
||||
/**
|
||||
*
|
||||
* Converts window paramaters from the brushes to “snap” the brushes to the chart histogram bar width and ensure timestamps
|
||||
* correspond to bucket timestamps
|
||||
*
|
||||
* @param windowParameters time range definition for baseline and deviation to be used by log rate analysis
|
||||
* @param snapTimestamps time range definition that always corresponds to histogram bucket timestamps
|
||||
* @returns WindowParameters
|
||||
*/
|
||||
export const getSnappedWindowParameters = (
|
||||
windowParameters: WindowParameters,
|
||||
snapTimestamps: number[]
|
||||
): WindowParameters => {
|
||||
const snappedBaselineMin = snapTimestamps.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.baselineMin) < Math.abs(pts - windowParameters.baselineMin)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, snapTimestamps[0]);
|
||||
const baselineMaxTimestamps = snapTimestamps.filter((ts) => ts > snappedBaselineMin);
|
||||
|
||||
const snappedBaselineMax = baselineMaxTimestamps.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.baselineMax) < Math.abs(pts - windowParameters.baselineMax)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, baselineMaxTimestamps[0]);
|
||||
const deviationMinTss = baselineMaxTimestamps.filter((ts) => ts > snappedBaselineMax);
|
||||
|
||||
const snappedDeviationMin = deviationMinTss.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.deviationMin) < Math.abs(pts - windowParameters.deviationMin)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, deviationMinTss[0]);
|
||||
const deviationMaxTss = deviationMinTss.filter((ts) => ts > snappedDeviationMin);
|
||||
|
||||
const snappedDeviationMax = deviationMaxTss.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.deviationMax) < Math.abs(pts - windowParameters.deviationMax)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, deviationMaxTss[0]);
|
||||
|
||||
return {
|
||||
baselineMin: snappedBaselineMin,
|
||||
baselineMax: snappedBaselineMax,
|
||||
deviationMin: snappedDeviationMin,
|
||||
deviationMax: snappedDeviationMax,
|
||||
};
|
||||
};
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { WindowParameters } from './window_parameters';
|
||||
|
||||
/**
|
||||
* Given a point in time (e.g. where a user clicks), use simple heuristics to compute:
|
||||
*
|
||||
* 1. The time window around the click to evaluate for changes
|
||||
* 2. The historical time window prior to the click to use as a baseline.
|
||||
*
|
||||
* The philosophy here is that charts are displayed with different granularities according to their
|
||||
* overall time window. We select the log deviation and historical time windows inline with the
|
||||
* overall time window.
|
||||
*
|
||||
* The algorithm for doing this is based on the typical granularities that exist in machine data.
|
||||
*
|
||||
* @param clickTime timestamp of the clicked log rate deviation.
|
||||
* @param minTime minimum timestamp of the time window to be analysed
|
||||
* @param maxTime maximum timestamp of the time window to be analysed
|
||||
* @param clickTimeUpper optional timestamp to treat clicktime and clickTimeUpper
|
||||
* as a time range instead of point in time
|
||||
* @param windowGapOverride optional override for the baseline/deviation gap
|
||||
* @returns WindowParameters
|
||||
*/
|
||||
export const getWindowParameters = (
|
||||
clickTime: number,
|
||||
minTime: number,
|
||||
maxTime: number,
|
||||
clickTimeUpper?: number,
|
||||
windowGapOverride?: number
|
||||
): WindowParameters => {
|
||||
const totalWindow = maxTime - minTime;
|
||||
|
||||
// min deviation window
|
||||
const minDeviationWindow = 10 * 60 * 1000; // 10min
|
||||
const minBaselineWindow = 30 * 60 * 1000; // 30min
|
||||
const minWindowGap = 5 * 60 * 1000; // 5min
|
||||
|
||||
// work out bounds as done in the original notebooks,
|
||||
// with the deviation window aiming to be a 1/10
|
||||
// of the size of the total window and the baseline window
|
||||
// being 3.5/10 of the total window.
|
||||
const deviationWindow = Math.max(totalWindow / 10, minDeviationWindow);
|
||||
const baselineWindow = Math.max(totalWindow / 3.5, minBaselineWindow);
|
||||
const windowGap = windowGapOverride ?? Math.max(totalWindow / 10, minWindowGap);
|
||||
|
||||
const deviationMin = clickTimeUpper ? clickTime : clickTime - deviationWindow / 2;
|
||||
const deviationMax = clickTimeUpper ? clickTimeUpper : clickTime + deviationWindow / 2;
|
||||
|
||||
const baselineMax = deviationMin - windowGap;
|
||||
const baselineMin = baselineMax - baselineWindow;
|
||||
|
||||
return {
|
||||
baselineMin: Math.round(baselineMin),
|
||||
baselineMax: Math.round(baselineMax),
|
||||
deviationMin: Math.round(deviationMin),
|
||||
deviationMax: Math.round(deviationMax),
|
||||
};
|
||||
};
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { DocumentCountStatsChangePoint } from './types';
|
||||
import { getWindowParameters } from './get_window_parameters';
|
||||
import type { WindowParameters } from './window_parameters';
|
||||
|
||||
/**
|
||||
* Calculates window parameters, adjusting the window based on a
|
||||
* change point and interval. If a change point is specified and falls within
|
||||
* the startRange, the window is adjusted around the change point. Otherwise,
|
||||
* the window is determined by the startRange and interval.
|
||||
*
|
||||
* @param startRange The start timestamp or window parameters. If a number,
|
||||
* it's the start timestamp; if an object, it's assumed to be
|
||||
* window parameters and is returned directly.
|
||||
* @param interval Interval in milliseconds for extending the window or
|
||||
* adjusting the start range.
|
||||
* @param timeRangeEarliest Earliest timestamp in milliseconds in the time range.
|
||||
* @param timeRangeLatest Latest timestamp in milliseconds in the time range.
|
||||
* @param changePoint Optional change point with `startTs` and `endTs`
|
||||
* properties. Adjusts window parameters if within `startRange`.
|
||||
* @returns Window parameters
|
||||
*/
|
||||
export function getWindowParametersForTrigger(
|
||||
startRange: number | WindowParameters,
|
||||
interval: number,
|
||||
timeRangeEarliest: number,
|
||||
timeRangeLatest: number,
|
||||
changePoint?: DocumentCountStatsChangePoint
|
||||
): WindowParameters {
|
||||
if (
|
||||
typeof startRange === 'number' &&
|
||||
changePoint &&
|
||||
startRange >= changePoint.startTs &&
|
||||
startRange <= changePoint.endTs
|
||||
) {
|
||||
return getWindowParameters(
|
||||
changePoint.startTs + interval,
|
||||
timeRangeEarliest,
|
||||
timeRangeLatest + interval,
|
||||
changePoint.endTs,
|
||||
interval
|
||||
);
|
||||
} else if (typeof startRange === 'number') {
|
||||
return getWindowParameters(
|
||||
startRange + interval / 2,
|
||||
timeRangeEarliest,
|
||||
timeRangeLatest + interval
|
||||
);
|
||||
}
|
||||
|
||||
return startRange;
|
||||
}
|
|
@ -44,3 +44,18 @@ export interface SimpleHierarchicalTreeNode {
|
|||
children: SimpleHierarchicalTreeNode[];
|
||||
addNode: (node: SimpleHierarchicalTreeNode) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a change point in document count statistics,
|
||||
* identifying a significant change over time.
|
||||
*/
|
||||
export interface DocumentCountStatsChangePoint {
|
||||
/** Key is the timestamp of the change point. */
|
||||
key: number;
|
||||
/** The start timestamp of the change point period. */
|
||||
startTs: number;
|
||||
/** The end timestamp of the change point period. */
|
||||
endTs: number;
|
||||
/** The type of change point. */
|
||||
type: string;
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
|
||||
|
||||
/**
|
||||
* Time range definition for baseline and deviation to be used by log rate analysis.
|
||||
*
|
||||
* @export
|
||||
* @interface WindowParameters
|
||||
* @typedef {WindowParameters}
|
||||
*/
|
||||
export interface WindowParameters {
|
||||
/** Baseline minimum value */
|
||||
baselineMin: number;
|
||||
/** Baseline maximum value */
|
||||
baselineMax: number;
|
||||
/** Deviation minimum value */
|
||||
deviationMin: number;
|
||||
/** Deviation maximum value */
|
||||
deviationMax: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for WindowParameters
|
||||
*
|
||||
* @param {unknown} arg - The argument to be checked.
|
||||
* @returns {arg is WindowParameters}
|
||||
*/
|
||||
export const isWindowParameters = (arg: unknown): arg is WindowParameters =>
|
||||
isPopulatedObject(arg, ['baselineMin', 'baselineMax', 'deviationMin', 'deviationMax']) &&
|
||||
Object.values(arg).every((d) => typeof d === 'number');
|
|
@ -1,147 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
|
||||
|
||||
/**
|
||||
* Time range definition for baseline and deviation to be used by log rate analysis.
|
||||
*
|
||||
* @export
|
||||
* @interface WindowParameters
|
||||
* @typedef {WindowParameters}
|
||||
*/
|
||||
export interface WindowParameters {
|
||||
/** Baseline minimum value */
|
||||
baselineMin: number;
|
||||
/** Baseline maximum value */
|
||||
baselineMax: number;
|
||||
/** Deviation minimum value */
|
||||
deviationMin: number;
|
||||
/** Deviation maximum value */
|
||||
deviationMax: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for WindowParameters
|
||||
*
|
||||
* @param {unknown} arg - The argument to be checked.
|
||||
* @returns {arg is WindowParameters}
|
||||
*/
|
||||
export const isWindowParameters = (arg: unknown): arg is WindowParameters =>
|
||||
isPopulatedObject(arg, ['baselineMin', 'baselineMax', 'deviationMin', 'deviationMax']) &&
|
||||
Object.values(arg).every((d) => typeof d === 'number');
|
||||
|
||||
/**
|
||||
* Given a point in time (e.g. where a user clicks), use simple heuristics to compute:
|
||||
*
|
||||
* 1. The time window around the click to evaluate for changes
|
||||
* 2. The historical time window prior to the click to use as a baseline.
|
||||
*
|
||||
* The philosophy here is that charts are displayed with different granularities according to their
|
||||
* overall time window. We select the log deviation and historical time windows inline with the
|
||||
* overall time window.
|
||||
*
|
||||
* The algorithm for doing this is based on the typical granularities that exist in machine data.
|
||||
*
|
||||
* @param clickTime timestamp of the clicked log rate deviation.
|
||||
* @param minTime minimum timestamp of the time window to be analysed
|
||||
* @param maxTime maximum timestamp of the time window to be analysed
|
||||
* @returns WindowParameters
|
||||
*/
|
||||
export const getWindowParameters = (
|
||||
clickTime: number,
|
||||
minTime: number,
|
||||
maxTime: number
|
||||
): WindowParameters => {
|
||||
const totalWindow = maxTime - minTime;
|
||||
|
||||
// min deviation window
|
||||
const minDeviationWindow = 10 * 60 * 1000; // 10min
|
||||
const minBaselineWindow = 30 * 60 * 1000; // 30min
|
||||
const minWindowGap = 5 * 60 * 1000; // 5min
|
||||
|
||||
// work out bounds as done in the original notebooks,
|
||||
// with the deviation window aiming to be a 1/10
|
||||
// of the size of the total window and the baseline window
|
||||
// being 3.5/10 of the total window.
|
||||
const deviationWindow = Math.max(totalWindow / 10, minDeviationWindow);
|
||||
const baselineWindow = Math.max(totalWindow / 3.5, minBaselineWindow);
|
||||
const windowGap = Math.max(totalWindow / 10, minWindowGap);
|
||||
|
||||
const deviationMin = clickTime - deviationWindow / 2;
|
||||
const deviationMax = clickTime + deviationWindow / 2;
|
||||
|
||||
const baselineMax = deviationMin - windowGap;
|
||||
const baselineMin = baselineMax - baselineWindow;
|
||||
|
||||
return {
|
||||
baselineMin: Math.round(baselineMin),
|
||||
baselineMax: Math.round(baselineMax),
|
||||
deviationMin: Math.round(deviationMin),
|
||||
deviationMax: Math.round(deviationMax),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* Converts window paramaters from the brushes to “snap” the brushes to the chart histogram bar width and ensure timestamps
|
||||
* correspond to bucket timestamps
|
||||
*
|
||||
* @param windowParameters time range definition for baseline and deviation to be used by log rate analysis
|
||||
* @param snapTimestamps time range definition that always corresponds to histogram bucket timestamps
|
||||
* @returns WindowParameters
|
||||
*/
|
||||
export const getSnappedWindowParameters = (
|
||||
windowParameters: WindowParameters,
|
||||
snapTimestamps: number[]
|
||||
): WindowParameters => {
|
||||
const snappedBaselineMin = snapTimestamps.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.baselineMin) < Math.abs(pts - windowParameters.baselineMin)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, snapTimestamps[0]);
|
||||
const baselineMaxTimestamps = snapTimestamps.filter((ts) => ts > snappedBaselineMin);
|
||||
|
||||
const snappedBaselineMax = baselineMaxTimestamps.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.baselineMax) < Math.abs(pts - windowParameters.baselineMax)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, baselineMaxTimestamps[0]);
|
||||
const deviationMinTss = baselineMaxTimestamps.filter((ts) => ts > snappedBaselineMax);
|
||||
|
||||
const snappedDeviationMin = deviationMinTss.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.deviationMin) < Math.abs(pts - windowParameters.deviationMin)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, deviationMinTss[0]);
|
||||
const deviationMaxTss = deviationMinTss.filter((ts) => ts > snappedDeviationMin);
|
||||
|
||||
const snappedDeviationMax = deviationMaxTss.reduce((pts, cts) => {
|
||||
if (
|
||||
Math.abs(cts - windowParameters.deviationMax) < Math.abs(pts - windowParameters.deviationMax)
|
||||
) {
|
||||
return cts;
|
||||
}
|
||||
return pts;
|
||||
}, deviationMaxTss[0]);
|
||||
|
||||
return {
|
||||
baselineMin: snappedBaselineMin,
|
||||
baselineMax: snappedBaselineMax,
|
||||
deviationMin: snappedDeviationMin,
|
||||
deviationMax: snappedDeviationMax,
|
||||
};
|
||||
};
|
|
@ -113,6 +113,7 @@ export const DocumentCountContent: FC<DocumentCountContentProps> = ({
|
|||
autoAnalysisStart={initialAnalysisStart}
|
||||
barColorOverride={barColorOverride}
|
||||
barHighlightColorOverride={barHighlightColorOverride}
|
||||
changePoint={documentCountStats.changePoint}
|
||||
{...docCountChartProps}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
|
|
|
@ -6,16 +6,21 @@
|
|||
*/
|
||||
|
||||
import { isEqual } from 'lodash';
|
||||
import React, { useEffect, useMemo, useRef, useState, type FC } from 'react';
|
||||
import { EuiEmptyPrompt, EuiHorizontalRule, EuiPanel } from '@elastic/eui';
|
||||
import React, { useCallback, useEffect, useMemo, useRef, useState, type FC } from 'react';
|
||||
import { EuiButton, EuiEmptyPrompt, EuiHorizontalRule, EuiPanel } from '@elastic/eui';
|
||||
import type { Moment } from 'moment';
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import type { BarStyleAccessor } from '@elastic/charts/dist/chart_types/xy_chart/utils/specs';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import type { DataView } from '@kbn/data-views-plugin/public';
|
||||
import {
|
||||
getWindowParametersForTrigger,
|
||||
getSnappedTimestamps,
|
||||
getSnappedWindowParameters,
|
||||
LOG_RATE_ANALYSIS_HIGHLIGHT_COLOR,
|
||||
LOG_RATE_ANALYSIS_TYPE,
|
||||
type LogRateAnalysisType,
|
||||
type WindowParameters,
|
||||
|
@ -181,6 +186,47 @@ export const LogRateAnalysisContent: FC<LogRateAnalysisContentProps> = ({
|
|||
setInitialAnalysisStart(undefined);
|
||||
}
|
||||
|
||||
const barStyle = {
|
||||
rect: {
|
||||
opacity: 1,
|
||||
fill: LOG_RATE_ANALYSIS_HIGHLIGHT_COLOR,
|
||||
},
|
||||
};
|
||||
|
||||
// Used to highlight an auto-detected change point in the date histogram.
|
||||
const barStyleAccessor: BarStyleAccessor | undefined =
|
||||
isBrushCleared && documentCountStats?.changePoint
|
||||
? (d, g) => {
|
||||
return g.specId === 'document_count' &&
|
||||
documentCountStats?.changePoint &&
|
||||
d.x > documentCountStats.changePoint.startTs &&
|
||||
d.x < documentCountStats.changePoint.endTs
|
||||
? barStyle
|
||||
: null;
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const triggerAnalysis = useCallback(() => {
|
||||
if (documentCountStats) {
|
||||
const { interval, timeRangeEarliest, timeRangeLatest, changePoint } = documentCountStats;
|
||||
|
||||
if (changePoint && interval && timeRangeEarliest && timeRangeLatest) {
|
||||
const wp = getWindowParametersForTrigger(
|
||||
changePoint.startTs,
|
||||
interval,
|
||||
timeRangeEarliest,
|
||||
timeRangeLatest,
|
||||
changePoint
|
||||
);
|
||||
|
||||
const snapTimestamps = getSnappedTimestamps(timeRangeEarliest, timeRangeLatest, interval);
|
||||
const wpSnap = getSnappedWindowParameters(wp, snapTimestamps);
|
||||
|
||||
setInitialAnalysisStart(wpSnap);
|
||||
}
|
||||
}
|
||||
}, [documentCountStats]);
|
||||
|
||||
return (
|
||||
<EuiPanel hasBorder={false} hasShadow={false}>
|
||||
{documentCountStats !== undefined && (
|
||||
|
@ -198,6 +244,7 @@ export const LogRateAnalysisContent: FC<LogRateAnalysisContentProps> = ({
|
|||
initialAnalysisStart={initialAnalysisStart}
|
||||
barColorOverride={barColorOverride}
|
||||
barHighlightColorOverride={barHighlightColorOverride}
|
||||
barStyleAccessor={barStyleAccessor}
|
||||
/>
|
||||
)}
|
||||
<EuiHorizontalRule />
|
||||
|
@ -219,7 +266,59 @@ export const LogRateAnalysisContent: FC<LogRateAnalysisContentProps> = ({
|
|||
embeddingOrigin={embeddingOrigin}
|
||||
/>
|
||||
)}
|
||||
{windowParameters === undefined && (
|
||||
{windowParameters === undefined && documentCountStats?.changePoint && (
|
||||
<EuiEmptyPrompt
|
||||
color="subdued"
|
||||
hasShadow={false}
|
||||
hasBorder={false}
|
||||
css={{ minWidth: '100%' }}
|
||||
title={
|
||||
<h2>
|
||||
{documentCountStats?.changePoint.type === LOG_RATE_ANALYSIS_TYPE.SPIKE && (
|
||||
<FormattedMessage
|
||||
id="xpack.aiops.logRateAnalysis.page.changePointSpikePromptTitle"
|
||||
defaultMessage="Log rate spike detected"
|
||||
/>
|
||||
)}
|
||||
{documentCountStats?.changePoint.type === LOG_RATE_ANALYSIS_TYPE.DIP && (
|
||||
<FormattedMessage
|
||||
id="xpack.aiops.logRateAnalysis.page.changePointDipPromptTitle"
|
||||
defaultMessage="Log rate dip detected"
|
||||
/>
|
||||
)}
|
||||
{documentCountStats?.changePoint.type !== LOG_RATE_ANALYSIS_TYPE.SPIKE &&
|
||||
documentCountStats?.changePoint.type !== LOG_RATE_ANALYSIS_TYPE.DIP && (
|
||||
<FormattedMessage
|
||||
id="xpack.aiops.logRateAnalysis.page.changePointOtherPromptTitle"
|
||||
defaultMessage="Log rate change point detected"
|
||||
/>
|
||||
)}
|
||||
</h2>
|
||||
}
|
||||
titleSize="xs"
|
||||
body={
|
||||
<>
|
||||
<p>
|
||||
<FormattedMessage
|
||||
id="xpack.aiops.logRateAnalysis.page.changePointPromptBody"
|
||||
defaultMessage="The log rate analysis feature identifies statistically significant field/value combinations that contribute to a log rate spike or dip."
|
||||
/>
|
||||
</p>
|
||||
<EuiButton
|
||||
data-test-subj="aiopsLogRateAnalysisContentRunAnalysisButton"
|
||||
onClick={triggerAnalysis}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.aiops.logRateAnalysis.page.changePointPromptRunAnalysisButton"
|
||||
defaultMessage="Run analysis"
|
||||
/>
|
||||
</EuiButton>
|
||||
</>
|
||||
}
|
||||
data-test-subj="aiopsChangePointDetectedPrompt"
|
||||
/>
|
||||
)}
|
||||
{windowParameters === undefined && documentCountStats?.changePoint === undefined && (
|
||||
<EuiEmptyPrompt
|
||||
color="subdued"
|
||||
hasShadow={false}
|
||||
|
|
|
@ -7,21 +7,64 @@
|
|||
|
||||
import { finalSignificantItemGroups } from '@kbn/aiops-test-utils/artificial_logs/final_significant_item_groups';
|
||||
import { significantTerms } from '@kbn/aiops-test-utils/artificial_logs/significant_terms';
|
||||
import { kibanaSampleDataLogsSignificantTermsBase } from '@kbn/aiops-test-utils/kibana_sample_data_logs/significant_terms';
|
||||
import type { SignificantItem, SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
|
||||
import { getGroupTableItems } from './get_group_table_items';
|
||||
import { getTableItemAsKQL } from './get_table_item_as_kql';
|
||||
|
||||
const kibanaSampleDataLogsSignificantTerms: SignificantItem[] =
|
||||
kibanaSampleDataLogsSignificantTermsBase.map((d) => ({
|
||||
...d,
|
||||
key: `${d.fieldName}:${d.fieldValue}`,
|
||||
type: 'keyword',
|
||||
doc_count: 1981,
|
||||
bg_count: 553,
|
||||
total_doc_count: 4669,
|
||||
total_bg_count: 1975,
|
||||
score: 47.38899434932384,
|
||||
normalizedScore: 0.8328439168064725,
|
||||
pValue: +d.pValue,
|
||||
}));
|
||||
|
||||
const kibanaSampleDataLogsGroups: SignificantItemGroup[] = [
|
||||
{
|
||||
id: 'the-group-id',
|
||||
group: kibanaSampleDataLogsSignificantTermsBase.map((d) => ({
|
||||
...d,
|
||||
key: `${d.fieldName}:${d.fieldValue}`,
|
||||
type: 'keyword',
|
||||
docCount: 1981,
|
||||
pValue: +d.pValue,
|
||||
})),
|
||||
docCount: 792,
|
||||
pValue: 0.00974308761016614,
|
||||
},
|
||||
];
|
||||
|
||||
describe('getTableItemAsKQL', () => {
|
||||
it('returns a KQL syntax for a significant item', () => {
|
||||
expect(getTableItemAsKQL(significantTerms[0])).toBe('user:Peter');
|
||||
expect(getTableItemAsKQL(significantTerms[1])).toBe('response_code:500');
|
||||
expect(getTableItemAsKQL(significantTerms[2])).toBe('url:home.php');
|
||||
expect(getTableItemAsKQL(significantTerms[3])).toBe('url:login.php');
|
||||
expect(getTableItemAsKQL(significantTerms[0])).toBe('user:"Peter"');
|
||||
expect(getTableItemAsKQL(significantTerms[1])).toBe('response_code:"500"');
|
||||
expect(getTableItemAsKQL(significantTerms[2])).toBe('url:"home.php"');
|
||||
expect(getTableItemAsKQL(significantTerms[3])).toBe('url:"login.php"');
|
||||
|
||||
expect(getTableItemAsKQL(kibanaSampleDataLogsSignificantTerms[0])).toBe(
|
||||
'agent.keyword:"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.50 Safari/534.24"'
|
||||
);
|
||||
});
|
||||
it('returns a KQL syntax for a group of significant items', () => {
|
||||
|
||||
it('returns a KQL syntax for a group of significant items for the artificial logs dataset', () => {
|
||||
const groupTableItems = getGroupTableItems(finalSignificantItemGroups);
|
||||
expect(getTableItemAsKQL(groupTableItems[0])).toBe('response_code:500 AND url:home.php');
|
||||
expect(getTableItemAsKQL(groupTableItems[1])).toBe('url:login.php AND response_code:500');
|
||||
expect(getTableItemAsKQL(groupTableItems[2])).toBe('user:Peter AND url:home.php');
|
||||
expect(getTableItemAsKQL(groupTableItems[0])).toBe('response_code:"500" AND url:"home.php"');
|
||||
expect(getTableItemAsKQL(groupTableItems[1])).toBe('url:"login.php" AND response_code:"500"');
|
||||
expect(getTableItemAsKQL(groupTableItems[2])).toBe('user:"Peter" AND url:"home.php"');
|
||||
});
|
||||
|
||||
it('returns a KQL syntax for a group of significant items for the Kibana logs dataset', () => {
|
||||
const groupTableItems = getGroupTableItems(kibanaSampleDataLogsGroups);
|
||||
expect(getTableItemAsKQL(groupTableItems[0])).toBe(
|
||||
'agent.keyword:"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.50 Safari/534.24" AND clientip:"30.156.16.164" AND extension.keyword:"" AND geo.dest:"IN" AND geo.srcdest:"US:IN" AND host.keyword:"elastic-elastic-elastic.org" AND ip:"30.156.16.163" AND machine.os.keyword:"win xp" AND referer:"http://www.elastic-elastic-elastic.com/success/timothy-l-kopra" AND response.keyword:"404"'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,19 +5,20 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { escapeKuery } from '@kbn/es-query';
|
||||
import { escapeKuery, escapeQuotes } from '@kbn/es-query';
|
||||
import { isSignificantItem, type SignificantItem } from '@kbn/ml-agg-utils';
|
||||
|
||||
import type { GroupTableItem } from './types';
|
||||
|
||||
export const getTableItemAsKQL = (tableItem: GroupTableItem | SignificantItem) => {
|
||||
if (isSignificantItem(tableItem)) {
|
||||
return `${escapeKuery(tableItem.fieldName)}:${escapeKuery(String(tableItem.fieldValue))}`;
|
||||
return `${escapeKuery(tableItem.fieldName)}:"${escapeQuotes(String(tableItem.fieldValue))}"`;
|
||||
}
|
||||
|
||||
return [
|
||||
...tableItem.groupItemsSortedByUniqueness.map(
|
||||
({ fieldName, fieldValue }) => `${escapeKuery(fieldName)}:${escapeKuery(String(fieldValue))}`
|
||||
({ fieldName, fieldValue }) =>
|
||||
`${escapeKuery(fieldName)}:"${escapeQuotes(String(fieldValue))}"`
|
||||
),
|
||||
].join(' AND ');
|
||||
};
|
||||
|
|
|
@ -13,10 +13,11 @@ import type { PartialTheme } from '@elastic/charts';
|
|||
import { Chart, BarSeries, ScaleType, Settings, Tooltip, TooltipType } from '@elastic/charts';
|
||||
import { EuiLoadingChart, EuiTextColor } from '@elastic/eui';
|
||||
|
||||
import { LOG_RATE_ANALYSIS_HIGHLIGHT_COLOR } from '@kbn/aiops-utils';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import type { SignificantItemHistogramItem } from '@kbn/ml-agg-utils';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { useAiopsAppContext } from '../../hooks/use_aiops_app_context';
|
||||
import { useEuiTheme } from '../../hooks/use_eui_theme';
|
||||
|
||||
|
@ -94,7 +95,9 @@ export const MiniHistogram: FC<MiniHistogramProps> = ({
|
|||
}
|
||||
|
||||
const barColor = barColorOverride ? [barColorOverride] : undefined;
|
||||
const barHighlightColor = barHighlightColorOverride ? [barHighlightColorOverride] : ['orange'];
|
||||
const barHighlightColor = barHighlightColorOverride
|
||||
? [barHighlightColorOverride]
|
||||
: [LOG_RATE_ANALYSIS_HIGHLIGHT_COLOR];
|
||||
|
||||
return (
|
||||
<div css={cssChartSize}>
|
||||
|
|
|
@ -8,8 +8,9 @@
|
|||
import { get } from 'lodash';
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import dateMath from '@kbn/datemath';
|
||||
|
||||
import dateMath from '@kbn/datemath';
|
||||
import { getExtendedChangePoint, type DocumentCountStatsChangePoint } from '@kbn/aiops-utils';
|
||||
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
|
||||
import type { SignificantItem } from '@kbn/ml-agg-utils';
|
||||
import type { Query } from '@kbn/es-query';
|
||||
|
@ -21,6 +22,7 @@ import type { GroupTableItem } from './components/log_rate_analysis_results_tabl
|
|||
export interface DocumentCountStats {
|
||||
interval?: number;
|
||||
buckets?: { [key: string]: number };
|
||||
changePoint?: DocumentCountStatsChangePoint;
|
||||
timeRangeEarliest?: number;
|
||||
timeRangeLatest?: number;
|
||||
totalCount: number;
|
||||
|
@ -45,7 +47,8 @@ export interface DocumentStatsSearchStrategyParams {
|
|||
export const getDocumentCountStatsRequest = (
|
||||
params: DocumentStatsSearchStrategyParams,
|
||||
randomSamplerWrapper?: RandomSamplerWrapper,
|
||||
skipAggs = false
|
||||
skipAggs = false,
|
||||
changePoints = false
|
||||
) => {
|
||||
const {
|
||||
index,
|
||||
|
@ -88,6 +91,16 @@ export const getDocumentCountStatsRequest = (
|
|||
: {}),
|
||||
},
|
||||
},
|
||||
...(changePoints
|
||||
? {
|
||||
change_point_request: {
|
||||
// @ts-expect-error missing from ES spec
|
||||
change_point: {
|
||||
buckets_path: 'eventRate>_count',
|
||||
},
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
const aggs = randomSamplerWrapper ? randomSamplerWrapper.wrap(rawAggs) : rawAggs;
|
||||
|
@ -152,6 +165,18 @@ export const processDocumentCountStats = (
|
|||
[]
|
||||
);
|
||||
|
||||
const changePointRaw = get(
|
||||
randomSamplerWrapper && body.aggregations !== undefined
|
||||
? randomSamplerWrapper.unwrap(body.aggregations)
|
||||
: body.aggregations,
|
||||
['change_point_request']
|
||||
);
|
||||
|
||||
const changePointBase =
|
||||
changePointRaw && changePointRaw.bucket && Object.keys(changePointRaw.type).length > 0
|
||||
? { key: Date.parse(changePointRaw.bucket.key), type: Object.keys(changePointRaw.type)[0] }
|
||||
: undefined;
|
||||
|
||||
const buckets = dataByTimeBucket.reduce<Record<string, number>>((acc, cur) => {
|
||||
acc[cur.key] = cur.doc_count;
|
||||
return acc;
|
||||
|
@ -168,5 +193,13 @@ export const processDocumentCountStats = (
|
|||
timeRangeLatest: params.latest,
|
||||
totalCount,
|
||||
lastDocTimeStampMs,
|
||||
...(changePointBase
|
||||
? {
|
||||
changePoint: {
|
||||
...changePointBase,
|
||||
...getExtendedChangePoint(buckets, changePointBase?.key),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
};
|
||||
|
|
|
@ -115,7 +115,9 @@ export function useDocumentCountStats<TParams extends DocumentStatsSearchStrateg
|
|||
{
|
||||
params: getDocumentCountStatsRequest(
|
||||
{ ...searchParams, trackTotalHits: false },
|
||||
randomSamplerWrapper
|
||||
randomSamplerWrapper,
|
||||
false,
|
||||
searchParamsCompare === undefined
|
||||
),
|
||||
},
|
||||
{ abortSignal: abortCtrl.current.signal }
|
||||
|
|
|
@ -18,7 +18,7 @@ import type {
|
|||
SignificantItemDuplicateGroup,
|
||||
ItemSet,
|
||||
FetchFrequentItemSetsResponse,
|
||||
} from '@kbn/aiops-utils/types';
|
||||
} from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
import { RANDOM_SAMPLER_SEED, LOG_RATE_ANALYSIS_SETTINGS } from '../../../../common/constants';
|
||||
|
||||
|
|
|
@ -12,7 +12,10 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import type { Logger } from '@kbn/logging';
|
||||
import type { FieldValuePair, SignificantItem } from '@kbn/ml-agg-utils';
|
||||
import type { FetchFrequentItemSetsResponse, ItemSet } from '@kbn/aiops-utils/types';
|
||||
import type {
|
||||
FetchFrequentItemSetsResponse,
|
||||
ItemSet,
|
||||
} from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
|
||||
|
||||
import type { AiopsLogRateAnalysisSchema } from '../../../../common/api/log_rate_analysis/schema';
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import type { SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
import type { FieldValuePairCounts } from '@kbn/aiops-utils/types';
|
||||
import type { FieldValuePairCounts } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
/**
|
||||
* Get a nested record of field/value pairs with counts
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { uniqWith, isEqual } from 'lodash';
|
||||
|
||||
import type { SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
import type { SignificantItemDuplicateGroup } from '@kbn/aiops-utils/types';
|
||||
import type { SignificantItemDuplicateGroup } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
export function getGroupsWithReaddedDuplicates(
|
||||
groups: SignificantItemGroup[],
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import type { SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
import type { FieldValuePairCounts } from '@kbn/aiops-utils/types';
|
||||
import type { FieldValuePairCounts } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
/**
|
||||
* Analyse duplicate field/value pairs in groups.
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { uniqBy } from 'lodash';
|
||||
|
||||
import type { SignificantItem, SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
import type { ItemSet } from '@kbn/aiops-utils/types';
|
||||
import type { ItemSet } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
import { duplicateIdentifier } from './duplicate_identifier';
|
||||
import { groupDuplicates } from './fetch_frequent_item_sets';
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import type { SignificantItem } from '@kbn/ml-agg-utils';
|
||||
import type { ItemSet, SimpleHierarchicalTreeNode } from '@kbn/aiops-utils/types';
|
||||
import type { ItemSet, SimpleHierarchicalTreeNode } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
import { getValueCounts } from './get_value_counts';
|
||||
import { getValuesDescending } from './get_values_descending';
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { orderBy } from 'lodash';
|
||||
import type { SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
import { stringHash } from '@kbn/ml-string-hash';
|
||||
import type { SimpleHierarchicalTreeNode } from '@kbn/aiops-utils/types';
|
||||
import type { SimpleHierarchicalTreeNode } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
/**
|
||||
* Get leaves from hierarchical tree.
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ItemSet } from '@kbn/aiops-utils/types';
|
||||
import type { ItemSet } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
export function getValueCounts(df: ItemSet[], field: string) {
|
||||
return df.reduce<Record<string, number>>((p, c) => {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ItemSet } from '@kbn/aiops-utils/types';
|
||||
import type { ItemSet } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
import { getValueCounts } from './get_value_counts';
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { stringHash } from '@kbn/ml-string-hash';
|
||||
import type { SignificantItem, SignificantItemGroup } from '@kbn/ml-agg-utils';
|
||||
import type { SignificantItemDuplicateGroup } from '@kbn/aiops-utils/types';
|
||||
import type { SignificantItemDuplicateGroup } from '@kbn/aiops-utils/log_rate_analysis/types';
|
||||
|
||||
export function transformSignificantItemToGroup(
|
||||
significantItem: SignificantItem,
|
||||
|
|
|
@ -15,7 +15,7 @@ import { DataView } from '@kbn/data-views-plugin/common';
|
|||
import {
|
||||
LOG_RATE_ANALYSIS_TYPE,
|
||||
type LogRateAnalysisType,
|
||||
} from '@kbn/aiops-utils/log_rate_analysis_type';
|
||||
} from '@kbn/aiops-utils/log_rate_analysis/log_rate_analysis_type';
|
||||
import { LogRateAnalysisContent, type LogRateAnalysisResultsData } from '@kbn/aiops-plugin/public';
|
||||
import { Rule } from '@kbn/alerting-plugin/common';
|
||||
import { TopAlert } from '@kbn/observability-plugin/public';
|
||||
|
|
|
@ -12,7 +12,7 @@ import { EuiFlexGroup, EuiFlexItem, EuiPanel, EuiTitle } from '@elastic/eui';
|
|||
import {
|
||||
LOG_RATE_ANALYSIS_TYPE,
|
||||
type LogRateAnalysisType,
|
||||
} from '@kbn/aiops-utils/log_rate_analysis_type';
|
||||
} from '@kbn/aiops-utils/log_rate_analysis/log_rate_analysis_type';
|
||||
import { LogRateAnalysisContent, type LogRateAnalysisResultsData } from '@kbn/aiops-plugin/public';
|
||||
import { Rule } from '@kbn/alerting-plugin/common';
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
||||
|
|
|
@ -69,8 +69,14 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) {
|
|||
);
|
||||
await aiops.logRateAnalysisPage.assertSearchPanelExists();
|
||||
|
||||
await ml.testExecution.logTestStep('displays empty prompt');
|
||||
await aiops.logRateAnalysisPage.assertNoWindowParametersEmptyPromptExists();
|
||||
await ml.testExecution.logTestStep('displays prompt');
|
||||
if (testData.expected.prompt === 'empty') {
|
||||
await aiops.logRateAnalysisPage.assertNoWindowParametersEmptyPromptExists();
|
||||
} else if (testData.expected.prompt === 'change-point') {
|
||||
await aiops.logRateAnalysisPage.assertChangePointDetectedPromptExists();
|
||||
} else {
|
||||
throw new Error('Invalid prompt');
|
||||
}
|
||||
|
||||
await ml.testExecution.logTestStep('clicks the document count chart to start analysis');
|
||||
await aiops.logRateAnalysisPage.clickDocumentCountChart(testData.chartClickCoordinates);
|
||||
|
|
|
@ -161,6 +161,7 @@ export const getArtificialLogDataViewTestData = ({
|
|||
wp: getWindowParameters(),
|
||||
},
|
||||
},
|
||||
prompt: 'change-point',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
|
|
@ -42,5 +42,6 @@ export const farequoteDataViewTestData: TestData = {
|
|||
},
|
||||
},
|
||||
},
|
||||
prompt: 'empty',
|
||||
},
|
||||
};
|
||||
|
|
|
@ -187,5 +187,6 @@ export const farequoteDataViewTestDataWithQuery: TestData = {
|
|||
},
|
||||
},
|
||||
},
|
||||
prompt: 'change-point',
|
||||
},
|
||||
};
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import { LOG_RATE_ANALYSIS_TYPE } from '@kbn/aiops-utils';
|
||||
import { kibanaSampleDataLogsSignificantTermsBase } from '@kbn/aiops-test-utils/kibana_sample_data_logs/significant_terms';
|
||||
|
||||
import type { TestData } from '../../types';
|
||||
|
||||
|
@ -21,10 +22,10 @@ export const kibanaLogsDataViewTestData: TestData = {
|
|||
fieldSelectorApplyAvailable: true,
|
||||
action: {
|
||||
type: 'LogPatternAnalysis',
|
||||
tableRowId: '1064853178',
|
||||
tableRowId: '822370508',
|
||||
expected: {
|
||||
queryBar:
|
||||
'clientip:30.156.16.164 AND host.keyword:elastic-elastic-elastic.org AND ip:30.156.16.163 AND response.keyword:404 AND machine.os.keyword:win xp AND geo.dest:IN AND geo.srcdest:US\\:IN',
|
||||
'clientip:"30.156.16.164" AND geo.dest:"IN" AND geo.srcdest:"US:IN" AND host.keyword:"elastic-elastic-elastic.org" AND response.keyword:"404" AND ip:"30.156.16.163" AND machine.os.keyword:"win xp" AND agent.keyword:"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.50 Safari/534.24" AND tags.keyword:"info" AND extension.keyword:""',
|
||||
totalDocCount: 100,
|
||||
},
|
||||
},
|
||||
|
@ -41,87 +42,36 @@ export const kibanaLogsDataViewTestData: TestData = {
|
|||
searchQueryLanguage: 'kuery',
|
||||
searchString: '',
|
||||
wp: {
|
||||
bMax: 1684368000000,
|
||||
bMin: 1682899200000,
|
||||
dMax: 1685491200000,
|
||||
dMin: 1684886400000,
|
||||
bMax: 1685059200000,
|
||||
bMin: 1683590400000,
|
||||
dMax: 1685232000000,
|
||||
dMin: 1685145600000,
|
||||
},
|
||||
},
|
||||
},
|
||||
analysisGroupsTable: [
|
||||
{
|
||||
group:
|
||||
'* clientip: 30.156.16.164* host.keyword: elastic-elastic-elastic.org* ip: 30.156.16.163* referer: http://www.elastic-elastic-elastic.com/success/timothy-l-kopra* response.keyword: 404Showing 5 out of 8 items. 8 items unique to this group.',
|
||||
'* clientip: 30.156.16.164* geo.dest: IN* geo.srcdest: US:IN* host.keyword: elastic-elastic-elastic.org* response.keyword: 404Showing 5 out of 11 items. 11 items unique to this group.',
|
||||
docCount: '100',
|
||||
},
|
||||
],
|
||||
filteredAnalysisGroupsTable: [
|
||||
{
|
||||
group:
|
||||
'* clientip: 30.156.16.164* host.keyword: elastic-elastic-elastic.org* ip: 30.156.16.163* response.keyword: 404* machine.os.keyword: win xpShowing 5 out of 7 items. 7 items unique to this group.',
|
||||
'* clientip: 30.156.16.164* geo.dest: IN* geo.srcdest: US:IN* host.keyword: elastic-elastic-elastic.org* response.keyword: 404Showing 5 out of 10 items. 10 items unique to this group.',
|
||||
docCount: '100',
|
||||
},
|
||||
],
|
||||
analysisTable: [
|
||||
{
|
||||
fieldName: 'clientip',
|
||||
fieldValue: '30.156.16.164',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '3.10e-13',
|
||||
impact: 'High',
|
||||
},
|
||||
{
|
||||
fieldName: 'geo.dest',
|
||||
fieldValue: 'IN',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '0.000716',
|
||||
impact: 'Medium',
|
||||
},
|
||||
{
|
||||
fieldName: 'geo.srcdest',
|
||||
fieldValue: 'US:IN',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '0.000716',
|
||||
impact: 'Medium',
|
||||
},
|
||||
{
|
||||
fieldName: 'host.keyword',
|
||||
fieldValue: 'elastic-elastic-elastic.org',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '7.14e-9',
|
||||
impact: 'High',
|
||||
},
|
||||
{
|
||||
fieldName: 'ip',
|
||||
fieldValue: '30.156.16.163',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '3.28e-13',
|
||||
impact: 'High',
|
||||
},
|
||||
{
|
||||
fieldName: 'machine.os.keyword',
|
||||
fieldValue: 'win xp',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '0.0000997',
|
||||
impact: 'Medium',
|
||||
},
|
||||
{
|
||||
fieldName: 'referer',
|
||||
fieldValue: 'http://www.elastic-elastic-elastic.com/success/timothy-l-kopra',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '4.74e-13',
|
||||
impact: 'High',
|
||||
},
|
||||
{
|
||||
fieldName: 'response.keyword',
|
||||
fieldValue: '404',
|
||||
logRate: 'Chart type:bar chart',
|
||||
pValue: '0.00000604',
|
||||
impact: 'Medium',
|
||||
},
|
||||
],
|
||||
analysisTable: kibanaSampleDataLogsSignificantTermsBase.map((d) => ({
|
||||
...d,
|
||||
logRate: 'Chart type:bar chart',
|
||||
impact: 'High',
|
||||
})),
|
||||
fieldSelectorPopover: [
|
||||
'agent.keyword',
|
||||
'clientip',
|
||||
'extension.keyword',
|
||||
'geo.dest',
|
||||
'geo.srcdest',
|
||||
'host.keyword',
|
||||
|
@ -129,6 +79,8 @@ export const kibanaLogsDataViewTestData: TestData = {
|
|||
'machine.os.keyword',
|
||||
'referer',
|
||||
'response.keyword',
|
||||
'tags.keyword',
|
||||
],
|
||||
prompt: 'change-point',
|
||||
},
|
||||
};
|
||||
|
|
|
@ -25,6 +25,7 @@ interface TestDataExpectedWithSampleProbability {
|
|||
appState: object;
|
||||
sampleProbabilityFormatted: string;
|
||||
fieldSelectorPopover: string[];
|
||||
prompt: 'empty' | 'change-point';
|
||||
}
|
||||
|
||||
export function isTestDataExpectedWithSampleProbability(
|
||||
|
@ -47,6 +48,7 @@ interface TestDataExpectedWithoutSampleProbability {
|
|||
impact: string;
|
||||
}>;
|
||||
fieldSelectorPopover: string[];
|
||||
prompt: 'empty' | 'change-point';
|
||||
}
|
||||
|
||||
export interface TestData {
|
||||
|
|
|
@ -108,6 +108,10 @@ export function LogRateAnalysisPageProvider({ getService, getPageObject }: FtrPr
|
|||
await testSubjects.existOrFail(`aiopsSearchPanel`);
|
||||
},
|
||||
|
||||
async assertChangePointDetectedPromptExists() {
|
||||
await testSubjects.existOrFail(`aiopsChangePointDetectedPrompt`);
|
||||
},
|
||||
|
||||
async assertNoWindowParametersEmptyPromptExists() {
|
||||
await testSubjects.existOrFail(`aiopsNoWindowParametersEmptyPrompt`);
|
||||
},
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue