mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Security solution] Fix a couple spots where we had _source still, use fields api (#162278)
This commit is contained in:
parent
5d25e4d225
commit
3be8f627d7
14 changed files with 77 additions and 1702 deletions
|
@ -19,8 +19,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '100',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['100'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -34,8 +34,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '200',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['200'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -49,8 +49,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '300',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['300'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -64,8 +64,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '400',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['400'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -79,8 +79,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '500',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['500'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -94,8 +94,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '600',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['600'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -109,8 +109,8 @@ export const mockAlertCountByRuleResult = {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '700',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['700'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -117,6 +117,8 @@ export const useAlertCountByRuleByStatus: UseAlertCountByRuleByStatus = ({
|
|||
return { items, isLoading, updatedAt };
|
||||
};
|
||||
|
||||
export const KIBANA_RULE_ID = 'kibana.alert.rule.uuid';
|
||||
|
||||
export const buildRuleAlertsByEntityQuery = ({
|
||||
additionalFilters = [],
|
||||
from,
|
||||
|
@ -133,6 +135,8 @@ export const buildRuleAlertsByEntityQuery = ({
|
|||
value: string;
|
||||
}) => ({
|
||||
size: 0,
|
||||
_source: false,
|
||||
fields: [KIBANA_RULE_ID],
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
|
@ -145,11 +149,15 @@ export const buildRuleAlertsByEntityQuery = ({
|
|||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
terms: {
|
||||
'kibana.alert.workflow_status': statuses,
|
||||
},
|
||||
},
|
||||
...(statuses?.length > 0
|
||||
? [
|
||||
{
|
||||
terms: {
|
||||
'kibana.alert.workflow_status': statuses,
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
term: {
|
||||
[field]: value,
|
||||
|
@ -167,7 +175,8 @@ export const buildRuleAlertsByEntityQuery = ({
|
|||
aggs: {
|
||||
ruleUuid: {
|
||||
top_hits: {
|
||||
_source: ['kibana.alert.rule.uuid'],
|
||||
_source: false,
|
||||
fields: [KIBANA_RULE_ID],
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
|
@ -181,8 +190,8 @@ interface RuleUuidData extends GenericBuckets {
|
|||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': string;
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': string[];
|
||||
};
|
||||
}
|
||||
];
|
||||
|
@ -201,7 +210,8 @@ const parseAlertCountByRuleItems = (
|
|||
): AlertCountByRuleByStatusItem[] => {
|
||||
const buckets = aggregations?.[ALERTS_BY_RULE_AGG].buckets ?? [];
|
||||
return buckets.map<AlertCountByRuleByStatusItem>((bucket) => {
|
||||
const uuid = bucket.ruleUuid.hits?.hits[0]?._source['kibana.alert.rule.uuid'] || '';
|
||||
const uuid =
|
||||
firstNonNullValue(bucket.ruleUuid.hits?.hits[0]?.fields['kibana.alert.rule.uuid']) ?? '';
|
||||
return {
|
||||
ruleName: firstNonNullValue(bucket.key) ?? '-',
|
||||
count: bucket.doc_count,
|
||||
|
|
|
@ -125,7 +125,7 @@ function formatResultData(
|
|||
): AnomaliesCount[] {
|
||||
const unsortedAnomalies: AnomaliesCount[] = anomaliesJobs.map((job) => {
|
||||
const bucket = buckets.find(({ key }) => key === job?.id);
|
||||
const hasUserName = has("entity.hits.hits[0]._source['user.name']", bucket);
|
||||
const hasUserName = has("entity.hits.hits[0].fields['user.name']", bucket);
|
||||
|
||||
return {
|
||||
name: job?.customSettings?.security_app_display_name ?? job.id,
|
||||
|
|
|
@ -1,259 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { EqlSearchStrategyResponse } from '@kbn/data-plugin/common';
|
||||
import type { Source } from './types';
|
||||
import type { EqlSearchResponse } from '../../../../common/detection_engine/types';
|
||||
import type { Connection } from '@elastic/elasticsearch';
|
||||
|
||||
export const getMockEqlResponse = (): EqlSearchStrategyResponse<EqlSearchResponse<Source>> => ({
|
||||
id: 'some-id',
|
||||
rawResponse: {
|
||||
body: {
|
||||
hits: {
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '1',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:16:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '2',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:50:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '3',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:06:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '4',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:15:54.368707900Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
total: {
|
||||
value: 4,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
took: 300,
|
||||
timed_out: false,
|
||||
},
|
||||
headers: {},
|
||||
warnings: [],
|
||||
meta: {
|
||||
aborted: false,
|
||||
attempts: 0,
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
connection: {} as Connection,
|
||||
request: {
|
||||
params: {
|
||||
body: JSON.stringify({
|
||||
filter: {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
gte: '2020-10-07T00:46:12.414Z',
|
||||
lte: '2020-10-07T01:46:12.414Z',
|
||||
format: 'strict_date_optional_time',
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
method: 'GET',
|
||||
path: '/_eql/search/',
|
||||
querystring: 'some query string',
|
||||
},
|
||||
options: {},
|
||||
id: '',
|
||||
},
|
||||
},
|
||||
statusCode: 200,
|
||||
},
|
||||
});
|
||||
|
||||
export const getMockEndgameEqlResponse = (): EqlSearchStrategyResponse<
|
||||
EqlSearchResponse<Source>
|
||||
> => ({
|
||||
id: 'some-id',
|
||||
rawResponse: {
|
||||
body: {
|
||||
hits: {
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '1',
|
||||
_source: {
|
||||
'@timestamp': 1601824614000,
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '2',
|
||||
_source: {
|
||||
'@timestamp': 1601826654368,
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '3',
|
||||
_source: {
|
||||
'@timestamp': 1601824014368,
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '4',
|
||||
_source: {
|
||||
'@timestamp': 1601824554368,
|
||||
},
|
||||
},
|
||||
],
|
||||
total: {
|
||||
value: 4,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
took: 300,
|
||||
timed_out: false,
|
||||
},
|
||||
headers: {},
|
||||
warnings: [],
|
||||
meta: {
|
||||
aborted: false,
|
||||
attempts: 0,
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
connection: {} as Connection,
|
||||
request: {
|
||||
params: {
|
||||
body: JSON.stringify({
|
||||
filter: {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
gte: '2020-10-07T00:46:12.414Z',
|
||||
lte: '2020-10-07T01:46:12.414Z',
|
||||
format: 'strict_date_optional_time',
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
method: 'GET',
|
||||
path: '/_eql/search/',
|
||||
querystring: 'some query string',
|
||||
},
|
||||
options: {},
|
||||
id: '',
|
||||
},
|
||||
},
|
||||
statusCode: 200,
|
||||
},
|
||||
});
|
||||
|
||||
export const getMockEqlSequenceResponse = (): EqlSearchStrategyResponse<
|
||||
EqlSearchResponse<Source>
|
||||
> => ({
|
||||
id: 'some-id',
|
||||
rawResponse: {
|
||||
body: {
|
||||
hits: {
|
||||
sequences: [
|
||||
{
|
||||
join_keys: [],
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '1',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:16:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '2',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:50:54.368707900Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
join_keys: [],
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '3',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:06:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '4',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:15:54.368707900Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
total: {
|
||||
value: 4,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
took: 300,
|
||||
timed_out: false,
|
||||
},
|
||||
headers: {},
|
||||
warnings: [],
|
||||
meta: {
|
||||
aborted: false,
|
||||
attempts: 0,
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
connection: {} as Connection,
|
||||
request: {
|
||||
params: {
|
||||
body: JSON.stringify({
|
||||
filter: {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
gte: '2020-10-07T00:46:12.414Z',
|
||||
lte: '2020-10-07T01:46:12.414Z',
|
||||
format: 'strict_date_optional_time',
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
method: 'GET',
|
||||
path: '/_eql/search/',
|
||||
querystring: 'some query string',
|
||||
},
|
||||
options: {},
|
||||
id: '',
|
||||
},
|
||||
},
|
||||
statusCode: 200,
|
||||
},
|
||||
});
|
|
@ -1,781 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
|
||||
import type { EqlSearchStrategyResponse } from '@kbn/data-plugin/common';
|
||||
import type { Source } from './types';
|
||||
import type { EqlSearchResponse } from '../../../../common/detection_engine/types';
|
||||
import type { inputsModel } from '../../store';
|
||||
|
||||
import {
|
||||
calculateBucketForHour,
|
||||
calculateBucketForDay,
|
||||
getEqlAggsData,
|
||||
createIntervalArray,
|
||||
getInterval,
|
||||
formatInspect,
|
||||
getEventsToBucket,
|
||||
} from './helpers';
|
||||
import {
|
||||
getMockEndgameEqlResponse,
|
||||
getMockEqlResponse,
|
||||
getMockEqlSequenceResponse,
|
||||
} from './eql_search_response.mock';
|
||||
|
||||
describe('eql/helpers', () => {
|
||||
describe('calculateBucketForHour', () => {
|
||||
test('returns 2 if the difference in times is 2 minutes', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T05:56:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(2);
|
||||
});
|
||||
|
||||
test('returns 10 if the difference in times is 8-10 minutes', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T05:48:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(10);
|
||||
});
|
||||
|
||||
test('returns 16 if the difference in times is 10-15 minutes', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T05:42:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(16);
|
||||
});
|
||||
|
||||
test('returns 60 if the difference in times is 58-60 minutes', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T04:58:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(60);
|
||||
});
|
||||
|
||||
test('returns exact time difference if it is a multiple of 2', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T05:37:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(20);
|
||||
});
|
||||
|
||||
test('returns 0 if times are equal', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T05:57:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(0);
|
||||
});
|
||||
|
||||
test('returns 2 if the difference in times is 2 minutes but arguments are flipped', () => {
|
||||
const diff = calculateBucketForHour(
|
||||
Date.parse('2020-02-20T05:57:54.037Z'),
|
||||
Date.parse('2020-02-20T05:56:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateBucketForDay', () => {
|
||||
test('returns 0 if two dates are equivalent', () => {
|
||||
const diff = calculateBucketForDay(
|
||||
Date.parse('2020-02-20T05:57:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(0);
|
||||
});
|
||||
|
||||
test('returns 1 if the difference in times is 60 minutes', () => {
|
||||
const diff = calculateBucketForDay(
|
||||
Date.parse('2020-02-20T05:17:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(1);
|
||||
});
|
||||
|
||||
test('returns 2 if the difference in times is 60-120 minutes', () => {
|
||||
const diff = calculateBucketForDay(
|
||||
Date.parse('2020-02-20T03:57:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(2);
|
||||
});
|
||||
|
||||
test('returns 3 if the difference in times is 120-180 minutes', () => {
|
||||
const diff = calculateBucketForDay(
|
||||
Date.parse('2020-02-20T03:56:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(3);
|
||||
});
|
||||
|
||||
test('returns 4 if the difference in times is 180-240 minutes', () => {
|
||||
const diff = calculateBucketForDay(
|
||||
Date.parse('2020-02-20T02:15:54.037Z'),
|
||||
Date.parse('2020-02-20T05:57:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(4);
|
||||
});
|
||||
|
||||
test('returns 2 if the difference in times is 60-120 minutes but arguments are flipped', () => {
|
||||
const diff = calculateBucketForDay(
|
||||
Date.parse('2020-02-20T05:57:54.037Z'),
|
||||
Date.parse('2020-02-20T03:59:54.037Z')
|
||||
);
|
||||
|
||||
expect(diff).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEqlAggsData', () => {
|
||||
describe('non-sequence', () => {
|
||||
// NOTE: We previously expected @timestamp to be a string, however,
|
||||
// date can also be a number (like for endgame-*)
|
||||
test('it works when @timestamp is a number', () => {
|
||||
const mockResponse = getMockEndgameEqlResponse();
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
mockResponse,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
false
|
||||
);
|
||||
|
||||
const date1 = moment(aggs.data[0].x);
|
||||
const date2 = moment(aggs.data[1].x);
|
||||
// This will be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
expect(diff).toEqual(120000);
|
||||
expect(aggs.data).toHaveLength(31);
|
||||
expect(aggs.data).toEqual([
|
||||
{ g: 'hits', x: 1601827200368, y: 0 },
|
||||
{ g: 'hits', x: 1601827080368, y: 0 },
|
||||
{ g: 'hits', x: 1601826960368, y: 0 },
|
||||
{ g: 'hits', x: 1601826840368, y: 0 },
|
||||
{ g: 'hits', x: 1601826720368, y: 0 },
|
||||
{ g: 'hits', x: 1601826600368, y: 1 },
|
||||
{ g: 'hits', x: 1601826480368, y: 0 },
|
||||
{ g: 'hits', x: 1601826360368, y: 0 },
|
||||
{ g: 'hits', x: 1601826240368, y: 0 },
|
||||
{ g: 'hits', x: 1601826120368, y: 0 },
|
||||
{ g: 'hits', x: 1601826000368, y: 0 },
|
||||
{ g: 'hits', x: 1601825880368, y: 0 },
|
||||
{ g: 'hits', x: 1601825760368, y: 0 },
|
||||
{ g: 'hits', x: 1601825640368, y: 0 },
|
||||
{ g: 'hits', x: 1601825520368, y: 0 },
|
||||
{ g: 'hits', x: 1601825400368, y: 0 },
|
||||
{ g: 'hits', x: 1601825280368, y: 0 },
|
||||
{ g: 'hits', x: 1601825160368, y: 0 },
|
||||
{ g: 'hits', x: 1601825040368, y: 0 },
|
||||
{ g: 'hits', x: 1601824920368, y: 0 },
|
||||
{ g: 'hits', x: 1601824800368, y: 0 },
|
||||
{ g: 'hits', x: 1601824680368, y: 0 },
|
||||
{ g: 'hits', x: 1601824560368, y: 2 },
|
||||
{ g: 'hits', x: 1601824440368, y: 0 },
|
||||
{ g: 'hits', x: 1601824320368, y: 0 },
|
||||
{ g: 'hits', x: 1601824200368, y: 0 },
|
||||
{ g: 'hits', x: 1601824080368, y: 0 },
|
||||
{ g: 'hits', x: 1601823960368, y: 1 },
|
||||
{ g: 'hits', x: 1601823840368, y: 0 },
|
||||
{ g: 'hits', x: 1601823720368, y: 0 },
|
||||
{ g: 'hits', x: 1601823600368, y: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
test('it returns results bucketed into 2 min intervals when range is "h"', () => {
|
||||
const mockResponse = getMockEqlResponse();
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
mockResponse,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
false
|
||||
);
|
||||
|
||||
const date1 = moment(aggs.data[0].x);
|
||||
const date2 = moment(aggs.data[1].x);
|
||||
// This will be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
expect(diff).toEqual(120000);
|
||||
expect(aggs.data).toHaveLength(31);
|
||||
expect(aggs.data).toEqual([
|
||||
{ g: 'hits', x: 1601827200368, y: 0 },
|
||||
{ g: 'hits', x: 1601827080368, y: 0 },
|
||||
{ g: 'hits', x: 1601826960368, y: 0 },
|
||||
{ g: 'hits', x: 1601826840368, y: 0 },
|
||||
{ g: 'hits', x: 1601826720368, y: 0 },
|
||||
{ g: 'hits', x: 1601826600368, y: 1 },
|
||||
{ g: 'hits', x: 1601826480368, y: 0 },
|
||||
{ g: 'hits', x: 1601826360368, y: 0 },
|
||||
{ g: 'hits', x: 1601826240368, y: 0 },
|
||||
{ g: 'hits', x: 1601826120368, y: 0 },
|
||||
{ g: 'hits', x: 1601826000368, y: 0 },
|
||||
{ g: 'hits', x: 1601825880368, y: 0 },
|
||||
{ g: 'hits', x: 1601825760368, y: 0 },
|
||||
{ g: 'hits', x: 1601825640368, y: 0 },
|
||||
{ g: 'hits', x: 1601825520368, y: 0 },
|
||||
{ g: 'hits', x: 1601825400368, y: 0 },
|
||||
{ g: 'hits', x: 1601825280368, y: 0 },
|
||||
{ g: 'hits', x: 1601825160368, y: 0 },
|
||||
{ g: 'hits', x: 1601825040368, y: 0 },
|
||||
{ g: 'hits', x: 1601824920368, y: 0 },
|
||||
{ g: 'hits', x: 1601824800368, y: 0 },
|
||||
{ g: 'hits', x: 1601824680368, y: 0 },
|
||||
{ g: 'hits', x: 1601824560368, y: 2 },
|
||||
{ g: 'hits', x: 1601824440368, y: 0 },
|
||||
{ g: 'hits', x: 1601824320368, y: 0 },
|
||||
{ g: 'hits', x: 1601824200368, y: 0 },
|
||||
{ g: 'hits', x: 1601824080368, y: 0 },
|
||||
{ g: 'hits', x: 1601823960368, y: 1 },
|
||||
{ g: 'hits', x: 1601823840368, y: 0 },
|
||||
{ g: 'hits', x: 1601823720368, y: 0 },
|
||||
{ g: 'hits', x: 1601823600368, y: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
test('it returns results bucketed into 1 hour intervals when range is "d"', () => {
|
||||
const mockResponse = getMockEqlResponse();
|
||||
const response: EqlSearchStrategyResponse<EqlSearchResponse<Source>> = {
|
||||
...mockResponse,
|
||||
rawResponse: {
|
||||
...mockResponse.rawResponse,
|
||||
body: {
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
timed_out: false,
|
||||
took: 15,
|
||||
hits: {
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '1',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:16:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '2',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T05:50:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '3',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T18:06:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '4',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T23:15:54.368707900Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
total: {
|
||||
value: 4,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
response,
|
||||
'd',
|
||||
'2020-10-04T23:50:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
false
|
||||
);
|
||||
const date1 = moment(aggs.data[0].x);
|
||||
const date2 = moment(aggs.data[1].x);
|
||||
// This'll be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
expect(diff).toEqual(3600000);
|
||||
expect(aggs.data).toHaveLength(25);
|
||||
expect(aggs.data).toEqual([
|
||||
{ g: 'hits', x: 1601855400368, y: 0 },
|
||||
{ g: 'hits', x: 1601851800368, y: 1 },
|
||||
{ g: 'hits', x: 1601848200368, y: 0 },
|
||||
{ g: 'hits', x: 1601844600368, y: 0 },
|
||||
{ g: 'hits', x: 1601841000368, y: 0 },
|
||||
{ g: 'hits', x: 1601837400368, y: 0 },
|
||||
{ g: 'hits', x: 1601833800368, y: 1 },
|
||||
{ g: 'hits', x: 1601830200368, y: 0 },
|
||||
{ g: 'hits', x: 1601826600368, y: 0 },
|
||||
{ g: 'hits', x: 1601823000368, y: 1 },
|
||||
{ g: 'hits', x: 1601819400368, y: 0 },
|
||||
{ g: 'hits', x: 1601815800368, y: 0 },
|
||||
{ g: 'hits', x: 1601812200368, y: 0 },
|
||||
{ g: 'hits', x: 1601808600368, y: 0 },
|
||||
{ g: 'hits', x: 1601805000368, y: 0 },
|
||||
{ g: 'hits', x: 1601801400368, y: 0 },
|
||||
{ g: 'hits', x: 1601797800368, y: 0 },
|
||||
{ g: 'hits', x: 1601794200368, y: 0 },
|
||||
{ g: 'hits', x: 1601790600368, y: 1 },
|
||||
{ g: 'hits', x: 1601787000368, y: 0 },
|
||||
{ g: 'hits', x: 1601783400368, y: 0 },
|
||||
{ g: 'hits', x: 1601779800368, y: 0 },
|
||||
{ g: 'hits', x: 1601776200368, y: 0 },
|
||||
{ g: 'hits', x: 1601772600368, y: 0 },
|
||||
{ g: 'hits', x: 1601769000368, y: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
test('it correctly returns total hits', () => {
|
||||
const mockResponse = getMockEqlResponse();
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
mockResponse,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
false
|
||||
);
|
||||
|
||||
expect(aggs.totalCount).toEqual(4);
|
||||
});
|
||||
|
||||
test('it returns array with each item having a "total" of 0 if response returns no hits', () => {
|
||||
const mockResponse = getMockEqlResponse();
|
||||
const response: EqlSearchStrategyResponse<EqlSearchResponse<Source>> = {
|
||||
...mockResponse,
|
||||
rawResponse: {
|
||||
...mockResponse.rawResponse,
|
||||
body: {
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
timed_out: false,
|
||||
took: 15,
|
||||
hits: {
|
||||
total: {
|
||||
value: 0,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
response,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
false
|
||||
);
|
||||
|
||||
expect(aggs.data.every(({ y }) => y === 0)).toBeTruthy();
|
||||
expect(aggs.totalCount).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sequence', () => {
|
||||
test('it returns results bucketed into 2 min intervals when range is "h"', () => {
|
||||
const mockResponse = getMockEqlSequenceResponse();
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
mockResponse,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
true
|
||||
);
|
||||
|
||||
const date1 = moment(aggs.data[0].x);
|
||||
const date2 = moment(aggs.data[1].x);
|
||||
// This will be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
expect(diff).toEqual(120000);
|
||||
expect(aggs.data).toHaveLength(31);
|
||||
expect(aggs.data).toEqual([
|
||||
{ g: 'hits', x: 1601827200368, y: 0 },
|
||||
{ g: 'hits', x: 1601827080368, y: 0 },
|
||||
{ g: 'hits', x: 1601826960368, y: 0 },
|
||||
{ g: 'hits', x: 1601826840368, y: 0 },
|
||||
{ g: 'hits', x: 1601826720368, y: 0 },
|
||||
{ g: 'hits', x: 1601826600368, y: 1 },
|
||||
{ g: 'hits', x: 1601826480368, y: 0 },
|
||||
{ g: 'hits', x: 1601826360368, y: 0 },
|
||||
{ g: 'hits', x: 1601826240368, y: 0 },
|
||||
{ g: 'hits', x: 1601826120368, y: 0 },
|
||||
{ g: 'hits', x: 1601826000368, y: 0 },
|
||||
{ g: 'hits', x: 1601825880368, y: 0 },
|
||||
{ g: 'hits', x: 1601825760368, y: 0 },
|
||||
{ g: 'hits', x: 1601825640368, y: 0 },
|
||||
{ g: 'hits', x: 1601825520368, y: 0 },
|
||||
{ g: 'hits', x: 1601825400368, y: 0 },
|
||||
{ g: 'hits', x: 1601825280368, y: 0 },
|
||||
{ g: 'hits', x: 1601825160368, y: 0 },
|
||||
{ g: 'hits', x: 1601825040368, y: 0 },
|
||||
{ g: 'hits', x: 1601824920368, y: 0 },
|
||||
{ g: 'hits', x: 1601824800368, y: 0 },
|
||||
{ g: 'hits', x: 1601824680368, y: 0 },
|
||||
{ g: 'hits', x: 1601824560368, y: 1 },
|
||||
{ g: 'hits', x: 1601824440368, y: 0 },
|
||||
{ g: 'hits', x: 1601824320368, y: 0 },
|
||||
{ g: 'hits', x: 1601824200368, y: 0 },
|
||||
{ g: 'hits', x: 1601824080368, y: 0 },
|
||||
{ g: 'hits', x: 1601823960368, y: 0 },
|
||||
{ g: 'hits', x: 1601823840368, y: 0 },
|
||||
{ g: 'hits', x: 1601823720368, y: 0 },
|
||||
{ g: 'hits', x: 1601823600368, y: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
test('it returns results bucketed into 1 hour intervals when range is "d"', () => {
|
||||
const mockResponse = getMockEqlSequenceResponse();
|
||||
const response: EqlSearchStrategyResponse<EqlSearchResponse<Source>> = {
|
||||
...mockResponse,
|
||||
rawResponse: {
|
||||
...mockResponse.rawResponse,
|
||||
body: {
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
timed_out: false,
|
||||
took: 15,
|
||||
hits: {
|
||||
sequences: [
|
||||
{
|
||||
join_keys: [],
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '1',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T15:16:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '2',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T05:50:54.368707900Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
join_keys: [],
|
||||
events: [
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '3',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T18:06:54.368707900Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: 'index',
|
||||
_id: '4',
|
||||
_source: {
|
||||
'@timestamp': '2020-10-04T23:15:54.368707900Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
total: {
|
||||
value: 4,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
response,
|
||||
'd',
|
||||
'2020-10-04T23:50:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
true
|
||||
);
|
||||
const date1 = moment(aggs.data[0].x);
|
||||
const date2 = moment(aggs.data[1].x);
|
||||
// This'll be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
expect(diff).toEqual(3600000);
|
||||
expect(aggs.data).toHaveLength(25);
|
||||
expect(aggs.data).toEqual([
|
||||
{ g: 'hits', x: 1601855400368, y: 0 },
|
||||
{ g: 'hits', x: 1601851800368, y: 1 },
|
||||
{ g: 'hits', x: 1601848200368, y: 0 },
|
||||
{ g: 'hits', x: 1601844600368, y: 0 },
|
||||
{ g: 'hits', x: 1601841000368, y: 0 },
|
||||
{ g: 'hits', x: 1601837400368, y: 0 },
|
||||
{ g: 'hits', x: 1601833800368, y: 0 },
|
||||
{ g: 'hits', x: 1601830200368, y: 0 },
|
||||
{ g: 'hits', x: 1601826600368, y: 0 },
|
||||
{ g: 'hits', x: 1601823000368, y: 0 },
|
||||
{ g: 'hits', x: 1601819400368, y: 0 },
|
||||
{ g: 'hits', x: 1601815800368, y: 0 },
|
||||
{ g: 'hits', x: 1601812200368, y: 0 },
|
||||
{ g: 'hits', x: 1601808600368, y: 0 },
|
||||
{ g: 'hits', x: 1601805000368, y: 0 },
|
||||
{ g: 'hits', x: 1601801400368, y: 0 },
|
||||
{ g: 'hits', x: 1601797800368, y: 0 },
|
||||
{ g: 'hits', x: 1601794200368, y: 0 },
|
||||
{ g: 'hits', x: 1601790600368, y: 1 },
|
||||
{ g: 'hits', x: 1601787000368, y: 0 },
|
||||
{ g: 'hits', x: 1601783400368, y: 0 },
|
||||
{ g: 'hits', x: 1601779800368, y: 0 },
|
||||
{ g: 'hits', x: 1601776200368, y: 0 },
|
||||
{ g: 'hits', x: 1601772600368, y: 0 },
|
||||
{ g: 'hits', x: 1601769000368, y: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
test('it correctly returns total hits', () => {
|
||||
const mockResponse = getMockEqlSequenceResponse();
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
mockResponse,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
true
|
||||
);
|
||||
|
||||
expect(aggs.totalCount).toEqual(4);
|
||||
});
|
||||
|
||||
test('it returns array with each item having a "total" of 0 if response returns no hits', () => {
|
||||
const mockResponse = getMockEqlSequenceResponse();
|
||||
const response: EqlSearchStrategyResponse<EqlSearchResponse<Source>> = {
|
||||
...mockResponse,
|
||||
rawResponse: {
|
||||
...mockResponse.rawResponse,
|
||||
body: {
|
||||
is_partial: false,
|
||||
is_running: false,
|
||||
timed_out: false,
|
||||
took: 15,
|
||||
hits: {
|
||||
total: {
|
||||
value: 0,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const aggs = getEqlAggsData(
|
||||
response,
|
||||
'h',
|
||||
'2020-10-04T16:00:00.368707900Z',
|
||||
jest.fn() as inputsModel.Refetch,
|
||||
['foo-*'],
|
||||
true
|
||||
);
|
||||
|
||||
expect(aggs.data.every(({ y }) => y === 0)).toBeTruthy();
|
||||
expect(aggs.totalCount).toEqual(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('createIntervalArray', () => {
|
||||
test('returns array of 12 numbers from 0 to 60 by 5', () => {
|
||||
const arrayOfNumbers = createIntervalArray(0, 12, 5);
|
||||
expect(arrayOfNumbers).toEqual([0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60]);
|
||||
});
|
||||
|
||||
test('returns array of 5 numbers from 0 to 10 by 2', () => {
|
||||
const arrayOfNumbers = createIntervalArray(0, 5, 2);
|
||||
expect(arrayOfNumbers).toEqual([0, 2, 4, 6, 8, 10]);
|
||||
});
|
||||
|
||||
test('returns array of numbers from start param to end param if multiplier is 1', () => {
|
||||
const arrayOfNumbers = createIntervalArray(0, 12, 1);
|
||||
expect(arrayOfNumbers).toEqual([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getInterval', () => {
|
||||
test('returns object with 2 minute interval timestamps if range is "h"', () => {
|
||||
const intervals = getInterval('h', 1601856270140);
|
||||
|
||||
const allAre2MinApart = Object.keys(intervals).every((int) => {
|
||||
const interval1 = intervals[int];
|
||||
const interval2 = intervals[`${Number(int) + 2}`];
|
||||
if (interval1 != null && interval2 != null) {
|
||||
const date1 = moment(Number(interval1.timestamp));
|
||||
const date2 = moment(Number(interval2.timestamp));
|
||||
// This'll be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
return diff === 120000;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
expect(allAre2MinApart).toBeTruthy();
|
||||
});
|
||||
|
||||
test('returns object with 1 hour interval timestamps if range is "d"', () => {
|
||||
const intervals = getInterval('d', 1601856270140);
|
||||
|
||||
const allAre1HourApart = Object.keys(intervals).every((int) => {
|
||||
const interval1 = intervals[int];
|
||||
const interval2 = intervals[`${Number(int) + 1}`];
|
||||
if (interval1 != null && interval2 != null) {
|
||||
const date1 = moment(Number(interval1.timestamp));
|
||||
const date2 = moment(Number(interval2.timestamp));
|
||||
// This'll be in ms
|
||||
const diff = date1.diff(date2);
|
||||
|
||||
return diff === 3600000;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
expect(allAre1HourApart).toBeTruthy();
|
||||
});
|
||||
|
||||
test('returns error if range is anything other than "h" or "d"', () => {
|
||||
expect(() => getInterval('m', 1601856270140)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatInspect', () => {
|
||||
test('it should return "dsl" with response params and index info', () => {
|
||||
const { dsl } = formatInspect(getMockEqlResponse(), ['foo-*']);
|
||||
|
||||
expect(JSON.parse(dsl[0])).toEqual({
|
||||
body: {
|
||||
filter: {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
format: 'strict_date_optional_time',
|
||||
gte: '2020-10-07T00:46:12.414Z',
|
||||
lte: '2020-10-07T01:46:12.414Z',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
index: ['foo-*'],
|
||||
method: 'GET',
|
||||
path: '/_eql/search/',
|
||||
querystring: 'some query string',
|
||||
});
|
||||
});
|
||||
|
||||
test('it should return "response"', () => {
|
||||
const mockResponse = getMockEqlResponse();
|
||||
const { response } = formatInspect(mockResponse, ['foo-*']);
|
||||
|
||||
expect(JSON.parse(response[0])).toEqual(mockResponse.rawResponse.body);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEventsToBucket', () => {
|
||||
test('returns events for non-sequence queries', () => {
|
||||
const events = getEventsToBucket(false, getMockEqlResponse());
|
||||
|
||||
expect(events).toEqual([
|
||||
{ _id: '1', _index: 'index', _source: { '@timestamp': '2020-10-04T15:16:54.368707900Z' } },
|
||||
{ _id: '2', _index: 'index', _source: { '@timestamp': '2020-10-04T15:50:54.368707900Z' } },
|
||||
{ _id: '3', _index: 'index', _source: { '@timestamp': '2020-10-04T15:06:54.368707900Z' } },
|
||||
{ _id: '4', _index: 'index', _source: { '@timestamp': '2020-10-04T15:15:54.368707900Z' } },
|
||||
]);
|
||||
});
|
||||
|
||||
test('returns empty array if no hits', () => {
|
||||
const resp = getMockEqlResponse();
|
||||
const mockResponse: EqlSearchStrategyResponse<EqlSearchResponse<Source>> = {
|
||||
...resp,
|
||||
rawResponse: {
|
||||
...resp.rawResponse,
|
||||
body: {
|
||||
...resp.rawResponse.body,
|
||||
hits: {
|
||||
total: {
|
||||
value: 0,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const events = getEventsToBucket(false, mockResponse);
|
||||
|
||||
expect(events).toEqual([]);
|
||||
});
|
||||
|
||||
test('returns events for sequence queries', () => {
|
||||
const events = getEventsToBucket(true, getMockEqlSequenceResponse());
|
||||
|
||||
expect(events).toEqual([
|
||||
{ _id: '2', _index: 'index', _source: { '@timestamp': '2020-10-04T15:50:54.368707900Z' } },
|
||||
{ _id: '4', _index: 'index', _source: { '@timestamp': '2020-10-04T15:15:54.368707900Z' } },
|
||||
]);
|
||||
});
|
||||
|
||||
test('returns empty array if no sequences', () => {
|
||||
const resp = getMockEqlSequenceResponse();
|
||||
const mockResponse: EqlSearchStrategyResponse<EqlSearchResponse<Source>> = {
|
||||
...resp,
|
||||
rawResponse: {
|
||||
...resp.rawResponse,
|
||||
body: {
|
||||
...resp.rawResponse.body,
|
||||
hits: {
|
||||
total: {
|
||||
value: 0,
|
||||
relation: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const events = getEventsToBucket(true, mockResponse);
|
||||
|
||||
expect(events).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,183 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
import type { Unit } from '@kbn/datemath';
|
||||
import type { EqlSearchStrategyResponse } from '@kbn/data-plugin/common';
|
||||
import type { inputsModel } from '../../store';
|
||||
|
||||
import type { InspectResponse } from '../../../types';
|
||||
import type { EqlPreviewResponse, Source } from './types';
|
||||
import type { BaseHit, EqlSearchResponse } from '../../../../common/detection_engine/types';
|
||||
|
||||
type EqlAggBuckets = Record<string, { timestamp: string; total: number }>;
|
||||
|
||||
/**
|
||||
* Calculates which 2 min bucket segment, event should be sorted into
|
||||
* @param eventTimestamp The event to be bucketed timestamp
|
||||
* @param relativeNow The timestamp we are using to calculate how far from 'now' event occurred
|
||||
*/
|
||||
export const calculateBucketForHour = (eventTimestamp: number, relativeNow: number): number => {
|
||||
const diff = Math.abs(relativeNow - eventTimestamp);
|
||||
const minutes = Math.floor(diff / 60000);
|
||||
return Math.ceil(minutes / 2) * 2;
|
||||
};
|
||||
|
||||
/**
|
||||
* Calculates which 1 hour bucket segment, event should be sorted into
|
||||
* @param eventTimestamp The event to be bucketed timestamp
|
||||
* @param relativeNow The timestamp we are using to calculate how far from 'now' event occurred
|
||||
*/
|
||||
export const calculateBucketForDay = (eventTimestamp: number, relativeNow: number): number => {
|
||||
const diff = Math.abs(relativeNow - eventTimestamp);
|
||||
const minutes = Math.floor(diff / 60000);
|
||||
return Math.ceil(minutes / 60);
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats the response for the UI inspect modal
|
||||
* @param response The query search response
|
||||
* @param indices The indices the query searched
|
||||
* TODO: Update eql search strategy to return index in it's meta
|
||||
* params info, currently not being returned, but expected for
|
||||
* inspect modal display
|
||||
*/
|
||||
export const formatInspect = (
|
||||
response: EqlSearchStrategyResponse<EqlSearchResponse<Source>>,
|
||||
indices: string[]
|
||||
): InspectResponse => {
|
||||
const body = response.rawResponse.meta.request.params.body;
|
||||
const bodyParse: Record<string, unknown> | undefined =
|
||||
typeof body === 'string' ? JSON.parse(body) : body;
|
||||
return {
|
||||
dsl: [
|
||||
JSON.stringify(
|
||||
{ ...response.rawResponse.meta.request.params, index: indices, body: bodyParse },
|
||||
null,
|
||||
2
|
||||
),
|
||||
],
|
||||
response: [JSON.stringify(response.rawResponse.body, null, 2)],
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the events out of the response based on type of query
|
||||
* @param isSequence Is the eql query a sequence query
|
||||
* @param response The query search response
|
||||
*/
|
||||
export const getEventsToBucket = (
|
||||
isSequence: boolean,
|
||||
response: EqlSearchStrategyResponse<EqlSearchResponse<Source>>
|
||||
): Array<BaseHit<Source>> => {
|
||||
const hits = response.rawResponse.body.hits ?? [];
|
||||
if (isSequence) {
|
||||
return (
|
||||
hits.sequences?.map((seq) => {
|
||||
return seq.events[seq.events.length - 1];
|
||||
}) ?? []
|
||||
);
|
||||
} else {
|
||||
return hits.events ?? [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Eql does not support aggregations, this is an in-memory
|
||||
* hand-spun aggregation for the events to give the user a visual
|
||||
* representation of their query results
|
||||
* @param response The query search response
|
||||
* @param range User chosen timeframe (last hour, day)
|
||||
* @param to Based on range chosen
|
||||
* @param refetch Callback used in inspect button, ref just passed through
|
||||
* @param indices Indices searched by query
|
||||
* @param isSequence Is the eql query a sequence query
|
||||
*/
|
||||
export const getEqlAggsData = (
|
||||
response: EqlSearchStrategyResponse<EqlSearchResponse<Source>>,
|
||||
range: Unit,
|
||||
to: string,
|
||||
refetch: inputsModel.Refetch,
|
||||
indices: string[],
|
||||
isSequence: boolean
|
||||
): EqlPreviewResponse => {
|
||||
const { dsl, response: inspectResponse } = formatInspect(response, indices);
|
||||
const relativeNow = Date.parse(to);
|
||||
const accumulator = getInterval(range, relativeNow);
|
||||
const events = getEventsToBucket(isSequence, response);
|
||||
const totalCount = response.rawResponse.body.hits.total.value;
|
||||
|
||||
const buckets = events.reduce<EqlAggBuckets>((acc, hit) => {
|
||||
const timestamp = hit._source['@timestamp'];
|
||||
if (timestamp == null) {
|
||||
return acc;
|
||||
}
|
||||
const eventDate = new Date(timestamp).toISOString();
|
||||
const eventTimestamp = Date.parse(eventDate);
|
||||
const bucket =
|
||||
range === 'h'
|
||||
? calculateBucketForHour(eventTimestamp, relativeNow)
|
||||
: calculateBucketForDay(eventTimestamp, relativeNow);
|
||||
if (acc[bucket] != null) {
|
||||
acc[bucket].total += 1;
|
||||
}
|
||||
return acc;
|
||||
}, accumulator);
|
||||
const data = Object.keys(buckets).map((key) => {
|
||||
return { x: Number(buckets[key].timestamp), y: buckets[key].total, g: 'hits' };
|
||||
});
|
||||
|
||||
const isAllZeros = data.every(({ y }) => y === 0);
|
||||
|
||||
return {
|
||||
data,
|
||||
totalCount: isAllZeros ? 0 : totalCount,
|
||||
inspect: {
|
||||
dsl,
|
||||
response: inspectResponse,
|
||||
},
|
||||
refetch,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper method to create an array to be used for calculating bucket intervals
|
||||
* @param start
|
||||
* @param end
|
||||
* @param multiplier
|
||||
*/
|
||||
export const createIntervalArray = (start: number, end: number, multiplier: number): number[] => {
|
||||
return Array(end - start + 1)
|
||||
.fill(0)
|
||||
.map((_, idx) => start + idx * multiplier);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper method to create an array to be used for calculating bucket intervals
|
||||
* @param range User chosen timeframe (last hour, day)
|
||||
* @param relativeNow Based on range chosen
|
||||
*/
|
||||
export const getInterval = (range: Unit, relativeNow: number): EqlAggBuckets => {
|
||||
switch (range) {
|
||||
case 'h':
|
||||
return createIntervalArray(0, 30, 2).reduce((acc, int) => {
|
||||
return {
|
||||
...acc,
|
||||
[int]: { timestamp: moment(relativeNow).subtract(int, 'm').format('x'), total: 0 },
|
||||
};
|
||||
}, {});
|
||||
case 'd':
|
||||
return createIntervalArray(0, 24, 1).reduce((acc, int) => {
|
||||
return {
|
||||
...acc,
|
||||
[int]: { timestamp: moment(relativeNow).subtract(int, 'h').format('x'), total: 0 },
|
||||
};
|
||||
}, {});
|
||||
default:
|
||||
throw new RangeError('Invalid time range selected. Must be "Last hour" or "Last day".');
|
||||
}
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { useEqlPreview } from './use_eql_preview';
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { Unit } from '@kbn/datemath';
|
||||
|
||||
import type { InspectResponse } from '../../../types';
|
||||
import type { ChartData } from '../../components/charts/common';
|
||||
import type { inputsModel } from '../../store';
|
||||
|
||||
export interface EqlPreviewRequest {
|
||||
to: string;
|
||||
from: string;
|
||||
interval: Unit;
|
||||
query: string;
|
||||
index: string[];
|
||||
}
|
||||
|
||||
export interface EqlPreviewResponse {
|
||||
data: ChartData[];
|
||||
totalCount: number;
|
||||
inspect: InspectResponse;
|
||||
refetch: inputsModel.Refetch;
|
||||
}
|
||||
|
||||
export interface Source {
|
||||
'@timestamp': string | number;
|
||||
}
|
|
@ -1,203 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { Unit } from '@kbn/datemath';
|
||||
import { renderHook, act } from '@testing-library/react-hooks';
|
||||
import { of, throwError } from 'rxjs';
|
||||
import { delay } from 'rxjs/operators';
|
||||
|
||||
import * as i18n from '../translations';
|
||||
import type { EqlSearchStrategyResponse } from '@kbn/data-plugin/common';
|
||||
import type { Source } from './types';
|
||||
import type { EqlSearchResponse } from '../../../../common/detection_engine/types';
|
||||
import { useKibana } from '../../lib/kibana';
|
||||
import { useEqlPreview } from '.';
|
||||
import { getMockEqlResponse } from './eql_search_response.mock';
|
||||
import { useAppToasts } from '../use_app_toasts';
|
||||
|
||||
jest.mock('../../lib/kibana');
|
||||
jest.mock('../use_app_toasts');
|
||||
|
||||
describe('useEqlPreview', () => {
|
||||
const params = {
|
||||
to: '2020-10-04T16:00:54.368707900Z',
|
||||
query: 'file where true',
|
||||
index: ['foo-*', 'bar-*'],
|
||||
interval: 'h' as Unit,
|
||||
from: '2020-10-04T15:00:54.368707900Z',
|
||||
};
|
||||
|
||||
let addErrorMock: jest.Mock;
|
||||
let addSuccessMock: jest.Mock;
|
||||
let addWarningMock: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
addErrorMock = jest.fn();
|
||||
addSuccessMock = jest.fn();
|
||||
addWarningMock = jest.fn();
|
||||
(useAppToasts as jest.Mock).mockImplementation(() => ({
|
||||
addError: addErrorMock,
|
||||
addWarning: addWarningMock,
|
||||
addSuccess: addSuccessMock,
|
||||
}));
|
||||
|
||||
(useKibana().services.data.search.search as jest.Mock).mockReturnValue(
|
||||
of(getMockEqlResponse())
|
||||
);
|
||||
});
|
||||
|
||||
it('should initiate hook', async () => {
|
||||
await act(async () => {
|
||||
const { result, waitForNextUpdate } = renderHook(() => useEqlPreview());
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(result.current[0]).toBeFalsy();
|
||||
expect(typeof result.current[1]).toEqual('function');
|
||||
expect(result.current[2]).toEqual({
|
||||
data: [],
|
||||
inspect: { dsl: [], response: [] },
|
||||
refetch: result.current[2].refetch,
|
||||
totalCount: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should invoke search with passed in params', async () => {
|
||||
await act(async () => {
|
||||
const { result, waitForNextUpdate } = renderHook(() => useEqlPreview());
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
result.current[1](params);
|
||||
|
||||
const mockCalls = (useKibana().services.data.search.search as jest.Mock).mock.calls;
|
||||
|
||||
expect(mockCalls.length).toEqual(1);
|
||||
expect(mockCalls[0][0].params.body.query).toEqual('file where true');
|
||||
expect(mockCalls[0][0].params.body.filter).toEqual({
|
||||
range: {
|
||||
'@timestamp': {
|
||||
format: 'strict_date_optional_time',
|
||||
gte: '2020-10-04T15:00:54.368707900Z',
|
||||
lte: '2020-10-04T16:00:54.368707900Z',
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockCalls[0][0].params.index).toBe('foo-*,bar-*');
|
||||
});
|
||||
});
|
||||
|
||||
it('should resolve values after search is invoked', async () => {
|
||||
await act(async () => {
|
||||
const { result, waitForNextUpdate } = renderHook(() => useEqlPreview());
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
result.current[1](params);
|
||||
|
||||
expect(result.current[0]).toBeFalsy();
|
||||
expect(typeof result.current[1]).toEqual('function');
|
||||
expect(result.current[2].totalCount).toEqual(4);
|
||||
expect(result.current[2].data.length).toBeGreaterThan(0);
|
||||
expect(result.current[2].inspect.dsl.length).toBeGreaterThan(0);
|
||||
expect(result.current[2].inspect.response.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not resolve values after search is invoked if component unmounted', async () => {
|
||||
await act(async () => {
|
||||
(useKibana().services.data.search.search as jest.Mock).mockReturnValue(
|
||||
of(getMockEqlResponse()).pipe(delay(5000))
|
||||
);
|
||||
const { result, waitForNextUpdate, unmount } = renderHook(() => useEqlPreview());
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
result.current[1](params);
|
||||
|
||||
unmount();
|
||||
|
||||
expect(result.current[0]).toBeTruthy();
|
||||
expect(result.current[2].totalCount).toEqual(0);
|
||||
expect(result.current[2].data.length).toEqual(0);
|
||||
expect(result.current[2].inspect.dsl.length).toEqual(0);
|
||||
expect(result.current[2].inspect.response.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not resolve new values on search if response is error response', async () => {
|
||||
await act(async () => {
|
||||
(useKibana().services.data.search.search as jest.Mock).mockReturnValue(
|
||||
of<EqlSearchStrategyResponse<EqlSearchResponse<Source>>>({
|
||||
...getMockEqlResponse(),
|
||||
isRunning: false,
|
||||
isPartial: true,
|
||||
})
|
||||
);
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() => useEqlPreview());
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
result.current[1](params);
|
||||
|
||||
expect(result.current[0]).toBeFalsy();
|
||||
expect(addWarningMock.mock.calls[0][0]).toEqual(i18n.EQL_PREVIEW_FETCH_FAILURE);
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: Determine why eql search strategy returns null for meta.params.body
|
||||
// in complete responses, but not in partial responses
|
||||
it('should update inspect information on partial response', async () => {
|
||||
const mockResponse = getMockEqlResponse();
|
||||
await act(async () => {
|
||||
(useKibana().services.data.search.search as jest.Mock).mockReturnValue(
|
||||
of<EqlSearchStrategyResponse<EqlSearchResponse<Source>>>({
|
||||
isRunning: true,
|
||||
isPartial: true,
|
||||
rawResponse: mockResponse.rawResponse,
|
||||
})
|
||||
);
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() => useEqlPreview());
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
result.current[1](params);
|
||||
|
||||
expect(result.current[2].inspect.dsl.length).toEqual(1);
|
||||
expect(result.current[2].inspect.response.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should add error toast if search throws', async () => {
|
||||
await act(async () => {
|
||||
(useKibana().services.data.search.search as jest.Mock).mockReturnValue(
|
||||
throwError('This is an error!')
|
||||
);
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() => useEqlPreview());
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
result.current[1](params);
|
||||
|
||||
expect(result.current[0]).toBeFalsy();
|
||||
expect(addErrorMock.mock.calls[0][0]).toEqual('This is an error!');
|
||||
});
|
||||
});
|
||||
|
||||
it('returns a memoized value', async () => {
|
||||
const { result, rerender } = renderHook(() => useEqlPreview());
|
||||
|
||||
const result1 = result.current[1];
|
||||
act(() => rerender());
|
||||
const result2 = result.current[1];
|
||||
|
||||
expect(result1).toBe(result2);
|
||||
});
|
||||
});
|
|
@ -1,183 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { noop } from 'lodash/fp';
|
||||
import { Subject } from 'rxjs';
|
||||
import { takeUntil } from 'rxjs/operators';
|
||||
|
||||
import { parseScheduleDates } from '@kbn/securitysolution-io-ts-utils';
|
||||
import type { EqlSearchStrategyRequest, EqlSearchStrategyResponse } from '@kbn/data-plugin/common';
|
||||
import {
|
||||
isCompleteResponse,
|
||||
isErrorResponse,
|
||||
isPartialResponse,
|
||||
EQL_SEARCH_STRATEGY,
|
||||
} from '@kbn/data-plugin/common';
|
||||
import { AbortError } from '@kbn/kibana-utils-plugin/common';
|
||||
import * as i18n from '../translations';
|
||||
import { useKibana } from '../../lib/kibana';
|
||||
import { formatInspect, getEqlAggsData } from './helpers';
|
||||
import type { EqlPreviewResponse, EqlPreviewRequest, Source } from './types';
|
||||
import { hasEqlSequenceQuery } from '../../../../common/detection_engine/utils';
|
||||
import type { EqlSearchResponse } from '../../../../common/detection_engine/types';
|
||||
import type { inputsModel } from '../../store';
|
||||
import { useAppToasts } from '../use_app_toasts';
|
||||
|
||||
export const useEqlPreview = (): [
|
||||
boolean,
|
||||
(arg: EqlPreviewRequest) => void,
|
||||
EqlPreviewResponse
|
||||
] => {
|
||||
const { data } = useKibana().services;
|
||||
const refetch = useRef<inputsModel.Refetch>(noop);
|
||||
const abortCtrl = useRef(new AbortController());
|
||||
const unsubscribeStream = useRef(new Subject<void>());
|
||||
const [loading, setLoading] = useState(false);
|
||||
const didCancel = useRef(false);
|
||||
const { addError, addWarning } = useAppToasts();
|
||||
|
||||
const [response, setResponse] = useState<EqlPreviewResponse>({
|
||||
data: [],
|
||||
inspect: {
|
||||
dsl: [],
|
||||
response: [],
|
||||
},
|
||||
refetch: refetch.current,
|
||||
totalCount: 0,
|
||||
});
|
||||
|
||||
const searchEql = useCallback(
|
||||
({ from, to, query, index, interval }: EqlPreviewRequest) => {
|
||||
if (parseScheduleDates(to) == null || parseScheduleDates(from) == null) {
|
||||
addWarning(i18n.EQL_TIME_INTERVAL_NOT_DEFINED);
|
||||
return;
|
||||
}
|
||||
|
||||
const asyncSearch = async () => {
|
||||
abortCtrl.current = new AbortController();
|
||||
setLoading(true);
|
||||
setResponse((prevResponse) => ({
|
||||
...prevResponse,
|
||||
data: [],
|
||||
inspect: {
|
||||
dsl: [],
|
||||
response: [],
|
||||
},
|
||||
totalCount: 0,
|
||||
}));
|
||||
|
||||
data.search
|
||||
.search<EqlSearchStrategyRequest, EqlSearchStrategyResponse<EqlSearchResponse<Source>>>(
|
||||
{
|
||||
params: {
|
||||
index: index.join(),
|
||||
body: {
|
||||
filter: {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
gte: from,
|
||||
lte: to,
|
||||
format: 'strict_date_optional_time',
|
||||
},
|
||||
},
|
||||
},
|
||||
query,
|
||||
// EQL requires a cap, otherwise it defaults to 10
|
||||
// It also sorts on ascending order, capping it at
|
||||
// something smaller like 20, made it so that some of
|
||||
// the more recent events weren't returned
|
||||
size: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
strategy: EQL_SEARCH_STRATEGY,
|
||||
abortSignal: abortCtrl.current.signal,
|
||||
}
|
||||
)
|
||||
.pipe(takeUntil(unsubscribeStream.current))
|
||||
.subscribe({
|
||||
next: (res) => {
|
||||
if (isCompleteResponse(res)) {
|
||||
if (!didCancel.current) {
|
||||
setLoading(false);
|
||||
|
||||
setResponse((prev) => {
|
||||
const { inspect, ...rest } = getEqlAggsData(
|
||||
res,
|
||||
interval,
|
||||
to,
|
||||
refetch.current,
|
||||
index,
|
||||
hasEqlSequenceQuery(query)
|
||||
);
|
||||
const inspectDsl = prev.inspect.dsl[0] ? prev.inspect.dsl : inspect.dsl;
|
||||
const inspectResp = prev.inspect.response[0]
|
||||
? prev.inspect.response
|
||||
: inspect.response;
|
||||
|
||||
return {
|
||||
...prev,
|
||||
...rest,
|
||||
inspect: {
|
||||
dsl: inspectDsl,
|
||||
response: inspectResp,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
unsubscribeStream.current.next();
|
||||
} else if (isPartialResponse(res)) {
|
||||
// TODO: Eql search strategy partial responses return a value under meta.params.body
|
||||
// but the final/complete response does not, that's why the inspect values are set here
|
||||
setResponse((prev) => ({ ...prev, inspect: formatInspect(res, index) }));
|
||||
} else if (isErrorResponse(res)) {
|
||||
setLoading(false);
|
||||
addWarning(i18n.EQL_PREVIEW_FETCH_FAILURE);
|
||||
unsubscribeStream.current.next();
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
if (!(err instanceof AbortError)) {
|
||||
setLoading(false);
|
||||
setResponse({
|
||||
data: [],
|
||||
inspect: {
|
||||
dsl: [],
|
||||
response: [],
|
||||
},
|
||||
refetch: refetch.current,
|
||||
totalCount: 0,
|
||||
});
|
||||
addError(err, {
|
||||
title: i18n.EQL_PREVIEW_FETCH_FAILURE,
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
abortCtrl.current.abort();
|
||||
asyncSearch();
|
||||
refetch.current = asyncSearch;
|
||||
},
|
||||
[data.search, addError, addWarning]
|
||||
);
|
||||
|
||||
useEffect((): (() => void) => {
|
||||
return (): void => {
|
||||
didCancel.current = true;
|
||||
abortCtrl.current.abort();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
unsubscribeStream.current.complete();
|
||||
};
|
||||
}, []);
|
||||
|
||||
return [loading, searchEql, response];
|
||||
};
|
|
@ -67,7 +67,7 @@ import { AlertCountByRuleByStatus } from '../../../../common/components/alert_co
|
|||
import { useLicense } from '../../../../common/hooks/use_license';
|
||||
import { ResponderActionButton } from '../../../../detections/components/endpoint_responder/responder_action_button';
|
||||
|
||||
const ES_HOST_FIELD = 'host.hostname';
|
||||
const ES_HOST_FIELD = 'host.name';
|
||||
const HostOverviewManage = manageQuery(HostOverview);
|
||||
|
||||
const HostDetailsComponent: React.FC<HostDetailsProps> = ({ detailName, hostDetailsPagePath }) => {
|
||||
|
|
|
@ -6,12 +6,20 @@
|
|||
*/
|
||||
|
||||
import type { RuleAlertsItem, SeverityRuleAlertsAggsResponse } from './use_rule_alerts_items';
|
||||
import {
|
||||
KIBANA_ALERT_SEVERITY,
|
||||
KIBANA_RULE_ID,
|
||||
KIBANA_RULE_NAME,
|
||||
TIMESTAMP,
|
||||
} from './use_rule_alerts_items';
|
||||
|
||||
export const from = '2022-04-05T12:00:00.000Z';
|
||||
export const to = '2022-04-08T12:00:00.000Z';
|
||||
|
||||
export const severityRuleAlertsQuery = {
|
||||
size: 0,
|
||||
_source: false,
|
||||
fields: [KIBANA_RULE_NAME, KIBANA_RULE_ID, KIBANA_ALERT_SEVERITY, TIMESTAMP],
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
|
@ -62,11 +70,11 @@ export const mockSeverityRuleAlertsResponse: { aggregations: SeverityRuleAlertsA
|
|||
},
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.name': 'RULE_1',
|
||||
'kibana.alert.rule.uuid': '79ec0270-b4c5-11ec-970e-8f7c5a7144f7',
|
||||
'@timestamp': '2022-04-05T15:58:35.079Z',
|
||||
'kibana.alert.severity': 'critical',
|
||||
fields: {
|
||||
'kibana.alert.rule.name': ['RULE_1'],
|
||||
'kibana.alert.rule.uuid': ['79ec0270-b4c5-11ec-970e-8f7c5a7144f7'],
|
||||
'@timestamp': ['2022-04-05T15:58:35.079Z'],
|
||||
'kibana.alert.severity': ['critical'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -83,11 +91,11 @@ export const mockSeverityRuleAlertsResponse: { aggregations: SeverityRuleAlertsA
|
|||
},
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.uuid': '955c79d0-b403-11ec-b5a7-6dc1ed01bdd7',
|
||||
'kibana.alert.rule.name': 'RULE_2',
|
||||
'@timestamp': '2022-04-05T15:58:47.164Z',
|
||||
'kibana.alert.severity': 'high',
|
||||
fields: {
|
||||
'kibana.alert.rule.uuid': ['955c79d0-b403-11ec-b5a7-6dc1ed01bdd7'],
|
||||
'kibana.alert.rule.name': ['RULE_2'],
|
||||
'@timestamp': ['2022-04-05T15:58:47.164Z'],
|
||||
'kibana.alert.severity': ['high'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -104,11 +112,11 @@ export const mockSeverityRuleAlertsResponse: { aggregations: SeverityRuleAlertsA
|
|||
},
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'kibana.alert.rule.name': 'RULE_3',
|
||||
'kibana.alert.rule.uuid': '13bc7bc0-b1d6-11ec-a799-67811b37527a',
|
||||
'@timestamp': '2022-04-05T15:56:16.606Z',
|
||||
'kibana.alert.severity': 'low',
|
||||
fields: {
|
||||
'kibana.alert.rule.name': ['RULE_3'],
|
||||
'kibana.alert.rule.uuid': ['13bc7bc0-b1d6-11ec-a799-67811b37527a'],
|
||||
'@timestamp': ['2022-04-05T15:56:16.606Z'],
|
||||
'kibana.alert.severity': ['low'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -12,6 +12,7 @@ import { useQueryAlerts } from '../../../../detections/containers/detection_engi
|
|||
import { ALERTS_QUERY_NAMES } from '../../../../detections/containers/detection_engine/alerts/constants';
|
||||
import { useQueryInspector } from '../../../../common/components/page/manage_query';
|
||||
import type { ESBoolQuery } from '../../../../../common/typed_json';
|
||||
import { firstNonNullValue } from '../../../../../common/endpoint/models/ecs_safety_helpers';
|
||||
|
||||
// Formatted item result
|
||||
export interface RuleAlertsItem {
|
||||
|
@ -35,11 +36,11 @@ export interface SeverityRuleAlertsAggsResponse {
|
|||
};
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'@timestamp': string;
|
||||
'kibana.alert.rule.name': string;
|
||||
'kibana.alert.rule.uuid': string;
|
||||
'kibana.alert.severity': Severity;
|
||||
fields: {
|
||||
'@timestamp': string[];
|
||||
'kibana.alert.rule.name': string[];
|
||||
'kibana.alert.rule.uuid': string[];
|
||||
'kibana.alert.severity': Severity[];
|
||||
};
|
||||
}
|
||||
];
|
||||
|
@ -48,6 +49,10 @@ export interface SeverityRuleAlertsAggsResponse {
|
|||
}>;
|
||||
};
|
||||
}
|
||||
export const KIBANA_RULE_NAME = 'kibana.alert.rule.name';
|
||||
export const KIBANA_RULE_ID = 'kibana.alert.rule.uuid';
|
||||
export const KIBANA_ALERT_SEVERITY = 'kibana.alert.severity';
|
||||
export const TIMESTAMP = '@timestamp';
|
||||
|
||||
const getSeverityRuleAlertsQuery = ({
|
||||
from,
|
||||
|
@ -58,6 +63,8 @@ const getSeverityRuleAlertsQuery = ({
|
|||
to: string;
|
||||
filterQuery?: ESBoolQuery;
|
||||
}) => ({
|
||||
_source: false,
|
||||
fields: [KIBANA_RULE_NAME, KIBANA_RULE_ID, KIBANA_ALERT_SEVERITY, TIMESTAMP],
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
|
@ -101,14 +108,13 @@ const getRuleAlertsItemsFromAggs = (
|
|||
): RuleAlertsItem[] => {
|
||||
const buckets = aggregations?.alertsByRule.buckets ?? [];
|
||||
return buckets.map<RuleAlertsItem>((bucket) => {
|
||||
const lastAlert = bucket.lastRuleAlert.hits.hits[0]._source;
|
||||
|
||||
const lastAlert = bucket.lastRuleAlert.hits.hits[0].fields;
|
||||
return {
|
||||
id: lastAlert['kibana.alert.rule.uuid'],
|
||||
id: firstNonNullValue(lastAlert[KIBANA_RULE_ID]) ?? '',
|
||||
alert_count: bucket.lastRuleAlert.hits.total.value,
|
||||
name: lastAlert['kibana.alert.rule.name'],
|
||||
last_alert_at: lastAlert['@timestamp'],
|
||||
severity: lastAlert['kibana.alert.severity'],
|
||||
name: firstNonNullValue(lastAlert[KIBANA_RULE_NAME]) ?? '',
|
||||
last_alert_at: firstNonNullValue(lastAlert[TIMESTAMP]) ?? '',
|
||||
severity: firstNonNullValue(lastAlert[KIBANA_ALERT_SEVERITY]) ?? 'low',
|
||||
};
|
||||
});
|
||||
};
|
||||
|
|
|
@ -59,9 +59,8 @@ export const getAggregatedAnomaliesQuery = ({
|
|||
aggs: {
|
||||
entity: {
|
||||
top_hits: {
|
||||
_source: {
|
||||
includes: ['host.name', 'user.name'],
|
||||
},
|
||||
_source: false,
|
||||
fields: ['host.name', 'user.name'],
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue