mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 01:13:23 -04:00
Exclude muted alerts from alert summaries (#147664)
Resolves https://github.com/elastic/kibana/issues/147531. In this PR, I'm making the alert summary actions exclude muted alerts. ## To verify **Scenario 1 (summary per rule run)** 1. Install sample web logs by visiting `/app/home#/tutorial_directory/sampleData`, clicking `Other sample data sets` and clicking `Add data` for `Sample web logs` 2. Add `kibana_sample_data_logs` index pattern to O11y settings by visiting `/app/metrics/explorer`, clicking `Settings` on the top right, appending `,kibana_sample_data_logs` (with leading comma) to the `Metrics indices` field and clicking `Apply`. 3. Create a metric threshold rule that generates multiple alerts by using the following curl command (fix url) ``` curl -XPOST -H "Content-type: application/json" -H "kbn-xsrf: foo" -d '{"params":{"criteria":[{"metric":"bytes","comparator":">","threshold":[0],"timeSize":1,"timeUnit":"h","aggType":"avg"}],"sourceId":"default","alertOnNoData":true,"alertOnGroupDisappear":true,"groupBy":["agent.keyword"]},"consumer":"infrastructure","schedule":{"interval":"10s"},"tags":[],"name":"test","rule_type_id":"metrics.alert.threshold","actions":[{"frequency":{"summary":true,"notify_when":"onActiveAlert"},"group":"metrics.threshold.fired","id":"preconfigured-server-log","params":{"level":"info","message":"Found {{alerts.all.count}} alerts. {{alerts.new.count}} new, {{alerts.ongoing.count}} ongoing, {{alerts.recovered.count}} recovered."}}]}' 'http://elastic:changeme@localhost:5601/api/alerting/rule' ``` 4. Observe 3 alerts in the summary (new then ongoing) 5. Mute one of the alerts by using the rule details page alerts tab 6. Observe only 2 alerts are now in the summary **Scenario 2 (summary over a time spam)** Same steps as above except for step 3, use the following curl command (summaries will generate every 30s) ``` curl -XPOST -H "Content-type: application/json" -H "kbn-xsrf: foo" -d '{"params":{"criteria":[{"metric":"bytes","comparator":">","threshold":[0],"timeSize":1,"timeUnit":"h","aggType":"avg"}],"sourceId":"default","alertOnNoData":true,"alertOnGroupDisappear":true,"groupBy":["agent.keyword"]},"consumer":"infrastructure","schedule":{"interval":"10s"},"tags":[],"name":"test","rule_type_id":"metrics.alert.threshold","actions":[{"frequency":{"summary":true,"notify_when":"onThrottleInterval","throttle":"30s"},"group":"metrics.threshold.fired","id":"preconfigured-server-log","params":{"level":"info","message":"Found {{alerts.all.count}} alerts. {{alerts.new.count}} new, {{alerts.ongoing.count}} ongoing, {{alerts.recovered.count}} recovered."}}]}' 'http://elastic:changeme@localhost:5601/api/alerting/rule' ```
This commit is contained in:
parent
f83c49b275
commit
794e721cc0
7 changed files with 323 additions and 31 deletions
|
@ -846,6 +846,7 @@ describe('Execution Handler', () => {
|
|||
generateExecutionParams({
|
||||
rule: {
|
||||
...defaultExecutionParams.rule,
|
||||
mutedInstanceIds: ['foo'],
|
||||
actions: [
|
||||
{
|
||||
id: '1',
|
||||
|
@ -872,6 +873,7 @@ describe('Execution Handler', () => {
|
|||
executionUuid: '5f6aa57d-3e22-484e-bae8-cbed868f4d28',
|
||||
ruleId: '1',
|
||||
spaceId: 'test1',
|
||||
excludedAlertInstanceIds: ['foo'],
|
||||
});
|
||||
expect(actionsClient.bulkEnqueueExecution).toHaveBeenCalledTimes(1);
|
||||
expect(actionsClient.bulkEnqueueExecution.mock.calls[0]).toMatchInlineSnapshot(`
|
||||
|
@ -959,6 +961,7 @@ describe('Execution Handler', () => {
|
|||
generateExecutionParams({
|
||||
rule: {
|
||||
...defaultExecutionParams.rule,
|
||||
mutedInstanceIds: ['foo'],
|
||||
actions: [
|
||||
{
|
||||
id: '1',
|
||||
|
@ -986,6 +989,7 @@ describe('Execution Handler', () => {
|
|||
end: new Date(),
|
||||
ruleId: '1',
|
||||
spaceId: 'test1',
|
||||
excludedAlertInstanceIds: ['foo'],
|
||||
});
|
||||
expect(result).toEqual({
|
||||
throttledActions: {
|
||||
|
|
|
@ -201,7 +201,11 @@ export class ExecutionHandler<
|
|||
if (isSummaryActionPerRuleRun(action) && !this.hasAlerts(alerts)) {
|
||||
continue;
|
||||
}
|
||||
const summarizedAlerts = await this.getSummarizedAlerts({ action, spaceId, ruleId });
|
||||
const summarizedAlerts = await this.getSummarizedAlerts({
|
||||
action,
|
||||
spaceId,
|
||||
ruleId,
|
||||
});
|
||||
const actionToRun = {
|
||||
...action,
|
||||
params: injectActionParams({
|
||||
|
@ -525,12 +529,14 @@ export class ExecutionHandler<
|
|||
end: new Date(),
|
||||
ruleId,
|
||||
spaceId,
|
||||
excludedAlertInstanceIds: this.rule.mutedInstanceIds,
|
||||
};
|
||||
} else {
|
||||
options = {
|
||||
executionUuid: this.executionId,
|
||||
ruleId,
|
||||
spaceId,
|
||||
excludedAlertInstanceIds: this.rule.mutedInstanceIds,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1370,6 +1370,7 @@ describe('Task Runner', () => {
|
|||
executionUuid: '5f6aa57d-3e22-484e-bae8-cbed868f4d28',
|
||||
ruleId: '1',
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(enqueueFunction).toHaveBeenCalledTimes(1);
|
||||
expect(enqueueFunction).toHaveBeenCalledWith(
|
||||
|
@ -1445,6 +1446,7 @@ describe('Task Runner', () => {
|
|||
end: new Date(DATE_1970),
|
||||
ruleId: '1',
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(enqueueFunction).toHaveBeenCalledTimes(1);
|
||||
expect(enqueueFunction).toHaveBeenCalledWith(
|
||||
|
|
|
@ -131,6 +131,7 @@ export interface GetSummarizedAlertsFnOpts {
|
|||
executionUuid?: string;
|
||||
ruleId: string;
|
||||
spaceId: string;
|
||||
excludedAlertInstanceIds: string[];
|
||||
}
|
||||
|
||||
// TODO - add type for these alerts when we determine which alerts-as-data
|
||||
|
|
|
@ -43,7 +43,12 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
isLifecycleAlert: false,
|
||||
})();
|
||||
|
||||
await getSummarizedAlertsFn({ executionUuid: 'abc', ruleId: 'rule-id', spaceId: 'space-id' });
|
||||
await getSummarizedAlertsFn({
|
||||
executionUuid: 'abc',
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(ruleDataClientMock.getReader).toHaveBeenCalledWith({ namespace: 'space-id' });
|
||||
});
|
||||
|
||||
|
@ -54,7 +59,12 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
isLifecycleAlert: false,
|
||||
})();
|
||||
|
||||
await getSummarizedAlertsFn({ executionUuid: 'abc', ruleId: 'rule-id', spaceId: 'space-id' });
|
||||
await getSummarizedAlertsFn({
|
||||
executionUuid: 'abc',
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(ruleDataClientMock.getReader).toHaveBeenCalledWith();
|
||||
});
|
||||
|
||||
|
@ -156,6 +166,7 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
executionUuid: 'abc',
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: ['TEST_ALERT_10'],
|
||||
});
|
||||
expect(ruleDataClientMock.getReader).toHaveBeenCalledWith();
|
||||
expect(ruleDataClientMock.getReader().search).toHaveBeenCalledTimes(3);
|
||||
|
@ -181,6 +192,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[EVENT_ACTION]: 'open',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -208,6 +228,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[EVENT_ACTION]: 'active',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -235,6 +264,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[EVENT_ACTION]: 'close',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -405,6 +443,7 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
end: new Date('2020-01-01T12:25:00.000Z'),
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: ['TEST_ALERT_10'],
|
||||
});
|
||||
expect(ruleDataClientMock.getReader).toHaveBeenCalledWith();
|
||||
expect(ruleDataClientMock.getReader().search).toHaveBeenCalledTimes(3);
|
||||
|
@ -428,6 +467,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[ALERT_RULE_UUID]: 'rule-id',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[ALERT_START]: {
|
||||
|
@ -460,6 +508,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[ALERT_RULE_UUID]: 'rule-id',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[ALERT_START]: {
|
||||
|
@ -501,6 +558,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[ALERT_RULE_UUID]: 'rule-id',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[ALERT_END]: {
|
||||
|
@ -655,6 +721,7 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
executionUuid: 'abc',
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: ['TEST_ALERT_10'],
|
||||
});
|
||||
expect(ruleDataClientMock.getReader).toHaveBeenCalledWith({ namespace: 'space-id' });
|
||||
expect(ruleDataClientMock.getReader().search).toHaveBeenCalledTimes(1);
|
||||
|
@ -675,6 +742,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[ALERT_RULE_UUID]: 'rule-id',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -807,6 +883,7 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
end: new Date('2020-01-01T12:25:00.000Z'),
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: ['TEST_ALERT_10'],
|
||||
});
|
||||
expect(ruleDataClientMock.getReader).toHaveBeenCalledWith({ namespace: 'space-id' });
|
||||
expect(ruleDataClientMock.getReader().search).toHaveBeenCalledTimes(1);
|
||||
|
@ -830,6 +907,15 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
[ALERT_RULE_UUID]: 'rule-id',
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: ['TEST_ALERT_10'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -897,7 +983,12 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
})();
|
||||
|
||||
await expect(
|
||||
getSummarizedAlertsFn({ executionUuid: 'abc', ruleId: 'rule-id', spaceId: 'space-id' })
|
||||
getSummarizedAlertsFn({
|
||||
executionUuid: 'abc',
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
})
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"search error"`);
|
||||
});
|
||||
|
||||
|
@ -909,7 +1000,11 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
})();
|
||||
|
||||
await expect(
|
||||
getSummarizedAlertsFn({ ruleId: 'rule-id', spaceId: 'space-id' })
|
||||
getSummarizedAlertsFn({
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
})
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Must specify either execution UUID or time range for summarized alert query."`
|
||||
);
|
||||
|
@ -929,6 +1024,7 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
end: new Date(),
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
})
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Must specify either execution UUID or time range for summarized alert query."`
|
||||
|
@ -943,7 +1039,12 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
})();
|
||||
|
||||
await expect(
|
||||
getSummarizedAlertsFn({ start: new Date(), ruleId: 'rule-id', spaceId: 'space-id' })
|
||||
getSummarizedAlertsFn({
|
||||
start: new Date(),
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
})
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Must specify either execution UUID or time range for summarized alert query."`
|
||||
);
|
||||
|
@ -957,7 +1058,12 @@ describe('createGetSummarizedAlertsFn', () => {
|
|||
})();
|
||||
|
||||
await expect(
|
||||
getSummarizedAlertsFn({ end: new Date(), ruleId: 'rule-id', spaceId: 'space-id' })
|
||||
getSummarizedAlertsFn({
|
||||
end: new Date(),
|
||||
ruleId: 'rule-id',
|
||||
spaceId: 'space-id',
|
||||
excludedAlertInstanceIds: [],
|
||||
})
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Must specify either execution UUID or time range for summarized alert query."`
|
||||
);
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
ALERT_START,
|
||||
EVENT_ACTION,
|
||||
TIMESTAMP,
|
||||
ALERT_INSTANCE_ID,
|
||||
} from '@kbn/rule-data-utils';
|
||||
import {
|
||||
QueryDslQueryContainer,
|
||||
|
@ -36,7 +37,14 @@ interface CreateGetSummarizedAlertsFnOpts {
|
|||
export const createGetSummarizedAlertsFn =
|
||||
(opts: CreateGetSummarizedAlertsFnOpts) =>
|
||||
() =>
|
||||
async ({ start, end, executionUuid, ruleId, spaceId }: GetSummarizedAlertsFnOpts) => {
|
||||
async ({
|
||||
start,
|
||||
end,
|
||||
executionUuid,
|
||||
ruleId,
|
||||
spaceId,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetSummarizedAlertsFnOpts) => {
|
||||
if (!ruleId || !spaceId) {
|
||||
throw new Error(`Must specify both rule ID and space ID for summarized alert query.`);
|
||||
}
|
||||
|
@ -64,6 +72,7 @@ export const createGetSummarizedAlertsFn =
|
|||
ruleId,
|
||||
executionUuid: executionUuid!,
|
||||
isLifecycleAlert: opts.isLifecycleAlert,
|
||||
excludedAlertInstanceIds,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -73,6 +82,7 @@ export const createGetSummarizedAlertsFn =
|
|||
start: start!,
|
||||
end: end!,
|
||||
isLifecycleAlert: opts.isLifecycleAlert,
|
||||
excludedAlertInstanceIds,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -81,6 +91,7 @@ interface GetAlertsByExecutionUuidOpts {
|
|||
ruleId: string;
|
||||
ruleDataClientReader: IRuleDataReader;
|
||||
isLifecycleAlert: boolean;
|
||||
excludedAlertInstanceIds: string[];
|
||||
}
|
||||
|
||||
const getAlertsByExecutionUuid = async ({
|
||||
|
@ -88,28 +99,41 @@ const getAlertsByExecutionUuid = async ({
|
|||
ruleId,
|
||||
ruleDataClientReader,
|
||||
isLifecycleAlert,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetAlertsByExecutionUuidOpts) => {
|
||||
if (isLifecycleAlert) {
|
||||
return getLifecycleAlertsByExecutionUuid({ executionUuid, ruleId, ruleDataClientReader });
|
||||
return getLifecycleAlertsByExecutionUuid({
|
||||
executionUuid,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
});
|
||||
}
|
||||
|
||||
return getPersistentAlertsByExecutionUuid({ executionUuid, ruleId, ruleDataClientReader });
|
||||
return getPersistentAlertsByExecutionUuid({
|
||||
executionUuid,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
});
|
||||
};
|
||||
|
||||
interface GetAlertsByExecutionUuidHelperOpts {
|
||||
executionUuid: string;
|
||||
ruleId: string;
|
||||
ruleDataClientReader: IRuleDataReader;
|
||||
excludedAlertInstanceIds: string[];
|
||||
}
|
||||
|
||||
const getPersistentAlertsByExecutionUuid = async <TSearchRequest extends ESSearchRequest>({
|
||||
executionUuid,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetAlertsByExecutionUuidHelperOpts) => {
|
||||
// persistent alerts only create new alerts so query by execution UUID to
|
||||
// get all alerts created during an execution
|
||||
const request = getQueryByExecutionUuid(executionUuid, ruleId);
|
||||
const request = getQueryByExecutionUuid(executionUuid, ruleId, excludedAlertInstanceIds);
|
||||
const response = (await ruleDataClientReader.search(request)) as ESSearchResponse<
|
||||
AlertDocument,
|
||||
TSearchRequest
|
||||
|
@ -132,15 +156,16 @@ const getLifecycleAlertsByExecutionUuid = async ({
|
|||
executionUuid,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetAlertsByExecutionUuidHelperOpts) => {
|
||||
// lifecycle alerts assign a different action to an alert depending
|
||||
// on whether it is new/ongoing/recovered. query for each action in order
|
||||
// to get the count of each action type as well as up to the maximum number
|
||||
// of each type of alert.
|
||||
const requests = [
|
||||
getQueryByExecutionUuid(executionUuid, ruleId, 'open'),
|
||||
getQueryByExecutionUuid(executionUuid, ruleId, 'active'),
|
||||
getQueryByExecutionUuid(executionUuid, ruleId, 'close'),
|
||||
getQueryByExecutionUuid(executionUuid, ruleId, excludedAlertInstanceIds, 'open'),
|
||||
getQueryByExecutionUuid(executionUuid, ruleId, excludedAlertInstanceIds, 'active'),
|
||||
getQueryByExecutionUuid(executionUuid, ruleId, excludedAlertInstanceIds, 'close'),
|
||||
];
|
||||
|
||||
const responses = await Promise.all(
|
||||
|
@ -163,7 +188,12 @@ const getHitsWithCount = <TSearchRequest extends ESSearchRequest>(
|
|||
};
|
||||
};
|
||||
|
||||
const getQueryByExecutionUuid = (executionUuid: string, ruleId: string, action?: string) => {
|
||||
const getQueryByExecutionUuid = (
|
||||
executionUuid: string,
|
||||
ruleId: string,
|
||||
excludedAlertInstanceIds: string[],
|
||||
action?: string
|
||||
) => {
|
||||
const filter: QueryDslQueryContainer[] = [
|
||||
{
|
||||
term: {
|
||||
|
@ -183,6 +213,17 @@ const getQueryByExecutionUuid = (executionUuid: string, ruleId: string, action?:
|
|||
},
|
||||
});
|
||||
}
|
||||
if (excludedAlertInstanceIds.length) {
|
||||
filter.push({
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: excludedAlertInstanceIds,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
body: {
|
||||
|
@ -203,6 +244,7 @@ interface GetAlertsByTimeRangeOpts {
|
|||
ruleId: string;
|
||||
ruleDataClientReader: IRuleDataReader;
|
||||
isLifecycleAlert: boolean;
|
||||
excludedAlertInstanceIds: string[];
|
||||
}
|
||||
|
||||
const getAlertsByTimeRange = async ({
|
||||
|
@ -211,12 +253,25 @@ const getAlertsByTimeRange = async ({
|
|||
ruleId,
|
||||
ruleDataClientReader,
|
||||
isLifecycleAlert,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetAlertsByTimeRangeOpts) => {
|
||||
if (isLifecycleAlert) {
|
||||
return getLifecycleAlertsByTimeRange({ start, end, ruleId, ruleDataClientReader });
|
||||
return getLifecycleAlertsByTimeRange({
|
||||
start,
|
||||
end,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
});
|
||||
}
|
||||
|
||||
return getPersistentAlertsByTimeRange({ start, end, ruleId, ruleDataClientReader });
|
||||
return getPersistentAlertsByTimeRange({
|
||||
start,
|
||||
end,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
});
|
||||
};
|
||||
|
||||
interface GetAlertsByTimeRangeHelperOpts {
|
||||
|
@ -224,6 +279,7 @@ interface GetAlertsByTimeRangeHelperOpts {
|
|||
end: Date;
|
||||
ruleId: string;
|
||||
ruleDataClientReader: IRuleDataReader;
|
||||
excludedAlertInstanceIds: string[];
|
||||
}
|
||||
|
||||
enum AlertTypes {
|
||||
|
@ -237,10 +293,11 @@ const getPersistentAlertsByTimeRange = async <TSearchRequest extends ESSearchReq
|
|||
end,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetAlertsByTimeRangeHelperOpts) => {
|
||||
// persistent alerts only create new alerts so query for all alerts within the time
|
||||
// range and treat them as NEW
|
||||
const request = getQueryByTimeRange(start, end, ruleId);
|
||||
const request = getQueryByTimeRange(start, end, ruleId, excludedAlertInstanceIds);
|
||||
const response = (await ruleDataClientReader.search(request)) as ESSearchResponse<
|
||||
AlertDocument,
|
||||
TSearchRequest
|
||||
|
@ -264,11 +321,12 @@ const getLifecycleAlertsByTimeRange = async ({
|
|||
end,
|
||||
ruleId,
|
||||
ruleDataClientReader,
|
||||
excludedAlertInstanceIds,
|
||||
}: GetAlertsByTimeRangeHelperOpts) => {
|
||||
const requests = [
|
||||
getQueryByTimeRange(start, end, ruleId, AlertTypes.NEW),
|
||||
getQueryByTimeRange(start, end, ruleId, AlertTypes.ONGOING),
|
||||
getQueryByTimeRange(start, end, ruleId, AlertTypes.RECOVERED),
|
||||
getQueryByTimeRange(start, end, ruleId, excludedAlertInstanceIds, AlertTypes.NEW),
|
||||
getQueryByTimeRange(start, end, ruleId, excludedAlertInstanceIds, AlertTypes.ONGOING),
|
||||
getQueryByTimeRange(start, end, ruleId, excludedAlertInstanceIds, AlertTypes.RECOVERED),
|
||||
];
|
||||
|
||||
const responses = await Promise.all(
|
||||
|
@ -282,7 +340,13 @@ const getLifecycleAlertsByTimeRange = async ({
|
|||
};
|
||||
};
|
||||
|
||||
const getQueryByTimeRange = (start: Date, end: Date, ruleId: string, type?: AlertTypes) => {
|
||||
const getQueryByTimeRange = (
|
||||
start: Date,
|
||||
end: Date,
|
||||
ruleId: string,
|
||||
excludedAlertInstanceIds: string[],
|
||||
type?: AlertTypes
|
||||
) => {
|
||||
// base query filters the alert documents for a rule by the given time range
|
||||
let filter: QueryDslQueryContainer[] = [
|
||||
{
|
||||
|
@ -300,6 +364,18 @@ const getQueryByTimeRange = (start: Date, end: Date, ruleId: string, type?: Aler
|
|||
},
|
||||
];
|
||||
|
||||
if (excludedAlertInstanceIds.length) {
|
||||
filter.push({
|
||||
bool: {
|
||||
must_not: {
|
||||
terms: {
|
||||
[ALERT_INSTANCE_ID]: excludedAlertInstanceIds,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (type === AlertTypes.NEW) {
|
||||
// alerts are considered NEW within the time range if they started after
|
||||
// the query start time
|
||||
|
|
|
@ -14,6 +14,7 @@ import { mappingFromFieldMap } from '@kbn/rule-registry-plugin/common/mapping_fr
|
|||
import {
|
||||
AlertConsumers,
|
||||
ALERT_REASON,
|
||||
ALERT_INSTANCE_ID,
|
||||
} from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
|
||||
import {
|
||||
createLifecycleExecutor,
|
||||
|
@ -192,13 +193,11 @@ export default function createGetSummarizedAlertsTest({ getService }: FtrProvide
|
|||
executionId: execution1Uuid,
|
||||
});
|
||||
|
||||
// Refresh the index so the data is available for reading
|
||||
await es.indices.refresh({ index: `${ruleDataClient.indexName}*` });
|
||||
|
||||
const execution1SummarizedAlerts = await getSummarizedAlerts({
|
||||
ruleId: id,
|
||||
executionUuid: execution1Uuid,
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(execution1SummarizedAlerts.new.count).to.eql(1);
|
||||
expect(execution1SummarizedAlerts.ongoing.count).to.eql(0);
|
||||
|
@ -214,13 +213,11 @@ export default function createGetSummarizedAlertsTest({ getService }: FtrProvide
|
|||
executionId: execution2Uuid,
|
||||
});
|
||||
|
||||
// Refresh the index so the data is available for reading
|
||||
await es.indices.refresh({ index: `${ruleDataClient.indexName}*` });
|
||||
|
||||
const execution2SummarizedAlerts = await getSummarizedAlerts({
|
||||
ruleId: id,
|
||||
executionUuid: execution2Uuid,
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(execution2SummarizedAlerts.new.count).to.eql(0);
|
||||
expect(execution2SummarizedAlerts.ongoing.count).to.eql(1);
|
||||
|
@ -235,13 +232,11 @@ export default function createGetSummarizedAlertsTest({ getService }: FtrProvide
|
|||
executionId: execution3Uuid,
|
||||
});
|
||||
|
||||
// Refresh the index so the data is available for reading
|
||||
await es.indices.refresh({ index: `${ruleDataClient.indexName}*` });
|
||||
|
||||
const execution3SummarizedAlerts = await getSummarizedAlerts({
|
||||
ruleId: id,
|
||||
executionUuid: execution3Uuid,
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(execution3SummarizedAlerts.new.count).to.eql(0);
|
||||
expect(execution3SummarizedAlerts.ongoing.count).to.eql(0);
|
||||
|
@ -255,6 +250,7 @@ export default function createGetSummarizedAlertsTest({ getService }: FtrProvide
|
|||
start: preExecution1Start,
|
||||
end: new Date(),
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(timeRangeSummarizedAlerts1.new.count).to.eql(1);
|
||||
expect(timeRangeSummarizedAlerts1.ongoing.count).to.eql(0);
|
||||
|
@ -268,10 +264,111 @@ export default function createGetSummarizedAlertsTest({ getService }: FtrProvide
|
|||
start: preExecution2Start,
|
||||
end: new Date(),
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [],
|
||||
});
|
||||
expect(timeRangeSummarizedAlerts2.new.count).to.eql(0);
|
||||
expect(timeRangeSummarizedAlerts2.ongoing.count).to.eql(0);
|
||||
expect(timeRangeSummarizedAlerts2.recovered.count).to.eql(1);
|
||||
});
|
||||
|
||||
it(`shouldn't return muted alerts`, async () => {
|
||||
const ruleId = uuid.v4();
|
||||
const id1 = 'host-01';
|
||||
const id2 = 'host-02';
|
||||
|
||||
// This creates the function that will wrap the solution's rule executor with the RuleRegistry lifecycle
|
||||
const createLifecycleRuleExecutor = createLifecycleExecutor(logger, ruleDataClient);
|
||||
const createGetSummarizedAlerts = createGetSummarizedAlertsFn({
|
||||
ruleDataClient,
|
||||
useNamespace: false,
|
||||
isLifecycleAlert: true,
|
||||
});
|
||||
|
||||
// This creates the executor that is passed to the Alerting framework.
|
||||
const executor = createLifecycleRuleExecutor<
|
||||
MockRuleParams,
|
||||
{ shouldTriggerAlert: boolean },
|
||||
MockAlertState,
|
||||
MockAlertContext,
|
||||
MockAllowedActionGroups
|
||||
>(async function (options) {
|
||||
const { services } = options;
|
||||
const { alertWithLifecycle } = services;
|
||||
|
||||
alertWithLifecycle({
|
||||
id: id1,
|
||||
fields: {
|
||||
[ALERT_REASON]: 'Test alert is firing',
|
||||
},
|
||||
});
|
||||
alertWithLifecycle({
|
||||
id: id2,
|
||||
fields: {
|
||||
[ALERT_REASON]: 'Test alert is firing',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const getSummarizedAlerts = createGetSummarizedAlerts();
|
||||
|
||||
// Create the options with the minimal amount of values to test the lifecycle executor
|
||||
const options = {
|
||||
spaceId: 'default',
|
||||
rule: {
|
||||
id: ruleId,
|
||||
name: 'test rule',
|
||||
ruleTypeId: 'observability.test.fake',
|
||||
ruleTypeName: 'test',
|
||||
consumer: 'observability',
|
||||
producer: 'observability.test',
|
||||
},
|
||||
services: {
|
||||
alertFactory: { create: sinon.stub() },
|
||||
shouldWriteAlerts: sinon.stub().returns(true),
|
||||
},
|
||||
} as unknown as RuleExecutorOptions<
|
||||
MockRuleParams,
|
||||
WrappedLifecycleRuleState<{ shouldTriggerAlert: boolean }>,
|
||||
{ [x: string]: unknown },
|
||||
{ [x: string]: unknown },
|
||||
string
|
||||
>;
|
||||
|
||||
const getState = (
|
||||
shouldTriggerAlert: boolean,
|
||||
alerts: Record<string, TrackedLifecycleAlertState>
|
||||
) => ({ wrapped: { shouldTriggerAlert }, trackedAlerts: alerts, trackedAlertsRecovered: {} });
|
||||
|
||||
// Execute the rule the first time - this creates a new alert
|
||||
const execution1Uuid = uuid.v4();
|
||||
await executor({
|
||||
...options,
|
||||
startedAt: new Date(),
|
||||
state: getState(true, {}),
|
||||
executionId: execution1Uuid,
|
||||
});
|
||||
|
||||
const summarizedAlertsExcludingId1 = await getSummarizedAlerts({
|
||||
ruleId,
|
||||
executionUuid: execution1Uuid,
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [id1],
|
||||
});
|
||||
expect(summarizedAlertsExcludingId1.new.count).to.eql(1);
|
||||
expect(summarizedAlertsExcludingId1.ongoing.count).to.eql(0);
|
||||
expect(summarizedAlertsExcludingId1.recovered.count).to.eql(0);
|
||||
expect(summarizedAlertsExcludingId1.new.data[0][ALERT_INSTANCE_ID]).to.eql(id2);
|
||||
|
||||
const summarizedAlertsExcludingId2 = await getSummarizedAlerts({
|
||||
ruleId,
|
||||
executionUuid: execution1Uuid,
|
||||
spaceId: 'default',
|
||||
excludedAlertInstanceIds: [id2],
|
||||
});
|
||||
expect(summarizedAlertsExcludingId2.new.count).to.eql(1);
|
||||
expect(summarizedAlertsExcludingId2.ongoing.count).to.eql(0);
|
||||
expect(summarizedAlertsExcludingId2.recovered.count).to.eql(0);
|
||||
expect(summarizedAlertsExcludingId2.new.data[0][ALERT_INSTANCE_ID]).to.eql(id1);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue