[RAM] Maintenance Window Task Runner Integration + New AAD/Event Log Fields (#154761)

## Summary

Resolves: https://github.com/elastic/kibana/issues/153468
Maintenance window API PR: https://github.com/elastic/kibana/pull/153411

This PR does the following: 
- Skip alert notifications for rules in maintenance
- Add `maintenance_window_ids` field to alert events in the event log
- Add `maintenance_window_ids` attribute to AAD

### Checklist
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios

---------

Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Jiawei Wu 2023-04-19 08:48:23 -07:00 committed by GitHub
parent e35a1d46d9
commit 14f01672c7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 892 additions and 2 deletions

View file

@ -13,6 +13,7 @@ import {
ALERT_END,
ALERT_FLAPPING,
ALERT_FLAPPING_HISTORY,
ALERT_MAINTENANCE_WINDOW_IDS,
ALERT_INSTANCE_ID,
ALERT_LAST_DETECTED,
ALERT_REASON,
@ -67,6 +68,11 @@ export const alertFieldMap = {
array: true,
required: false,
},
[ALERT_MAINTENANCE_WINDOW_IDS]: {
type: 'keyword',
array: true,
required: false,
},
[ALERT_INSTANCE_ID]: {
type: 'keyword',
array: false,

View file

@ -40,6 +40,9 @@ const ALERT_FLAPPING = `${ALERT_NAMESPACE}.flapping` as const;
// kibana.alert.flapping_history - whether the alert is currently in a flapping state
const ALERT_FLAPPING_HISTORY = `${ALERT_NAMESPACE}.flapping_history` as const;
// kibana.alert.maintenance_window_ids - IDs of maintenance windows that are affecting this alert
const ALERT_MAINTENANCE_WINDOW_IDS = `${ALERT_NAMESPACE}.maintenance_window_ids` as const;
// kibana.alert.instance.id - alert ID, also known as alert instance ID
const ALERT_INSTANCE_ID = `${ALERT_NAMESPACE}.instance.id` as const;
@ -107,6 +110,7 @@ const fields = {
ALERT_END,
ALERT_FLAPPING,
ALERT_FLAPPING_HISTORY,
ALERT_MAINTENANCE_WINDOW_IDS,
ALERT_INSTANCE_ID,
ALERT_LAST_DETECTED,
ALERT_REASON,
@ -143,6 +147,7 @@ export {
ALERT_END,
ALERT_FLAPPING,
ALERT_FLAPPING_HISTORY,
ALERT_MAINTENANCE_WINDOW_IDS,
ALERT_INSTANCE_ID,
ALERT_LAST_DETECTED,
ALERT_REASON,

View file

@ -16,6 +16,7 @@ import {
ALERT_DURATION,
ALERT_END,
ALERT_FLAPPING,
ALERT_MAINTENANCE_WINDOW_IDS,
ALERT_INSTANCE_ID,
ALERT_REASON,
ALERT_RULE_CATEGORY,
@ -125,6 +126,7 @@ const fields = {
ALERT_EVALUATION_THRESHOLD,
ALERT_EVALUATION_VALUE,
ALERT_FLAPPING,
ALERT_MAINTENANCE_WINDOW_IDS,
ALERT_INSTANCE_ID,
ALERT_RULE_CONSUMER,
ALERT_RULE_PRODUCER,

View file

@ -216,6 +216,9 @@ describe('mappingFromFieldMap', () => {
flapping_history: {
type: 'boolean',
},
maintenance_window_ids: {
type: 'keyword',
},
instance: {
properties: {
id: {

View file

@ -236,6 +236,7 @@ describe('Legacy Alerts Client', () => {
shouldLogAndScheduleActionsForAlerts: true,
flappingSettings: DEFAULT_FLAPPING_SETTINGS,
notifyWhen: RuleNotifyWhen.CHANGE,
maintenanceWindowIds: ['window-id1', 'window-id2'],
});
expect(processAlerts).toHaveBeenCalledWith({
@ -284,6 +285,7 @@ describe('Legacy Alerts Client', () => {
ruleRunMetricsStore,
canSetRecoveryContext: false,
shouldPersistAlerts: true,
maintenanceWindowIds: ['window-id1', 'window-id2'],
});
expect(alertsClient.getProcessedAlerts('active')).toEqual({

View file

@ -117,6 +117,7 @@ export class LegacyAlertsClient<
shouldLogAndScheduleActionsForAlerts,
flappingSettings,
notifyWhen,
maintenanceWindowIds,
}: {
eventLogger: AlertingEventLogger;
ruleLabel: string;
@ -124,6 +125,7 @@ export class LegacyAlertsClient<
ruleRunMetricsStore: RuleRunMetricsStore;
flappingSettings: RulesSettingsFlappingProperties;
notifyWhen: RuleNotifyWhenType | null;
maintenanceWindowIds?: string[];
}) {
const {
newAlerts: processedAlertsNew,
@ -176,6 +178,7 @@ export class LegacyAlertsClient<
ruleRunMetricsStore,
canSetRecoveryContext: this.options.ruleType.doesSetRecoveryContext ?? false,
shouldPersistAlerts: shouldLogAndScheduleActionsForAlerts,
maintenanceWindowIds,
});
}

View file

@ -68,6 +68,7 @@ const alert = {
duration: '2343252346',
},
flapping: false,
maintenanceWindowIds: ['window-id1', 'window-id2'],
};
const action = {
@ -1068,6 +1069,7 @@ describe('createAlertRecord', () => {
expect(record.kibana?.alert?.rule?.rule_type_id).toEqual(contextWithName.ruleType.id);
expect(record.kibana?.alert?.rule?.consumer).toEqual(contextWithName.consumer);
expect(record.kibana?.alert?.rule?.execution?.uuid).toEqual(contextWithName.executionId);
expect(record.kibana?.alert?.maintenance_window_ids).toEqual(alert.maintenanceWindowIds);
expect(record.kibana?.alerting?.instance_id).toEqual(alert.id);
expect(record.kibana?.alerting?.action_group_id).toEqual(alert.group);
expect(record.kibana?.saved_objects).toEqual([

View file

@ -50,6 +50,7 @@ interface AlertOpts {
group?: string;
state?: AlertInstanceState;
flapping: boolean;
maintenanceWindowIds?: string[];
}
interface ActionOpts {
@ -256,6 +257,7 @@ export function createAlertRecord(context: RuleContextOpts, alert: AlertOpts) {
],
ruleName: context.ruleName,
flapping: alert.flapping,
maintenanceWindowIds: alert.maintenanceWindowIds,
});
}

View file

@ -9,6 +9,8 @@ import { createAlertEventLogRecordObject } from './create_alert_event_log_record
import { UntypedNormalizedRuleType } from '../rule_type_registry';
import { RecoveredActionGroup } from '../types';
const MAINTENANCE_WINDOW_IDS = ['test-1', 'test-2'];
describe('createAlertEventLogRecordObject', () => {
const ruleType: jest.Mocked<UntypedNormalizedRuleType> = {
id: 'test',
@ -44,6 +46,7 @@ describe('createAlertEventLogRecordObject', () => {
},
],
spaceId: 'default',
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
})
).toStrictEqual({
'@timestamp': '1970-01-01T00:00:00.000Z',
@ -61,6 +64,7 @@ describe('createAlertEventLogRecordObject', () => {
},
rule_type_id: 'test',
},
maintenance_window_ids: MAINTENANCE_WINDOW_IDS,
},
saved_objects: [
{
@ -113,6 +117,7 @@ describe('createAlertEventLogRecordObject', () => {
},
],
spaceId: 'default',
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
})
).toStrictEqual({
event: {
@ -132,6 +137,7 @@ describe('createAlertEventLogRecordObject', () => {
},
rule_type_id: 'test',
},
maintenance_window_ids: MAINTENANCE_WINDOW_IDS,
},
alerting: {
action_group_id: 'group 1',
@ -196,6 +202,7 @@ describe('createAlertEventLogRecordObject', () => {
ongoing: 3,
recovered: 1,
},
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
})
).toStrictEqual({
event: {
@ -215,6 +222,7 @@ describe('createAlertEventLogRecordObject', () => {
},
rule_type_id: 'test',
},
maintenance_window_ids: MAINTENANCE_WINDOW_IDS,
},
alerting: {
action_group_id: 'group 1',

View file

@ -42,6 +42,7 @@ interface CreateAlertEventLogRecordParams {
ongoing: number;
recovered: number;
};
maintenanceWindowIds?: string[];
}
export function createAlertEventLogRecordObject(params: CreateAlertEventLogRecordParams): Event {
@ -60,6 +61,7 @@ export function createAlertEventLogRecordObject(params: CreateAlertEventLogRecor
flapping,
alertUuid,
alertSummary,
maintenanceWindowIds,
} = params;
const alerting =
params.instanceId || group || alertSummary
@ -92,6 +94,7 @@ export function createAlertEventLogRecordObject(params: CreateAlertEventLogRecor
kibana: {
alert: {
...(flapping !== undefined ? { flapping } : {}),
...(maintenanceWindowIds ? { maintenance_window_ids: maintenanceWindowIds } : {}),
...(alertUuid ? { uuid: alertUuid } : {}),
rule: {
rule_type_id: ruleType.id,

View file

@ -17,7 +17,7 @@ const createMaintenanceWindowClientMock = () => {
find: jest.fn(),
get: jest.fn(),
archive: jest.fn(),
getActiveMaintenanceWindows: jest.fn(),
getActiveMaintenanceWindows: jest.fn().mockResolvedValue([]),
finish: jest.fn(),
delete: jest.fn(),
};

View file

@ -502,6 +502,10 @@ export class AlertingPlugin {
return rulesSettingsClientFactory!.create(request);
};
const getMaintenanceWindowClientWithRequest = (request: KibanaRequest) => {
return maintenanceWindowClientFactory!.create(request);
};
taskRunnerFactory.initialize({
logger,
data: plugins.data,
@ -528,6 +532,7 @@ export class AlertingPlugin {
actionsConfigMap: getActionsConfigMap(this.config.rules.run.actions),
usageCounter: this.usageCounter,
getRulesSettingsClientWithRequest,
getMaintenanceWindowClientWithRequest,
});
this.eventLogService!.registerSavedObjectProvider('alert', (request) => {

View file

@ -233,7 +233,13 @@ export const mockTaskInstance = () => ({
ownerId: null,
});
export const generateAlertOpts = ({ action, group, state, id }: GeneratorParams = {}) => {
export const generateAlertOpts = ({
action,
group,
state,
id,
maintenanceWindowIds = [],
}: GeneratorParams = {}) => {
id = id ?? '1';
let message: string = '';
switch (action) {
@ -255,6 +261,7 @@ export const generateAlertOpts = ({ action, group, state, id }: GeneratorParams
state,
...(group ? { group } : {}),
flapping: false,
maintenanceWindowIds,
};
};

View file

@ -362,4 +362,87 @@ describe('logAlerts', () => {
uuid: expect.any(String),
});
});
test('should correctly set maintenance window in ruleRunMetricsStore and call alertingEventLogger.logAlert', () => {
jest.clearAllMocks();
const MAINTENANCE_WINDOW_IDS = ['window-id-1', 'window-id-2'];
logAlerts({
logger,
alertingEventLogger,
newAlerts: {
'4': new Alert<{}, {}, DefaultActionGroupId>('4'),
},
activeAlerts: {
'1': new Alert<{}, {}, DefaultActionGroupId>('1'),
'4': new Alert<{}, {}, DefaultActionGroupId>('4'),
},
recoveredAlerts: {
'7': new Alert<{}, {}, DefaultActionGroupId>('7'),
'8': new Alert<{}, {}, DefaultActionGroupId>('8'),
},
ruleLogPrefix: `test-rule-type-id:123: 'test rule'`,
ruleRunMetricsStore,
canSetRecoveryContext: false,
shouldPersistAlerts: true,
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
});
expect(ruleRunMetricsStore.getNumberOfNewAlerts()).toEqual(1);
expect(ruleRunMetricsStore.getNumberOfActiveAlerts()).toEqual(2);
expect(ruleRunMetricsStore.getNumberOfRecoveredAlerts()).toEqual(2);
expect(alertingEventLogger.logAlert).toHaveBeenCalledTimes(5);
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(1, {
action: 'recovered-instance',
id: '7',
message: "test-rule-type-id:123: 'test rule' alert '7' has recovered",
state: {},
flapping: false,
group: undefined,
uuid: expect.any(String),
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
});
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(2, {
action: 'recovered-instance',
id: '8',
message: "test-rule-type-id:123: 'test rule' alert '8' has recovered",
state: {},
flapping: false,
group: undefined,
uuid: expect.any(String),
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
});
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(3, {
action: 'new-instance',
id: '4',
message: "test-rule-type-id:123: 'test rule' created new alert: '4'",
state: {},
flapping: false,
group: undefined,
uuid: expect.any(String),
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
});
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(4, {
action: 'active-instance',
id: '1',
message: "test-rule-type-id:123: 'test rule' active alert: '1' in actionGroup: 'undefined'",
state: {},
flapping: false,
group: undefined,
uuid: expect.any(String),
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
});
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(5, {
action: 'active-instance',
id: '4',
message: "test-rule-type-id:123: 'test rule' active alert: '4' in actionGroup: 'undefined'",
state: {},
flapping: false,
group: undefined,
uuid: expect.any(String),
maintenanceWindowIds: MAINTENANCE_WINDOW_IDS,
});
});
});

View file

@ -28,6 +28,7 @@ export interface LogAlertsParams<
ruleRunMetricsStore: RuleRunMetricsStore;
canSetRecoveryContext: boolean;
shouldPersistAlerts: boolean;
maintenanceWindowIds?: string[];
}
export function logAlerts<
@ -45,6 +46,7 @@ export function logAlerts<
ruleRunMetricsStore,
canSetRecoveryContext,
shouldPersistAlerts,
maintenanceWindowIds,
}: LogAlertsParams<State, Context, ActionGroupIds, RecoveryActionGroupId>) {
const newAlertIds = Object.keys(newAlerts);
const activeAlertIds = Object.keys(activeAlerts);
@ -104,6 +106,7 @@ export function logAlerts<
message,
state,
flapping: recoveredAlerts[id].getFlapping(),
maintenanceWindowIds,
});
}
@ -121,6 +124,7 @@ export function logAlerts<
message,
state,
flapping: activeAlerts[id].getFlapping(),
maintenanceWindowIds,
});
}
@ -138,6 +142,7 @@ export function logAlerts<
message,
state,
flapping: activeAlerts[id].getFlapping(),
maintenanceWindowIds,
});
}
}

View file

@ -16,6 +16,7 @@ import {
RuleExecutionStatusWarningReasons,
Rule,
RuleAction,
MaintenanceWindow,
} from '../types';
import { ConcreteTaskInstance, isUnrecoverableError } from '@kbn/task-manager-plugin/server';
import { TaskRunnerContext } from './task_runner_factory';
@ -77,7 +78,9 @@ import { SharePluginStart } from '@kbn/share-plugin/server';
import { dataViewPluginMocks } from '@kbn/data-views-plugin/public/mocks';
import { DataViewsServerPluginStart } from '@kbn/data-views-plugin/server';
import { rulesSettingsClientMock } from '../rules_settings_client.mock';
import { maintenanceWindowClientMock } from '../maintenance_window_client.mock';
import { alertsServiceMock } from '../alerts_service/alerts_service.mock';
import { getMockMaintenanceWindow } from '../maintenance_window_client/methods/test_helpers';
jest.mock('uuid', () => ({
v4: () => '5f6aa57d-3e22-484e-bae8-cbed868f4d28',
@ -130,6 +133,7 @@ describe('Task Runner', () => {
dataViewsServiceFactory: jest.fn().mockResolvedValue(dataViewPluginMocks.createStartContract()),
} as DataViewsServerPluginStart;
const alertsService = alertsServiceMock.create();
const maintenanceWindowClient = maintenanceWindowClientMock.create();
type TaskRunnerFactoryInitializerParamsType = jest.Mocked<TaskRunnerContext> & {
actionsPlugin: jest.Mocked<ActionsPluginStart>;
@ -167,6 +171,7 @@ describe('Task Runner', () => {
},
},
getRulesSettingsClientWithRequest: jest.fn().mockReturnValue(rulesSettingsClientMock.create()),
getMaintenanceWindowClientWithRequest: jest.fn().mockReturnValue(maintenanceWindowClient),
};
const ephemeralTestParams: Array<
@ -203,6 +208,7 @@ describe('Task Runner', () => {
});
savedObjectsService.getScopedClient.mockReturnValue(services.savedObjectsClient);
elasticsearchService.client.asScoped.mockReturnValue(services.scopedClusterClient);
maintenanceWindowClient.getActiveMaintenanceWindows.mockResolvedValue([]);
taskRunnerFactoryInitializerParams.getRulesClientWithRequest.mockReturnValue(rulesClient);
taskRunnerFactoryInitializerParams.actionsPlugin.getActionsClientWithRequest.mockResolvedValue(
actionsClient
@ -217,6 +223,9 @@ describe('Task Runner', () => {
taskRunnerFactoryInitializerParams.getRulesSettingsClientWithRequest.mockReturnValue(
rulesSettingsClientMock.create()
);
taskRunnerFactoryInitializerParams.getMaintenanceWindowClientWithRequest.mockReturnValue(
maintenanceWindowClient
);
mockedRuleTypeSavedObject.monitoring!.run.history = [];
mockedRuleTypeSavedObject.monitoring!.run.calculated_metrics.success_ratio = 0;
@ -602,6 +611,101 @@ describe('Task Runner', () => {
}
);
test('skips alert notification if there are active maintenance windows', async () => {
taskRunnerFactoryInitializerParams.actionsPlugin.isActionTypeEnabled.mockReturnValue(true);
taskRunnerFactoryInitializerParams.actionsPlugin.isActionExecutable.mockReturnValue(true);
ruleType.executor.mockImplementation(
async ({
services: executorServices,
}: RuleExecutorOptions<
RuleTypeParams,
RuleTypeState,
AlertInstanceState,
AlertInstanceContext,
string
>) => {
executorServices.alertFactory.create('1').scheduleActions('default');
return { state: {} };
}
);
const taskRunner = new TaskRunner(
ruleType,
mockedTaskInstance,
taskRunnerFactoryInitializerParams,
inMemoryMetrics
);
expect(AlertingEventLogger).toHaveBeenCalledTimes(1);
rulesClient.getAlertFromRaw.mockReturnValue(mockedRuleTypeSavedObject as Rule);
maintenanceWindowClient.getActiveMaintenanceWindows.mockResolvedValueOnce([
{
...getMockMaintenanceWindow(),
id: 'test-id-1',
} as MaintenanceWindow,
{
...getMockMaintenanceWindow(),
id: 'test-id-2',
} as MaintenanceWindow,
]);
encryptedSavedObjectsClient.getDecryptedAsInternalUser.mockResolvedValue(SAVED_OBJECT);
await taskRunner.run();
expect(actionsClient.ephemeralEnqueuedExecution).toHaveBeenCalledTimes(0);
expect(logger.debug).toHaveBeenCalledTimes(7);
expect(logger.debug).nthCalledWith(1, 'executing rule test:1 at 1970-01-01T00:00:00.000Z');
expect(logger.debug).nthCalledWith(
2,
`rule test:1: '${RULE_NAME}' has 1 active alerts: [{\"instanceId\":\"1\",\"actionGroup\":\"default\"}]`
);
expect(logger.debug).nthCalledWith(
3,
`no scheduling of actions for rule test:1: '${RULE_NAME}': has active maintenance windows test-id-1,test-id-2.`
);
expect(logger.debug).nthCalledWith(
4,
'deprecated ruleRunStatus for test:1: {"lastExecutionDate":"1970-01-01T00:00:00.000Z","status":"active"}'
);
expect(logger.debug).nthCalledWith(
5,
'ruleRunStatus for test:1: {"outcome":"succeeded","outcomeOrder":0,"outcomeMsg":null,"warning":null,"alertsCount":{"active":1,"new":1,"recovered":0,"ignored":0}}'
);
expect(logger.debug).nthCalledWith(
6,
'ruleRunMetrics for test:1: {"numSearches":3,"totalSearchDurationMs":23423,"esSearchDurationMs":33,"numberOfTriggeredActions":0,"numberOfGeneratedActions":0,"numberOfActiveAlerts":1,"numberOfRecoveredAlerts":0,"numberOfNewAlerts":1,"hasReachedAlertLimit":false,"triggeredActionsStatus":"complete"}'
);
expect(logger.debug).nthCalledWith(
7,
'Updating rule task for test rule with id 1 - {"lastExecutionDate":"1970-01-01T00:00:00.000Z","status":"active"} - {"outcome":"succeeded","outcomeOrder":0,"outcomeMsg":null,"warning":null,"alertsCount":{"active":1,"new":1,"recovered":0,"ignored":0}}'
);
testAlertingEventLogCalls({
activeAlerts: 1,
newAlerts: 1,
status: 'active',
logAlert: 2,
});
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(
1,
generateAlertOpts({
action: EVENT_LOG_ACTIONS.newInstance,
group: 'default',
state: { start: DATE_1970, duration: '0' },
maintenanceWindowIds: ['test-id-1', 'test-id-2'],
})
);
expect(alertingEventLogger.logAlert).toHaveBeenNthCalledWith(
2,
generateAlertOpts({
action: EVENT_LOG_ACTIONS.activeInstance,
group: 'default',
state: { start: DATE_1970, duration: '0' },
maintenanceWindowIds: ['test-id-1', 'test-id-2'],
})
);
expect(mockUsageCounter.incrementCounter).not.toHaveBeenCalled();
});
test.each(ephemeralTestParams)(
'skips firing actions for active alert if alert is muted %s',
async (nameExtension, customTaskRunnerFactoryInitializerParams, enqueueFunction) => {

View file

@ -47,6 +47,7 @@ import {
parseDuration,
RawAlertInstance,
RuleLastRunOutcomeOrderMap,
MaintenanceWindow,
} from '../../common';
import { NormalizedRuleType, UntypedNormalizedRuleType } from '../rule_type_registry';
import { getEsErrorMessage } from '../lib/errors';
@ -315,6 +316,22 @@ export class TaskRunner<
});
const rulesSettingsClient = this.context.getRulesSettingsClientWithRequest(fakeRequest);
const flappingSettings = await rulesSettingsClient.flapping().get();
const maintenanceWindowClient = this.context.getMaintenanceWindowClientWithRequest(fakeRequest);
let activeMaintenanceWindows: MaintenanceWindow[] = [];
try {
activeMaintenanceWindows = await maintenanceWindowClient.getActiveMaintenanceWindows({
interval: rule.schedule.interval,
});
} catch (err) {
this.logger.error(
`error getting active maintenance window for ${ruleTypeId}:${ruleId} ${err.message}`
);
}
const maintenanceWindowIds = activeMaintenanceWindows.map(
(maintenanceWindow) => maintenanceWindow.id
);
const { updatedRuleTypeState } = await this.timer.runWithTimer(
TaskRunnerTimerSpan.RuleTypeRun,
@ -397,6 +414,7 @@ export class TaskRunner<
},
logger: this.logger,
flappingSettings,
...(maintenanceWindowIds.length ? { maintenanceWindowIds } : {}),
})
);
@ -444,6 +462,7 @@ export class TaskRunner<
shouldLogAndScheduleActionsForAlerts: this.shouldLogAndScheduleActionsForAlerts(),
flappingSettings,
notifyWhen,
maintenanceWindowIds,
});
});
@ -470,6 +489,10 @@ export class TaskRunner<
if (isRuleSnoozed(rule)) {
this.logger.debug(`no scheduling of actions for rule ${ruleLabel}: rule is snoozed.`);
} else if (maintenanceWindowIds.length) {
this.logger.debug(
`no scheduling of actions for rule ${ruleLabel}: has active maintenance windows ${maintenanceWindowIds}.`
);
} else if (!this.shouldLogAndScheduleActionsForAlerts()) {
this.logger.debug(
`no scheduling of actions for rule ${ruleLabel}: rule execution has been cancelled.`

View file

@ -54,6 +54,7 @@ import { SharePluginStart } from '@kbn/share-plugin/server';
import { DataViewsServerPluginStart } from '@kbn/data-views-plugin/server';
import { dataViewPluginMocks } from '@kbn/data-views-plugin/public/mocks';
import { rulesSettingsClientMock } from '../rules_settings_client.mock';
import { maintenanceWindowClientMock } from '../maintenance_window_client.mock';
import { alertsServiceMock } from '../alerts_service/alerts_service.mock';
jest.mock('uuid', () => ({
@ -143,6 +144,9 @@ describe('Task Runner Cancel', () => {
},
},
getRulesSettingsClientWithRequest: jest.fn().mockReturnValue(rulesSettingsClientMock.create()),
getMaintenanceWindowClientWithRequest: jest
.fn()
.mockReturnValue(maintenanceWindowClientMock.create()),
};
beforeEach(() => {
@ -173,6 +177,9 @@ describe('Task Runner Cancel', () => {
taskRunnerFactoryInitializerParams.getRulesSettingsClientWithRequest.mockReturnValue(
rulesSettingsClientMock.create()
);
taskRunnerFactoryInitializerParams.getMaintenanceWindowClientWithRequest.mockReturnValue(
maintenanceWindowClientMock.create()
);
rulesClient.getAlertFromRaw.mockReturnValue(mockedRuleTypeSavedObject as Rule);
encryptedSavedObjectsClient.getDecryptedAsInternalUser.mockResolvedValue({

View file

@ -30,6 +30,7 @@ import { SharePluginStart } from '@kbn/share-plugin/server';
import { DataViewsServerPluginStart } from '@kbn/data-views-plugin/server';
import { dataViewPluginMocks } from '@kbn/data-views-plugin/public/mocks';
import { rulesSettingsClientMock } from '../rules_settings_client.mock';
import { maintenanceWindowClientMock } from '../maintenance_window_client.mock';
import { alertsServiceMock } from '../alerts_service/alerts_service.mock';
const inMemoryMetrics = inMemoryMetricsMock.create();
@ -120,6 +121,9 @@ describe('Task Runner Factory', () => {
},
},
getRulesSettingsClientWithRequest: jest.fn().mockReturnValue(rulesSettingsClientMock.create()),
getMaintenanceWindowClientWithRequest: jest
.fn()
.mockReturnValue(maintenanceWindowClientMock.create()),
};
beforeEach(() => {

View file

@ -32,6 +32,7 @@ import {
AlertInstanceContext,
RulesClientApi,
RulesSettingsClientApi,
MaintenanceWindowClientApi,
} from '../types';
import { TaskRunner } from './task_runner';
import { NormalizedRuleType } from '../rule_type_registry';
@ -65,6 +66,7 @@ export interface TaskRunnerContext {
cancelAlertsOnRuleTimeout: boolean;
usageCounter?: UsageCounter;
getRulesSettingsClientWithRequest(request: KibanaRequest): RulesSettingsClientApi;
getMaintenanceWindowClientWithRequest(request: KibanaRequest): MaintenanceWindowClientApi;
}
export class TaskRunnerFactory {

View file

@ -118,6 +118,7 @@ export interface RuleExecutorOptions<
state: State;
namespace?: string;
flappingSettings: RulesSettingsFlappingProperties;
maintenanceWindowIds?: string[];
}
export interface RuleParamsAndRefs<Params extends RuleTypeParams> {

View file

@ -304,6 +304,13 @@
"flapping": {
"type": "boolean"
},
"maintenance_window_ids": {
"type": "keyword",
"ignore_above": 1024,
"meta": {
"isArray": "true"
}
},
"uuid": {
"type": "keyword",
"ignore_above": 1024

View file

@ -140,6 +140,7 @@ export const EventSchema = schema.maybe(
alert: schema.maybe(
schema.object({
flapping: ecsBoolean(),
maintenance_window_ids: ecsStringMulti(),
uuid: ecsString(),
rule: schema.maybe(
schema.object({

View file

@ -86,6 +86,10 @@ exports.EcsCustomPropertyMappings = {
flapping: {
type: 'boolean',
},
maintenance_window_ids: {
type: 'keyword',
ignore_above: 1024,
},
uuid: {
type: 'keyword',
ignore_above: 1024,
@ -274,4 +278,5 @@ exports.EcsEventLogMultiValuedProperties = [
'event.type',
'rule.author',
'kibana.space_ids',
'kibana.alert.maintenance_window_ids',
];

View file

@ -72,6 +72,11 @@ it('matches snapshot', () => {
"required": false,
"type": "date",
},
"kibana.alert.maintenance_window_ids": Object {
"array": true,
"required": false,
"type": "keyword",
},
"kibana.alert.reason": Object {
"array": false,
"required": false,

View file

@ -9,6 +9,7 @@ import { loggerMock } from '@kbn/logging-mocks';
import { pick } from 'lodash';
import {
ALERT_INSTANCE_ID,
ALERT_MAINTENANCE_WINDOW_IDS,
ALERT_RULE_CATEGORY,
ALERT_RULE_CONSUMER,
ALERT_RULE_NAME,
@ -921,6 +922,332 @@ describe('createLifecycleExecutor', () => {
});
});
describe('set maintenance window ids on the document', () => {
const maintenanceWindowIds = ['test-id-1', 'test-id-2'];
it('updates documents with maintenance window ids for newly firing alerts', async () => {
const logger = loggerMock.create();
const ruleDataClientMock = createRuleDataClientMock();
const executor = createLifecycleExecutor(
logger,
ruleDataClientMock
)<{}, TestRuleState, never, never, never>(async ({ services, state }) => {
services.alertWithLifecycle({
id: 'TEST_ALERT_0',
fields: { [TAGS]: ['source-tag1', 'source-tag2'] },
});
services.alertWithLifecycle({
id: 'TEST_ALERT_1',
fields: { [TAGS]: ['source-tag3', 'source-tag4'] },
});
return { state };
});
await executor(
createDefaultAlertExecutorOptions({
params: {},
state: { wrapped: initialRuleState, trackedAlerts: {}, trackedAlertsRecovered: {} },
logger,
maintenanceWindowIds,
})
);
expect((await ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledWith(
expect.objectContaining({
body: [
// alert documents
{ index: { _id: expect.any(String) } },
expect.objectContaining({
[ALERT_INSTANCE_ID]: 'TEST_ALERT_0',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[EVENT_ACTION]: 'open',
[EVENT_KIND]: 'signal',
[TAGS]: ['source-tag1', 'source-tag2', 'rule-tag1', 'rule-tag2'],
[ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds,
}),
{ index: { _id: expect.any(String) } },
expect.objectContaining({
[ALERT_INSTANCE_ID]: 'TEST_ALERT_1',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[EVENT_ACTION]: 'open',
[EVENT_KIND]: 'signal',
[TAGS]: ['source-tag3', 'source-tag4', 'rule-tag1', 'rule-tag2'],
[ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds,
}),
],
})
);
expect((await ruleDataClientMock.getWriter()).bulk).not.toHaveBeenCalledWith(
expect.objectContaining({
body: expect.arrayContaining([
// evaluation documents
{ index: {} },
expect.objectContaining({
[EVENT_KIND]: 'event',
}),
]),
})
);
});
it('updates documents with maintenance window ids for repeatedly firing alerts', async () => {
const logger = loggerMock.create();
const ruleDataClientMock = createRuleDataClientMock();
ruleDataClientMock.getReader().search.mockResolvedValue({
hits: {
hits: [
{
_source: {
'@timestamp': '',
[ALERT_INSTANCE_ID]: 'TEST_ALERT_0',
[ALERT_UUID]: 'ALERT_0_UUID',
[ALERT_RULE_CATEGORY]: 'RULE_TYPE_NAME',
[ALERT_RULE_CONSUMER]: 'CONSUMER',
[ALERT_RULE_NAME]: 'NAME',
[ALERT_RULE_PRODUCER]: 'PRODUCER',
[ALERT_RULE_TYPE_ID]: 'RULE_TYPE_ID',
[ALERT_RULE_UUID]: 'RULE_UUID',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[ALERT_WORKFLOW_STATUS]: 'closed',
[SPACE_IDS]: ['fake-space-id'],
labels: { LABEL_0_KEY: 'LABEL_0_VALUE' }, // this must show up in the written doc
},
},
{
_source: {
'@timestamp': '',
[ALERT_INSTANCE_ID]: 'TEST_ALERT_1',
[ALERT_UUID]: 'ALERT_1_UUID',
[ALERT_RULE_CATEGORY]: 'RULE_TYPE_NAME',
[ALERT_RULE_CONSUMER]: 'CONSUMER',
[ALERT_RULE_NAME]: 'NAME',
[ALERT_RULE_PRODUCER]: 'PRODUCER',
[ALERT_RULE_TYPE_ID]: 'RULE_TYPE_ID',
[ALERT_RULE_UUID]: 'RULE_UUID',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[ALERT_WORKFLOW_STATUS]: 'open',
[SPACE_IDS]: ['fake-space-id'],
labels: { LABEL_0_KEY: 'LABEL_0_VALUE' }, // this must not show up in the written doc
},
},
],
},
} as any);
const executor = createLifecycleExecutor(
logger,
ruleDataClientMock
)<{}, TestRuleState, never, never, never>(async ({ services, state }) => {
services.alertWithLifecycle({
id: 'TEST_ALERT_0',
fields: {},
});
services.alertWithLifecycle({
id: 'TEST_ALERT_1',
fields: {},
});
return { state };
});
await executor(
createDefaultAlertExecutorOptions({
alertId: 'TEST_ALERT_0',
params: {},
state: {
wrapped: initialRuleState,
trackedAlerts: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
},
TEST_ALERT_1: {
alertId: 'TEST_ALERT_1',
alertUuid: 'TEST_ALERT_1_UUID',
started: '2020-01-02T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
},
},
trackedAlertsRecovered: {},
},
logger,
maintenanceWindowIds,
})
);
expect((await ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledWith(
expect.objectContaining({
body: [
// alert document
{ index: { _id: 'TEST_ALERT_0_UUID' } },
expect.objectContaining({
[ALERT_INSTANCE_ID]: 'TEST_ALERT_0',
[ALERT_WORKFLOW_STATUS]: 'closed',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
labels: { LABEL_0_KEY: 'LABEL_0_VALUE' },
[EVENT_ACTION]: 'active',
[EVENT_KIND]: 'signal',
[ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds,
}),
{ index: { _id: 'TEST_ALERT_1_UUID' } },
expect.objectContaining({
[ALERT_INSTANCE_ID]: 'TEST_ALERT_1',
[ALERT_WORKFLOW_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[EVENT_ACTION]: 'active',
[EVENT_KIND]: 'signal',
[ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds,
}),
],
})
);
expect((await ruleDataClientMock.getWriter()).bulk).not.toHaveBeenCalledWith(
expect.objectContaining({
body: expect.arrayContaining([
// evaluation documents
{ index: {} },
expect.objectContaining({
[EVENT_KIND]: 'event',
}),
]),
})
);
});
it('updates document with maintenance window ids for recovered alerts', async () => {
const logger = loggerMock.create();
const ruleDataClientMock = createRuleDataClientMock();
ruleDataClientMock.getReader().search.mockResolvedValue({
hits: {
hits: [
{
_source: {
'@timestamp': '',
[ALERT_INSTANCE_ID]: 'TEST_ALERT_0',
[ALERT_UUID]: 'ALERT_0_UUID',
[ALERT_RULE_CATEGORY]: 'RULE_TYPE_NAME',
[ALERT_RULE_CONSUMER]: 'CONSUMER',
[ALERT_RULE_NAME]: 'NAME',
[ALERT_RULE_PRODUCER]: 'PRODUCER',
[ALERT_RULE_TYPE_ID]: 'RULE_TYPE_ID',
[ALERT_RULE_UUID]: 'RULE_UUID',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['fake-space-id'],
labels: { LABEL_0_KEY: 'LABEL_0_VALUE' }, // this must show up in the written doc
[TAGS]: ['source-tag1', 'source-tag2'],
},
},
{
_source: {
'@timestamp': '',
[ALERT_INSTANCE_ID]: 'TEST_ALERT_1',
[ALERT_UUID]: 'ALERT_1_UUID',
[ALERT_RULE_CATEGORY]: 'RULE_TYPE_NAME',
[ALERT_RULE_CONSUMER]: 'CONSUMER',
[ALERT_RULE_NAME]: 'NAME',
[ALERT_RULE_PRODUCER]: 'PRODUCER',
[ALERT_RULE_TYPE_ID]: 'RULE_TYPE_ID',
[ALERT_RULE_UUID]: 'RULE_UUID',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['fake-space-id'],
labels: { LABEL_0_KEY: 'LABEL_0_VALUE' }, // this must not show up in the written doc
[TAGS]: ['source-tag3', 'source-tag4'],
},
},
],
},
} as any);
const executor = createLifecycleExecutor(
logger,
ruleDataClientMock
)<{}, TestRuleState, never, never, never>(async ({ services, state }) => {
// TEST_ALERT_0 has recovered
services.alertWithLifecycle({
id: 'TEST_ALERT_1',
fields: {},
});
return { state };
});
await executor(
createDefaultAlertExecutorOptions({
alertId: 'TEST_ALERT_0',
params: {},
state: {
wrapped: initialRuleState,
trackedAlerts: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
},
TEST_ALERT_1: {
alertId: 'TEST_ALERT_1',
alertUuid: 'TEST_ALERT_1_UUID',
started: '2020-01-02T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
},
},
trackedAlertsRecovered: {},
},
logger,
maintenanceWindowIds,
})
);
expect((await ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledWith(
expect.objectContaining({
body: expect.arrayContaining([
// alert document
{ index: { _id: 'TEST_ALERT_0_UUID' } },
expect.objectContaining({
[ALERT_INSTANCE_ID]: 'TEST_ALERT_0',
[ALERT_STATUS]: ALERT_STATUS_RECOVERED,
labels: { LABEL_0_KEY: 'LABEL_0_VALUE' },
[TAGS]: ['source-tag1', 'source-tag2', 'rule-tag1', 'rule-tag2'],
[EVENT_ACTION]: 'close',
[EVENT_KIND]: 'signal',
[ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds,
}),
{ index: { _id: 'TEST_ALERT_1_UUID' } },
expect.objectContaining({
[ALERT_INSTANCE_ID]: 'TEST_ALERT_1',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[EVENT_ACTION]: 'active',
[EVENT_KIND]: 'signal',
[TAGS]: ['source-tag3', 'source-tag4', 'rule-tag1', 'rule-tag2'],
[ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds,
}),
]),
})
);
expect((await ruleDataClientMock.getWriter()).bulk).not.toHaveBeenCalledWith(
expect.objectContaining({
body: expect.arrayContaining([
// evaluation documents
{ index: {} },
expect.objectContaining({
[EVENT_KIND]: 'event',
}),
]),
})
);
});
});
describe('set flapping on the document', () => {
const flapping = new Array(16).fill(false).concat([true, true, true, true]);
const notFlapping = new Array(20).fill(false);

View file

@ -43,6 +43,7 @@ import {
TIMESTAMP,
VERSION,
ALERT_FLAPPING,
ALERT_MAINTENANCE_WINDOW_IDS,
} from '../../common/technical_rule_data_field_names';
import { CommonAlertFieldNameLatest, CommonAlertIdFieldNameLatest } from '../../common/schemas';
import { IRuleDataClient } from '../rule_data_client';
@ -131,6 +132,7 @@ export const createLifecycleExecutor =
services: { alertFactory, shouldWriteAlerts },
state: previousState,
flappingSettings,
maintenanceWindowIds,
rule,
} = options;
@ -299,6 +301,9 @@ export const createLifecycleExecutor =
[VERSION]: ruleDataClient.kibanaVersion,
[ALERT_FLAPPING]: flapping,
...(isRecovered ? { [ALERT_END]: commonRuleFields[TIMESTAMP] } : {}),
...(maintenanceWindowIds?.length
? { [ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds }
: {}),
};
return {

View file

@ -39,6 +39,7 @@ export const createDefaultAlertExecutorOptions = <
startedAt = new Date(),
updatedAt = new Date(),
shouldWriteAlerts = true,
maintenanceWindowIds,
}: {
alertId?: string;
ruleName?: string;
@ -49,6 +50,7 @@ export const createDefaultAlertExecutorOptions = <
startedAt?: Date;
updatedAt?: Date;
shouldWriteAlerts?: boolean;
maintenanceWindowIds?: string[];
}): RuleExecutorOptions<Params, State, InstanceState, InstanceContext, ActionGroupIds> => ({
startedAt,
rule: {
@ -92,4 +94,5 @@ export const createDefaultAlertExecutorOptions = <
executionId: 'b33f65d7-6e8b-4aae-8d20-c93613deb33f',
logger,
flappingSettings: DEFAULT_FLAPPING_SETTINGS,
...(maintenanceWindowIds ? { maintenanceWindowIds } : {}),
});

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import moment from 'moment';
import expect from '@kbn/expect';
import { IValidatedEvent, nanosToMillis } from '@kbn/event-log-plugin/server';
import { RuleNotifyWhen } from '@kbn/alerting-plugin/common';
@ -1180,6 +1181,128 @@ export default function eventLogTests({ getService }: FtrProviderContext) {
}
}
});
it('should generate expected events affected by active maintenance windows', async () => {
// Create 2 active maintenance windows
const { body: window1 } = await supertest
.post(`${getUrlPrefix(space.id)}/internal/alerting/rules/maintenance_window`)
.set('kbn-xsrf', 'foo')
.send({
title: 'test-maintenance-window-1',
duration: 60 * 60 * 1000, // 1 hr
r_rule: {
dtstart: moment.utc().toISOString(),
tzid: 'UTC',
freq: 0, // yearly
count: 1,
},
})
.expect(200);
objectRemover.add(space.id, window1.id, 'rules/maintenance_window', 'alerting', true);
const { body: window2 } = await supertest
.post(`${getUrlPrefix(space.id)}/internal/alerting/rules/maintenance_window`)
.set('kbn-xsrf', 'foo')
.send({
title: 'test-maintenance-window-2',
duration: 60 * 60 * 1000, // 1 hr
r_rule: {
dtstart: moment.utc().toISOString(),
tzid: 'UTC',
freq: 0, // yearly
count: 1,
},
})
.expect(200);
objectRemover.add(space.id, window2.id, 'rules/maintenance_window', 'alerting', true);
// Create 1 inactive maintenance window
const { body: window3 } = await supertest
.post(`${getUrlPrefix(space.id)}/internal/alerting/rules/maintenance_window`)
.set('kbn-xsrf', 'foo')
.send({
title: 'test-maintenance-window-3',
duration: 60 * 60 * 1000, // 1 hr
r_rule: {
dtstart: moment.utc().add(1, 'day').toISOString(),
tzid: 'UTC',
freq: 0, // yearly
count: 1,
},
})
.expect(200);
objectRemover.add(space.id, window3.id, 'rules/maintenance_window', 'alerting', true);
const { body: createdAction } = await supertest
.post(`${getUrlPrefix(space.id)}/api/actions/connector`)
.set('kbn-xsrf', 'foo')
.send({
name: 'MY action',
connector_type_id: 'test.noop',
config: {},
secrets: {},
})
.expect(200);
// pattern of when the alert should fire
const pattern = {
instance: [false, true, true],
};
const response = await supertest
.post(`${getUrlPrefix(space.id)}/api/alerting/rule`)
.set('kbn-xsrf', 'foo')
.send(
getTestRuleData({
rule_type_id: 'test.patternFiring',
schedule: { interval: '1s' },
throttle: null,
params: {
pattern,
},
actions: [
{
id: createdAction.id,
group: 'default',
params: {},
},
],
})
);
expect(response.status).to.eql(200);
const alertId = response.body.id;
objectRemover.add(space.id, alertId, 'rule', 'alerting');
// get the events we're expecting
const events = await retry.try(async () => {
return await getEventLog({
getService,
spaceId: space.id,
type: 'alert',
id: alertId,
provider: 'alerting',
actions: new Map([
// make sure the counts of the # of events per type are as expected
['execute-start', { gte: 4 }],
['execute', { gte: 4 }],
['new-instance', { equal: 1 }],
['active-instance', { gte: 1 }],
['recovered-instance', { equal: 1 }],
]),
});
});
const actionsToCheck = ['new-instance', 'active-instance', 'recovered-instance'];
events.forEach((event) => {
if (actionsToCheck.includes(event?.event?.action || '')) {
const alertMaintenanceWindowIds =
event?.kibana?.alert?.maintenance_window_ids?.sort();
expect(alertMaintenanceWindowIds).eql([window1.id, window2.id].sort());
}
});
});
});
}
});

View file

@ -15,6 +15,7 @@ import {
AlertConsumers,
ALERT_REASON,
ALERT_INSTANCE_ID,
ALERT_MAINTENANCE_WINDOW_IDS,
} from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
import {
createLifecycleExecutor,
@ -380,5 +381,141 @@ export default function createGetSummarizedAlertsTest({ getService }: FtrProvide
expect(summarizedAlertsExcludingId2.recovered.count).to.eql(0);
expect(get(summarizedAlertsExcludingId2.new.data[0], ALERT_INSTANCE_ID)).to.eql(id1);
});
it('should return new, ongoing, and recovered alerts if there are active maintenance windows', async () => {
const id = 'host-01';
const maintenanceWindowIds = ['test-id-1', 'test-id-2'];
// This creates the function that will wrap the solution's rule executor with the RuleRegistry lifecycle
const createLifecycleRuleExecutor = createLifecycleExecutor(logger, ruleDataClient);
const createGetSummarizedAlerts = createGetSummarizedAlertsFn({
ruleDataClient,
useNamespace: false,
isLifecycleAlert: true,
});
// This creates the executor that is passed to the Alerting framework.
const executor = createLifecycleRuleExecutor<
MockRuleParams,
{ shouldTriggerAlert: boolean },
MockAlertState,
MockAlertContext,
MockAllowedActionGroups
>(async function (options) {
const { services, state: previousState } = options;
const { alertWithLifecycle } = services;
const triggerAlert = previousState.shouldTriggerAlert;
if (triggerAlert) {
alertWithLifecycle({
id,
fields: {
[ALERT_REASON]: 'Test alert is firing',
},
});
}
return Promise.resolve({ state: { shouldTriggerAlert: triggerAlert } });
});
const getSummarizedAlerts = createGetSummarizedAlerts();
// Create the options with the minimal amount of values to test the lifecycle executor
const options = {
spaceId: 'default',
rule: {
id,
name: 'test rule',
ruleTypeId: 'observability.test.fake',
ruleTypeName: 'test',
consumer: 'observability',
producer: 'observability.test',
},
services: {
alertFactory: getMockAlertFactory(),
shouldWriteAlerts: sinon.stub().returns(true),
},
flappingSettings: DEFAULT_FLAPPING_SETTINGS,
maintenanceWindowIds,
} as unknown as RuleExecutorOptions<
MockRuleParams,
WrappedLifecycleRuleState<{ shouldTriggerAlert: boolean }>,
{ [x: string]: unknown },
{ [x: string]: unknown },
string
>;
const getState = (
shouldTriggerAlert: boolean,
alerts: Record<string, TrackedLifecycleAlertState>
) => ({ wrapped: { shouldTriggerAlert }, trackedAlerts: alerts, trackedAlertsRecovered: {} });
// Execute the rule the first time - this creates a new alert
const execution1Uuid = uuidv4();
const execution1Result = await executor({
...options,
startedAt: new Date(),
state: getState(true, {}),
executionId: execution1Uuid,
});
const execution1SummarizedAlerts = await getSummarizedAlerts({
ruleId: id,
executionUuid: execution1Uuid,
spaceId: 'default',
excludedAlertInstanceIds: [],
});
expect(execution1SummarizedAlerts.new.count).to.eql(1);
expect(execution1SummarizedAlerts.ongoing.count).to.eql(0);
expect(execution1SummarizedAlerts.recovered.count).to.eql(0);
expect(get(execution1SummarizedAlerts.new.data[0], ALERT_MAINTENANCE_WINDOW_IDS)).to.eql(
maintenanceWindowIds
);
// Execute again to update the existing alert
const execution2Uuid = uuidv4();
const execution2Result = await executor({
...options,
startedAt: new Date(),
state: getState(true, execution1Result.state.trackedAlerts),
executionId: execution2Uuid,
});
const execution2SummarizedAlerts = await getSummarizedAlerts({
ruleId: id,
executionUuid: execution2Uuid,
spaceId: 'default',
excludedAlertInstanceIds: [],
});
expect(execution2SummarizedAlerts.new.count).to.eql(0);
expect(execution2SummarizedAlerts.ongoing.count).to.eql(1);
expect(execution2SummarizedAlerts.recovered.count).to.eql(0);
expect(get(execution2SummarizedAlerts.ongoing.data[0], ALERT_MAINTENANCE_WINDOW_IDS)).to.eql(
maintenanceWindowIds
);
// Execute again to recover the alert
const execution3Uuid = uuidv4();
await executor({
...options,
startedAt: new Date(),
state: getState(false, execution2Result.state.trackedAlerts),
executionId: execution3Uuid,
});
const execution3SummarizedAlerts = await getSummarizedAlerts({
ruleId: id,
executionUuid: execution3Uuid,
spaceId: 'default',
excludedAlertInstanceIds: [],
});
expect(execution3SummarizedAlerts.new.count).to.eql(0);
expect(execution3SummarizedAlerts.ongoing.count).to.eql(0);
expect(execution3SummarizedAlerts.recovered.count).to.eql(1);
expect(
get(execution3SummarizedAlerts.recovered.data[0], ALERT_MAINTENANCE_WINDOW_IDS)
).to.eql(maintenanceWindowIds);
});
});
}