[Response Ops][Alerting] Removing lifecycle executor from rule registry (#192576)

## Summary

All lifecycle rule types have been migrated to use the alerting
framework alerts client so the lifecycle executor in the rule registry
can be removed since it is no longer in use.

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Ying Mao 2024-10-31 12:30:47 -04:00 committed by GitHub
parent 68c209f96d
commit 322392fb28
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 74 additions and 4389 deletions

View file

@ -6,14 +6,7 @@
*/
import rison from '@kbn/rison';
import {
AlertInstanceContext as AlertContext,
AlertInstanceState as AlertState,
} from '@kbn/alerting-plugin/server';
import { RuleExecutorServicesMock, alertsMock } from '@kbn/alerting-plugin/server/mocks';
import { LifecycleAlertServices } from '@kbn/rule-registry-plugin/server';
import { ruleRegistryMocks } from '@kbn/rule-registry-plugin/server/mocks';
import { createLifecycleRuleExecutorMock } from '@kbn/rule-registry-plugin/server/utils/create_lifecycle_rule_executor_mock';
import { COMPARATORS } from '@kbn/alerting-comparators';
import { Aggregators, InventoryMetricConditions } from '../../../../common/alerting/metrics';
import type { LogMeta, Logger } from '@kbn/logging';
@ -150,9 +143,7 @@ const mockLibs = {
infraPluginMock.createStartContract(),
],
configuration: createMockStaticConfiguration({}),
metricsRules: {
createLifecycleRuleExecutor: createLifecycleRuleExecutorMock,
},
metricsRules: {},
basePath: {
publicBaseUrl: 'http://localhost:5601',
prepend: (path: string) => path,
@ -165,14 +156,10 @@ const mockLibs = {
logger,
} as unknown as InfraBackendLibs;
const alerts = new Map<string, AlertTestInstance>();
let services: RuleExecutorServicesMock & LifecycleAlertServices<AlertState, AlertContext, string>;
let services: RuleExecutorServicesMock;
const setup = () => {
const alertsServices = alertsMock.createRuleExecutorServices();
services = {
...alertsServices,
...ruleRegistryMocks.createLifecycleAlertServices(alertsServices),
};
services = alertsMock.createRuleExecutorServices();
services.alertsClient.report.mockImplementation((params: any) => {
alerts.set(params.id, { actionGroup: params.actionGroup, context: [], payload: [] });

View file

@ -6,7 +6,6 @@
*/
import { CoreSetup, Logger } from '@kbn/core/server';
import { createLifecycleExecutor } from '@kbn/rule-registry-plugin/server';
import { InfraFeatureId } from '../../../common/constants';
import { createRuleDataClient } from './rule_data_client';
import {
@ -36,12 +35,7 @@ export class RulesService {
ruleDataService: setupDeps.ruleRegistry.ruleDataService,
});
const createLifecycleRuleExecutor = createLifecycleExecutor(this.logger, ruleDataClient);
return {
createLifecycleRuleExecutor,
ruleDataClient,
};
return { ruleDataClient };
}
public start(_startDeps: RulesServiceStartDeps): RulesServiceStart {

View file

@ -6,13 +6,7 @@
*/
import { PluginSetupContract as AlertingPluginSetup } from '@kbn/alerting-plugin/server';
import {
createLifecycleExecutor,
IRuleDataClient,
RuleRegistryPluginSetupContract,
} from '@kbn/rule-registry-plugin/server';
type LifecycleRuleExecutorCreator = ReturnType<typeof createLifecycleExecutor>;
import { IRuleDataClient, RuleRegistryPluginSetupContract } from '@kbn/rule-registry-plugin/server';
export interface RulesServiceSetupDeps {
alerting: AlertingPluginSetup;
ruleRegistry: RuleRegistryPluginSetupContract;
@ -22,7 +16,6 @@ export interface RulesServiceSetupDeps {
export interface RulesServiceStartDeps {}
export interface RulesServiceSetup {
createLifecycleRuleExecutor: LifecycleRuleExecutorCreator;
ruleDataClient: IRuleDataClient;
}

View file

@ -25,13 +25,6 @@ export * from './rule_data_plugin_service';
export * from './rule_data_client';
export * from './alert_data_client/audit_events';
export { createLifecycleRuleTypeFactory } from './utils/create_lifecycle_rule_type_factory';
export type {
LifecycleRuleExecutor,
LifecycleAlertService,
LifecycleAlertServices,
} from './utils/create_lifecycle_executor';
export { createLifecycleExecutor } from './utils/create_lifecycle_executor';
export { createPersistenceRuleTypeWrapper } from './utils/create_persistence_rule_type_wrapper';
export * from './utils/persistence_types';
export type { AlertsClient } from './alert_data_client/alerts_client';

View file

@ -11,10 +11,8 @@ import {
ruleDataServiceMock,
RuleDataServiceMock,
} from './rule_data_plugin_service/rule_data_plugin_service.mock';
import { createLifecycleAlertServicesMock } from './utils/lifecycle_alert_services.mock';
export const ruleRegistryMocks = {
createLifecycleAlertServices: createLifecycleAlertServicesMock,
createRuleDataService: ruleDataServiceMock.create,
createRuleDataClient: createRuleDataClientMock,
createAlertsClientMock: alertsClientMock,

View file

@ -29,7 +29,6 @@ import type {
PluginSetup as DataPluginSetup,
} from '@kbn/data-plugin/server';
import { createLifecycleRuleTypeFactory } from './utils/create_lifecycle_rule_type_factory';
import type { RuleRegistryPluginConfig } from './config';
import { type IRuleDataService, RuleDataService, Dataset } from './rule_data_plugin_service';
import { AlertsClientFactory } from './alert_data_client/alerts_client_factory';
@ -52,7 +51,6 @@ export interface RuleRegistryPluginStartDependencies {
export interface RuleRegistryPluginSetupContract {
ruleDataService: IRuleDataService;
createLifecycleRuleTypeFactory: typeof createLifecycleRuleTypeFactory;
dataset: typeof Dataset;
}
@ -153,7 +151,6 @@ export class RuleRegistryPlugin
return {
ruleDataService: this.ruleDataService,
createLifecycleRuleTypeFactory,
dataset: Dataset,
};
}

View file

@ -1,479 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { Logger } from '@kbn/logging';
import type { PublicContract } from '@kbn/utility-types';
import { getOrElse } from 'fp-ts/lib/Either';
import { v4 } from 'uuid';
import { difference } from 'lodash';
import {
RuleExecutorOptions,
Alert,
AlertInstanceContext,
AlertInstanceState,
RuleTypeParams,
RuleTypeState,
isValidAlertIndexName,
} from '@kbn/alerting-plugin/server';
import { isFlapping } from '@kbn/alerting-plugin/server/lib';
import { wrappedStateRt, WrappedLifecycleRuleState } from '@kbn/alerting-state-types';
export type {
TrackedLifecycleAlertState,
WrappedLifecycleRuleState,
} from '@kbn/alerting-state-types';
import { ParsedExperimentalFields } from '../../common/parse_experimental_fields';
import { ParsedTechnicalFields } from '../../common/parse_technical_fields';
import {
ALERT_TIME_RANGE,
ALERT_DURATION,
ALERT_END,
ALERT_INSTANCE_ID,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_STATUS_RECOVERED,
ALERT_UUID,
ALERT_WORKFLOW_STATUS,
EVENT_ACTION,
EVENT_KIND,
TAGS,
TIMESTAMP,
VERSION,
ALERT_FLAPPING,
ALERT_MAINTENANCE_WINDOW_IDS,
} from '../../common/technical_rule_data_field_names';
import { CommonAlertFieldNameLatest, CommonAlertIdFieldNameLatest } from '../../common/schemas';
import { IRuleDataClient } from '../rule_data_client';
import { AlertExecutorOptionsWithExtraServices } from '../types';
import { fetchExistingAlerts } from './fetch_existing_alerts';
import { getCommonAlertFields } from './get_common_alert_fields';
import { getUpdatedFlappingHistory } from './get_updated_flapping_history';
import { fetchAlertByAlertUUID } from './fetch_alert_by_uuid';
import { getAlertsForNotification } from './get_alerts_for_notification';
type ImplicitTechnicalFieldName = CommonAlertFieldNameLatest | CommonAlertIdFieldNameLatest;
type ExplicitTechnicalAlertFields = Partial<
Omit<ParsedTechnicalFields, ImplicitTechnicalFieldName>
>;
type ExplicitAlertFields = Record<string, unknown> & // every field can have values of arbitrary types
ExplicitTechnicalAlertFields; // but technical fields must obey their respective type
export type LifecycleAlertService<
InstanceState extends AlertInstanceState = never,
InstanceContext extends AlertInstanceContext = never,
ActionGroupIds extends string = never
> = (alert: {
id: string;
fields: ExplicitAlertFields;
}) => Alert<InstanceState, InstanceContext, ActionGroupIds>;
export interface LifecycleAlertServices<
InstanceState extends AlertInstanceState = never,
InstanceContext extends AlertInstanceContext = never,
ActionGroupIds extends string = never
> {
alertWithLifecycle: LifecycleAlertService<InstanceState, InstanceContext, ActionGroupIds>;
getAlertStartedDate: (alertInstanceId: string) => string | null;
getAlertUuid: (alertInstanceId: string) => string;
getAlertByAlertUuid: (
alertUuid: string
) => Promise<Partial<ParsedTechnicalFields & ParsedExperimentalFields> | null> | null;
}
export type LifecycleRuleExecutor<
Params extends RuleTypeParams = never,
State extends RuleTypeState = never,
InstanceState extends AlertInstanceState = never,
InstanceContext extends AlertInstanceContext = never,
ActionGroupIds extends string = never
> = (
options: AlertExecutorOptionsWithExtraServices<
Params,
State,
InstanceState,
InstanceContext,
ActionGroupIds,
LifecycleAlertServices<InstanceState, InstanceContext, ActionGroupIds>
>
) => Promise<{ state: State }>;
export const createLifecycleExecutor =
(logger: Logger, ruleDataClient: PublicContract<IRuleDataClient>) =>
<
Params extends RuleTypeParams = never,
State extends RuleTypeState = never,
InstanceState extends AlertInstanceState = never,
InstanceContext extends AlertInstanceContext = never,
ActionGroupIds extends string = never
>(
wrappedExecutor: LifecycleRuleExecutor<
Params,
State,
InstanceState,
InstanceContext,
ActionGroupIds
>
) =>
async (
options: RuleExecutorOptions<
Params,
WrappedLifecycleRuleState<State>,
InstanceState,
InstanceContext,
ActionGroupIds
>
): Promise<{ state: WrappedLifecycleRuleState<State> }> => {
const {
services: { alertFactory, getMaintenanceWindowIds, shouldWriteAlerts },
state: previousState,
flappingSettings,
rule,
} = options;
const ruleDataClientWriter = await ruleDataClient.getWriter();
const state = getOrElse(
(): WrappedLifecycleRuleState<State> => ({
wrapped: previousState as State,
trackedAlerts: {},
trackedAlertsRecovered: {},
})
)(wrappedStateRt<State>().decode(previousState));
const commonRuleFields = getCommonAlertFields(options);
const currentAlerts: Record<string, ExplicitAlertFields> = {};
const alertUuidMap: Map<string, string> = new Map();
const lifecycleAlertServices: LifecycleAlertServices<
InstanceState,
InstanceContext,
ActionGroupIds
> = {
alertWithLifecycle: ({ id, fields }) => {
currentAlerts[id] = fields;
const alert = alertFactory.create(id);
const uuid = alert.getUuid();
alertUuidMap.set(id, uuid);
return alert;
},
getAlertStartedDate: (alertId: string) => state.trackedAlerts[alertId]?.started ?? null,
getAlertUuid: (alertId: string) => {
const uuid = alertUuidMap.get(alertId);
if (uuid) {
return uuid;
}
const trackedAlert = state.trackedAlerts[alertId];
if (trackedAlert) {
return trackedAlert.alertUuid;
}
const trackedRecoveredAlert = state.trackedAlertsRecovered[alertId];
if (trackedRecoveredAlert) {
return trackedRecoveredAlert.alertUuid;
}
const alertInfo = `alert ${alertId} of rule ${rule.ruleTypeId}:${rule.id}`;
logger.warn(
`[Rule Registry] requesting uuid for ${alertInfo} which is not tracked, generating dynamically`
);
return v4();
},
getAlertByAlertUuid: async (alertUuid: string) => {
try {
return await fetchAlertByAlertUUID(ruleDataClient, alertUuid);
} catch (err) {
return null;
}
},
};
const wrappedExecutorResult = await wrappedExecutor({
...options,
state: state.wrapped != null ? state.wrapped : ({} as State),
services: {
...options.services,
...lifecycleAlertServices,
},
});
const currentAlertIds = Object.keys(currentAlerts);
const trackedAlertIds = Object.keys(state.trackedAlerts);
const trackedAlertRecoveredIds = Object.keys(state.trackedAlertsRecovered);
const newAlertIds = difference(currentAlertIds, trackedAlertIds);
const allAlertIds = [...new Set(currentAlertIds.concat(trackedAlertIds))];
const trackedAlertStates = Object.values(state.trackedAlerts);
logger.debug(
`[Rule Registry] Tracking ${allAlertIds.length} alerts (${newAlertIds.length} new, ${trackedAlertStates.length} previous)`
);
// load maintenance window ids if there are new alerts
const maintenanceWindowIds: string[] = allAlertIds.length
? await getMaintenanceWindowIds()
: [];
interface TrackedAlertData {
indexName: string;
fields: Partial<ParsedTechnicalFields & ParsedExperimentalFields>;
seqNo: number | undefined;
primaryTerm: number | undefined;
}
const trackedAlertsDataMap: Record<string, TrackedAlertData> = {};
if (trackedAlertStates.length) {
const result = await fetchExistingAlerts(
ruleDataClient,
trackedAlertStates,
commonRuleFields
);
result.forEach((hit) => {
const alertInstanceId = hit._source ? hit._source[ALERT_INSTANCE_ID] : void 0;
if (alertInstanceId && hit._source) {
const alertLabel = `${rule.ruleTypeId}:${rule.id} ${alertInstanceId}`;
if (hit._seq_no == null) {
logger.error(`missing _seq_no on alert instance ${alertLabel}`);
} else if (hit._primary_term == null) {
logger.error(`missing _primary_term on alert instance ${alertLabel}`);
} else {
trackedAlertsDataMap[alertInstanceId] = {
indexName: hit._index,
fields: hit._source,
seqNo: hit._seq_no,
primaryTerm: hit._primary_term,
};
}
}
});
}
const makeEventsDataMapFor = (alertIds: string[]) =>
alertIds
.filter((alertId) => {
const alertData = trackedAlertsDataMap[alertId];
const alertIndex = alertData?.indexName;
if (!alertIndex) {
return true;
} else if (!isValidAlertIndexName(alertIndex)) {
logger.warn(
`Could not update alert ${alertId} in ${alertIndex}. Partial and restored alert indices are not supported.`
);
return false;
}
return true;
})
.map((alertId) => {
const alertData = trackedAlertsDataMap[alertId];
const currentAlertData = currentAlerts[alertId];
const trackedAlert = state.trackedAlerts[alertId];
if (!alertData) {
logger.debug(`[Rule Registry] Could not find alert data for ${alertId}`);
}
const isNew = !trackedAlert;
const isRecovered = !currentAlertData;
const isActive = !isRecovered;
const flappingHistory = getUpdatedFlappingHistory<State>(
flappingSettings,
alertId,
state,
isNew,
isRecovered,
isActive,
trackedAlertRecoveredIds
);
const { alertUuid, started, flapping, pendingRecoveredCount, activeCount } = !isNew
? state.trackedAlerts[alertId]
: {
alertUuid: lifecycleAlertServices.getAlertUuid(alertId),
started: commonRuleFields[TIMESTAMP],
flapping: state.trackedAlertsRecovered[alertId]
? state.trackedAlertsRecovered[alertId].flapping
: false,
pendingRecoveredCount: 0,
activeCount: 0,
};
const event: ParsedTechnicalFields & ParsedExperimentalFields = {
...alertData?.fields,
...commonRuleFields,
...currentAlertData,
[ALERT_DURATION]: (options.startedAt.getTime() - new Date(started).getTime()) * 1000,
[ALERT_TIME_RANGE]: isRecovered
? {
gte: started,
lte: commonRuleFields[TIMESTAMP],
}
: { gte: started },
[ALERT_INSTANCE_ID]: alertId,
[ALERT_START]: started,
[ALERT_UUID]: alertUuid,
[ALERT_STATUS]: isRecovered ? ALERT_STATUS_RECOVERED : ALERT_STATUS_ACTIVE,
[ALERT_WORKFLOW_STATUS]: alertData?.fields[ALERT_WORKFLOW_STATUS] ?? 'open',
[EVENT_KIND]: 'signal',
[EVENT_ACTION]: isNew ? 'open' : isActive ? 'active' : 'close',
[TAGS]: Array.from(
new Set([
...(currentAlertData?.tags ?? []),
...(alertData?.fields[TAGS] ?? []),
...(options.rule.tags ?? []),
])
),
[VERSION]: ruleDataClient.kibanaVersion,
[ALERT_FLAPPING]: flapping,
...(isRecovered ? { [ALERT_END]: commonRuleFields[TIMESTAMP] } : {}),
...(isNew && maintenanceWindowIds?.length
? { [ALERT_MAINTENANCE_WINDOW_IDS]: maintenanceWindowIds }
: {}),
};
return {
indexName: alertData?.indexName,
seqNo: alertData?.seqNo,
primaryTerm: alertData?.primaryTerm,
event,
flappingHistory,
flapping,
pendingRecoveredCount,
activeCount,
};
});
const trackedEventsToIndex = makeEventsDataMapFor(trackedAlertIds);
const newEventsToIndex = makeEventsDataMapFor(newAlertIds);
const trackedRecoveredEventsToIndex = makeEventsDataMapFor(trackedAlertRecoveredIds);
const allEventsToIndex = getAlertsForNotification(
flappingSettings,
rule.alertDelay?.active ?? 0,
trackedEventsToIndex,
newEventsToIndex,
{ maintenanceWindowIds, timestamp: commonRuleFields[TIMESTAMP] }
);
// Only write alerts if:
// - writing is enabled
// AND
// - rule execution has not been cancelled due to timeout
// OR
// - if execution has been cancelled due to timeout, if feature flags are configured to write alerts anyway
const writeAlerts = ruleDataClient.isWriteEnabled() && shouldWriteAlerts();
if (allEventsToIndex.length > 0 && writeAlerts) {
logger.debug(`[Rule Registry] Preparing to index ${allEventsToIndex.length} alerts.`);
await ruleDataClientWriter.bulk({
body: allEventsToIndex.flatMap(({ event, indexName, seqNo, primaryTerm }) => [
indexName
? {
index: {
_id: event[ALERT_UUID]!,
_index: indexName,
if_seq_no: seqNo,
if_primary_term: primaryTerm,
require_alias: false,
},
}
: {
create: {
_id: event[ALERT_UUID]!,
},
},
event,
]),
refresh: true,
});
} else {
logger.debug(
`[Rule Registry] Not indexing ${allEventsToIndex.length} alerts because writing has been disabled.`
);
}
const nextTrackedAlerts = Object.fromEntries(
[...newEventsToIndex, ...trackedEventsToIndex]
.filter(({ event }) => event[ALERT_STATUS] !== ALERT_STATUS_RECOVERED)
.map(
({
event,
flappingHistory,
flapping: isCurrentlyFlapping,
pendingRecoveredCount,
activeCount,
}) => {
const alertId = event[ALERT_INSTANCE_ID]!;
const alertUuid = event[ALERT_UUID]!;
const started = new Date(event[ALERT_START]!).toISOString();
const flapping = isFlapping(flappingSettings, flappingHistory, isCurrentlyFlapping);
return [
alertId,
{
alertId,
alertUuid,
started,
flappingHistory,
flapping,
pendingRecoveredCount,
activeCount,
},
];
}
)
);
const nextTrackedAlertsRecovered = Object.fromEntries(
[...allEventsToIndex, ...trackedRecoveredEventsToIndex]
.filter(
({ event, flappingHistory, flapping }) =>
// return recovered alerts if they are flapping or if the flapping array is not at capacity
// this is a space saving effort that will stop tracking a recovered alert if it wasn't flapping and doesn't have state changes
// in the last max capcity number of executions
event[ALERT_STATUS] === ALERT_STATUS_RECOVERED &&
(flapping || flappingHistory.filter((f: boolean) => f).length > 0)
)
.map(
({
event,
flappingHistory,
flapping: isCurrentlyFlapping,
pendingRecoveredCount,
activeCount,
}) => {
const alertId = event[ALERT_INSTANCE_ID]!;
const alertUuid = event[ALERT_UUID]!;
const started = new Date(event[ALERT_START]!).toISOString();
const flapping = isFlapping(flappingSettings, flappingHistory, isCurrentlyFlapping);
return [
alertId,
{
alertId,
alertUuid,
started,
flappingHistory,
flapping,
pendingRecoveredCount,
activeCount,
},
];
}
)
);
return {
state: {
wrapped: wrappedExecutorResult?.state ?? ({} as State),
trackedAlerts: writeAlerts ? nextTrackedAlerts : {},
trackedAlertsRecovered: writeAlerts ? nextTrackedAlertsRecovered : {},
},
};
};

View file

@ -1,38 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
RuleTypeParams,
RuleTypeState,
AlertInstanceState,
AlertInstanceContext,
} from '@kbn/alerting-plugin/server';
import { AlertExecutorOptionsWithExtraServices } from '../types';
import { LifecycleAlertServices, LifecycleRuleExecutor } from './create_lifecycle_executor';
export const createLifecycleRuleExecutorMock =
<
Params extends RuleTypeParams = never,
State extends RuleTypeState = never,
InstanceState extends AlertInstanceState = never,
InstanceContext extends AlertInstanceContext = never,
ActionGroupIds extends string = never
>(
executor: LifecycleRuleExecutor<Params, State, InstanceState, InstanceContext, ActionGroupIds>
) =>
async (
options: AlertExecutorOptionsWithExtraServices<
Params,
State,
InstanceState,
InstanceContext,
ActionGroupIds,
LifecycleAlertServices<InstanceState, InstanceContext, ActionGroupIds>
>
) =>
await executor(options);

View file

@ -1,512 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { schema } from '@kbn/config-schema';
import {
ALERT_DURATION,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_STATUS_RECOVERED,
ALERT_UUID,
ALERT_TIME_RANGE,
} from '@kbn/rule-data-utils';
import { loggerMock } from '@kbn/logging-mocks';
import { castArray, omit } from 'lodash';
import { createRuleDataClientMock } from '../rule_data_client/rule_data_client.mock';
import { createLifecycleRuleTypeFactory } from './create_lifecycle_rule_type_factory';
import { ISearchStartSearchSource } from '@kbn/data-plugin/common';
import { SharePluginStart } from '@kbn/share-plugin/server';
import { dataViewPluginMocks } from '@kbn/data-views-plugin/public/mocks';
import { DEFAULT_FLAPPING_SETTINGS } from '@kbn/alerting-plugin/common/rules_settings';
type RuleTestHelpers = ReturnType<typeof createRule>;
function createRule(shouldWriteAlerts: boolean = true) {
const ruleDataClientMock = createRuleDataClientMock();
const factory = createLifecycleRuleTypeFactory({
ruleDataClient: ruleDataClientMock,
logger: loggerMock.create(),
});
let nextAlerts: Array<{ id: string; fields: Record<string, any> }> = [];
const type = factory({
actionGroups: [
{
id: 'warning',
name: 'warning',
},
],
actionVariables: {
context: [],
params: [],
state: [],
},
defaultActionGroupId: 'warning',
executor: async ({ services }) => {
nextAlerts.forEach((alert) => {
services.alertWithLifecycle(alert);
});
nextAlerts = [];
return { state: {} };
},
id: 'ruleTypeId',
isExportable: true,
minimumLicenseRequired: 'basic',
name: 'ruleTypeName',
category: 'test',
producer: 'producer',
validate: {
params: schema.object(
{},
{
unknowns: 'allow',
}
),
},
});
let state: Record<string, any> = {};
let previousStartedAt: Date | null;
const createdAt = new Date('2021-06-16T09:00:00.000Z');
const scheduleActions = jest.fn();
let uuidCounter = 1;
const getUuid = jest.fn(() => `uuid-${uuidCounter++}`);
const alertFactory = {
create: () => {
return {
scheduleActions,
getUuid,
} as any;
},
alertLimit: {
getValue: () => 1000,
setLimitReached: () => {},
},
done: () => ({ getRecoveredAlerts: () => [] }),
};
return {
alertWithLifecycle: async (alerts: Array<{ id: string; fields: Record<string, any> }>) => {
nextAlerts = alerts;
const startedAt = new Date((previousStartedAt ?? createdAt).getTime() + 60000);
scheduleActions.mockClear();
({ state } = ((await type.executor({
executionId: 'b33f65d7-6e8b-4aae-8d20-c93613dec9f9',
logger: loggerMock.create(),
namespace: 'namespace',
params: { threshold: 1, operator: '>' },
previousStartedAt,
rule: {
id: 'alertId',
actions: [],
consumer: 'consumer',
createdAt,
createdBy: 'createdBy',
enabled: true,
muteAll: false,
name: 'name',
notifyWhen: 'onActionGroupChange',
producer: 'producer',
revision: 0,
ruleTypeId: 'ruleTypeId',
ruleTypeName: 'ruleTypeName',
schedule: {
interval: '1m',
},
snoozeSchedule: [],
tags: ['tags'],
throttle: null,
updatedAt: createdAt,
updatedBy: 'updatedBy',
},
services: {
alertsClient: null,
alertFactory,
savedObjectsClient: {} as any,
scopedClusterClient: {} as any,
search: {} as any,
getMaintenanceWindowIds: async () => [],
getSearchSourceClient: async () => ({} as ISearchStartSearchSource),
shouldStopExecution: () => false,
shouldWriteAlerts: () => shouldWriteAlerts,
uiSettingsClient: {} as any,
share: {} as SharePluginStart,
getDataViews: async () => dataViewPluginMocks.createStartContract(),
},
spaceId: 'spaceId',
startedAt,
startedAtOverridden: false,
state,
flappingSettings: DEFAULT_FLAPPING_SETTINGS,
getTimeRange: () => {
const date = new Date(Date.now()).toISOString();
return { dateStart: date, dateEnd: date };
},
})) ?? {}) as Record<string, any>);
previousStartedAt = startedAt;
},
scheduleActions,
ruleDataClientMock,
};
}
describe('createLifecycleRuleTypeFactory', () => {
describe('with a new rule', () => {
let helpers: RuleTestHelpers;
beforeEach(() => {
helpers = createRule();
});
describe('when writing is disabled', () => {
beforeEach(() => {
helpers.ruleDataClientMock.isWriteEnabled.mockReturnValue(false);
});
it("doesn't persist anything", async () => {
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
]);
expect((await helpers.ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledTimes(0);
});
});
describe('when rule is cancelled due to timeout and config flags indicate to skip actions', () => {
beforeEach(() => {
helpers = createRule(false);
helpers.ruleDataClientMock.isWriteEnabled.mockReturnValue(true);
});
it("doesn't persist anything", async () => {
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
]);
expect((await helpers.ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledTimes(0);
});
});
describe('when alerts are new', () => {
beforeEach(async () => {
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
{
id: 'opbeans-node',
fields: {
'service.name': 'opbeans-node',
},
},
]);
});
it('writes the correct alerts', async () => {
expect((await helpers.ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledTimes(1);
const body = (await helpers.ruleDataClientMock.getWriter()).bulk.mock.calls[0][0].body!;
const documents: any[] = body.filter((op: any) => !isOpDoc(op));
const evaluationDocuments = documents.filter((doc) => doc['event.kind'] === 'event');
const alertDocuments = documents.filter((doc) => doc['event.kind'] === 'signal');
expect(evaluationDocuments.length).toBe(0);
expect(alertDocuments.length).toBe(2);
expect(
alertDocuments.every((doc) => doc[ALERT_STATUS] === ALERT_STATUS_ACTIVE)
).toBeTruthy();
expect(alertDocuments.every((doc) => doc[ALERT_DURATION] === 0)).toBeTruthy();
expect(alertDocuments.every((doc) => doc['event.action'] === 'open')).toBeTruthy();
expect(documents.map((doc) => omit(doc, ALERT_UUID))).toMatchInlineSnapshot(`
Array [
Object {
"@timestamp": "2021-06-16T09:01:00.000Z",
"event.action": "open",
"event.kind": "signal",
"kibana.alert.consecutive_matches": 1,
"kibana.alert.duration.us": 0,
"kibana.alert.flapping": false,
"kibana.alert.instance.id": "opbeans-java",
"kibana.alert.rule.category": "ruleTypeName",
"kibana.alert.rule.consumer": "consumer",
"kibana.alert.rule.execution.uuid": "b33f65d7-6e8b-4aae-8d20-c93613dec9f9",
"kibana.alert.rule.name": "name",
"kibana.alert.rule.parameters": Object {
"operator": ">",
"threshold": 1,
},
"kibana.alert.rule.producer": "producer",
"kibana.alert.rule.revision": 0,
"kibana.alert.rule.rule_type_id": "ruleTypeId",
"kibana.alert.rule.tags": Array [
"tags",
],
"kibana.alert.rule.uuid": "alertId",
"kibana.alert.start": "2021-06-16T09:01:00.000Z",
"kibana.alert.status": "active",
"kibana.alert.time_range": Object {
"gte": "2021-06-16T09:01:00.000Z",
},
"kibana.alert.workflow_status": "open",
"kibana.space_ids": Array [
"spaceId",
],
"kibana.version": "7.16.0",
"service.name": "opbeans-java",
"tags": Array [
"tags",
],
},
Object {
"@timestamp": "2021-06-16T09:01:00.000Z",
"event.action": "open",
"event.kind": "signal",
"kibana.alert.consecutive_matches": 1,
"kibana.alert.duration.us": 0,
"kibana.alert.flapping": false,
"kibana.alert.instance.id": "opbeans-node",
"kibana.alert.rule.category": "ruleTypeName",
"kibana.alert.rule.consumer": "consumer",
"kibana.alert.rule.execution.uuid": "b33f65d7-6e8b-4aae-8d20-c93613dec9f9",
"kibana.alert.rule.name": "name",
"kibana.alert.rule.parameters": Object {
"operator": ">",
"threshold": 1,
},
"kibana.alert.rule.producer": "producer",
"kibana.alert.rule.revision": 0,
"kibana.alert.rule.rule_type_id": "ruleTypeId",
"kibana.alert.rule.tags": Array [
"tags",
],
"kibana.alert.rule.uuid": "alertId",
"kibana.alert.start": "2021-06-16T09:01:00.000Z",
"kibana.alert.status": "active",
"kibana.alert.time_range": Object {
"gte": "2021-06-16T09:01:00.000Z",
},
"kibana.alert.workflow_status": "open",
"kibana.space_ids": Array [
"spaceId",
],
"kibana.version": "7.16.0",
"service.name": "opbeans-node",
"tags": Array [
"tags",
],
},
]
`);
});
});
describe('when alerts are active', () => {
beforeEach(async () => {
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
{
id: 'opbeans-node',
fields: {
'service.name': 'opbeans-node',
},
},
]);
// TODO mock the resolved value before calling alertWithLifecycle again
const lastOpbeansNodeDoc = (
await helpers.ruleDataClientMock.getWriter()
).bulk.mock.calls[0][0].body
?.concat()
.reverse()
.find((doc: any) => !isOpDoc(doc) && doc['service.name'] === 'opbeans-node') as Record<
string,
any
>;
// @ts-ignore 4.3.5 upgrade
helpers.ruleDataClientMock.getReader().search.mockResolvedValueOnce({
hits: {
hits: [{ _source: lastOpbeansNodeDoc } as any],
total: {
value: 1,
relation: 'eq',
},
},
took: 0,
timed_out: false,
_shards: {
failed: 0,
successful: 1,
total: 1,
},
});
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
{
id: 'opbeans-node',
fields: {
'service.name': 'opbeans-node',
'kibana.alert.workflow_status': 'closed',
},
},
]);
});
it('writes the correct alerts', async () => {
expect((await helpers.ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledTimes(2);
const body = (await helpers.ruleDataClientMock.getWriter()).bulk.mock.calls[1][0].body!;
const documents: any[] = body.filter((op: any) => !isOpDoc(op));
const evaluationDocuments = documents.filter((doc) => doc['event.kind'] === 'event');
const alertDocuments = documents.filter((doc) => doc['event.kind'] === 'signal');
expect(evaluationDocuments.length).toBe(0);
expect(alertDocuments.length).toBe(2);
expect(
alertDocuments.every((doc) => doc[ALERT_STATUS] === ALERT_STATUS_ACTIVE)
).toBeTruthy();
expect(alertDocuments.every((doc) => doc['event.action'] === 'active')).toBeTruthy();
expect(alertDocuments.every((doc) => doc[ALERT_DURATION] > 0)).toBeTruthy();
});
});
describe('when alerts recover', () => {
beforeEach(async () => {
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
{
id: 'opbeans-node',
fields: {
'service.name': 'opbeans-node',
},
},
]);
const lastOpbeansNodeDoc = (
await helpers.ruleDataClientMock.getWriter()
).bulk.mock.calls[0][0].body
?.concat()
.reverse()
.find((doc: any) => !isOpDoc(doc) && doc['service.name'] === 'opbeans-node') as Record<
string,
any
>;
helpers.ruleDataClientMock.getReader().search.mockResolvedValueOnce({
hits: {
hits: [
{
_source: lastOpbeansNodeDoc,
_index: '.alerts-a',
_primary_term: 4,
_seq_no: 2,
} as any,
],
total: {
value: 1,
relation: 'eq',
},
},
took: 0,
timed_out: false,
_shards: {
failed: 0,
successful: 1,
total: 1,
},
});
await helpers.alertWithLifecycle([
{
id: 'opbeans-java',
fields: {
'service.name': 'opbeans-java',
},
},
]);
});
it('writes the correct alerts', async () => {
expect((await helpers.ruleDataClientMock.getWriter()).bulk).toHaveBeenCalledTimes(2);
const body = (await helpers.ruleDataClientMock.getWriter()).bulk.mock.calls[1][0].body!;
const documents: any[] = body.filter((op: any) => !isOpDoc(op));
const opbeansJavaAlertDoc = documents.find(
(doc) => castArray(doc['service.name'])[0] === 'opbeans-java'
);
const opbeansNodeAlertDoc = documents.find(
(doc) => castArray(doc['service.name'])[0] === 'opbeans-node'
);
expect(opbeansJavaAlertDoc['event.action']).toBe('active');
expect(opbeansJavaAlertDoc[ALERT_STATUS]).toBe(ALERT_STATUS_ACTIVE);
expect(opbeansNodeAlertDoc['event.action']).toBe('close');
expect(opbeansNodeAlertDoc[ALERT_STATUS]).toBe(ALERT_STATUS_RECOVERED);
expect(opbeansNodeAlertDoc[ALERT_TIME_RANGE]).toEqual({
gte: '2021-06-16T09:01:00.000Z',
lte: '2021-06-16T09:02:00.000Z',
});
});
});
});
});
function isOpDoc(doc: any) {
if (doc?.index?._id) return true;
if (doc?.create?._id) return true;
return false;
}

View file

@ -1,45 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { Logger } from '@kbn/logging';
import {
AlertInstanceContext,
AlertInstanceState,
RuleTypeParams,
RuleTypeState,
} from '@kbn/alerting-plugin/common';
import { IRuleDataClient } from '../rule_data_client';
import { AlertTypeWithExecutor } from '../types';
import { createLifecycleExecutor, LifecycleAlertServices } from './create_lifecycle_executor';
export const createLifecycleRuleTypeFactory =
({ logger, ruleDataClient }: { logger: Logger; ruleDataClient: IRuleDataClient }) =>
<
TParams extends RuleTypeParams,
TAlertInstanceState extends AlertInstanceState,
TAlertInstanceContext extends AlertInstanceContext,
TActionGroupIds extends string,
TServices extends LifecycleAlertServices<
TAlertInstanceState,
TAlertInstanceContext,
TActionGroupIds
>
>(
type: AlertTypeWithExecutor<TAlertInstanceState, TParams, TAlertInstanceContext, TServices>
): AlertTypeWithExecutor<TAlertInstanceState, TParams, TAlertInstanceContext, any> => {
const createBoundLifecycleExecutor = createLifecycleExecutor(logger, ruleDataClient);
const executor = createBoundLifecycleExecutor<
TParams,
RuleTypeState,
AlertInstanceState,
TAlertInstanceContext,
string
>(type.executor as any);
return {
...type,
executor: executor as any,
};
};

View file

@ -1,207 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
DEFAULT_FLAPPING_SETTINGS,
DISABLE_FLAPPING_SETTINGS,
} from '@kbn/alerting-plugin/common/rules_settings';
import { getUpdatedFlappingHistory } from './get_updated_flapping_history';
describe('getUpdatedFlappingHistory', () => {
type TestRuleState = Record<string, unknown> & {
aRuleStateKey: string;
};
const initialRuleState: TestRuleState = {
aRuleStateKey: 'INITIAL_RULE_STATE_VALUE',
};
test('sets flapping state to true if the alert is new', () => {
const state = { wrapped: initialRuleState, trackedAlerts: {}, trackedAlertsRecovered: {} };
expect(
getUpdatedFlappingHistory(
DEFAULT_FLAPPING_SETTINGS,
'TEST_ALERT_0',
state,
true,
false,
false,
[]
)
).toMatchInlineSnapshot(`
Array [
true,
]
`);
});
test('sets flapping state to false on an alert that is still active', () => {
const state = {
wrapped: initialRuleState,
trackedAlerts: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
activeCount: 0,
},
},
trackedAlertsRecovered: {},
};
expect(
getUpdatedFlappingHistory(
DEFAULT_FLAPPING_SETTINGS,
'TEST_ALERT_0',
state,
false,
false,
true,
[]
)
).toMatchInlineSnapshot(`
Array [
false,
]
`);
});
test('sets flapping state to true on an alert that is active and previously recovered', () => {
const state = {
wrapped: initialRuleState,
trackedAlertsRecovered: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
activeCount: 0,
},
},
trackedAlerts: {},
};
const recoveredIds = ['TEST_ALERT_0'];
expect(
getUpdatedFlappingHistory(
DEFAULT_FLAPPING_SETTINGS,
'TEST_ALERT_0',
state,
true,
false,
true,
recoveredIds
)
).toMatchInlineSnapshot(`
Array [
true,
]
`);
expect(recoveredIds).toEqual([]);
});
test('sets flapping state to true on an alert that is recovered and previously active', () => {
const state = {
wrapped: initialRuleState,
trackedAlerts: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
activeCount: 0,
},
},
trackedAlertsRecovered: {},
};
const recoveredIds = ['TEST_ALERT_0'];
expect(
getUpdatedFlappingHistory(
DEFAULT_FLAPPING_SETTINGS,
'TEST_ALERT_0',
state,
false,
true,
false,
recoveredIds
)
).toMatchInlineSnapshot(`
Array [
true,
]
`);
expect(recoveredIds).toEqual(['TEST_ALERT_0']);
});
test('sets flapping state to false on an alert that is still recovered', () => {
const state = {
wrapped: initialRuleState,
trackedAlerts: {},
trackedAlertsRecovered: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
activeCount: 0,
},
},
};
const recoveredIds = ['TEST_ALERT_0'];
expect(
getUpdatedFlappingHistory(
DEFAULT_FLAPPING_SETTINGS,
'TEST_ALERT_0',
state,
false,
true,
false,
recoveredIds
)
).toMatchInlineSnapshot(`
Array [
false,
]
`);
expect(recoveredIds).toEqual(['TEST_ALERT_0']);
});
test('does not set flapping state if flapping is not enabled', () => {
const state = {
wrapped: initialRuleState,
trackedAlerts: {},
trackedAlertsRecovered: {
TEST_ALERT_0: {
alertId: 'TEST_ALERT_0',
alertUuid: 'TEST_ALERT_0_UUID',
started: '2020-01-01T12:00:00.000Z',
flappingHistory: [],
flapping: false,
pendingRecoveredCount: 0,
activeCount: 0,
},
},
};
expect(
getUpdatedFlappingHistory(
DISABLE_FLAPPING_SETTINGS,
'TEST_ALERT_0',
state,
false,
true,
false,
['TEST_ALERT_0']
)
).toMatchInlineSnapshot(`Array []`);
});
});

View file

@ -1,64 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { RuleTypeState } from '@kbn/alerting-plugin/common';
import { RulesSettingsFlappingProperties } from '@kbn/alerting-plugin/common/rules_settings';
import { updateFlappingHistory } from '@kbn/alerting-plugin/server/lib';
import { remove } from 'lodash';
import { WrappedLifecycleRuleState } from './create_lifecycle_executor';
export function getUpdatedFlappingHistory<State extends RuleTypeState = never>(
flappingSettings: RulesSettingsFlappingProperties,
alertId: string,
state: WrappedLifecycleRuleState<State>,
isNew: boolean,
isRecovered: boolean,
isActive: boolean,
recoveredIds: string[]
) {
// duplicating this logic to determine flapping at this level
let flappingHistory: boolean[] = [];
if (flappingSettings.enabled) {
if (isRecovered) {
if (state.trackedAlerts[alertId]) {
// this alert has flapped from active to recovered
flappingHistory = updateFlappingHistory(
flappingSettings,
state.trackedAlerts[alertId].flappingHistory,
true
);
} else if (state.trackedAlertsRecovered[alertId]) {
// this alert is still recovered
flappingHistory = updateFlappingHistory(
flappingSettings,
state.trackedAlertsRecovered[alertId].flappingHistory,
false
);
}
} else if (isNew) {
if (state.trackedAlertsRecovered[alertId]) {
// this alert has flapped from recovered to active
flappingHistory = updateFlappingHistory(
flappingSettings,
state.trackedAlertsRecovered[alertId].flappingHistory,
true
);
remove(recoveredIds, (id) => id === alertId);
} else {
flappingHistory = updateFlappingHistory(flappingSettings, [], true);
}
} else if (isActive) {
// this alert is still active
flappingHistory = updateFlappingHistory(
flappingSettings,
state.trackedAlerts[alertId].flappingHistory,
false
);
}
}
return flappingHistory;
}

View file

@ -1,41 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AlertInstanceContext, AlertInstanceState } from '@kbn/alerting-plugin/server';
import { alertsMock } from '@kbn/alerting-plugin/server/mocks';
import { LifecycleAlertServices } from './create_lifecycle_executor';
/**
* This wraps the alerts to enable the preservation of the generic type
* arguments of the factory function.
**/
class AlertsMockWrapper<
InstanceState extends AlertInstanceState = AlertInstanceState,
InstanceContext extends AlertInstanceContext = AlertInstanceContext
> {
createAlertServices() {
return alertsMock.createRuleExecutorServices<InstanceState, InstanceContext>();
}
}
type AlertServices<
InstanceState extends AlertInstanceState = AlertInstanceState,
InstanceContext extends AlertInstanceContext = AlertInstanceContext
> = ReturnType<AlertsMockWrapper<InstanceState, InstanceContext>['createAlertServices']>;
export const createLifecycleAlertServicesMock = <
InstanceState extends AlertInstanceState = never,
InstanceContext extends AlertInstanceContext = never,
ActionGroupIds extends string = never
>(
alertServices: AlertServices<InstanceState, InstanceContext>
): LifecycleAlertServices<InstanceState, InstanceContext, ActionGroupIds> => ({
alertWithLifecycle: ({ id }) => alertServices.alertFactory.create(id),
getAlertStartedDate: jest.fn((id: string) => null),
getAlertUuid: jest.fn((id: string) => 'mock-alert-uuid'),
getAlertByAlertUuid: jest.fn((id: string) => Promise.resolve(null)),
});

View file

@ -34,7 +34,6 @@
"@kbn/alerts-as-data-utils",
"@kbn/core-http-router-server-mocks",
"@kbn/core-http-server",
"@kbn/alerting-state-types",
"@kbn/alerting-types"
],
"exclude": [

View file

@ -17,7 +17,6 @@ import {
RuleTypeState,
RuleTypeParams,
} from '@kbn/alerting-plugin/server';
import { AlertConsumers } from '@kbn/rule-data-utils';
import { ES_TEST_INDEX_NAME } from '@kbn/alerting-api-integration-helpers';
import { FixtureStartDeps, FixtureSetupDeps } from './plugin';
@ -884,33 +883,14 @@ function getCancellableRuleType() {
return result;
}
function getAlwaysFiringAlertAsDataRuleType(
logger: Logger,
{ ruleRegistry }: Pick<FixtureSetupDeps, 'ruleRegistry'>
) {
function getAlwaysFiringAlertAsDataRuleType() {
const paramsSchema = schema.object({
index: schema.string(),
reference: schema.string(),
});
type ParamsType = TypeOf<typeof paramsSchema>;
const ruleDataClient = ruleRegistry.ruleDataService.initializeIndex({
feature: AlertConsumers.OBSERVABILITY,
registrationContext: 'observability.test.alerts',
dataset: ruleRegistry.dataset.alerts,
componentTemplateRefs: [],
componentTemplates: [
{
name: 'mappings',
},
],
});
const createLifecycleRuleType = ruleRegistry.createLifecycleRuleTypeFactory({
logger,
ruleDataClient,
});
return createLifecycleRuleType({
const result: RuleType<ParamsType, never, {}, {}, {}, 'default'> = {
id: 'test.always-firing-alert-as-data',
name: 'Test: Always Firing Alert As Data',
actionGroups: [{ id: 'default', name: 'Default' }],
@ -926,19 +906,8 @@ function getAlwaysFiringAlertAsDataRuleType(
const { services, params, state, spaceId, namespace, rule } = ruleExecutorOptions;
const ruleInfo = { spaceId, namespace, ...rule };
services
.alertWithLifecycle({
id: '1',
fields: {},
})
.scheduleActions('default');
services
.alertWithLifecycle({
id: '2',
fields: {},
})
.scheduleActions('default');
services.alertsClient?.report({ id: '1', actionGroup: 'default' });
services.alertsClient?.report({ id: '2', actionGroup: 'default' });
await services.scopedClusterClient.asCurrentUser.index({
index: params.index,
@ -960,8 +929,10 @@ function getAlwaysFiringAlertAsDataRuleType(
fieldMap: {},
},
useLegacyAlerts: true,
shouldWrite: true,
},
});
};
return result;
}
function getWaitingRuleType(logger: Logger) {
@ -1393,7 +1364,7 @@ export function defineRuleTypes(
alerting.registerType(getCancellableRuleType());
alerting.registerType(getPatternSuccessOrFailureRuleType());
alerting.registerType(getExceedsAlertLimitRuleType());
alerting.registerType(getAlwaysFiringAlertAsDataRuleType(logger, { ruleRegistry }));
alerting.registerType(getAlwaysFiringAlertAsDataRuleType());
alerting.registerType(getPatternFiringAutoRecoverFalseRuleType());
alerting.registerType(getPatternFiringAlertsAsDataRuleType());
alerting.registerType(getWaitingRuleType(logger));

View file

@ -24,7 +24,6 @@
"@kbn/notifications-plugin",
"@kbn/core-saved-objects-server",
"@kbn/logging",
"@kbn/rule-data-utils",
"@kbn/alerting-api-integration-helpers",
],
"exclude": [

View file

@ -40,6 +40,7 @@ export default function bulkUntrackTests({ getService }: FtrProviderContext) {
match_all: {},
},
conflicts: 'proceed',
ignore_unavailable: true,
});
await objectRemover.removeAll();
});

View file

@ -31,6 +31,7 @@ export default function bulkUntrackByQueryTests({ getService }: FtrProviderConte
match_all: {},
},
conflicts: 'proceed',
ignore_unavailable: true,
});
await objectRemover.removeAll();
});

View file

@ -51,7 +51,11 @@ export default function alertTests({ getService }: FtrProviderContext) {
after(async () => {
await esTestIndexTool.destroy();
await es.indices.delete({ index: authorizationIndex });
await es.deleteByQuery({ index: alertAsDataIndex, query: { match_all: {} } });
await es.deleteByQuery({
index: alertAsDataIndex,
query: { match_all: {} },
ignore_unavailable: true,
});
});
for (const scenario of UserAtSpaceScenarios) {
@ -1493,6 +1497,10 @@ instanceStateValue: true
_index: '.internal.alerts-observability.test.alerts.alerts-default-000001',
kibana: {
alert: {
action_group: 'default',
flapping_history: expectExpect.any(Array),
maintenance_window_ids: [],
severity_improving: false,
rule: {
parameters: {
index: '.kibana-alerting-test-data',
@ -1500,7 +1508,10 @@ instanceStateValue: true
},
category: 'Test: Always Firing Alert As Data',
consumer: 'alertsFixture',
execution: { uuid: expectExpect.any(String) },
execution: {
uuid: expectExpect.any(String),
timestamp: expectExpect.any(String),
},
name: 'abc',
producer: 'alertsFixture',
revision: 0,
@ -1530,6 +1541,10 @@ instanceStateValue: true
_index: '.internal.alerts-observability.test.alerts.alerts-default-000001',
kibana: {
alert: {
action_group: 'default',
flapping_history: expectExpect.any(Array),
maintenance_window_ids: [],
severity_improving: false,
rule: {
parameters: {
index: '.kibana-alerting-test-data',
@ -1537,7 +1552,10 @@ instanceStateValue: true
},
category: 'Test: Always Firing Alert As Data',
consumer: 'alertsFixture',
execution: { uuid: expectExpect.any(String) },
execution: {
timestamp: expectExpect.any(String),
uuid: expectExpect.any(String),
},
name: 'abc',
producer: 'alertsFixture',
revision: 0,
@ -1583,6 +1601,10 @@ instanceStateValue: true
_index: '.internal.alerts-observability.test.alerts.alerts-default-000001',
kibana: {
alert: {
action_group: 'default',
flapping_history: expectExpect.any(Array),
maintenance_window_ids: [],
previous_action_group: 'default',
rule: {
parameters: {
index: '.kibana-alerting-test-data',
@ -1590,7 +1612,10 @@ instanceStateValue: true
},
category: 'Test: Always Firing Alert As Data',
consumer: 'alertsFixture',
execution: { uuid: expectExpect.any(String) },
execution: {
timestamp: expectExpect.any(String),
uuid: expectExpect.any(String),
},
name: 'abc',
producer: 'alertsFixture',
revision: 0,
@ -1620,6 +1645,10 @@ instanceStateValue: true
_index: '.internal.alerts-observability.test.alerts.alerts-default-000001',
kibana: {
alert: {
action_group: 'default',
flapping_history: expectExpect.any(Array),
maintenance_window_ids: [],
previous_action_group: 'default',
rule: {
parameters: {
index: '.kibana-alerting-test-data',
@ -1627,7 +1656,10 @@ instanceStateValue: true
},
category: 'Test: Always Firing Alert As Data',
consumer: 'alertsFixture',
execution: { uuid: expectExpect.any(String) },
execution: {
timestamp: expectExpect.any(String),
uuid: expectExpect.any(String),
},
name: 'abc',
producer: 'alertsFixture',
revision: 0,
@ -1721,6 +1753,10 @@ instanceStateValue: true
_index: '.internal.alerts-observability.test.alerts.alerts-default-000001',
kibana: {
alert: {
action_group: 'default',
flapping_history: expectExpect.any(Array),
maintenance_window_ids: [],
severity_improving: false,
rule: {
parameters: {
index: '.kibana-alerting-test-data',
@ -1728,7 +1764,10 @@ instanceStateValue: true
},
category: 'Test: Always Firing Alert As Data',
consumer: 'alertsFixture',
execution: { uuid: expectExpect.any(String) },
execution: {
uuid: expectExpect.any(String),
timestamp: expectExpect.any(String),
},
name: 'abc',
producer: 'alertsFixture',
revision: 0,
@ -1758,6 +1797,10 @@ instanceStateValue: true
_index: '.internal.alerts-observability.test.alerts.alerts-default-000001',
kibana: {
alert: {
action_group: 'default',
flapping_history: expectExpect.any(Array),
maintenance_window_ids: [],
severity_improving: false,
rule: {
parameters: {
index: '.kibana-alerting-test-data',
@ -1765,7 +1808,10 @@ instanceStateValue: true
},
category: 'Test: Always Firing Alert As Data',
consumer: 'alertsFixture',
execution: { uuid: expectExpect.any(String) },
execution: {
uuid: expectExpect.any(String),
timestamp: expectExpect.any(String),
},
name: 'abc',
producer: 'alertsFixture',
revision: 0,

View file

@ -43,6 +43,7 @@ export default function createDisableRuleTests({ getService }: FtrProviderContex
match_all: {},
},
conflicts: 'proceed',
ignore_unavailable: true,
});
await objectRemover.removeAll();
});

View file

@ -34,7 +34,7 @@ import {
ALERT_CONSECUTIVE_MATCHES,
} from '@kbn/rule-data-utils';
import { RuleNotifyWhen } from '@kbn/alerting-plugin/common';
import { ES_TEST_INDEX_NAME, ESTestIndexTool } from '@kbn/alerting-api-integration-helpers';
import { ESTestIndexTool } from '@kbn/alerting-api-integration-helpers';
import { FtrProviderContext } from '../../../../../common/ftr_provider_context';
import { Spaces } from '../../../../scenarios';
import {
@ -79,6 +79,7 @@ export default function createAlertsAsDataAlertDelayInstallResourcesTest({
index: [alertsAsDataIndex, alwaysFiringAlertsAsDataIndex],
query: { match_all: {} },
conflicts: 'proceed',
ignore_unavailable: true,
});
});
afterEach(async () => {
@ -87,6 +88,7 @@ export default function createAlertsAsDataAlertDelayInstallResourcesTest({
index: [alertsAsDataIndex, alwaysFiringAlertsAsDataIndex],
query: { match_all: {} },
conflicts: 'proceed',
ignore_unavailable: true,
});
});
after(async () => {
@ -395,234 +397,6 @@ export default function createAlertsAsDataAlertDelayInstallResourcesTest({
expect(alertDocsRun6.length).to.equal(1);
});
it('should generate expected events with a alertDelay with AAD (rule registry)', async () => {
const params = {
index: ES_TEST_INDEX_NAME,
reference: 'test',
};
const { body: createdAction } = await supertestWithoutAuth
.post(`${getUrlPrefix(Spaces.space1.id)}/api/actions/connector`)
.set('kbn-xsrf', 'foo')
.send({
name: 'MY action',
connector_type_id: 'test.noop',
config: {},
secrets: {},
})
.expect(200);
const response = await supertestWithoutAuth
.post(`${getUrlPrefix(Spaces.space1.id)}/api/alerting/rule`)
.set('kbn-xsrf', 'foo')
.send(
getTestRuleData({
rule_type_id: 'test.always-firing-alert-as-data',
schedule: { interval: '1d' },
throttle: null,
notify_when: null,
params,
actions: [
{
id: createdAction.id,
group: 'default',
params: {},
frequency: {
summary: false,
throttle: null,
notify_when: RuleNotifyWhen.CHANGE,
},
},
],
alert_delay: {
active: 3,
},
})
);
expect(response.status).to.eql(200);
const ruleId = response.body.id;
objectRemover.add(Spaces.space1.id, ruleId, 'rule', 'alerting');
// --------------------------
// RUN 1 - 0 new alerts
// --------------------------
let events: IValidatedEvent[] = await waitForEventLogDocs(
ruleId,
new Map([['execute', { equal: 1 }]])
);
let executeEvent = events[0];
expect(get(executeEvent, ACTIVE_PATH)).to.be(0);
expect(get(executeEvent, NEW_PATH)).to.be(0);
expect(get(executeEvent, RECOVERED_PATH)).to.be(0);
expect(get(executeEvent, ACTION_PATH)).to.be(0);
expect(get(executeEvent, DELAYED_PATH)).to.be(2);
// Query for alerts
const alertDocsRun1 = await queryForAlertDocs<Alert>(alwaysFiringAlertsAsDataIndex);
// Get alert state from task document
let state: any = await getTaskState(ruleId);
expect(state.alertInstances['1'].meta.activeCount).to.equal(1);
expect(state.alertTypeState.trackedAlerts['1'].activeCount).to.equal(1);
// After the first run, we should have 0 alert docs for the 0 active alerts
expect(alertDocsRun1.length).to.equal(0);
// --------------------------
// RUN 2 - 0 new alerts
// --------------------------
let runSoon = await supertestWithoutAuth
.post(`${getUrlPrefix(Spaces.space1.id)}/internal/alerting/rule/${ruleId}/_run_soon`)
.set('kbn-xsrf', 'foo');
expect(runSoon.status).to.eql(204);
events = await waitForEventLogDocs(ruleId, new Map([['execute', { equal: 2 }]]));
executeEvent = events[1];
expect(get(executeEvent, ACTIVE_PATH)).to.be(0);
expect(get(executeEvent, NEW_PATH)).to.be(0);
expect(get(executeEvent, RECOVERED_PATH)).to.be(0);
expect(get(executeEvent, ACTION_PATH)).to.be(0);
expect(get(executeEvent, DELAYED_PATH)).to.be(2);
// Query for alerts
const alertDocsRun2 = await queryForAlertDocs<Alert>(alwaysFiringAlertsAsDataIndex);
// Get alert state from task document
state = await getTaskState(ruleId);
expect(state.alertInstances['1'].meta.activeCount).to.equal(2);
expect(state.alertTypeState.trackedAlerts['1'].activeCount).to.equal(2);
// After the second run, we should have 0 alert docs for the 0 active alerts
expect(alertDocsRun2.length).to.equal(0);
// --------------------------
// RUN 3 - 1 new alert
// --------------------------
runSoon = await supertestWithoutAuth
.post(`${getUrlPrefix(Spaces.space1.id)}/internal/alerting/rule/${ruleId}/_run_soon`)
.set('kbn-xsrf', 'foo');
expect(runSoon.status).to.eql(204);
events = await waitForEventLogDocs(ruleId, new Map([['execute', { equal: 3 }]]));
executeEvent = events[2];
let executionUuid = get(executeEvent, UUID_PATH);
// Note: the rule creates 2 alerts but we will only look at one
expect(get(executeEvent, ACTIVE_PATH)).to.be(2);
expect(get(executeEvent, NEW_PATH)).to.be(2);
expect(get(executeEvent, RECOVERED_PATH)).to.be(0);
expect(get(executeEvent, ACTION_PATH)).to.be(2);
expect(get(executeEvent, DELAYED_PATH)).to.be(0);
// Query for alerts
const alertDocsRun3 = await queryForAlertDocs<Alert>(alwaysFiringAlertsAsDataIndex);
// Get alert state from task document
state = await getTaskState(ruleId);
expect(state.alertInstances['1'].meta.activeCount).to.equal(3);
expect(state.alertTypeState.trackedAlerts['1'].activeCount).to.equal(3);
// After the third run, we should have 2 alert docs for the 2 active alerts but we will only look at one
expect(alertDocsRun3.length).to.equal(2);
let source: Alert = alertDocsRun3[0]._source!;
// Each doc should have a copy of the rule data
expect(source[ALERT_RULE_CATEGORY]).to.equal('Test: Always Firing Alert As Data');
expect(source[ALERT_RULE_CONSUMER]).to.equal('alertsFixture');
expect(source[ALERT_RULE_NAME]).to.equal('abc');
expect(source[ALERT_RULE_PRODUCER]).to.equal('alertsFixture');
expect(source[ALERT_RULE_TAGS]).to.eql(['foo']);
expect(source[ALERT_RULE_TYPE_ID]).to.equal('test.always-firing-alert-as-data');
expect(source[ALERT_RULE_UUID]).to.equal(ruleId);
expect(source[ALERT_RULE_PARAMETERS]).to.eql(params);
expect(source[SPACE_IDS]).to.eql(['space1']);
expect(source[ALERT_RULE_EXECUTION_UUID]).to.equal(executionUuid);
// alert UUID should equal doc id
expect(source[ALERT_UUID]).to.equal(alertDocsRun3[0]._id);
// duration should be 0 since this is a new alert
expect(source[ALERT_DURATION]).to.equal(0);
// start should be defined
expect(source[ALERT_START]).to.match(timestampPattern);
// time_range.gte should be same as start
expect(source[ALERT_TIME_RANGE]?.gte).to.equal(source[ALERT_START]);
// timestamp should be defined
expect(source['@timestamp']).to.match(timestampPattern);
// status should be active
expect(source[ALERT_STATUS]).to.equal('active');
// workflow status should be 'open'
expect(source[ALERT_WORKFLOW_STATUS]).to.equal('open');
// event.action should be 'open'
expect(source[EVENT_ACTION]).to.equal('open');
// event.kind should be 'signal'
expect(source[EVENT_KIND]).to.equal('signal');
// tags should equal rule tags because rule type doesn't set any tags
expect(source.tags).to.eql(['foo']);
// alert consecutive matches should match the active count
expect(source[ALERT_CONSECUTIVE_MATCHES]).to.equal(3);
// --------------------------
// RUN 4 - 1 active alert
// --------------------------
runSoon = await supertestWithoutAuth
.post(`${getUrlPrefix(Spaces.space1.id)}/internal/alerting/rule/${ruleId}/_run_soon`)
.set('kbn-xsrf', 'foo');
expect(runSoon.status).to.eql(204);
events = await waitForEventLogDocs(ruleId, new Map([['execute', { equal: 4 }]]));
executeEvent = events[3];
executionUuid = get(executeEvent, UUID_PATH);
// Note: the rule creates 2 alerts but we will only look at one
expect(get(executeEvent, ACTIVE_PATH)).to.be(2);
expect(get(executeEvent, NEW_PATH)).to.be(0);
expect(get(executeEvent, RECOVERED_PATH)).to.be(0);
expect(get(executeEvent, ACTION_PATH)).to.be(0);
expect(get(executeEvent, DELAYED_PATH)).to.be(0);
// Query for alerts
const alertDocsRun4 = await queryForAlertDocs<Alert>(alwaysFiringAlertsAsDataIndex);
// Get alert state from task document
state = await getTaskState(ruleId);
expect(state.alertInstances['1'].meta.activeCount).to.equal(4);
expect(state.alertTypeState.trackedAlerts['1'].activeCount).to.equal(4);
// After the fourth run, we should have 2 alert docs for the 2 active alerts but we will only look at one
expect(alertDocsRun4.length).to.equal(2);
source = alertDocsRun4[0]._source!;
const run3Source = alertDocsRun3[0]._source!;
// Each doc should have a copy of the rule data
expect(source[ALERT_RULE_CATEGORY]).to.equal('Test: Always Firing Alert As Data');
expect(source[ALERT_RULE_CONSUMER]).to.equal('alertsFixture');
expect(source[ALERT_RULE_NAME]).to.equal('abc');
expect(source[ALERT_RULE_PRODUCER]).to.equal('alertsFixture');
expect(source[ALERT_RULE_TAGS]).to.eql(['foo']);
expect(source[ALERT_RULE_TYPE_ID]).to.equal('test.always-firing-alert-as-data');
expect(source[ALERT_RULE_UUID]).to.equal(ruleId);
expect(source[ALERT_RULE_PARAMETERS]).to.eql(params);
expect(source[SPACE_IDS]).to.eql(['space1']);
expect(source[ALERT_RULE_EXECUTION_UUID]).to.equal(executionUuid);
expect(source[ALERT_UUID]).to.equal(run3Source[ALERT_UUID]);
// start time should be defined and the same as prior run
expect(source[ALERT_START]).to.match(timestampPattern);
expect(source[ALERT_START]).to.equal(run3Source[ALERT_START]);
// timestamp should be defined and not the same as prior run
expect(source['@timestamp']).to.match(timestampPattern);
expect(source['@timestamp']).not.to.equal(run3Source['@timestamp']);
// status should still be active
expect(source[ALERT_STATUS]).to.equal('active');
// event.action set to active
expect(source[EVENT_ACTION]).to.eql('active');
expect(source.tags).to.eql(['foo']);
// these values should be the same as previous run
expect(source[EVENT_KIND]).to.eql(run3Source[EVENT_KIND]);
expect(source[ALERT_WORKFLOW_STATUS]).to.eql(run3Source[ALERT_WORKFLOW_STATUS]);
expect(source[ALERT_TIME_RANGE]?.gte).to.equal(run3Source[ALERT_TIME_RANGE]?.gte);
// alert consecutive matches should match the active count
expect(source[ALERT_CONSECUTIVE_MATCHES]).to.equal(4);
});
it('should not recover alert if the activeCount did not reach the alertDelay threshold with AAD', async () => {
const { body: createdAction } = await supertestWithoutAuth
.post(`${getUrlPrefix(Spaces.space1.id)}/api/actions/connector`)

View file

@ -63,6 +63,7 @@ export default function createDisableRuleTests({ getService }: FtrProviderContex
match_all: {},
},
conflicts: 'proceed',
ignore_unavailable: true,
});
await objectRemover.removeAll();
});

View file

@ -23,6 +23,5 @@ export default ({ loadTestFile, getService }: FtrProviderContext): void => {
// Trial
loadTestFile(require.resolve('./get_alert_by_id'));
loadTestFile(require.resolve('./update_alert'));
loadTestFile(require.resolve('./lifecycle_executor'));
});
};

View file

@ -1,275 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
// WARNING: This test running in Function Test Runner is building a live
// LifecycleRuleExecutor, feeding it some mock data, but letting it write
// it's various alerts to indices. I suspect it's quite fragile, and I
// added this comment to fix some fragility in the way the alert factory
// was built. I suspect it will suffer more such things in the future.
// I fixed this as a drive-by, but opened an issue to do something later,
// if needed: https://github.com/elastic/kibana/issues/144557
import { type Subject, ReplaySubject, of } from 'rxjs';
import type { ElasticsearchClient, Logger, LogMeta } from '@kbn/core/server';
import sinon from 'sinon';
import expect from '@kbn/expect';
import { mappingFromFieldMap } from '@kbn/alerting-plugin/common';
import {
AlertConsumers,
ALERT_REASON,
ALERT_UUID,
} from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
import {
createLifecycleExecutor,
WrappedLifecycleRuleState,
} from '@kbn/rule-registry-plugin/server/utils/create_lifecycle_executor';
import { Dataset, IRuleDataClient, RuleDataService } from '@kbn/rule-registry-plugin/server';
import { RuleExecutorOptions } from '@kbn/alerting-plugin/server';
import { getDataStreamAdapter } from '@kbn/alerting-plugin/server/alerts_service/lib/data_stream_adapter';
import type { FtrProviderContext } from '../../../common/ftr_provider_context';
import {
MockRuleParams,
MockRuleState,
MockAlertContext,
MockAlertState,
MockAllowedActionGroups,
} from '../../../common/types';
import { cleanupRegistryIndices, getMockAlertFactory } from '../../../common/lib/helpers';
// eslint-disable-next-line import/no-default-export
export default function createLifecycleExecutorApiTest({ getService }: FtrProviderContext) {
const es = getService('es');
const log = getService('log');
const fakeLogger = <Meta extends LogMeta = LogMeta>(msg: string, meta?: Meta) =>
meta ? log.debug(msg, meta) : log.debug(msg);
const logger = {
trace: fakeLogger,
debug: fakeLogger,
info: fakeLogger,
warn: fakeLogger,
error: fakeLogger,
fatal: fakeLogger,
log: sinon.stub(),
get: sinon.stub(),
isLevelEnabled: sinon.stub(),
} as Logger;
const getClusterClient = () => {
const client = es as ElasticsearchClient;
return Promise.resolve(client);
};
const dataStreamAdapter = getDataStreamAdapter({ useDataStreamForAlerts: false });
describe('createLifecycleExecutor', () => {
let ruleDataClient: IRuleDataClient;
let pluginStop$: Subject<void>;
const elasticsearchAndSOAvailability$ = of(true);
before(async () => {
// First we need to setup the data service. This happens within the
// Rule Registry plugin as part of the server side setup phase.
pluginStop$ = new ReplaySubject(1);
const ruleDataService = new RuleDataService({
getClusterClient,
logger,
kibanaVersion: '8.0.0',
isWriteEnabled: true,
isWriterCacheEnabled: false,
disabledRegistrationContexts: [] as string[],
frameworkAlerts: {
enabled: () => false,
getContextInitializationPromise: async () => ({ result: false }),
},
pluginStop$,
dataStreamAdapter,
elasticsearchAndSOAvailability$,
});
// This initializes the service. This happens immediately after the creation
// of the RuleDataService in the setup phase of the Rule Registry plugin
ruleDataService.initializeService();
// This initializes the index and templates and returns the data client.
// This happens in each solution plugin before they can register lifecycle
// executors.
ruleDataClient = ruleDataService.initializeIndex({
feature: AlertConsumers.OBSERVABILITY,
registrationContext: 'observability.test',
dataset: Dataset.alerts,
componentTemplateRefs: [],
componentTemplates: [
{
name: 'mappings',
mappings: mappingFromFieldMap(
{
testObject: {
type: 'object',
required: false,
array: false,
},
},
false
),
},
],
});
});
after(async () => {
cleanupRegistryIndices(getService, ruleDataClient);
pluginStop$.next();
pluginStop$.complete();
});
it('should work with object fields', async () => {
const id = 'host-01';
// This creates the function that will wrap the solution's rule executor with the RuleRegistry lifecycle
const createLifecycleRuleExecutor = createLifecycleExecutor(logger, ruleDataClient);
// This creates the executor that is passed to the Alerting framework.
const executor = createLifecycleRuleExecutor<
MockRuleParams,
MockRuleState,
MockAlertState,
MockAlertContext,
MockAllowedActionGroups
>(async function (options) {
const { services, state: previousState } = options;
const { alertWithLifecycle } = services;
// Fake some state updates
const state = previousState.testObject
? {
...previousState,
testObject: {
...previousState.testObject,
values: [
...previousState.testObject.values,
{ name: 'count', value: previousState.testObject.values.length + 1 },
],
},
}
: {
...previousState,
testObject: {
id,
values: [{ name: 'count', value: 1 }],
host: {
name: id,
},
},
};
// This MUST be called by the solutions executor function
alertWithLifecycle({
id,
fields: {
[ALERT_REASON]: 'Test alert is firing',
...state,
},
});
// Returns the current state of the alert
return Promise.resolve({ state });
});
const ruleId = 'rule-id';
// Create the options with the minimal amount of values to test the lifecycle executor
const options = {
alertId: ruleId,
spaceId: 'default',
tags: ['test'],
startedAt: new Date(),
rule: {
id: ruleId,
name: 'test rule',
ruleTypeId: 'observability.test.fake',
ruleTypeName: 'test',
consumer: 'observability',
producer: 'observability.test',
},
services: {
alertFactory: getMockAlertFactory(),
shouldWriteAlerts: sinon.stub().returns(true),
getMaintenanceWindowIds: async () => [],
},
flappingSettings: {
enabled: false,
lookBackWindow: 20,
statusChangeThreshold: 4,
},
dataStreamAdapter,
} as unknown as RuleExecutorOptions<
MockRuleParams,
WrappedLifecycleRuleState<MockRuleState>,
{ [x: string]: unknown },
{ [x: string]: unknown },
string
>;
// Execute the rule the first time
const executorResult = await executor(options);
expect(executorResult.state.wrapped).to.eql({
testObject: {
host: { name: 'host-01' },
id: 'host-01',
values: [{ name: 'count', value: 1 }],
},
});
const alertUuid = executorResult.state.trackedAlerts['host-01'].alertUuid;
expect(alertUuid).to.be('uuid-1');
// We need to refresh the index so the data is available for the next call
await es.indices.refresh({ index: `${ruleDataClient.indexName}*` });
// Execute again to ensure that we read the object and write it again with the updated state
const nextExecutorResult = await executor({ ...options, state: executorResult.state });
expect(nextExecutorResult.state.wrapped).to.eql({
testObject: {
host: { name: 'host-01' },
id: 'host-01',
values: [
{ name: 'count', value: 1 },
{ name: 'count', value: 2 },
],
},
});
// Refresh again so we can query the data to check it was written properly
await es.indices.refresh({ index: `${ruleDataClient.indexName}*` });
// Use the ruleDataClient to read the results from the index
const response = await ruleDataClient.getReader().search({
body: {
query: {
bool: {
filter: [
{
term: {
[ALERT_UUID]: nextExecutorResult.state.trackedAlerts['host-01'].alertUuid,
},
},
],
},
},
},
});
const source = response.hits.hits[0]._source as any;
// The state in Elasticsearch should match the state returned from the executor
expect(source.testObject).to.eql(
nextExecutorResult.state.wrapped && nextExecutorResult.state.wrapped.testObject
);
});
});
}