mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Response Ops][Alerting] Get summary alert query returns hard-coded 100 alerts (#209025)
Resolves https://github.com/elastic/kibana/issues/208750 ## Summary Removes the hard-coded value in the summarized alerts query, and uses the max alerts config instead. ### Checklist - [ ] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios ### To verify 1. Set `xpack.alerting.rules.run.alerts.max` in kibana.yml and start Kibana 2. Create a summary alerting rule 3. Verify that the size in the summary alert query is the same value as the max alerts config
This commit is contained in:
parent
c53140036a
commit
64936f504d
6 changed files with 53 additions and 7 deletions
|
@ -376,6 +376,8 @@ export class AlertsClient<
|
|||
throw new Error(`Must specify either execution UUID or time range for AAD alert query.`);
|
||||
}
|
||||
|
||||
const maxAlertLimit = this.legacyAlertsClient.getMaxAlertLimit();
|
||||
|
||||
const getQueryParams = {
|
||||
executionUuid,
|
||||
start,
|
||||
|
@ -383,6 +385,7 @@ export class AlertsClient<
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
};
|
||||
|
||||
const formatAlert = this.ruleType.alerts?.formatAlert;
|
||||
|
@ -641,12 +644,14 @@ export class AlertsClient<
|
|||
);
|
||||
}
|
||||
const isLifecycleAlert = this.ruleType.autoRecoverAlerts ?? false;
|
||||
const maxAlertLimit = this.legacyAlertsClient.getMaxAlertLimit();
|
||||
|
||||
const query = getMaintenanceWindowAlertsQuery({
|
||||
executionUuid,
|
||||
ruleId,
|
||||
maintenanceWindows,
|
||||
action: isLifecycleAlert ? 'open' : undefined,
|
||||
maxAlertLimit,
|
||||
});
|
||||
|
||||
const response = await this.search<ScopedQueryAggregationResult>(query);
|
||||
|
|
|
@ -219,7 +219,7 @@ export const getExpectedQueryByExecutionUuid = ({
|
|||
],
|
||||
},
|
||||
},
|
||||
size: 100,
|
||||
size: 1000,
|
||||
track_total_hits: true,
|
||||
},
|
||||
ignore_unavailable: true,
|
||||
|
@ -382,7 +382,7 @@ export const getExpectedQueryByTimeRange = ({
|
|||
filter,
|
||||
},
|
||||
},
|
||||
size: 100,
|
||||
size: 1000,
|
||||
track_total_hits: true,
|
||||
},
|
||||
ignore_unavailable: true,
|
||||
|
|
|
@ -243,6 +243,21 @@ describe('Legacy Alerts Client', () => {
|
|||
expect(mockCreateAlertFactory.hasReachedAlertLimit).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('getMaxAlertLimit() should return the maxAlertLimit', async () => {
|
||||
const alertsClient = new LegacyAlertsClient({
|
||||
alertingEventLogger,
|
||||
logger,
|
||||
request: fakeRequest,
|
||||
spaceId: 'space1',
|
||||
ruleType,
|
||||
maintenanceWindowsService,
|
||||
});
|
||||
|
||||
await alertsClient.initializeExecution(defaultExecutionOpts);
|
||||
|
||||
expect(alertsClient.getMaxAlertLimit()).toBe(1000);
|
||||
});
|
||||
|
||||
test('processAlerts() should call processAlerts, trimRecoveredAlerts and getAlertsForNotifications', async () => {
|
||||
maintenanceWindowsService.getMaintenanceWindows.mockReturnValue({
|
||||
maintenanceWindows: [
|
||||
|
|
|
@ -260,6 +260,10 @@ export class LegacyAlertsClient<
|
|||
return this.alertFactory!.hasReachedAlertLimit();
|
||||
}
|
||||
|
||||
public getMaxAlertLimit(): number {
|
||||
return this.maxAlerts;
|
||||
}
|
||||
|
||||
public checkLimitUsage() {
|
||||
return this.alertFactory!.alertLimit.checkLimitUsage();
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@ import { FormatAlert } from '../../types';
|
|||
import { expandFlattenedAlert } from './format_alert';
|
||||
import { injectAnalyzeWildcard } from './inject_analyze_wildcard';
|
||||
|
||||
const MAX_ALERT_DOCS_TO_RETURN = 100;
|
||||
enum AlertTypes {
|
||||
NEW = 0,
|
||||
ONGOING,
|
||||
|
@ -53,6 +52,7 @@ const getLifecycleAlertsQueryByExecutionUuid = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}: GetLifecycleAlertsQueryByExecutionUuidParams): Array<SearchRequest['body']> => {
|
||||
// lifecycle alerts assign a different action to an alert depending
|
||||
// on whether it is new/ongoing/recovered. query for each action in order
|
||||
|
@ -65,6 +65,7 @@ const getLifecycleAlertsQueryByExecutionUuid = ({
|
|||
excludedAlertInstanceIds,
|
||||
action: 'open',
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}),
|
||||
getQueryByExecutionUuid({
|
||||
executionUuid,
|
||||
|
@ -72,6 +73,7 @@ const getLifecycleAlertsQueryByExecutionUuid = ({
|
|||
excludedAlertInstanceIds,
|
||||
action: 'active',
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}),
|
||||
getQueryByExecutionUuid({
|
||||
executionUuid,
|
||||
|
@ -79,6 +81,7 @@ const getLifecycleAlertsQueryByExecutionUuid = ({
|
|||
excludedAlertInstanceIds,
|
||||
action: 'close',
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}),
|
||||
];
|
||||
};
|
||||
|
@ -89,6 +92,7 @@ const getLifecycleAlertsQueryByTimeRange = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}: GetLifecycleAlertsQueryByTimeRangeParams): Array<SearchRequest['body']> => {
|
||||
return [
|
||||
getQueryByTimeRange({
|
||||
|
@ -98,6 +102,7 @@ const getLifecycleAlertsQueryByTimeRange = ({
|
|||
excludedAlertInstanceIds,
|
||||
type: AlertTypes.NEW,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}),
|
||||
getQueryByTimeRange({
|
||||
start,
|
||||
|
@ -106,6 +111,7 @@ const getLifecycleAlertsQueryByTimeRange = ({
|
|||
excludedAlertInstanceIds,
|
||||
type: AlertTypes.ONGOING,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}),
|
||||
getQueryByTimeRange({
|
||||
start,
|
||||
|
@ -114,6 +120,7 @@ const getLifecycleAlertsQueryByTimeRange = ({
|
|||
excludedAlertInstanceIds,
|
||||
type: AlertTypes.RECOVERED,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}),
|
||||
];
|
||||
};
|
||||
|
@ -124,6 +131,7 @@ const getQueryByExecutionUuid = ({
|
|||
excludedAlertInstanceIds,
|
||||
action,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}: GetQueryByExecutionUuidParams): SearchRequest['body'] => {
|
||||
const filter: QueryDslQueryContainer[] = [
|
||||
{
|
||||
|
@ -170,7 +178,7 @@ const getQueryByExecutionUuid = ({
|
|||
}
|
||||
|
||||
return {
|
||||
size: MAX_ALERT_DOCS_TO_RETURN,
|
||||
size: maxAlertLimit,
|
||||
track_total_hits: true,
|
||||
query: {
|
||||
bool: {
|
||||
|
@ -187,6 +195,7 @@ const getQueryByTimeRange = ({
|
|||
excludedAlertInstanceIds,
|
||||
type,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}: GetQueryByTimeRangeParams<AlertTypes>): SearchRequest['body'] => {
|
||||
// base query filters the alert documents for a rule by the given time range
|
||||
let filter: QueryDslQueryContainer[] = [
|
||||
|
@ -267,7 +276,7 @@ const getQueryByTimeRange = ({
|
|||
}
|
||||
|
||||
return {
|
||||
size: MAX_ALERT_DOCS_TO_RETURN,
|
||||
size: maxAlertLimit,
|
||||
track_total_hits: true,
|
||||
query: {
|
||||
bool: {
|
||||
|
@ -282,6 +291,7 @@ export const getQueryByScopedQueries = ({
|
|||
ruleId,
|
||||
action,
|
||||
maintenanceWindows,
|
||||
maxAlertLimit,
|
||||
}: GetQueryByScopedQueriesParams): SearchRequest['body'] => {
|
||||
const filters: QueryDslQueryContainer[] = [
|
||||
{
|
||||
|
@ -330,7 +340,7 @@ export const getQueryByScopedQueries = ({
|
|||
aggs: {
|
||||
alertId: {
|
||||
top_hits: {
|
||||
size: MAX_ALERT_DOCS_TO_RETURN,
|
||||
size: maxAlertLimit,
|
||||
_source: {
|
||||
includes: [ALERT_UUID],
|
||||
},
|
||||
|
@ -460,6 +470,7 @@ const getLifecycleAlertsQueries = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}: GetAlertsQueryParams): Array<SearchRequest['body']> => {
|
||||
let queryBodies;
|
||||
if (!!executionUuid) {
|
||||
|
@ -468,6 +479,7 @@ const getLifecycleAlertsQueries = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
});
|
||||
} else {
|
||||
queryBodies = getLifecycleAlertsQueryByTimeRange({
|
||||
|
@ -476,6 +488,7 @@ const getLifecycleAlertsQueries = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -489,6 +502,7 @@ const getContinualAlertsQuery = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
}: GetAlertsQueryParams): SearchRequest['body'] => {
|
||||
let queryBody;
|
||||
if (!!executionUuid) {
|
||||
|
@ -497,6 +511,7 @@ const getContinualAlertsQuery = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
});
|
||||
} else {
|
||||
queryBody = getQueryByTimeRange({
|
||||
|
@ -505,6 +520,7 @@ const getContinualAlertsQuery = ({
|
|||
ruleId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
maxAlertLimit,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -516,12 +532,14 @@ const getMaintenanceWindowAlertsQuery = ({
|
|||
ruleId,
|
||||
action,
|
||||
maintenanceWindows,
|
||||
maxAlertLimit,
|
||||
}: GetMaintenanceWindowAlertsQueryParams): SearchRequest['body'] => {
|
||||
return getQueryByScopedQueries({
|
||||
executionUuid,
|
||||
ruleId,
|
||||
action,
|
||||
maintenanceWindows,
|
||||
maxAlertLimit,
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
@ -221,12 +221,13 @@ export type UpdateAlertsMaintenanceWindowIdByScopedQueryParams =
|
|||
export type GetAlertsQueryParams = Omit<
|
||||
GetSummarizedAlertsParams,
|
||||
'formatAlert' | 'isLifecycleAlert' | 'spaceId'
|
||||
>;
|
||||
> & { maxAlertLimit: number };
|
||||
|
||||
export interface GetLifecycleAlertsQueryByExecutionUuidParams {
|
||||
executionUuid: string;
|
||||
ruleId: string;
|
||||
excludedAlertInstanceIds: string[];
|
||||
maxAlertLimit: number;
|
||||
alertsFilter?: AlertsFilter | null;
|
||||
}
|
||||
|
||||
|
@ -239,6 +240,7 @@ export interface GetQueryByScopedQueriesParams {
|
|||
ruleId: string;
|
||||
executionUuid: string;
|
||||
maintenanceWindows: MaintenanceWindow[];
|
||||
maxAlertLimit: number;
|
||||
action?: string;
|
||||
}
|
||||
|
||||
|
@ -246,6 +248,7 @@ export interface GetMaintenanceWindowAlertsQueryParams {
|
|||
ruleId: string;
|
||||
maintenanceWindows: MaintenanceWindow[];
|
||||
executionUuid: string;
|
||||
maxAlertLimit: number;
|
||||
action?: string;
|
||||
}
|
||||
|
||||
|
@ -254,6 +257,7 @@ export interface GetLifecycleAlertsQueryByTimeRangeParams {
|
|||
end: Date;
|
||||
ruleId: string;
|
||||
excludedAlertInstanceIds: string[];
|
||||
maxAlertLimit: number;
|
||||
alertsFilter?: AlertsFilter | null;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue