mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
# Backport This will backport the following commits from `main` to `8.x`: - [[Synthetics] Improve overview page performance !! (#201275)](https://github.com/elastic/kibana/pull/201275) <!--- Backport version: 9.4.3 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Shahzad","email":"shahzad31comp@gmail.com"},"sourceCommit":{"committedDate":"2024-12-11T19:33:33Z","message":"[Synthetics] Improve overview page performance !! (#201275)\n\n## Summary\r\n\r\nImprove overview page performance !!\r\n\r\nRight now UI works for few hundred to 1000 monitors, but it starts\r\ndegrading after that, this PR makes sure, we refactor queries in such a\r\nway that it scale up to 10k-20k monitors easily.\r\n\r\n\r\n### Queries before\r\nBefore this PR, we were doing 2 steps queries, first fetch all saved\r\nobjects and the fetch all summary documents by passings all ids from\r\nfirst phase. This meant that let's say if we have 20k saved objects,\r\nfirst we will need to page through all of them to even start fetching\r\nsummaries. To fetch summary documents, we were using `top_hits` query\r\nwhich can be memory expensive.\r\n\r\n\r\n### Queries now\r\nIn this PR we fetch summaries and saved objects in parallel, since we\r\nhave space id on documents as well, there was no need to do 2 step\r\nqueries. Now we fetch both things in parallel and then we hydrate saved\r\nobject data from summary data. In this PR now we are using top_metrics\r\nquery to fetch each monitor status instead of `top_hits`\r\n\r\n\r\nI tested on about 20k monitors, app performs reasoably well after the PR\r\n<img width=\"1920\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/c143e196-59a4-45b4-86b7-bd22ac4c5d4b\">\r\n\r\n\r\nOn a very slow cluster on which kibana is local against a remote cluster\r\n\r\n### After\r\n<img width=\"1920\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/540d0cdf-2f8c-44d1-af76-81953d9ca0ff\">\r\n\r\n\r\n### Before\r\n<img width=\"1918\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/5fdc314d-bb59-4137-9397-d8aee6bd4806\">\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"b4ccb0c205b2df4312edfe7a087e0bca25242d05","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","backport:prev-minor","ci:project-deploy-observability","Team:obs-ux-management"],"title":"[Synthetics] Improve overview page performance !!","number":201275,"url":"https://github.com/elastic/kibana/pull/201275","mergeCommit":{"message":"[Synthetics] Improve overview page performance !! (#201275)\n\n## Summary\r\n\r\nImprove overview page performance !!\r\n\r\nRight now UI works for few hundred to 1000 monitors, but it starts\r\ndegrading after that, this PR makes sure, we refactor queries in such a\r\nway that it scale up to 10k-20k monitors easily.\r\n\r\n\r\n### Queries before\r\nBefore this PR, we were doing 2 steps queries, first fetch all saved\r\nobjects and the fetch all summary documents by passings all ids from\r\nfirst phase. This meant that let's say if we have 20k saved objects,\r\nfirst we will need to page through all of them to even start fetching\r\nsummaries. To fetch summary documents, we were using `top_hits` query\r\nwhich can be memory expensive.\r\n\r\n\r\n### Queries now\r\nIn this PR we fetch summaries and saved objects in parallel, since we\r\nhave space id on documents as well, there was no need to do 2 step\r\nqueries. Now we fetch both things in parallel and then we hydrate saved\r\nobject data from summary data. In this PR now we are using top_metrics\r\nquery to fetch each monitor status instead of `top_hits`\r\n\r\n\r\nI tested on about 20k monitors, app performs reasoably well after the PR\r\n<img width=\"1920\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/c143e196-59a4-45b4-86b7-bd22ac4c5d4b\">\r\n\r\n\r\nOn a very slow cluster on which kibana is local against a remote cluster\r\n\r\n### After\r\n<img width=\"1920\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/540d0cdf-2f8c-44d1-af76-81953d9ca0ff\">\r\n\r\n\r\n### Before\r\n<img width=\"1918\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/5fdc314d-bb59-4137-9397-d8aee6bd4806\">\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"b4ccb0c205b2df4312edfe7a087e0bca25242d05"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/201275","number":201275,"mergeCommit":{"message":"[Synthetics] Improve overview page performance !! (#201275)\n\n## Summary\r\n\r\nImprove overview page performance !!\r\n\r\nRight now UI works for few hundred to 1000 monitors, but it starts\r\ndegrading after that, this PR makes sure, we refactor queries in such a\r\nway that it scale up to 10k-20k monitors easily.\r\n\r\n\r\n### Queries before\r\nBefore this PR, we were doing 2 steps queries, first fetch all saved\r\nobjects and the fetch all summary documents by passings all ids from\r\nfirst phase. This meant that let's say if we have 20k saved objects,\r\nfirst we will need to page through all of them to even start fetching\r\nsummaries. To fetch summary documents, we were using `top_hits` query\r\nwhich can be memory expensive.\r\n\r\n\r\n### Queries now\r\nIn this PR we fetch summaries and saved objects in parallel, since we\r\nhave space id on documents as well, there was no need to do 2 step\r\nqueries. Now we fetch both things in parallel and then we hydrate saved\r\nobject data from summary data. In this PR now we are using top_metrics\r\nquery to fetch each monitor status instead of `top_hits`\r\n\r\n\r\nI tested on about 20k monitors, app performs reasoably well after the PR\r\n<img width=\"1920\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/c143e196-59a4-45b4-86b7-bd22ac4c5d4b\">\r\n\r\n\r\nOn a very slow cluster on which kibana is local against a remote cluster\r\n\r\n### After\r\n<img width=\"1920\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/540d0cdf-2f8c-44d1-af76-81953d9ca0ff\">\r\n\r\n\r\n### Before\r\n<img width=\"1918\" alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/5fdc314d-bb59-4137-9397-d8aee6bd4806\">\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"b4ccb0c205b2df4312edfe7a087e0bca25242d05"}}]}] BACKPORT--> Co-authored-by: Shahzad <shahzad31comp@gmail.com>
This commit is contained in:
parent
b3ba62a972
commit
d13e80d1e2
12 changed files with 1336 additions and 1820 deletions
|
@ -235,4 +235,7 @@ const commons = {
|
|||
},
|
||||
},
|
||||
},
|
||||
meta: {
|
||||
space_id: 'default',
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,300 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import times from 'lodash/times';
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { cloneDeep, intersection } from 'lodash';
|
||||
import { SavedObjectsFindResult } from '@kbn/core-saved-objects-api-server';
|
||||
import { MsearchMultisearchBody } from '@elastic/elasticsearch/lib/api/types';
|
||||
|
||||
import { isStatusEnabled } from '../../common/runtime_types/monitor_management/alert_config';
|
||||
import { FINAL_SUMMARY_FILTER } from '../../common/constants/client_defaults';
|
||||
import {
|
||||
ConfigKey,
|
||||
EncryptedSyntheticsMonitorAttributes,
|
||||
OverviewPing,
|
||||
OverviewStatus,
|
||||
OverviewStatusMetaData,
|
||||
} from '../../common/runtime_types';
|
||||
import { createEsParams, SyntheticsEsClient } from '../lib';
|
||||
|
||||
const DEFAULT_MAX_ES_BUCKET_SIZE = 10000;
|
||||
|
||||
const fields = [
|
||||
'@timestamp',
|
||||
'summary',
|
||||
'monitor',
|
||||
'observer',
|
||||
'config_id',
|
||||
'error',
|
||||
'agent',
|
||||
'url',
|
||||
'state',
|
||||
'tags',
|
||||
];
|
||||
|
||||
const getStatusQuery = ({
|
||||
idSize,
|
||||
idsToQuery,
|
||||
range,
|
||||
monitorLocationIds,
|
||||
}: {
|
||||
idSize: number;
|
||||
monitorLocationIds: string[];
|
||||
range: { from: string; to: string };
|
||||
idsToQuery: string[];
|
||||
}) => {
|
||||
const params = createEsParams({
|
||||
body: {
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
FINAL_SUMMARY_FILTER,
|
||||
{
|
||||
range: {
|
||||
'@timestamp': {
|
||||
gte: range.from,
|
||||
lte: range.to,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
terms: {
|
||||
'monitor.id': idsToQuery,
|
||||
},
|
||||
},
|
||||
] as QueryDslQueryContainer[],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
id: {
|
||||
terms: {
|
||||
field: 'monitor.id',
|
||||
size: idSize,
|
||||
},
|
||||
aggs: {
|
||||
location: {
|
||||
terms: {
|
||||
field: 'observer.name',
|
||||
size: monitorLocationIds.length || 100,
|
||||
},
|
||||
aggs: {
|
||||
status: {
|
||||
top_hits: {
|
||||
size: 1,
|
||||
sort: [
|
||||
{
|
||||
'@timestamp': {
|
||||
order: 'desc',
|
||||
},
|
||||
},
|
||||
],
|
||||
_source: {
|
||||
includes: fields,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (monitorLocationIds.length > 0) {
|
||||
params.body.query?.bool?.filter.push({
|
||||
terms: {
|
||||
'observer.name': monitorLocationIds,
|
||||
},
|
||||
});
|
||||
}
|
||||
return params;
|
||||
};
|
||||
|
||||
type StatusQueryParams = ReturnType<typeof getStatusQuery>;
|
||||
type OverviewStatusResponse = Omit<
|
||||
OverviewStatus,
|
||||
| 'disabledCount'
|
||||
| 'allMonitorsCount'
|
||||
| 'disabledMonitorsCount'
|
||||
| 'projectMonitorsCount'
|
||||
| 'disabledMonitorQueryIds'
|
||||
| 'allIds'
|
||||
>;
|
||||
|
||||
export async function queryMonitorStatus({
|
||||
esClient,
|
||||
monitorLocationIds,
|
||||
range,
|
||||
monitorQueryIds,
|
||||
monitorLocationsMap,
|
||||
monitorQueryIdToConfigIdMap,
|
||||
monitors,
|
||||
}: {
|
||||
esClient: SyntheticsEsClient;
|
||||
monitorLocationIds: string[];
|
||||
range: { from: string; to: string };
|
||||
monitorQueryIds: string[];
|
||||
monitorLocationsMap: Record<string, string[]>;
|
||||
monitorQueryIdToConfigIdMap: Record<string, string>;
|
||||
monitors: Array<SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>>;
|
||||
}): Promise<OverviewStatusResponse> {
|
||||
const idSize = Math.trunc(DEFAULT_MAX_ES_BUCKET_SIZE / monitorLocationIds.length || 1);
|
||||
const pageCount = Math.ceil(monitorQueryIds.length / idSize);
|
||||
let up = 0;
|
||||
let down = 0;
|
||||
const upConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
const downConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
const monitorsWithoutData = new Map(Object.entries(cloneDeep(monitorLocationsMap)));
|
||||
const pendingConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
const disabledConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
|
||||
monitors
|
||||
.filter((monitor) => !monitor.attributes[ConfigKey.ENABLED])
|
||||
.forEach((monitor) => {
|
||||
const monitorQueryId = monitor.attributes[ConfigKey.MONITOR_QUERY_ID];
|
||||
monitor.attributes[ConfigKey.LOCATIONS]?.forEach((location) => {
|
||||
disabledConfigs[`${monitorQueryIdToConfigIdMap[monitorQueryId]}-${location.id}`] = {
|
||||
configId: `${monitorQueryIdToConfigIdMap[monitorQueryId]}`,
|
||||
monitorQueryId,
|
||||
status: 'disabled',
|
||||
locationId: location.id,
|
||||
locationLabel: location.label,
|
||||
...getMonitorMeta(monitor),
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
const queries: MsearchMultisearchBody[] = times(pageCount).map((i) => {
|
||||
const idsToQuery = (monitorQueryIds as string[]).slice(i * idSize, i * idSize + idSize);
|
||||
return getStatusQuery({
|
||||
idSize,
|
||||
monitorLocationIds,
|
||||
range,
|
||||
idsToQuery,
|
||||
}).body;
|
||||
});
|
||||
|
||||
if (queries.length) {
|
||||
const { responses } = await esClient.msearch<StatusQueryParams, OverviewPing>(
|
||||
queries,
|
||||
'getCurrentStatusOverview'
|
||||
);
|
||||
|
||||
responses.forEach((result) => {
|
||||
result.aggregations?.id.buckets.forEach(({ location, key: queryId }) => {
|
||||
const locationSummaries = location.buckets.map(({ status, key: locationName }) => {
|
||||
const ping = status.hits.hits[0]._source;
|
||||
return { location: locationName, ping };
|
||||
});
|
||||
|
||||
const monitor = monitors.find((m) => m.attributes[ConfigKey.MONITOR_QUERY_ID] === queryId)!;
|
||||
|
||||
// discard any locations that are not in the monitorLocationsMap for the given monitor as well as those which are
|
||||
// in monitorLocationsMap but not in listOfLocations
|
||||
const monLocations = monitorLocationsMap?.[queryId];
|
||||
const monQueriedLocations = intersection(monLocations, monitorLocationIds);
|
||||
monQueriedLocations?.forEach((monLocation) => {
|
||||
const locationSummary = locationSummaries.find(
|
||||
(summary) => summary.location === monLocation
|
||||
);
|
||||
|
||||
if (locationSummary) {
|
||||
const { ping } = locationSummary;
|
||||
const downCount = ping.summary?.down ?? 0;
|
||||
const upCount = ping.summary?.up ?? 0;
|
||||
const configId = ping.config_id;
|
||||
const monitorQueryId = ping.monitor.id;
|
||||
|
||||
const meta = {
|
||||
ping,
|
||||
configId,
|
||||
monitorQueryId,
|
||||
locationId: monLocation,
|
||||
timestamp: ping['@timestamp'],
|
||||
locationLabel: ping.observer.geo!.name!,
|
||||
...getMonitorMeta(monitor),
|
||||
};
|
||||
|
||||
if (downCount > 0) {
|
||||
down += 1;
|
||||
downConfigs[`${configId}-${monLocation}`] = {
|
||||
...meta,
|
||||
status: 'down',
|
||||
};
|
||||
} else if (upCount > 0) {
|
||||
up += 1;
|
||||
upConfigs[`${configId}-${monLocation}`] = {
|
||||
...meta,
|
||||
status: 'up',
|
||||
};
|
||||
}
|
||||
const monitorsMissingData = monitorsWithoutData.get(monitorQueryId) || [];
|
||||
monitorsWithoutData.set(
|
||||
monitorQueryId,
|
||||
monitorsMissingData?.filter((loc) => loc !== monLocation)
|
||||
);
|
||||
if (!monitorsWithoutData.get(monitorQueryId)?.length) {
|
||||
monitorsWithoutData.delete(monitorQueryId);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// identify the remaining monitors without data, to determine pending monitors
|
||||
for (const [queryId, locs] of monitorsWithoutData) {
|
||||
const monitor = monitors.find((m) => m.attributes[ConfigKey.MONITOR_QUERY_ID] === queryId)!;
|
||||
locs.forEach((loc) => {
|
||||
pendingConfigs[`${monitorQueryIdToConfigIdMap[queryId]}-${loc}`] = {
|
||||
configId: `${monitorQueryIdToConfigIdMap[queryId]}`,
|
||||
monitorQueryId: queryId,
|
||||
status: 'unknown',
|
||||
locationId: loc,
|
||||
locationLabel: monitor.attributes[ConfigKey.LOCATIONS]?.find(
|
||||
(location) => location.id === loc
|
||||
)?.label!,
|
||||
name: monitor.attributes[ConfigKey.NAME],
|
||||
schedule: monitor.attributes[ConfigKey.SCHEDULE].number,
|
||||
tags: monitor.attributes[ConfigKey.TAGS],
|
||||
isEnabled: monitor.attributes[ConfigKey.ENABLED],
|
||||
type: monitor.attributes[ConfigKey.MONITOR_TYPE],
|
||||
projectId: monitor.attributes[ConfigKey.PROJECT_ID],
|
||||
isStatusAlertEnabled: isStatusEnabled(monitor.attributes[ConfigKey.ALERT_CONFIG]),
|
||||
updated_at: monitor.updated_at,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
up,
|
||||
down,
|
||||
pending: Object.values(pendingConfigs).length,
|
||||
upConfigs,
|
||||
downConfigs,
|
||||
pendingConfigs,
|
||||
enabledMonitorQueryIds: monitorQueryIds,
|
||||
disabledConfigs,
|
||||
};
|
||||
}
|
||||
|
||||
const getMonitorMeta = (monitor: SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>) => {
|
||||
return {
|
||||
name: monitor.attributes[ConfigKey.NAME],
|
||||
schedule: monitor.attributes[ConfigKey.SCHEDULE].number,
|
||||
tags: monitor.attributes[ConfigKey.TAGS],
|
||||
isEnabled: monitor.attributes[ConfigKey.ENABLED],
|
||||
type: monitor.attributes[ConfigKey.MONITOR_TYPE],
|
||||
projectId: monitor.attributes[ConfigKey.PROJECT_ID],
|
||||
isStatusAlertEnabled: isStatusEnabled(monitor.attributes[ConfigKey.ALERT_CONFIG]),
|
||||
updated_at: monitor.updated_at,
|
||||
spaceId: monitor.namespaces?.[0],
|
||||
};
|
||||
};
|
File diff suppressed because it is too large
Load diff
|
@ -4,126 +4,11 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { intersection } from 'lodash';
|
||||
import datemath, { Unit } from '@kbn/datemath';
|
||||
import moment from 'moment';
|
||||
import { RouteContext, SyntheticsRestApiRouteFactory } from '../types';
|
||||
import { ConfigKey, OverviewStatusState } from '../../../common/runtime_types';
|
||||
import {
|
||||
getAllMonitors,
|
||||
processMonitors,
|
||||
} from '../../saved_objects/synthetics_monitor/get_all_monitors';
|
||||
import { queryMonitorStatus } from '../../queries/query_monitor_status';
|
||||
import { OverviewStatusService } from './overview_status_service';
|
||||
import { SyntheticsRestApiRouteFactory } from '../types';
|
||||
import { OverviewStatusState } from '../../../common/runtime_types';
|
||||
import { SYNTHETICS_API_URLS } from '../../../common/constants';
|
||||
import { getMonitorFilters, OverviewStatusSchema, OverviewStatusQuery } from '../common';
|
||||
|
||||
/**
|
||||
* Helper function that converts a monitor's schedule to a value to use to generate
|
||||
* an appropriate look-back window for snapshot count.
|
||||
* @param schedule a number/unit pair that represents how often a configured monitor runs
|
||||
* @returns schedule interval in ms
|
||||
*/
|
||||
export function periodToMs(schedule: { number: string; unit: Unit }) {
|
||||
if (Object.keys(datemath.unitsMap).indexOf(schedule.unit) === -1) return 0;
|
||||
|
||||
return parseInt(schedule.number, 10) * datemath.unitsMap[schedule.unit].base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Multi-stage function that first queries all the user's saved object monitor configs.
|
||||
*
|
||||
* Subsequently, fetch the status for each monitor per location in the data streams.
|
||||
* @returns The counts of up/down/disabled monitor by location, and a map of each monitor:location status.
|
||||
*/
|
||||
export async function getStatus(context: RouteContext, params: OverviewStatusQuery) {
|
||||
const { syntheticsEsClient, savedObjectsClient } = context;
|
||||
|
||||
const { query, scopeStatusByLocation = true, showFromAllSpaces } = params;
|
||||
|
||||
/**
|
||||
* Walk through all monitor saved objects, bucket IDs by disabled/enabled status.
|
||||
*
|
||||
* Track max period to make sure the snapshot query should reach back far enough to catch
|
||||
* latest ping for all enabled monitors.
|
||||
*/
|
||||
|
||||
const { filtersStr, locationFilter: queryLocations } = await getMonitorFilters({
|
||||
...params,
|
||||
context,
|
||||
});
|
||||
|
||||
const allMonitors = await getAllMonitors({
|
||||
soClient: savedObjectsClient,
|
||||
showFromAllSpaces,
|
||||
search: query ? `${query}*` : undefined,
|
||||
filter: filtersStr,
|
||||
fields: [
|
||||
ConfigKey.ENABLED,
|
||||
ConfigKey.LOCATIONS,
|
||||
ConfigKey.MONITOR_QUERY_ID,
|
||||
ConfigKey.CONFIG_ID,
|
||||
ConfigKey.SCHEDULE,
|
||||
ConfigKey.MONITOR_SOURCE_TYPE,
|
||||
ConfigKey.MONITOR_TYPE,
|
||||
ConfigKey.NAME,
|
||||
ConfigKey.TAGS,
|
||||
ConfigKey.PROJECT_ID,
|
||||
ConfigKey.ALERT_CONFIG,
|
||||
],
|
||||
});
|
||||
|
||||
const {
|
||||
enabledMonitorQueryIds,
|
||||
disabledMonitorQueryIds,
|
||||
allIds,
|
||||
disabledCount,
|
||||
maxPeriod,
|
||||
monitorLocationIds,
|
||||
monitorLocationsMap,
|
||||
disabledMonitorsCount,
|
||||
projectMonitorsCount,
|
||||
monitorQueryIdToConfigIdMap,
|
||||
} = processMonitors(allMonitors, queryLocations);
|
||||
|
||||
// Account for locations filter
|
||||
const listOfLocationAfterFilter =
|
||||
queryLocations && scopeStatusByLocation
|
||||
? intersection(monitorLocationIds, queryLocations)
|
||||
: monitorLocationIds;
|
||||
|
||||
const range = {
|
||||
from: moment().subtract(maxPeriod, 'milliseconds').subtract(20, 'minutes').toISOString(),
|
||||
to: 'now',
|
||||
};
|
||||
|
||||
const { up, down, pending, upConfigs, downConfigs, pendingConfigs, disabledConfigs } =
|
||||
await queryMonitorStatus({
|
||||
range,
|
||||
monitors: allMonitors,
|
||||
monitorLocationsMap,
|
||||
monitorQueryIdToConfigIdMap,
|
||||
esClient: syntheticsEsClient,
|
||||
monitorLocationIds: listOfLocationAfterFilter,
|
||||
monitorQueryIds: enabledMonitorQueryIds,
|
||||
});
|
||||
|
||||
return {
|
||||
allIds,
|
||||
allMonitorsCount: allMonitors.length,
|
||||
disabledMonitorsCount,
|
||||
projectMonitorsCount,
|
||||
enabledMonitorQueryIds,
|
||||
disabledMonitorQueryIds,
|
||||
disabledCount,
|
||||
up,
|
||||
down,
|
||||
pending,
|
||||
upConfigs,
|
||||
downConfigs,
|
||||
pendingConfigs,
|
||||
disabledConfigs,
|
||||
};
|
||||
}
|
||||
import { OverviewStatusSchema } from '../common';
|
||||
|
||||
export const createGetCurrentStatusRoute: SyntheticsRestApiRouteFactory = () => ({
|
||||
method: 'GET',
|
||||
|
@ -132,9 +17,7 @@ export const createGetCurrentStatusRoute: SyntheticsRestApiRouteFactory = () =>
|
|||
query: OverviewStatusSchema,
|
||||
},
|
||||
handler: async (routeContext): Promise<OverviewStatusState> => {
|
||||
const { request } = routeContext;
|
||||
|
||||
const params = request.query as OverviewStatusQuery;
|
||||
return await getStatus(routeContext, params);
|
||||
const statusOverview = new OverviewStatusService(routeContext);
|
||||
return await statusOverview.getOverviewStatus();
|
||||
},
|
||||
});
|
||||
|
|
|
@ -0,0 +1,793 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { SavedObjectsFindResult } from '@kbn/core-saved-objects-api-server';
|
||||
import * as monitorsFns from '../../saved_objects/synthetics_monitor/get_all_monitors';
|
||||
import { EncryptedSyntheticsMonitorAttributes } from '../../../common/runtime_types';
|
||||
import { getUptimeESMockClient } from '../../queries/test_helpers';
|
||||
|
||||
import * as commonLibs from '../common';
|
||||
import * as allLocationsFn from '../../synthetics_service/get_all_locations';
|
||||
import { OverviewStatusService, SUMMARIES_PAGE_SIZE } from './overview_status_service';
|
||||
import times from 'lodash/times';
|
||||
import { flatten } from 'lodash';
|
||||
const japanLoc = {
|
||||
id: 'asia_japan',
|
||||
label: 'Asia/Pacific - Japan',
|
||||
};
|
||||
|
||||
const germanyLoc = {
|
||||
id: 'europe_germany',
|
||||
label: 'Europe - Germany',
|
||||
};
|
||||
|
||||
const allLocations: any = [japanLoc, germanyLoc];
|
||||
jest.spyOn(allLocationsFn, 'getAllLocations').mockResolvedValue({
|
||||
publicLocations: allLocations,
|
||||
privateLocations: [],
|
||||
allLocations,
|
||||
});
|
||||
|
||||
jest.mock('../../saved_objects/synthetics_monitor/get_all_monitors', () => ({
|
||||
...jest.requireActual('../../saved_objects/synthetics_monitor/get_all_monitors'),
|
||||
getAllMonitors: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.spyOn(commonLibs, 'getMonitors').mockResolvedValue({
|
||||
per_page: 10,
|
||||
saved_objects: [
|
||||
{
|
||||
id: 'mon-1',
|
||||
attributes: {
|
||||
enabled: false,
|
||||
locations: [{ id: 'us-east1' }, { id: 'us-west1' }, { id: 'japan' }],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'mon-2',
|
||||
attributes: {
|
||||
enabled: true,
|
||||
locations: [{ id: 'us-east1' }, { id: 'us-west1' }, { id: 'japan' }],
|
||||
schedule: {
|
||||
number: '10',
|
||||
unit: 'm',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
|
||||
describe('current status route', () => {
|
||||
const testMonitors = [
|
||||
{
|
||||
attributes: {
|
||||
config_id: 'id1',
|
||||
id: 'id1',
|
||||
type: 'browser',
|
||||
enabled: true,
|
||||
name: 'test monitor 1',
|
||||
project_id: 'project-id',
|
||||
tags: ['tag-1', 'tag-2'],
|
||||
schedule: {
|
||||
number: '1',
|
||||
unit: 'm',
|
||||
},
|
||||
locations: [japanLoc],
|
||||
},
|
||||
},
|
||||
{
|
||||
attributes: {
|
||||
id: 'id2',
|
||||
config_id: 'id2',
|
||||
enabled: true,
|
||||
type: 'browser',
|
||||
name: 'test monitor 2',
|
||||
project_id: 'project-id',
|
||||
tags: ['tag-1', 'tag-2'],
|
||||
schedule: {
|
||||
number: '1',
|
||||
unit: 'm',
|
||||
},
|
||||
locations: allLocations,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
describe('OverviewStatusService', () => {
|
||||
it('parses expected agg fields', async () => {
|
||||
const { esClient, syntheticsEsClient } = getUptimeESMockClient();
|
||||
|
||||
esClient.search.mockResponseOnce(
|
||||
getEsResponse({
|
||||
buckets: [
|
||||
{
|
||||
key: {
|
||||
monitorId: 'id1',
|
||||
locationId: japanLoc.id,
|
||||
},
|
||||
status: {
|
||||
key: japanLoc.id,
|
||||
top: [
|
||||
{
|
||||
metrics: {
|
||||
'monitor.status': 'up',
|
||||
},
|
||||
sort: ['2022-09-15T16:19:16.724Z'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: {
|
||||
monitorId: 'id2',
|
||||
locationId: japanLoc.id,
|
||||
},
|
||||
status: {
|
||||
key: japanLoc.id,
|
||||
top: [
|
||||
{
|
||||
metrics: {
|
||||
'monitor.status': 'up',
|
||||
},
|
||||
sort: ['2022-09-15T16:19:16.724Z'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: {
|
||||
monitorId: 'id2',
|
||||
locationId: germanyLoc.id,
|
||||
},
|
||||
status: {
|
||||
key: germanyLoc.id,
|
||||
top: [
|
||||
{
|
||||
metrics: {
|
||||
'monitor.status': 'down',
|
||||
},
|
||||
sort: ['2022-09-15T16:19:16.724Z'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
const routeContext: any = {
|
||||
request: {},
|
||||
syntheticsEsClient,
|
||||
};
|
||||
|
||||
const overviewStatusService = new OverviewStatusService(routeContext);
|
||||
overviewStatusService.getMonitorConfigs = jest.fn().mockResolvedValue(testMonitors as any);
|
||||
expect(await overviewStatusService.getOverviewStatus()).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"allIds": Array [
|
||||
"id1",
|
||||
"id2",
|
||||
],
|
||||
"allMonitorsCount": 2,
|
||||
"disabledConfigs": Object {},
|
||||
"disabledCount": 0,
|
||||
"disabledMonitorQueryIds": Array [],
|
||||
"disabledMonitorsCount": 0,
|
||||
"down": 1,
|
||||
"downConfigs": Object {
|
||||
"id2-europe_germany": Object {
|
||||
"configId": "id2",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "europe_germany",
|
||||
"locationLabel": "Europe - Germany",
|
||||
"monitorQueryId": "id2",
|
||||
"name": "test monitor 2",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "down",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": "2022-09-15T16:19:16.724Z",
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
},
|
||||
"enabledMonitorQueryIds": Array [
|
||||
"id1",
|
||||
"id2",
|
||||
],
|
||||
"pending": 0,
|
||||
"pendingConfigs": Object {},
|
||||
"projectMonitorsCount": 0,
|
||||
"up": 2,
|
||||
"upConfigs": Object {
|
||||
"id1-asia_japan": Object {
|
||||
"configId": "id1",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "asia_japan",
|
||||
"locationLabel": "Asia/Pacific - Japan",
|
||||
"monitorQueryId": "id1",
|
||||
"name": "test monitor 1",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "up",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": "2022-09-15T16:19:16.724Z",
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
"id2-asia_japan": Object {
|
||||
"configId": "id2",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "asia_japan",
|
||||
"locationLabel": "Asia/Pacific - Japan",
|
||||
"monitorQueryId": "id2",
|
||||
"name": "test monitor 2",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "up",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": "2022-09-15T16:19:16.724Z",
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('handles limits with multiple requests', async () => {
|
||||
const { esClient, syntheticsEsClient } = getUptimeESMockClient();
|
||||
esClient.search.mockResponseOnce(
|
||||
getEsResponse({
|
||||
after: {},
|
||||
buckets: flatten(
|
||||
times(SUMMARIES_PAGE_SIZE).map(() => [
|
||||
{
|
||||
key: {
|
||||
monitorId: 'id1',
|
||||
locationId: japanLoc.id,
|
||||
},
|
||||
status: {
|
||||
key: japanLoc.id,
|
||||
top: [
|
||||
{
|
||||
metrics: {
|
||||
'monitor.status': 'up',
|
||||
},
|
||||
sort: ['2022-09-15T16:19:16.724Z'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: {
|
||||
monitorId: 'id2',
|
||||
locationId: japanLoc.id,
|
||||
},
|
||||
status: {
|
||||
key: japanLoc.id,
|
||||
top: [
|
||||
{
|
||||
metrics: {
|
||||
'monitor.status': 'up',
|
||||
},
|
||||
sort: ['2022-09-15T16:19:16.724Z'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: {
|
||||
monitorId: 'id2',
|
||||
locationId: germanyLoc.id,
|
||||
},
|
||||
status: {
|
||||
key: germanyLoc.id,
|
||||
top: [
|
||||
{
|
||||
metrics: {
|
||||
'monitor.status': 'down',
|
||||
},
|
||||
sort: ['2022-09-15T16:19:16.724Z'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
])
|
||||
),
|
||||
})
|
||||
);
|
||||
|
||||
const routeContext: any = {
|
||||
request: {},
|
||||
syntheticsEsClient,
|
||||
};
|
||||
|
||||
const overviewStatusService = new OverviewStatusService(routeContext);
|
||||
overviewStatusService.getMonitorConfigs = jest.fn().mockResolvedValue(testMonitors as any);
|
||||
|
||||
expect(await overviewStatusService.getOverviewStatus()).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"allIds": Array [
|
||||
"id1",
|
||||
"id2",
|
||||
],
|
||||
"allMonitorsCount": 2,
|
||||
"disabledConfigs": Object {},
|
||||
"disabledCount": 0,
|
||||
"disabledMonitorQueryIds": Array [],
|
||||
"disabledMonitorsCount": 0,
|
||||
"down": 1,
|
||||
"downConfigs": Object {
|
||||
"id2-europe_germany": Object {
|
||||
"configId": "id2",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "europe_germany",
|
||||
"locationLabel": "Europe - Germany",
|
||||
"monitorQueryId": "id2",
|
||||
"name": "test monitor 2",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "down",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": "2022-09-15T16:19:16.724Z",
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
},
|
||||
"enabledMonitorQueryIds": Array [
|
||||
"id1",
|
||||
"id2",
|
||||
],
|
||||
"pending": 0,
|
||||
"pendingConfigs": Object {},
|
||||
"projectMonitorsCount": 0,
|
||||
"up": 2,
|
||||
"upConfigs": Object {
|
||||
"id1-asia_japan": Object {
|
||||
"configId": "id1",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "asia_japan",
|
||||
"locationLabel": "Asia/Pacific - Japan",
|
||||
"monitorQueryId": "id1",
|
||||
"name": "test monitor 1",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "up",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": "2022-09-15T16:19:16.724Z",
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
"id2-asia_japan": Object {
|
||||
"configId": "id2",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "asia_japan",
|
||||
"locationLabel": "Asia/Pacific - Japan",
|
||||
"monitorQueryId": "id2",
|
||||
"name": "test monitor 2",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "up",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": "2022-09-15T16:19:16.724Z",
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
expect(esClient.search).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('handles pending configs', async () => {
|
||||
const { esClient, syntheticsEsClient } = getUptimeESMockClient();
|
||||
esClient.search.mockResponseOnce(
|
||||
getEsResponse({
|
||||
buckets: [],
|
||||
})
|
||||
);
|
||||
const routeContext: any = {
|
||||
request: {},
|
||||
syntheticsEsClient,
|
||||
};
|
||||
|
||||
const overviewStatusService = new OverviewStatusService(routeContext);
|
||||
overviewStatusService.getMonitorConfigs = jest.fn().mockResolvedValue(testMonitors as any);
|
||||
expect(await overviewStatusService.getOverviewStatus()).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"allIds": Array [
|
||||
"id1",
|
||||
"id2",
|
||||
],
|
||||
"allMonitorsCount": 2,
|
||||
"disabledConfigs": Object {},
|
||||
"disabledCount": 0,
|
||||
"disabledMonitorQueryIds": Array [],
|
||||
"disabledMonitorsCount": 0,
|
||||
"down": 0,
|
||||
"downConfigs": Object {},
|
||||
"enabledMonitorQueryIds": Array [
|
||||
"id1",
|
||||
"id2",
|
||||
],
|
||||
"pending": 3,
|
||||
"pendingConfigs": Object {
|
||||
"id1-asia_japan": Object {
|
||||
"configId": "id1",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "asia_japan",
|
||||
"locationLabel": "Asia/Pacific - Japan",
|
||||
"monitorQueryId": "id1",
|
||||
"name": "test monitor 1",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "unknown",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": undefined,
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
"id2-asia_japan": Object {
|
||||
"configId": "id2",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "asia_japan",
|
||||
"locationLabel": "Asia/Pacific - Japan",
|
||||
"monitorQueryId": "id2",
|
||||
"name": "test monitor 2",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "unknown",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": undefined,
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
"id2-europe_germany": Object {
|
||||
"configId": "id2",
|
||||
"isEnabled": true,
|
||||
"isStatusAlertEnabled": false,
|
||||
"locationId": "europe_germany",
|
||||
"locationLabel": "Europe - Germany",
|
||||
"monitorQueryId": "id2",
|
||||
"name": "test monitor 2",
|
||||
"projectId": "project-id",
|
||||
"schedule": "1",
|
||||
"spaceId": undefined,
|
||||
"status": "unknown",
|
||||
"tags": Array [
|
||||
"tag-1",
|
||||
"tag-2",
|
||||
],
|
||||
"timestamp": undefined,
|
||||
"type": "browser",
|
||||
"updated_at": undefined,
|
||||
},
|
||||
},
|
||||
"projectMonitorsCount": 0,
|
||||
"up": 0,
|
||||
"upConfigs": Object {},
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStatus', () => {
|
||||
jest.spyOn(allLocationsFn, 'getAllLocations').mockResolvedValue({
|
||||
publicLocations: allLocations,
|
||||
privateLocations: [],
|
||||
allLocations: [
|
||||
{
|
||||
id: 'us_central_qa',
|
||||
label: 'US Central QA',
|
||||
},
|
||||
{
|
||||
id: 'us_central',
|
||||
label: 'North America - US Central',
|
||||
},
|
||||
] as any,
|
||||
});
|
||||
|
||||
it.each([
|
||||
[['US Central QA'], 1],
|
||||
[['North America - US Central'], 1],
|
||||
[['North America - US Central', 'US Central QA'], 2],
|
||||
[undefined, 2],
|
||||
])('handles disabled count when using location filters', async (locations, disabledCount) => {
|
||||
jest.spyOn(monitorsFns, 'getAllMonitors').mockResolvedValue([
|
||||
{
|
||||
type: 'synthetics-monitor',
|
||||
id: 'a9a94f2f-47ba-4fe2-afaa-e5cd29b281f1',
|
||||
attributes: {
|
||||
enabled: false,
|
||||
schedule: {
|
||||
number: '3',
|
||||
unit: 'm',
|
||||
},
|
||||
config_id: 'a9a94f2f-47ba-4fe2-afaa-e5cd29b281f1',
|
||||
locations: [
|
||||
{
|
||||
isServiceManaged: true,
|
||||
label: 'US Central QA',
|
||||
id: 'us_central_qa',
|
||||
},
|
||||
{
|
||||
isServiceManaged: true,
|
||||
label: 'North America - US Central',
|
||||
id: 'us_central',
|
||||
},
|
||||
],
|
||||
origin: 'project',
|
||||
id: 'a-test2-default',
|
||||
},
|
||||
references: [],
|
||||
migrationVersion: {
|
||||
'synthetics-monitor': '8.6.0',
|
||||
},
|
||||
coreMigrationVersion: '8.0.0',
|
||||
updated_at: '2023-02-28T14:31:37.641Z',
|
||||
created_at: '2023-02-28T14:31:37.641Z',
|
||||
version: 'Wzg0MzkzLDVd',
|
||||
namespaces: ['default'],
|
||||
score: null,
|
||||
sort: ['a', 3013],
|
||||
} as unknown as SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>,
|
||||
]);
|
||||
const { esClient, syntheticsEsClient } = getUptimeESMockClient();
|
||||
esClient.msearch.mockResponseOnce({
|
||||
responses: [
|
||||
getEsResponse({
|
||||
buckets: [
|
||||
{
|
||||
key: 'id1',
|
||||
location: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'Asia/Pacific - Japan',
|
||||
status: {
|
||||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'@timestamp': '2022-09-15T16:08:16.724Z',
|
||||
monitor: {
|
||||
status: 'up',
|
||||
id: 'id1',
|
||||
},
|
||||
summary: {
|
||||
up: 1,
|
||||
down: 0,
|
||||
},
|
||||
config_id: 'id1',
|
||||
observer: {
|
||||
geo: {
|
||||
name: 'Asia/Pacific - Japan',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'id2',
|
||||
location: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'Asia/Pacific - Japan',
|
||||
status: {
|
||||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'@timestamp': '2022-09-15T16:09:16.724Z',
|
||||
monitor: {
|
||||
status: 'up',
|
||||
id: 'id2',
|
||||
},
|
||||
summary: {
|
||||
up: 1,
|
||||
down: 0,
|
||||
},
|
||||
config_id: 'id2',
|
||||
observer: {
|
||||
geo: {
|
||||
name: 'Asia/Pacific - Japan',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'Europe - Germany',
|
||||
status: {
|
||||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_source: {
|
||||
'@timestamp': '2022-09-15T16:19:16.724Z',
|
||||
monitor: {
|
||||
status: 'down',
|
||||
id: 'id2',
|
||||
},
|
||||
summary: {
|
||||
down: 1,
|
||||
up: 0,
|
||||
},
|
||||
config_id: 'id2',
|
||||
observer: {
|
||||
geo: {
|
||||
name: 'Europe - Germany',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
took: 605,
|
||||
});
|
||||
|
||||
const overviewStatusService = new OverviewStatusService({
|
||||
request: {
|
||||
query: {
|
||||
locations,
|
||||
},
|
||||
},
|
||||
syntheticsEsClient,
|
||||
} as any);
|
||||
|
||||
const result = await overviewStatusService.getOverviewStatus();
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
disabledCount,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it.each([
|
||||
[['US Central QA'], 1],
|
||||
[['North America - US Central'], 1],
|
||||
[['North America - US Central', 'US Central QA'], 2],
|
||||
[undefined, 2],
|
||||
])('handles pending count when using location filters', async (locations, pending) => {
|
||||
jest.spyOn(monitorsFns, 'getAllMonitors').mockResolvedValue([
|
||||
{
|
||||
type: 'synthetics-monitor',
|
||||
id: 'a9a94f2f-47ba-4fe2-afaa-e5cd29b281f1',
|
||||
attributes: {
|
||||
enabled: true,
|
||||
schedule: {
|
||||
number: '3',
|
||||
unit: 'm',
|
||||
},
|
||||
config_id: 'a9a94f2f-47ba-4fe2-afaa-e5cd29b281f1',
|
||||
locations: [
|
||||
{
|
||||
isServiceManaged: true,
|
||||
label: 'US Central QA',
|
||||
id: 'us_central_qa',
|
||||
},
|
||||
{
|
||||
isServiceManaged: true,
|
||||
label: 'North America - US Central',
|
||||
id: 'us_central',
|
||||
},
|
||||
],
|
||||
origin: 'project',
|
||||
id: 'a-test2-default',
|
||||
},
|
||||
references: [],
|
||||
migrationVersion: {
|
||||
'synthetics-monitor': '8.6.0',
|
||||
},
|
||||
coreMigrationVersion: '8.0.0',
|
||||
updated_at: '2023-02-28T14:31:37.641Z',
|
||||
created_at: '2023-02-28T14:31:37.641Z',
|
||||
version: 'Wzg0MzkzLDVd',
|
||||
namespaces: ['default'],
|
||||
score: null,
|
||||
sort: ['a', 3013],
|
||||
} as unknown as SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>,
|
||||
]);
|
||||
const { esClient, syntheticsEsClient } = getUptimeESMockClient();
|
||||
esClient.search.mockResponseOnce(
|
||||
getEsResponse({
|
||||
buckets: [],
|
||||
})
|
||||
);
|
||||
|
||||
const overviewStatusService = new OverviewStatusService({
|
||||
request: {
|
||||
query: {
|
||||
locations,
|
||||
},
|
||||
},
|
||||
syntheticsEsClient,
|
||||
} as any);
|
||||
|
||||
const result = await overviewStatusService.getOverviewStatus();
|
||||
|
||||
expect(result.pending).toEqual(pending);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function getEsResponse({ buckets, after }: { buckets: any[]; after?: any }) {
|
||||
return {
|
||||
took: 605,
|
||||
timed_out: false,
|
||||
_shards: {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
skipped: 0,
|
||||
failed: 0,
|
||||
},
|
||||
hits: {
|
||||
hits: [],
|
||||
},
|
||||
aggregations: {
|
||||
monitors: {
|
||||
buckets,
|
||||
after_key: after,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
|
@ -0,0 +1,363 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment/moment';
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { SavedObjectsFindResult } from '@kbn/core-saved-objects-api-server';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { withApmSpan } from '@kbn/apm-data-access-plugin/server/utils/with_apm_span';
|
||||
import { asMutableArray } from '../../../common/utils/as_mutable_array';
|
||||
import { getMonitorFilters, OverviewStatusQuery } from '../common';
|
||||
import {
|
||||
getAllMonitors,
|
||||
processMonitors,
|
||||
} from '../../saved_objects/synthetics_monitor/get_all_monitors';
|
||||
import { ConfigKey } from '../../../common/constants/monitor_management';
|
||||
import { RouteContext } from '../types';
|
||||
import {
|
||||
EncryptedSyntheticsMonitorAttributes,
|
||||
OverviewStatusMetaData,
|
||||
} from '../../../common/runtime_types';
|
||||
import { isStatusEnabled } from '../../../common/runtime_types/monitor_management/alert_config';
|
||||
import {
|
||||
FINAL_SUMMARY_FILTER,
|
||||
getRangeFilter,
|
||||
getTimespanFilter,
|
||||
} from '../../../common/constants/client_defaults';
|
||||
|
||||
type LocationStatus = Array<{
|
||||
status: string;
|
||||
locationId: string;
|
||||
timestamp: string;
|
||||
}>;
|
||||
|
||||
export const SUMMARIES_PAGE_SIZE = 5000;
|
||||
|
||||
export class OverviewStatusService {
|
||||
filterData: {
|
||||
locationFilter?: string[] | string;
|
||||
filtersStr?: string;
|
||||
} = {};
|
||||
constructor(
|
||||
private readonly routeContext: RouteContext<Record<string, any>, OverviewStatusQuery>
|
||||
) {}
|
||||
|
||||
async getOverviewStatus() {
|
||||
const { request } = this.routeContext;
|
||||
const queryParams = request.query as OverviewStatusQuery;
|
||||
|
||||
this.filterData = await getMonitorFilters({
|
||||
...queryParams,
|
||||
context: this.routeContext,
|
||||
});
|
||||
|
||||
const [allConfigs, statusResult] = await Promise.all([
|
||||
this.getMonitorConfigs(),
|
||||
this.getQueryResult(),
|
||||
]);
|
||||
|
||||
const { up, down, pending, upConfigs, downConfigs, pendingConfigs, disabledConfigs } =
|
||||
this.processOverviewStatus(allConfigs, statusResult);
|
||||
|
||||
const {
|
||||
enabledMonitorQueryIds,
|
||||
disabledMonitorQueryIds,
|
||||
allIds,
|
||||
disabledCount,
|
||||
disabledMonitorsCount,
|
||||
projectMonitorsCount,
|
||||
} = processMonitors(allConfigs, this.filterData?.locationFilter);
|
||||
|
||||
return {
|
||||
allIds,
|
||||
allMonitorsCount: allConfigs.length,
|
||||
disabledMonitorsCount,
|
||||
projectMonitorsCount,
|
||||
enabledMonitorQueryIds,
|
||||
disabledMonitorQueryIds,
|
||||
disabledCount,
|
||||
up,
|
||||
down,
|
||||
pending,
|
||||
upConfigs,
|
||||
downConfigs,
|
||||
pendingConfigs,
|
||||
disabledConfigs,
|
||||
};
|
||||
}
|
||||
|
||||
getEsDataFilters() {
|
||||
const { spaceId, request } = this.routeContext;
|
||||
const params = request.query || {};
|
||||
const {
|
||||
scopeStatusByLocation = true,
|
||||
tags,
|
||||
monitorTypes,
|
||||
projects,
|
||||
showFromAllSpaces,
|
||||
} = params;
|
||||
const { locationFilter } = this.filterData;
|
||||
const getTermFilter = (field: string, value: string | string[] | undefined) => {
|
||||
if (!value || isEmpty(value)) {
|
||||
return [];
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return [
|
||||
{
|
||||
terms: {
|
||||
[field]: value,
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
return [
|
||||
{
|
||||
term: {
|
||||
[field]: value,
|
||||
},
|
||||
},
|
||||
];
|
||||
};
|
||||
const filters: QueryDslQueryContainer[] = [
|
||||
...(showFromAllSpaces ? [] : [{ term: { 'meta.space_id': spaceId } }]),
|
||||
...getTermFilter('monitor.type', monitorTypes),
|
||||
...getTermFilter('tags', tags),
|
||||
...getTermFilter('monitor.project.id', projects),
|
||||
];
|
||||
|
||||
if (scopeStatusByLocation && !isEmpty(locationFilter) && locationFilter) {
|
||||
filters.push({
|
||||
terms: {
|
||||
'observer.name': locationFilter,
|
||||
},
|
||||
});
|
||||
}
|
||||
return filters;
|
||||
}
|
||||
|
||||
async getQueryResult() {
|
||||
return withApmSpan('monitor_status_data', async () => {
|
||||
const range = {
|
||||
// max monitor schedule period is 4 hours, 20 minute subtraction is to be on safe side
|
||||
from: moment().subtract(4, 'hours').subtract(20, 'minutes').toISOString(),
|
||||
to: 'now',
|
||||
};
|
||||
|
||||
let hasMoreData = true;
|
||||
const monitorByIds = new Map<string, LocationStatus>();
|
||||
let afterKey: any;
|
||||
let count = 0;
|
||||
|
||||
do {
|
||||
const result = await this.routeContext.syntheticsEsClient.search(
|
||||
{
|
||||
body: {
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
FINAL_SUMMARY_FILTER,
|
||||
getRangeFilter({ from: range.from, to: range.to }),
|
||||
getTimespanFilter({ from: 'now-15m', to: 'now' }),
|
||||
...this.getEsDataFilters(),
|
||||
] as QueryDslQueryContainer[],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
monitors: {
|
||||
composite: {
|
||||
size: SUMMARIES_PAGE_SIZE,
|
||||
sources: asMutableArray([
|
||||
{
|
||||
monitorId: {
|
||||
terms: {
|
||||
field: 'monitor.id',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
locationId: {
|
||||
terms: {
|
||||
field: 'observer.name',
|
||||
},
|
||||
},
|
||||
},
|
||||
] as const),
|
||||
after: afterKey,
|
||||
},
|
||||
aggs: {
|
||||
status: {
|
||||
top_metrics: {
|
||||
metrics: {
|
||||
field: 'monitor.status',
|
||||
},
|
||||
sort: {
|
||||
'@timestamp': 'desc',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'getCurrentStatusOverview' + count
|
||||
);
|
||||
count += 1;
|
||||
const data = result.body.aggregations?.monitors;
|
||||
|
||||
hasMoreData = (data?.buckets ?? []).length >= SUMMARIES_PAGE_SIZE;
|
||||
afterKey = data?.after_key;
|
||||
|
||||
data?.buckets.forEach(({ status: statusAgg, key: bKey }) => {
|
||||
const monitorId = String(bKey.monitorId);
|
||||
const locationId = String(bKey.locationId);
|
||||
const status = String(statusAgg.top?.[0].metrics?.['monitor.status']);
|
||||
const timestamp = String(statusAgg.top[0].sort[0]);
|
||||
if (!monitorByIds.has(String(monitorId))) {
|
||||
monitorByIds.set(monitorId, []);
|
||||
}
|
||||
monitorByIds.get(monitorId)?.push({ status, locationId, timestamp });
|
||||
});
|
||||
} while (hasMoreData && afterKey);
|
||||
return monitorByIds;
|
||||
});
|
||||
}
|
||||
|
||||
processOverviewStatus(
|
||||
monitors: Array<SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>>,
|
||||
statusData: Map<string, LocationStatus>
|
||||
) {
|
||||
let up = 0;
|
||||
let down = 0;
|
||||
const upConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
const downConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
const pendingConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
const disabledConfigs: Record<string, OverviewStatusMetaData> = {};
|
||||
|
||||
const enabledMonitors = monitors.filter((monitor) => monitor.attributes[ConfigKey.ENABLED]);
|
||||
const disabledMonitors = monitors.filter((monitor) => !monitor.attributes[ConfigKey.ENABLED]);
|
||||
|
||||
const queryLocIds = this.filterData?.locationFilter;
|
||||
|
||||
disabledMonitors.forEach((monitor) => {
|
||||
const monitorQueryId = monitor.attributes[ConfigKey.MONITOR_QUERY_ID];
|
||||
const meta = this.getMonitorMeta(monitor);
|
||||
monitor.attributes[ConfigKey.LOCATIONS]?.forEach((location) => {
|
||||
disabledConfigs[`${meta.configId}-${location.id}`] = {
|
||||
monitorQueryId,
|
||||
status: 'disabled',
|
||||
locationId: location.id,
|
||||
locationLabel: location.label,
|
||||
...meta,
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
enabledMonitors.forEach((monitor) => {
|
||||
const monitorId = monitor.attributes[ConfigKey.MONITOR_QUERY_ID];
|
||||
const monitorStatus = statusData.get(monitorId);
|
||||
|
||||
// discard any locations that are not in the monitorLocationsMap for the given monitor as well as those which are
|
||||
// in monitorLocationsMap but not in listOfLocations
|
||||
const monLocations = monitor.attributes[ConfigKey.LOCATIONS];
|
||||
monLocations?.forEach((monLocation) => {
|
||||
if (!isEmpty(queryLocIds) && !queryLocIds?.includes(monLocation.id)) {
|
||||
// filter out location provided via query
|
||||
return;
|
||||
}
|
||||
const locData = monitorStatus?.find((loc) => loc.locationId === monLocation.id);
|
||||
const meta = {
|
||||
monitorQueryId: monitorId,
|
||||
locationId: monLocation.id,
|
||||
timestamp: locData?.timestamp,
|
||||
locationLabel: monLocation.label,
|
||||
...this.getMonitorMeta(monitor),
|
||||
};
|
||||
const monLocId = `${meta.configId}-${monLocation.id}`;
|
||||
if (locData) {
|
||||
if (locData.status === 'down') {
|
||||
down += 1;
|
||||
downConfigs[monLocId] = {
|
||||
...meta,
|
||||
status: 'down',
|
||||
};
|
||||
} else if (locData.status === 'up') {
|
||||
up += 1;
|
||||
upConfigs[monLocId] = {
|
||||
...meta,
|
||||
status: 'up',
|
||||
};
|
||||
}
|
||||
} else {
|
||||
pendingConfigs[monLocId] = {
|
||||
status: 'unknown',
|
||||
...meta,
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
up,
|
||||
down,
|
||||
pending: Object.values(pendingConfigs).length,
|
||||
upConfigs,
|
||||
downConfigs,
|
||||
pendingConfigs,
|
||||
disabledConfigs,
|
||||
};
|
||||
}
|
||||
|
||||
async getMonitorConfigs() {
|
||||
const { savedObjectsClient, request } = this.routeContext;
|
||||
const { query, showFromAllSpaces } = request.query || {};
|
||||
/**
|
||||
* Walk through all monitor saved objects, bucket IDs by disabled/enabled status.
|
||||
*
|
||||
* Track max period to make sure the snapshot query should reach back far enough to catch
|
||||
* latest ping for all enabled monitors.
|
||||
*/
|
||||
|
||||
const { filtersStr } = this.filterData;
|
||||
|
||||
return await getAllMonitors({
|
||||
soClient: savedObjectsClient,
|
||||
showFromAllSpaces,
|
||||
search: query ? `${query}*` : '',
|
||||
filter: filtersStr,
|
||||
fields: [
|
||||
ConfigKey.ENABLED,
|
||||
ConfigKey.LOCATIONS,
|
||||
ConfigKey.MONITOR_QUERY_ID,
|
||||
ConfigKey.CONFIG_ID,
|
||||
ConfigKey.SCHEDULE,
|
||||
ConfigKey.MONITOR_SOURCE_TYPE,
|
||||
ConfigKey.MONITOR_TYPE,
|
||||
ConfigKey.NAME,
|
||||
ConfigKey.TAGS,
|
||||
ConfigKey.PROJECT_ID,
|
||||
ConfigKey.ALERT_CONFIG,
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
getMonitorMeta(monitor: SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>) {
|
||||
return {
|
||||
name: monitor.attributes[ConfigKey.NAME],
|
||||
configId: monitor.attributes[ConfigKey.CONFIG_ID],
|
||||
schedule: monitor.attributes[ConfigKey.SCHEDULE].number,
|
||||
tags: monitor.attributes[ConfigKey.TAGS],
|
||||
isEnabled: monitor.attributes[ConfigKey.ENABLED],
|
||||
type: monitor.attributes[ConfigKey.MONITOR_TYPE],
|
||||
projectId: monitor.attributes[ConfigKey.PROJECT_ID],
|
||||
isStatusAlertEnabled: isStatusEnabled(monitor.attributes[ConfigKey.ALERT_CONFIG]),
|
||||
updated_at: monitor.updated_at,
|
||||
spaceId: monitor.namespaces?.[0],
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { periodToMs } from './utils';
|
||||
|
||||
describe('periodToMs', () => {
|
||||
it('returns 0 for unsupported unit type', () => {
|
||||
// @ts-expect-error Providing invalid value to test handler in function
|
||||
expect(periodToMs({ number: '10', unit: 'rad' })).toEqual(0);
|
||||
});
|
||||
|
||||
it('converts seconds', () => {
|
||||
expect(periodToMs({ number: '10', unit: 's' })).toEqual(10_000);
|
||||
});
|
||||
|
||||
it('converts minutes', () => {
|
||||
expect(periodToMs({ number: '1', unit: 'm' })).toEqual(60_000);
|
||||
});
|
||||
|
||||
it('converts hours', () => {
|
||||
expect(periodToMs({ number: '1', unit: 'h' })).toEqual(3_600_000);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Helper function that converts a monitor's schedule to a value to use to generate
|
||||
* an appropriate look-back window for snapshot count.
|
||||
* @param schedule a number/unit pair that represents how often a configured monitor runs
|
||||
* @returns schedule interval in ms
|
||||
*/
|
||||
import datemath, { Unit } from '@kbn/datemath';
|
||||
|
||||
export function periodToMs(schedule: { number: string; unit: Unit }) {
|
||||
if (Object.keys(datemath.unitsMap).indexOf(schedule.unit) === -1) return 0;
|
||||
|
||||
return parseInt(schedule.number, 10) * datemath.unitsMap[schedule.unit].base;
|
||||
}
|
|
@ -11,8 +11,9 @@ import {
|
|||
SavedObjectsFindResult,
|
||||
} from '@kbn/core-saved-objects-api-server';
|
||||
import { intersection } from 'lodash';
|
||||
import { withApmSpan } from '@kbn/apm-data-access-plugin/server/utils';
|
||||
import { periodToMs } from '../../routes/overview_status/utils';
|
||||
import { syntheticsMonitorType } from '../../../common/types/saved_objects';
|
||||
import { periodToMs } from '../../routes/overview_status/overview_status';
|
||||
import {
|
||||
ConfigKey,
|
||||
EncryptedSyntheticsMonitorAttributes,
|
||||
|
@ -34,26 +35,28 @@ export const getAllMonitors = async ({
|
|||
filter?: string;
|
||||
showFromAllSpaces?: boolean;
|
||||
} & Pick<SavedObjectsFindOptions, 'sortField' | 'sortOrder' | 'fields' | 'searchFields'>) => {
|
||||
const finder = soClient.createPointInTimeFinder<EncryptedSyntheticsMonitorAttributes>({
|
||||
type: syntheticsMonitorType,
|
||||
perPage: 1000,
|
||||
search,
|
||||
sortField,
|
||||
sortOrder,
|
||||
fields,
|
||||
filter,
|
||||
searchFields,
|
||||
...(showFromAllSpaces && { namespaces: ['*'] }),
|
||||
return withApmSpan('get_all_monitors', async () => {
|
||||
const finder = soClient.createPointInTimeFinder<EncryptedSyntheticsMonitorAttributes>({
|
||||
type: syntheticsMonitorType,
|
||||
perPage: 5000,
|
||||
search,
|
||||
sortField,
|
||||
sortOrder,
|
||||
fields,
|
||||
filter,
|
||||
searchFields,
|
||||
...(showFromAllSpaces && { namespaces: ['*'] }),
|
||||
});
|
||||
|
||||
const hits: Array<SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>> = [];
|
||||
for await (const result of finder.find()) {
|
||||
hits.push(...result.saved_objects);
|
||||
}
|
||||
|
||||
finder.close().catch(() => {});
|
||||
|
||||
return hits;
|
||||
});
|
||||
|
||||
const hits: Array<SavedObjectsFindResult<EncryptedSyntheticsMonitorAttributes>> = [];
|
||||
for await (const result of finder.find()) {
|
||||
hits.push(...result.saved_objects);
|
||||
}
|
||||
|
||||
finder.close().catch(() => {});
|
||||
|
||||
return hits;
|
||||
};
|
||||
|
||||
export const processMonitors = (
|
||||
|
@ -86,13 +89,13 @@ export const processMonitors = (
|
|||
|
||||
monitorQueryIdToConfigIdMap[attrs[ConfigKey.MONITOR_QUERY_ID]] = attrs[ConfigKey.CONFIG_ID];
|
||||
|
||||
const monitorLocations = attrs[ConfigKey.LOCATIONS].map((location) => location.id);
|
||||
const monitorLocIds = attrs[ConfigKey.LOCATIONS].map((location) => location.id);
|
||||
|
||||
if (attrs[ConfigKey.ENABLED] === false) {
|
||||
const queriedLocations = Array.isArray(queryLocations) ? queryLocations : [queryLocations];
|
||||
const intersectingLocations = intersection(
|
||||
monitorLocations,
|
||||
queryLocations ? queriedLocations : monitorLocations
|
||||
monitorLocIds,
|
||||
queryLocations ? queriedLocations : monitorLocIds
|
||||
);
|
||||
disabledCount += intersectingLocations.length;
|
||||
disabledMonitorsCount += 1;
|
||||
|
@ -101,9 +104,9 @@ export const processMonitors = (
|
|||
enabledMonitorQueryIds.push(attrs[ConfigKey.MONITOR_QUERY_ID]);
|
||||
|
||||
monitorLocationsMap[attrs[ConfigKey.MONITOR_QUERY_ID]] = queryLocations
|
||||
? intersection(monitorLocations, queryLocations)
|
||||
: monitorLocations;
|
||||
listOfLocationsSet = new Set([...listOfLocationsSet, ...monitorLocations]);
|
||||
? intersection(monitorLocIds, queryLocations)
|
||||
: monitorLocIds;
|
||||
listOfLocationsSet = new Set([...listOfLocationsSet, ...monitorLocIds]);
|
||||
|
||||
maxPeriod = Math.max(maxPeriod, periodToMs(attrs[ConfigKey.SCHEDULE]));
|
||||
}
|
||||
|
|
|
@ -4,107 +4,114 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { withApmSpan } from '@kbn/apm-data-access-plugin/server/utils/with_apm_span';
|
||||
import { DEFAULT_SPACE_ID } from '@kbn/spaces-plugin/common';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { isKibanaResponse } from '@kbn/core-http-server';
|
||||
import { isTestUser, SyntheticsEsClient } from './lib';
|
||||
import { checkIndicesReadPrivileges } from './synthetics_service/authentication/check_has_privilege';
|
||||
import { SYNTHETICS_INDEX_PATTERN } from '../common/constants';
|
||||
import { syntheticsServiceApiKey } from './saved_objects/service_api_key';
|
||||
import { isTestUser, SyntheticsEsClient } from './lib';
|
||||
import { SYNTHETICS_INDEX_PATTERN } from '../common/constants';
|
||||
import { checkIndicesReadPrivileges } from './synthetics_service/authentication/check_has_privilege';
|
||||
import { SyntheticsRouteWrapper } from './routes/types';
|
||||
|
||||
export const syntheticsRouteWrapper: SyntheticsRouteWrapper = (
|
||||
uptimeRoute,
|
||||
syntheticsRoute,
|
||||
server,
|
||||
syntheticsMonitorClient
|
||||
) => ({
|
||||
...uptimeRoute,
|
||||
...syntheticsRoute,
|
||||
options: {
|
||||
...(uptimeRoute.options ?? {}),
|
||||
...(syntheticsRoute.options ?? {}),
|
||||
},
|
||||
security: {
|
||||
authz: {
|
||||
requiredPrivileges: [
|
||||
'uptime-read',
|
||||
...(uptimeRoute.requiredPrivileges ?? []),
|
||||
...(uptimeRoute?.writeAccess ? ['uptime-write'] : []),
|
||||
...(syntheticsRoute.requiredPrivileges ?? []),
|
||||
...(syntheticsRoute?.writeAccess ? ['uptime-write'] : []),
|
||||
],
|
||||
},
|
||||
},
|
||||
handler: async (context, request, response) => {
|
||||
const { elasticsearch, savedObjects, uiSettings } = await context.core;
|
||||
return withApmSpan('synthetics_route_handler', async () => {
|
||||
const { elasticsearch, savedObjects, uiSettings } = await context.core;
|
||||
|
||||
const { client: esClient } = elasticsearch;
|
||||
const savedObjectsClient = savedObjects.getClient({
|
||||
includedHiddenTypes: [syntheticsServiceApiKey.name],
|
||||
});
|
||||
const { client: esClient } = elasticsearch;
|
||||
const savedObjectsClient = savedObjects.getClient({
|
||||
includedHiddenTypes: [syntheticsServiceApiKey.name],
|
||||
});
|
||||
|
||||
// specifically needed for the synthetics service api key generation
|
||||
server.authSavedObjectsClient = savedObjectsClient;
|
||||
// specifically needed for the synthetics service api key generation
|
||||
server.authSavedObjectsClient = savedObjectsClient;
|
||||
|
||||
const syntheticsEsClient = new SyntheticsEsClient(savedObjectsClient, esClient.asCurrentUser, {
|
||||
request,
|
||||
uiSettings,
|
||||
isDev: Boolean(server.isDev) && !isTestUser(server),
|
||||
heartbeatIndices: SYNTHETICS_INDEX_PATTERN,
|
||||
});
|
||||
|
||||
server.syntheticsEsClient = syntheticsEsClient;
|
||||
|
||||
const spaceId = server.spaces?.spacesService.getSpaceId(request) ?? DEFAULT_SPACE_ID;
|
||||
|
||||
try {
|
||||
const res = await uptimeRoute.handler({
|
||||
syntheticsEsClient,
|
||||
const syntheticsEsClient = new SyntheticsEsClient(
|
||||
savedObjectsClient,
|
||||
context,
|
||||
request,
|
||||
response,
|
||||
server,
|
||||
spaceId,
|
||||
syntheticsMonitorClient,
|
||||
});
|
||||
if (isKibanaResponse(res)) {
|
||||
return res;
|
||||
}
|
||||
|
||||
const inspectData = await syntheticsEsClient.getInspectData(uptimeRoute.path);
|
||||
|
||||
if (Array.isArray(res)) {
|
||||
if (isEmpty(inspectData)) {
|
||||
return response.ok({
|
||||
body: res,
|
||||
});
|
||||
} else {
|
||||
return response.ok({
|
||||
body: {
|
||||
result: res,
|
||||
...inspectData,
|
||||
},
|
||||
});
|
||||
esClient.asCurrentUser,
|
||||
{
|
||||
request,
|
||||
uiSettings,
|
||||
isDev: Boolean(server.isDev) && !isTestUser(server),
|
||||
heartbeatIndices: SYNTHETICS_INDEX_PATTERN,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
body: {
|
||||
...res,
|
||||
...(await syntheticsEsClient.getInspectData(uptimeRoute.path)),
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.statusCode === 403) {
|
||||
const privileges = await checkIndicesReadPrivileges(syntheticsEsClient);
|
||||
if (!privileges.has_all_requested) {
|
||||
return response.forbidden({
|
||||
body: {
|
||||
message:
|
||||
'MissingIndicesPrivileges: You do not have permission to read from the synthetics-* indices. Please contact your administrator.',
|
||||
},
|
||||
});
|
||||
server.syntheticsEsClient = syntheticsEsClient;
|
||||
|
||||
const spaceId = server.spaces?.spacesService.getSpaceId(request) ?? DEFAULT_SPACE_ID;
|
||||
|
||||
try {
|
||||
const res = await syntheticsRoute.handler({
|
||||
syntheticsEsClient,
|
||||
savedObjectsClient,
|
||||
context,
|
||||
request,
|
||||
response,
|
||||
server,
|
||||
spaceId,
|
||||
syntheticsMonitorClient,
|
||||
});
|
||||
if (isKibanaResponse(res)) {
|
||||
return res;
|
||||
}
|
||||
|
||||
const inspectData = await syntheticsEsClient.getInspectData(syntheticsRoute.path);
|
||||
|
||||
if (Array.isArray(res)) {
|
||||
if (isEmpty(inspectData)) {
|
||||
return response.ok({
|
||||
body: res,
|
||||
});
|
||||
} else {
|
||||
return response.ok({
|
||||
body: {
|
||||
result: res,
|
||||
...inspectData,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return response.ok({
|
||||
body: {
|
||||
...res,
|
||||
...inspectData,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.statusCode === 403) {
|
||||
const privileges = await checkIndicesReadPrivileges(syntheticsEsClient);
|
||||
if (!privileges.has_all_requested) {
|
||||
return response.forbidden({
|
||||
body: {
|
||||
message:
|
||||
'MissingIndicesPrivileges: You do not have permission to read from the synthetics-* indices. Please contact your administrator.',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
server.logger.error(e);
|
||||
throw e;
|
||||
}
|
||||
server.logger.error(e);
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
|
|
@ -356,11 +356,18 @@ export class SyntheticsService {
|
|||
if (output) {
|
||||
this.logger.debug(`1 monitor will be pushed to synthetics service.`);
|
||||
|
||||
this.syncErrors = await this.apiClient.post({
|
||||
monitors,
|
||||
output,
|
||||
license,
|
||||
});
|
||||
this.apiClient
|
||||
.post({
|
||||
monitors,
|
||||
output,
|
||||
license,
|
||||
})
|
||||
.then((res) => {
|
||||
this.syncErrors = res;
|
||||
})
|
||||
.catch((e) => {
|
||||
this.logger.error(e);
|
||||
});
|
||||
}
|
||||
return this.syncErrors;
|
||||
} catch (e) {
|
||||
|
|
|
@ -106,7 +106,8 @@
|
|||
"@kbn/core-chrome-browser",
|
||||
"@kbn/core-rendering-browser",
|
||||
"@kbn/index-lifecycle-management-common-shared",
|
||||
"@kbn/core-http-server-utils"
|
||||
"@kbn/core-http-server-utils",
|
||||
"@kbn/apm-data-access-plugin"
|
||||
],
|
||||
"exclude": ["target/**/*"]
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue