mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[8.x] [Investigation app] add entities route and investigation Contextual Insight (#194432) (#195158)
# Backport This will backport the following commits from `main` to `8.x`: - [[Investigation app] add entities route and investigation Contextual Insight (#194432)](https://github.com/elastic/kibana/pull/194432) <!--- Backport version: 8.9.8 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Dominique Clarke","email":"dominique.clarke@elastic.co"},"sourceCommit":{"committedDate":"2024-10-04T17:58:28Z","message":"[Investigation app] add entities route and investigation Contextual Insight (#194432)\n\n## Summary\r\n\r\nAdds a route that can be used to fetch entities related to an\r\ninvestigation.\r\n\r\nThe route fetches associated entities by service name, host name, or\r\ncontainer id. It then identifies the associated indices and datastreams.\r\n\r\nThe discovered entities are passed to the contextual insight to inform\r\nthe LLM.\r\n\r\n\r\n\r\n\r\nThis PR represents the first step in developing an AI-informed\r\nhypothesis at the beginning of the investigation. Over time, further\r\ninsights will be provided to the LLM to deepen it's investigative\r\nanalysis and propose a more helpful root cause hypothesis.\r\n\r\n### Testing\r\n\r\n1. Create some APM data. I'm using the otel demo and triggering a\r\nfailure via the flagd service. Since this is in flux, you can reach out\r\nto me about this workflow. However, you can also create APM data via\r\n`synth-trace`.\r\n2. Create an custom threshold rule that you expect to trigger an alert.\r\nI created mine to using `http.response.status_code: 500 /\r\nhttp.response.status_code : *` and set a low threshold base on the\r\namount of failures in my current test data. Be sure to also group the\r\nalert by `service.name`\r\n3. Wait for the alert to fire, then visit the alert details page and\r\nstart an investigation\r\n4. notice the contextual insight. Expand it to see more information\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"e4bb435b48560852b37e4de54fb9c05cf5a7f3b1","branchLabelMapping":{"^v9.0.0$":"main","^v8.16.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","backport:prev-minor","ci:project-deploy-observability","Team:obs-ux-management","v8.16.0"],"number":194432,"url":"https://github.com/elastic/kibana/pull/194432","mergeCommit":{"message":"[Investigation app] add entities route and investigation Contextual Insight (#194432)\n\n## Summary\r\n\r\nAdds a route that can be used to fetch entities related to an\r\ninvestigation.\r\n\r\nThe route fetches associated entities by service name, host name, or\r\ncontainer id. It then identifies the associated indices and datastreams.\r\n\r\nThe discovered entities are passed to the contextual insight to inform\r\nthe LLM.\r\n\r\n\r\n\r\n\r\nThis PR represents the first step in developing an AI-informed\r\nhypothesis at the beginning of the investigation. Over time, further\r\ninsights will be provided to the LLM to deepen it's investigative\r\nanalysis and propose a more helpful root cause hypothesis.\r\n\r\n### Testing\r\n\r\n1. Create some APM data. I'm using the otel demo and triggering a\r\nfailure via the flagd service. Since this is in flux, you can reach out\r\nto me about this workflow. However, you can also create APM data via\r\n`synth-trace`.\r\n2. Create an custom threshold rule that you expect to trigger an alert.\r\nI created mine to using `http.response.status_code: 500 /\r\nhttp.response.status_code : *` and set a low threshold base on the\r\namount of failures in my current test data. Be sure to also group the\r\nalert by `service.name`\r\n3. Wait for the alert to fire, then visit the alert details page and\r\nstart an investigation\r\n4. notice the contextual insight. Expand it to see more information\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"e4bb435b48560852b37e4de54fb9c05cf5a7f3b1"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","labelRegex":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/194432","number":194432,"mergeCommit":{"message":"[Investigation app] add entities route and investigation Contextual Insight (#194432)\n\n## Summary\r\n\r\nAdds a route that can be used to fetch entities related to an\r\ninvestigation.\r\n\r\nThe route fetches associated entities by service name, host name, or\r\ncontainer id. It then identifies the associated indices and datastreams.\r\n\r\nThe discovered entities are passed to the contextual insight to inform\r\nthe LLM.\r\n\r\n\r\n\r\n\r\nThis PR represents the first step in developing an AI-informed\r\nhypothesis at the beginning of the investigation. Over time, further\r\ninsights will be provided to the LLM to deepen it's investigative\r\nanalysis and propose a more helpful root cause hypothesis.\r\n\r\n### Testing\r\n\r\n1. Create some APM data. I'm using the otel demo and triggering a\r\nfailure via the flagd service. Since this is in flux, you can reach out\r\nto me about this workflow. However, you can also create APM data via\r\n`synth-trace`.\r\n2. Create an custom threshold rule that you expect to trigger an alert.\r\nI created mine to using `http.response.status_code: 500 /\r\nhttp.response.status_code : *` and set a low threshold base on the\r\namount of failures in my current test data. Be sure to also group the\r\nalert by `service.name`\r\n3. Wait for the alert to fire, then visit the alert details page and\r\nstart an investigation\r\n4. notice the contextual insight. Expand it to see more information\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>","sha":"e4bb435b48560852b37e4de54fb9c05cf5a7f3b1"}},{"branch":"8.x","label":"v8.16.0","labelRegex":"^v8.16.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}] BACKPORT--> Co-authored-by: Rickyanto Ang <rickyangwyn@gmail.com>
This commit is contained in:
parent
abc351f552
commit
c099f33d32
25 changed files with 1485 additions and 53 deletions
48
packages/kbn-investigation-shared/src/rest_specs/entity.ts
Normal file
48
packages/kbn-investigation-shared/src/rest_specs/entity.ts
Normal file
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { z } from '@kbn/zod';
|
||||
|
||||
const metricsSchema = z.object({
|
||||
failedTransactionRate: z.number().optional(),
|
||||
latency: z.number().optional(),
|
||||
throughput: z.number().optional(),
|
||||
logErrorRate: z.number().optional(),
|
||||
logRate: z.number().optional(),
|
||||
});
|
||||
|
||||
const entitySchema = z.object({
|
||||
id: z.string(),
|
||||
definitionId: z.string(),
|
||||
definitionVersion: z.string(),
|
||||
displayName: z.string(),
|
||||
firstSeenTimestamp: z.string(),
|
||||
lastSeenTimestamp: z.string(),
|
||||
identityFields: z.array(z.string()),
|
||||
schemaVersion: z.string(),
|
||||
type: z.string(),
|
||||
metrics: metricsSchema,
|
||||
});
|
||||
|
||||
const entitySourceSchema = z.object({
|
||||
dataStream: z.string().optional(),
|
||||
});
|
||||
|
||||
const entityWithSourceSchema = z.intersection(
|
||||
entitySchema,
|
||||
z.object({
|
||||
sources: z.array(entitySourceSchema),
|
||||
})
|
||||
);
|
||||
|
||||
type EntityWithSource = z.output<typeof entityWithSourceSchema>;
|
||||
type EntitySource = z.output<typeof entitySourceSchema>;
|
||||
|
||||
export { entitySchema, entityWithSourceSchema };
|
||||
export type { EntityWithSource, EntitySource };
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { z } from '@kbn/zod';
|
||||
import { entityWithSourceSchema } from './entity';
|
||||
|
||||
const getEntitiesParamsSchema = z
|
||||
.object({
|
||||
query: z
|
||||
.object({
|
||||
'service.name': z.string(),
|
||||
'service.environment': z.string(),
|
||||
'host.name': z.string(),
|
||||
'container.id': z.string(),
|
||||
})
|
||||
.partial(),
|
||||
})
|
||||
.partial();
|
||||
|
||||
const getEntitiesResponseSchema = z.object({
|
||||
entities: z.array(entityWithSourceSchema),
|
||||
});
|
||||
|
||||
type GetEntitiesParams = z.infer<typeof getEntitiesParamsSchema.shape.query>;
|
||||
type GetEntitiesResponse = z.output<typeof getEntitiesResponseSchema>;
|
||||
|
||||
export { getEntitiesParamsSchema, getEntitiesResponseSchema };
|
||||
export type { GetEntitiesParams, GetEntitiesResponse };
|
|
@ -27,6 +27,8 @@ export type * from './update_item';
|
|||
export type * from './update_note';
|
||||
export type * from './event';
|
||||
export type * from './get_events';
|
||||
export type * from './entity';
|
||||
export type * from './get_entities';
|
||||
|
||||
export * from './create';
|
||||
export * from './create_item';
|
||||
|
@ -48,3 +50,5 @@ export * from './update_item';
|
|||
export * from './update_note';
|
||||
export * from './event';
|
||||
export * from './get_events';
|
||||
export * from './entity';
|
||||
export * from './get_entities';
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
"configPath": ["xpack", "investigateApp"],
|
||||
"requiredPlugins": [
|
||||
"investigate",
|
||||
"observabilityAIAssistant",
|
||||
"observabilityShared",
|
||||
"lens",
|
||||
"dataViews",
|
||||
|
@ -28,7 +27,7 @@
|
|||
"kibanaReact",
|
||||
"kibanaUtils",
|
||||
],
|
||||
"optionalPlugins": [],
|
||||
"optionalPlugins": ["observabilityAIAssistant"],
|
||||
"extraPublicDirs": []
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,16 @@ export const investigationKeys = {
|
|||
[...investigationKeys.detail(investigationId), 'notes'] as const,
|
||||
detailItems: (investigationId: string) =>
|
||||
[...investigationKeys.detail(investigationId), 'items'] as const,
|
||||
entities: ({
|
||||
investigationId,
|
||||
...params
|
||||
}: {
|
||||
investigationId: string;
|
||||
serviceName?: string;
|
||||
serviceEnvironment?: string;
|
||||
hostName?: string;
|
||||
containerId?: string;
|
||||
}) => [...investigationKeys.detail(investigationId), 'entities', params] as const,
|
||||
};
|
||||
|
||||
export type InvestigationKeys = typeof investigationKeys;
|
||||
|
|
|
@ -7,10 +7,11 @@
|
|||
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { BASE_RAC_ALERTS_API_PATH, EcsFieldsResponse } from '@kbn/rule-registry-plugin/common';
|
||||
import { useKibana } from '../../../hooks/use_kibana';
|
||||
import { type GetInvestigationResponse, alertOriginSchema } from '@kbn/investigation-shared';
|
||||
import { useKibana } from './use_kibana';
|
||||
|
||||
export interface AlertParams {
|
||||
id?: string;
|
||||
export interface UseFetchAlertParams {
|
||||
investigation?: GetInvestigationResponse;
|
||||
}
|
||||
|
||||
export interface UseFetchAlertResponse {
|
||||
|
@ -22,20 +23,22 @@ export interface UseFetchAlertResponse {
|
|||
data: EcsFieldsResponse | undefined | null;
|
||||
}
|
||||
|
||||
export function useFetchAlert({ id }: AlertParams): UseFetchAlertResponse {
|
||||
export function useFetchAlert({ investigation }: UseFetchAlertParams): UseFetchAlertResponse {
|
||||
const {
|
||||
core: {
|
||||
http,
|
||||
notifications: { toasts },
|
||||
},
|
||||
} = useKibana();
|
||||
const alertOriginInvestigation = alertOriginSchema.safeParse(investigation?.origin);
|
||||
const alertId = alertOriginInvestigation.success ? alertOriginInvestigation.data.id : undefined;
|
||||
|
||||
const { isInitialLoading, isLoading, isError, isSuccess, isRefetching, data } = useQuery({
|
||||
queryKey: ['fetchAlert', id],
|
||||
queryKey: ['fetchAlert', investigation?.id],
|
||||
queryFn: async ({ signal }) => {
|
||||
return await http.get<EcsFieldsResponse>(BASE_RAC_ALERTS_API_PATH, {
|
||||
query: {
|
||||
id,
|
||||
id: alertId,
|
||||
},
|
||||
signal,
|
||||
});
|
||||
|
@ -47,7 +50,7 @@ export function useFetchAlert({ id }: AlertParams): UseFetchAlertResponse {
|
|||
title: 'Something went wrong while fetching alert',
|
||||
});
|
||||
},
|
||||
enabled: Boolean(id),
|
||||
enabled: Boolean(alertId),
|
||||
});
|
||||
|
||||
return {
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { GetEntitiesResponse } from '@kbn/investigation-shared';
|
||||
import { useKibana } from './use_kibana';
|
||||
import { investigationKeys } from './query_key_factory';
|
||||
|
||||
export interface EntityParams {
|
||||
investigationId: string;
|
||||
serviceName?: string;
|
||||
serviceEnvironment?: string;
|
||||
hostName?: string;
|
||||
containerId?: string;
|
||||
}
|
||||
|
||||
export function useFetchEntities({
|
||||
investigationId,
|
||||
serviceName,
|
||||
serviceEnvironment,
|
||||
hostName,
|
||||
containerId,
|
||||
}: EntityParams) {
|
||||
const {
|
||||
core: { http },
|
||||
} = useKibana();
|
||||
|
||||
const { isInitialLoading, isLoading, isError, isSuccess, isRefetching, data } = useQuery({
|
||||
queryKey: investigationKeys.entities({
|
||||
investigationId,
|
||||
serviceName,
|
||||
serviceEnvironment,
|
||||
hostName,
|
||||
containerId,
|
||||
}),
|
||||
queryFn: async ({ signal }) => {
|
||||
return await http.get<GetEntitiesResponse>('/api/observability/investigation/entities', {
|
||||
query: {
|
||||
'service.name': serviceName,
|
||||
'service.environment': serviceEnvironment,
|
||||
'host.name': hostName,
|
||||
'container.id': containerId,
|
||||
},
|
||||
version: '2023-10-31',
|
||||
signal,
|
||||
});
|
||||
},
|
||||
refetchOnWindowFocus: false,
|
||||
onError: (error: Error) => {
|
||||
// ignore error
|
||||
},
|
||||
enabled: Boolean(investigationId && (serviceName || hostName || containerId)),
|
||||
});
|
||||
|
||||
return {
|
||||
data,
|
||||
isInitialLoading,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isSuccess,
|
||||
isError,
|
||||
};
|
||||
}
|
|
@ -5,13 +5,12 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { alertOriginSchema } from '@kbn/investigation-shared';
|
||||
import { ALERT_REASON, ALERT_START, ALERT_STATUS } from '@kbn/rule-data-utils';
|
||||
import type { EcsFieldsResponse } from '@kbn/rule-registry-plugin/common';
|
||||
import dedent from 'dedent';
|
||||
import { useEffect } from 'react';
|
||||
import { useKibana } from '../../../hooks/use_kibana';
|
||||
import { useInvestigation } from '../contexts/investigation_context';
|
||||
import { useKibana } from './use_kibana';
|
||||
import { useInvestigation } from '../pages/details/contexts/investigation_context';
|
||||
import { useFetchAlert } from './use_fetch_alert';
|
||||
|
||||
export function useScreenContext() {
|
||||
|
@ -22,9 +21,7 @@ export function useScreenContext() {
|
|||
} = useKibana();
|
||||
|
||||
const { investigation } = useInvestigation();
|
||||
const alertOriginInvestigation = alertOriginSchema.safeParse(investigation?.origin);
|
||||
const alertId = alertOriginInvestigation.success ? alertOriginInvestigation.data.id : undefined;
|
||||
const { data: alertDetails, isLoading: isAlertDetailsLoading } = useFetchAlert({ id: alertId });
|
||||
const { data: alertDetails, isLoading: isAlertDetailsLoading } = useFetchAlert({ investigation });
|
||||
|
||||
useEffect(() => {
|
||||
if (!investigation || isAlertDetailsLoading) {
|
|
@ -8,7 +8,7 @@ import { EuiLoadingSpinner, EuiFlexItem } from '@elastic/eui';
|
|||
import { css } from '@emotion/css';
|
||||
import { ReactEmbeddableRenderer } from '@kbn/embeddable-plugin/public';
|
||||
import type { GlobalWidgetParameters } from '@kbn/investigate-plugin/public';
|
||||
import { useAbortableAsync } from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import { useAbortableAsync } from '@kbn/observability-utils/hooks/use_abortable_async';
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { v4 } from 'uuid';
|
||||
import { ErrorMessage } from '../../components/error_message';
|
||||
|
|
|
@ -10,7 +10,7 @@ import type { ESQLSearchResponse } from '@kbn/es-types';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { type GlobalWidgetParameters } from '@kbn/investigate-plugin/public';
|
||||
import type { Suggestion } from '@kbn/lens-plugin/public';
|
||||
import { useAbortableAsync } from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import { useAbortableAsync } from '@kbn/observability-utils/hooks/use_abortable_async';
|
||||
import React, { useMemo } from 'react';
|
||||
import { ErrorMessage } from '../../components/error_message';
|
||||
import { useKibana } from '../../hooks/use_kibana';
|
||||
|
|
|
@ -11,7 +11,7 @@ import type { ESQLColumn, ESQLRow } from '@kbn/es-types';
|
|||
import { GlobalWidgetParameters } from '@kbn/investigate-plugin/public';
|
||||
import { Item } from '@kbn/investigation-shared';
|
||||
import type { Suggestion } from '@kbn/lens-plugin/public';
|
||||
import { useAbortableAsync } from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import { useAbortableAsync } from '@kbn/observability-utils/hooks/use_abortable_async';
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import { ErrorMessage } from '../../../../components/error_message';
|
||||
import { SuggestVisualizationList } from '../../../../components/suggest_visualization_list';
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import dedent from 'dedent';
|
||||
import {
|
||||
ALERT_RULE_PARAMETERS,
|
||||
ALERT_START,
|
||||
ALERT_RULE_CATEGORY,
|
||||
ALERT_REASON,
|
||||
} from '@kbn/rule-data-utils';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { EntityWithSource } from '@kbn/investigation-shared';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useKibana } from '../../../../hooks/use_kibana';
|
||||
import { useInvestigation } from '../../contexts/investigation_context';
|
||||
import { useFetchEntities } from '../../../../hooks/use_fetch_entities';
|
||||
|
||||
export interface InvestigationContextualInsight {
|
||||
key: string;
|
||||
description: string;
|
||||
data: unknown;
|
||||
}
|
||||
|
||||
export function AssistantHypothesis({ investigationId }: { investigationId: string }) {
|
||||
const { alert } = useInvestigation();
|
||||
const {
|
||||
dependencies: {
|
||||
start: {
|
||||
observabilityAIAssistant: {
|
||||
ObservabilityAIAssistantContextualInsight,
|
||||
getContextualInsightMessages,
|
||||
},
|
||||
},
|
||||
},
|
||||
} = useKibana();
|
||||
const { data: entitiesData } = useFetchEntities({
|
||||
investigationId,
|
||||
serviceName: alert?.['service.name'] ? `${alert?.['service.name']}` : undefined,
|
||||
serviceEnvironment: alert?.['service.environment']
|
||||
? `${alert?.['service.environment']}`
|
||||
: undefined,
|
||||
hostName: alert?.['host.name'] ? `${alert?.['host.name']}` : undefined,
|
||||
containerId: alert?.['container.id'] ? `${alert?.['container.id']}` : undefined,
|
||||
});
|
||||
|
||||
const getAlertContextMessages = useCallback(async () => {
|
||||
if (!getContextualInsightMessages || !alert) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const entities = entitiesData?.entities ?? [];
|
||||
|
||||
const entityContext = entities?.length
|
||||
? `
|
||||
Alerts can optionally be associated with entities. Entities can be services, hosts, containers, or other resources. Entities can have metrics associated with them.
|
||||
|
||||
The alert that triggered this investigation is associated with the following entities: ${entities
|
||||
.map((entity, index) => {
|
||||
return dedent(`
|
||||
## Entity ${index + 1}:
|
||||
${formatEntityMetrics(entity)};
|
||||
`);
|
||||
})
|
||||
.join('/n/n')}`
|
||||
: '';
|
||||
|
||||
return getContextualInsightMessages({
|
||||
message: `I am investigating a failure in my system. I was made aware of the failure by an alert and I am trying to understand the root cause of the issue.`,
|
||||
instructions: dedent(
|
||||
`I'm an SRE. I am investigating a failure in my system. I was made aware of the failure via an alert. Your current task is to help me identify the root cause of the failure in my system.
|
||||
|
||||
The rule that triggered the alert is a ${
|
||||
alert[ALERT_RULE_CATEGORY]
|
||||
} rule. The alert started at ${alert[ALERT_START]}. The alert reason is ${
|
||||
alert[ALERT_REASON]
|
||||
}. The rule parameters are ${JSON.stringify(ALERT_RULE_PARAMETERS)}.
|
||||
|
||||
${entityContext}
|
||||
|
||||
Based on the alert details, suggest a root cause and next steps to mitigate the issue.
|
||||
|
||||
I do not have the alert details or entity details in front of me, so be sure to repeat the alert reason (${
|
||||
alert[ALERT_REASON]
|
||||
}), when the alert was triggered (${
|
||||
alert[ALERT_START]
|
||||
}), and the entity metrics in your response.
|
||||
|
||||
When displaying the entity metrics, please convert the metrics to a human-readable format. For example, convert "logRate" to "Log Rate" and "errorRate" to "Error Rate".
|
||||
`
|
||||
),
|
||||
});
|
||||
}, [alert, getContextualInsightMessages, entitiesData?.entities]);
|
||||
|
||||
if (!ObservabilityAIAssistantContextualInsight) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return alert && entitiesData ? (
|
||||
<ObservabilityAIAssistantContextualInsight
|
||||
title={i18n.translate(
|
||||
'xpack.investigateApp.assistantHypothesis.observabilityAIAssistantContextualInsight.helpMeInvestigateThisLabel',
|
||||
{ defaultMessage: 'Help me investigate this failure' }
|
||||
)}
|
||||
messages={getAlertContextMessages}
|
||||
/>
|
||||
) : null;
|
||||
}
|
||||
const formatEntityMetrics = (entity: EntityWithSource): string => {
|
||||
const entityMetrics = Object.entries(entity.metrics)
|
||||
.map(([key, value]) => `${key}: ${value}`)
|
||||
.join(', ');
|
||||
const entitySources = entity.sources.map((source) => source.dataStream).join(', ');
|
||||
return dedent(`
|
||||
Entity name: ${entity.displayName};
|
||||
Entity type: ${entity.type};
|
||||
Entity metrics: ${entityMetrics};
|
||||
Entity data streams: ${entitySources}
|
||||
`);
|
||||
};
|
|
@ -16,7 +16,7 @@ import { useInvestigation } from '../../contexts/investigation_context';
|
|||
import { InvestigationHeader } from '../investigation_header/investigation_header';
|
||||
import { InvestigationItems } from '../investigation_items/investigation_items';
|
||||
import { InvestigationNotes } from '../investigation_notes/investigation_notes';
|
||||
import { useScreenContext } from '../../hooks/use_screen_context';
|
||||
import { useScreenContext } from '../../../../hooks/use_screen_context';
|
||||
|
||||
interface Props {
|
||||
user: AuthenticatedUser;
|
||||
|
|
|
@ -6,12 +6,14 @@
|
|||
*/
|
||||
|
||||
import { EuiButtonEmpty, EuiText } from '@elastic/eui';
|
||||
import { alertOriginSchema } from '@kbn/investigation-shared';
|
||||
import { ALERT_RULE_CATEGORY } from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
|
||||
import {
|
||||
ALERT_RULE_CATEGORY,
|
||||
ALERT_UUID,
|
||||
} from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
|
||||
import React from 'react';
|
||||
import { useKibana } from '../../../../hooks/use_kibana';
|
||||
import { useFetchAlert } from '../../../../hooks/use_fetch_alert';
|
||||
import { useInvestigation } from '../../contexts/investigation_context';
|
||||
import { useFetchAlert } from '../../hooks/use_fetch_alert';
|
||||
|
||||
export function AlertDetailsButton() {
|
||||
const {
|
||||
|
@ -21,9 +23,7 @@ export function AlertDetailsButton() {
|
|||
} = useKibana();
|
||||
const { investigation } = useInvestigation();
|
||||
|
||||
const alertOriginInvestigation = alertOriginSchema.safeParse(investigation?.origin);
|
||||
const alertId = alertOriginInvestigation.success ? alertOriginInvestigation.data.id : undefined;
|
||||
const { data: alertDetails } = useFetchAlert({ id: alertId });
|
||||
const { data: alertDetails } = useFetchAlert({ investigation });
|
||||
|
||||
if (!alertDetails) {
|
||||
return null;
|
||||
|
@ -33,7 +33,7 @@ export function AlertDetailsButton() {
|
|||
data-test-subj="investigationDetailsAlertLink"
|
||||
iconType="arrowLeft"
|
||||
size="xs"
|
||||
href={basePath.prepend(`/app/observability/alerts/${alertId}`)}
|
||||
href={basePath.prepend(`/app/observability/alerts/${alertDetails[ALERT_UUID]}`)}
|
||||
>
|
||||
<EuiText size="s">{`[Alert] ${alertDetails?.[ALERT_RULE_CATEGORY]} breached`}</EuiText>
|
||||
</EuiButtonEmpty>
|
||||
|
|
|
@ -12,9 +12,10 @@ import { useInvestigation } from '../../contexts/investigation_context';
|
|||
import { AddInvestigationItem } from '../add_investigation_item/add_investigation_item';
|
||||
import { InvestigationItemsList } from '../investigation_items_list/investigation_items_list';
|
||||
import { InvestigationSearchBar } from '../investigation_search_bar/investigation_search_bar';
|
||||
import { AssistantHypothesis } from '../assistant_hypothesis/assistant_hypothesis';
|
||||
|
||||
export function InvestigationItems() {
|
||||
const { globalParams, updateInvestigationParams } = useInvestigation();
|
||||
const { globalParams, updateInvestigationParams, investigation } = useInvestigation();
|
||||
|
||||
return (
|
||||
<EuiFlexGroup direction="column" gutterSize="m">
|
||||
|
@ -33,10 +34,14 @@ export function InvestigationItems() {
|
|||
/>
|
||||
</EuiFlexItem>
|
||||
|
||||
{investigation?.id && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<AssistantHypothesis investigationId={investigation.id} />
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
<EuiFlexItem grow={false}>
|
||||
<InvestigationItemsList />
|
||||
</EuiFlexItem>
|
||||
|
||||
<AddInvestigationItem />
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { type GlobalWidgetParameters } from '@kbn/investigate-plugin/public';
|
||||
import { EcsFieldsResponse } from '@kbn/rule-registry-plugin/common';
|
||||
import { GetInvestigationResponse, InvestigationItem, Item } from '@kbn/investigation-shared';
|
||||
import { isEqual } from 'lodash';
|
||||
import React, { createContext, useContext, useEffect, useRef, useState } from 'react';
|
||||
|
@ -15,6 +16,7 @@ import { useAddInvestigationNote } from '../../../hooks/use_add_investigation_no
|
|||
import { useDeleteInvestigationItem } from '../../../hooks/use_delete_investigation_item';
|
||||
import { useDeleteInvestigationNote } from '../../../hooks/use_delete_investigation_note';
|
||||
import { useFetchInvestigation } from '../../../hooks/use_fetch_investigation';
|
||||
import { useFetchAlert } from '../../../hooks/use_fetch_alert';
|
||||
import { useKibana } from '../../../hooks/use_kibana';
|
||||
import { useUpdateInvestigation } from '../../../hooks/use_update_investigation';
|
||||
import { useUpdateInvestigationNote } from '../../../hooks/use_update_investigation_note';
|
||||
|
@ -26,6 +28,7 @@ export type RenderedInvestigationItem = InvestigationItem & {
|
|||
|
||||
interface InvestigationContextProps {
|
||||
investigation?: GetInvestigationResponse;
|
||||
alert?: EcsFieldsResponse;
|
||||
renderableItems: RenderedInvestigationItem[];
|
||||
globalParams: GlobalWidgetParameters;
|
||||
updateInvestigationParams: (params: GlobalWidgetParameters) => Promise<void>;
|
||||
|
@ -81,6 +84,7 @@ export function InvestigationProvider({
|
|||
id: initialInvestigation.id,
|
||||
initialInvestigation,
|
||||
});
|
||||
const { data: alert } = useFetchAlert({ investigation });
|
||||
|
||||
const cache = useRef<
|
||||
Record<string, { globalParams: GlobalWidgetParameters; item: RenderedInvestigationItem }>
|
||||
|
@ -211,6 +215,7 @@ export function InvestigationProvider({
|
|||
renderableItems,
|
||||
updateInvestigationParams,
|
||||
investigation,
|
||||
alert: alert ?? undefined,
|
||||
globalParams,
|
||||
addItem,
|
||||
deleteItem,
|
||||
|
|
|
@ -4,6 +4,10 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import type {
|
||||
ObservabilityAIAssistantPublicSetup,
|
||||
ObservabilityAIAssistantPublicStart,
|
||||
} from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import type { ContentManagementPublicStart } from '@kbn/content-management-plugin/public';
|
||||
import type { DataPublicPluginSetup, DataPublicPluginStart } from '@kbn/data-plugin/public';
|
||||
import type {
|
||||
|
@ -27,10 +31,6 @@ import type {
|
|||
import type { SecurityPluginSetup, SecurityPluginStart } from '@kbn/security-plugin/public';
|
||||
import type { UiActionsSetup, UiActionsStart } from '@kbn/ui-actions-plugin/public';
|
||||
import type { UnifiedSearchPublicPluginStart } from '@kbn/unified-search-plugin/public';
|
||||
import type {
|
||||
ObservabilityAIAssistantPublicSetup,
|
||||
ObservabilityAIAssistantPublicStart,
|
||||
} from '@kbn/observability-ai-assistant-plugin/public';
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-empty-interface*/
|
||||
|
||||
|
@ -41,6 +41,7 @@ export interface ConfigSchema {
|
|||
export interface InvestigateAppSetupDependencies {
|
||||
investigate: InvestigatePublicSetup;
|
||||
observabilityShared: ObservabilitySharedPluginSetup;
|
||||
observabilityAIAssistant: ObservabilityAIAssistantPublicSetup;
|
||||
lens: LensPublicSetup;
|
||||
dataViews: DataViewsPublicPluginSetup;
|
||||
data: DataPublicPluginSetup;
|
||||
|
@ -50,12 +51,12 @@ export interface InvestigateAppSetupDependencies {
|
|||
unifiedSearch: {};
|
||||
uiActions: UiActionsSetup;
|
||||
security: SecurityPluginSetup;
|
||||
observabilityAIAssistant: ObservabilityAIAssistantPublicSetup;
|
||||
}
|
||||
|
||||
export interface InvestigateAppStartDependencies {
|
||||
investigate: InvestigatePublicStart;
|
||||
observabilityShared: ObservabilitySharedPluginStart;
|
||||
observabilityAIAssistant: ObservabilityAIAssistantPublicStart;
|
||||
lens: LensPublicStart;
|
||||
dataViews: DataViewsPublicPluginStart;
|
||||
data: DataPublicPluginStart;
|
||||
|
@ -65,7 +66,6 @@ export interface InvestigateAppStartDependencies {
|
|||
unifiedSearch: UnifiedSearchPublicPluginStart;
|
||||
uiActions: UiActionsStart;
|
||||
security: SecurityPluginStart;
|
||||
observabilityAIAssistant: ObservabilityAIAssistantPublicStart;
|
||||
}
|
||||
|
||||
export interface InvestigateAppPublicSetup {}
|
||||
|
|
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ESSearchRequest, InferSearchResponseOf } from '@kbn/es-types';
|
||||
import type { KibanaRequest } from '@kbn/core/server';
|
||||
import { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { entitiesAliasPattern, ENTITY_LATEST } from '@kbn/entities-schema';
|
||||
import { unwrapEsResponse } from '@kbn/observability-plugin/common/utils/unwrap_es_response';
|
||||
import {
|
||||
MsearchMultisearchBody,
|
||||
MsearchMultisearchHeader,
|
||||
} from '@elastic/elasticsearch/lib/api/types';
|
||||
|
||||
export const SERVICE_ENTITIES_LATEST_ALIAS = entitiesAliasPattern({
|
||||
type: 'service',
|
||||
dataset: ENTITY_LATEST,
|
||||
});
|
||||
export const HOST_ENTITIES_LATEST_ALIAS = entitiesAliasPattern({
|
||||
type: 'host',
|
||||
dataset: ENTITY_LATEST,
|
||||
});
|
||||
export const CONTAINER_ENTITIES_LATEST_ALIAS = entitiesAliasPattern({
|
||||
type: 'container',
|
||||
dataset: ENTITY_LATEST,
|
||||
});
|
||||
type LatestAlias =
|
||||
| typeof SERVICE_ENTITIES_LATEST_ALIAS
|
||||
| typeof HOST_ENTITIES_LATEST_ALIAS
|
||||
| typeof CONTAINER_ENTITIES_LATEST_ALIAS;
|
||||
|
||||
export function cancelEsRequestOnAbort<T extends Promise<any>>(
|
||||
promise: T,
|
||||
request: KibanaRequest,
|
||||
controller: AbortController
|
||||
): T {
|
||||
const subscription = request.events.aborted$.subscribe(() => {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
return promise.finally(() => subscription.unsubscribe()) as T;
|
||||
}
|
||||
|
||||
export interface EntitiesESClient {
|
||||
search<TDocument = unknown, TSearchRequest extends ESSearchRequest = ESSearchRequest>(
|
||||
indexName: string,
|
||||
searchRequest: TSearchRequest
|
||||
): Promise<InferSearchResponseOf<TDocument, TSearchRequest>>;
|
||||
msearch<TDocument = unknown, TSearchRequest extends ESSearchRequest = ESSearchRequest>(
|
||||
allSearches: TSearchRequest[]
|
||||
): Promise<{ responses: Array<InferSearchResponseOf<TDocument, TSearchRequest>> }>;
|
||||
}
|
||||
|
||||
export function createEntitiesESClient({
|
||||
request,
|
||||
esClient,
|
||||
}: {
|
||||
request: KibanaRequest;
|
||||
esClient: ElasticsearchClient;
|
||||
}) {
|
||||
function search<TDocument = unknown, TSearchRequest extends ESSearchRequest = ESSearchRequest>(
|
||||
indexName: string,
|
||||
searchRequest: TSearchRequest
|
||||
): Promise<InferSearchResponseOf<TDocument, TSearchRequest>> {
|
||||
const controller = new AbortController();
|
||||
|
||||
const promise = cancelEsRequestOnAbort(
|
||||
esClient.search(
|
||||
{ ...searchRequest, index: [indexName], ignore_unavailable: true },
|
||||
{
|
||||
signal: controller.signal,
|
||||
meta: true,
|
||||
}
|
||||
) as unknown as Promise<{
|
||||
body: InferSearchResponseOf<TDocument, TSearchRequest>;
|
||||
}>,
|
||||
request,
|
||||
controller
|
||||
);
|
||||
|
||||
return unwrapEsResponse(promise);
|
||||
}
|
||||
|
||||
return {
|
||||
async search<TDocument = unknown, TSearchRequest extends ESSearchRequest = ESSearchRequest>(
|
||||
entityIndexAlias: LatestAlias,
|
||||
searchRequest: TSearchRequest
|
||||
): Promise<InferSearchResponseOf<TDocument, TSearchRequest>> {
|
||||
return search(entityIndexAlias, searchRequest);
|
||||
},
|
||||
|
||||
async msearch<TDocument = unknown, TSearchRequest extends ESSearchRequest = ESSearchRequest>(
|
||||
allSearches: Array<TSearchRequest & { index: LatestAlias }>
|
||||
): Promise<{ responses: Array<InferSearchResponseOf<TDocument, TSearchRequest>> }> {
|
||||
const searches = allSearches
|
||||
.map((params) => {
|
||||
const searchParams: [MsearchMultisearchHeader, MsearchMultisearchBody] = [
|
||||
{
|
||||
index: [params.index],
|
||||
ignore_unavailable: true,
|
||||
},
|
||||
{
|
||||
...params.body,
|
||||
},
|
||||
];
|
||||
|
||||
return searchParams;
|
||||
})
|
||||
.flat();
|
||||
|
||||
const promise = esClient.msearch(
|
||||
{ searches },
|
||||
{
|
||||
meta: true,
|
||||
}
|
||||
) as unknown as Promise<{
|
||||
body: { responses: Array<InferSearchResponseOf<TDocument, TSearchRequest>> };
|
||||
}>;
|
||||
|
||||
const { body } = await promise;
|
||||
return { responses: body.responses };
|
||||
},
|
||||
};
|
||||
}
|
|
@ -0,0 +1,493 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ElasticsearchClient } from '@kbn/core/server';
|
||||
import moment from 'moment';
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { calculateAuto } from '@kbn/calculate-auto';
|
||||
import {
|
||||
type RandomSamplerWrapper,
|
||||
createRandomSamplerWrapper,
|
||||
} from '@kbn/ml-random-sampler-utils';
|
||||
import { z } from '@kbn/zod';
|
||||
|
||||
const isoTimestampFormat = "YYYY-MM-DD'T'HH:mm:ss.SSS'Z'";
|
||||
|
||||
export interface LogCategory {
|
||||
change: LogCategoryChange;
|
||||
documentCount: number;
|
||||
histogram: LogCategoryHistogramBucket[];
|
||||
terms: string;
|
||||
}
|
||||
|
||||
export type LogCategoryChange =
|
||||
| LogCategoryNoChange
|
||||
| LogCategoryRareChange
|
||||
| LogCategorySpikeChange
|
||||
| LogCategoryDipChange
|
||||
| LogCategoryStepChange
|
||||
| LogCategoryDistributionChange
|
||||
| LogCategoryTrendChange
|
||||
| LogCategoryOtherChange
|
||||
| LogCategoryUnknownChange;
|
||||
|
||||
export interface LogCategoryNoChange {
|
||||
type: 'none';
|
||||
}
|
||||
|
||||
export interface LogCategoryRareChange {
|
||||
type: 'rare';
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface LogCategorySpikeChange {
|
||||
type: 'spike';
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface LogCategoryDipChange {
|
||||
type: 'dip';
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface LogCategoryStepChange {
|
||||
type: 'step';
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface LogCategoryTrendChange {
|
||||
type: 'trend';
|
||||
timestamp: string;
|
||||
correlationCoefficient: number;
|
||||
}
|
||||
|
||||
export interface LogCategoryDistributionChange {
|
||||
type: 'distribution';
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface LogCategoryOtherChange {
|
||||
type: 'other';
|
||||
timestamp?: string;
|
||||
}
|
||||
|
||||
export interface LogCategoryUnknownChange {
|
||||
type: 'unknown';
|
||||
rawChange: string;
|
||||
}
|
||||
|
||||
export interface LogCategoryHistogramBucket {
|
||||
documentCount: number;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface LogCategorizationParams {
|
||||
documentFilters: QueryDslQueryContainer[];
|
||||
endTimestamp: string;
|
||||
index: string;
|
||||
messageField: string;
|
||||
startTimestamp: string;
|
||||
timeField: string;
|
||||
}
|
||||
|
||||
// the fraction of a category's histogram below which the category is considered rare
|
||||
const rarityThreshold = 0.2;
|
||||
|
||||
export const categorizeDocuments = async ({
|
||||
esClient,
|
||||
index,
|
||||
endTimestamp,
|
||||
startTimestamp,
|
||||
timeField,
|
||||
messageField,
|
||||
samplingProbability,
|
||||
ignoredCategoryTerms,
|
||||
documentFilters = [],
|
||||
minDocsPerCategory,
|
||||
}: {
|
||||
esClient: ElasticsearchClient;
|
||||
index: string;
|
||||
endTimestamp: string;
|
||||
startTimestamp: string;
|
||||
timeField: string;
|
||||
messageField: string;
|
||||
samplingProbability: number;
|
||||
ignoredCategoryTerms: string[];
|
||||
documentFilters?: QueryDslQueryContainer[];
|
||||
minDocsPerCategory?: number;
|
||||
}) => {
|
||||
const randomSampler = createRandomSamplerWrapper({
|
||||
probability: samplingProbability,
|
||||
seed: 1,
|
||||
});
|
||||
|
||||
const requestParams = createCategorizationRequestParams({
|
||||
index,
|
||||
timeField,
|
||||
messageField,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
randomSampler,
|
||||
additionalFilters: documentFilters,
|
||||
ignoredCategoryTerms,
|
||||
minDocsPerCategory,
|
||||
maxCategoriesCount: 1000,
|
||||
});
|
||||
|
||||
const rawResponse = await esClient.search(requestParams);
|
||||
|
||||
if (rawResponse.aggregations == null) {
|
||||
throw new Error('No aggregations found in large categories response');
|
||||
}
|
||||
|
||||
const logCategoriesAggResult = randomSampler.unwrap(rawResponse.aggregations);
|
||||
|
||||
if (!('categories' in logCategoriesAggResult)) {
|
||||
throw new Error('No categorization aggregation found in large categories response');
|
||||
}
|
||||
|
||||
const logCategories =
|
||||
(logCategoriesAggResult.categories.buckets as unknown[]).map(mapCategoryBucket) ?? [];
|
||||
|
||||
return {
|
||||
categories: logCategories,
|
||||
hasReachedLimit: logCategories.length >= 1000,
|
||||
};
|
||||
};
|
||||
|
||||
const mapCategoryBucket = (bucket: any): LogCategory =>
|
||||
esCategoryBucketSchema
|
||||
.transform((parsedBucket) => ({
|
||||
change: mapChangePoint(parsedBucket),
|
||||
documentCount: parsedBucket.doc_count,
|
||||
histogram: parsedBucket.histogram,
|
||||
terms: parsedBucket.key,
|
||||
}))
|
||||
.parse(bucket);
|
||||
|
||||
const mapChangePoint = ({ change, histogram }: EsCategoryBucket): LogCategoryChange => {
|
||||
switch (change.type) {
|
||||
case 'stationary':
|
||||
if (isRareInHistogram(histogram)) {
|
||||
return {
|
||||
type: 'rare',
|
||||
timestamp: findFirstNonZeroBucket(histogram)?.timestamp ?? histogram[0].timestamp,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
type: 'none',
|
||||
};
|
||||
}
|
||||
case 'dip':
|
||||
case 'spike':
|
||||
return {
|
||||
type: change.type,
|
||||
timestamp: change.bucket.key,
|
||||
};
|
||||
case 'step_change':
|
||||
return {
|
||||
type: 'step',
|
||||
timestamp: change.bucket.key,
|
||||
};
|
||||
case 'distribution_change':
|
||||
return {
|
||||
type: 'distribution',
|
||||
timestamp: change.bucket.key,
|
||||
};
|
||||
case 'trend_change':
|
||||
return {
|
||||
type: 'trend',
|
||||
timestamp: change.bucket.key,
|
||||
correlationCoefficient: change.details.r_value,
|
||||
};
|
||||
case 'unknown':
|
||||
return {
|
||||
type: 'unknown',
|
||||
rawChange: change.rawChange,
|
||||
};
|
||||
case 'non_stationary':
|
||||
default:
|
||||
return {
|
||||
type: 'other',
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The official types are lacking the change_point aggregation
|
||||
*/
|
||||
const esChangePointBucketSchema = z.object({
|
||||
key: z.string().datetime(),
|
||||
doc_count: z.number(),
|
||||
});
|
||||
|
||||
const esChangePointDetailsSchema = z.object({
|
||||
p_value: z.number(),
|
||||
});
|
||||
|
||||
const esChangePointCorrelationSchema = esChangePointDetailsSchema.extend({
|
||||
r_value: z.number(),
|
||||
});
|
||||
|
||||
const esChangePointSchema = z.union([
|
||||
z
|
||||
.object({
|
||||
bucket: esChangePointBucketSchema,
|
||||
type: z.object({
|
||||
dip: esChangePointDetailsSchema,
|
||||
}),
|
||||
})
|
||||
.transform(({ bucket, type: { dip: details } }) => ({
|
||||
type: 'dip' as const,
|
||||
bucket,
|
||||
details,
|
||||
})),
|
||||
z
|
||||
.object({
|
||||
bucket: esChangePointBucketSchema,
|
||||
type: z.object({
|
||||
spike: esChangePointDetailsSchema,
|
||||
}),
|
||||
})
|
||||
.transform(({ bucket, type: { spike: details } }) => ({
|
||||
type: 'spike' as const,
|
||||
bucket,
|
||||
details,
|
||||
})),
|
||||
z
|
||||
.object({
|
||||
bucket: esChangePointBucketSchema,
|
||||
type: z.object({
|
||||
step_change: esChangePointDetailsSchema,
|
||||
}),
|
||||
})
|
||||
.transform(({ bucket, type: { step_change: details } }) => ({
|
||||
type: 'step_change' as const,
|
||||
bucket,
|
||||
details,
|
||||
})),
|
||||
z
|
||||
.object({
|
||||
bucket: esChangePointBucketSchema,
|
||||
type: z.object({
|
||||
trend_change: esChangePointCorrelationSchema,
|
||||
}),
|
||||
})
|
||||
.transform(({ bucket, type: { trend_change: details } }) => ({
|
||||
type: 'trend_change' as const,
|
||||
bucket,
|
||||
details,
|
||||
})),
|
||||
z
|
||||
.object({
|
||||
bucket: esChangePointBucketSchema,
|
||||
type: z.object({
|
||||
distribution_change: esChangePointDetailsSchema,
|
||||
}),
|
||||
})
|
||||
.transform(({ bucket, type: { distribution_change: details } }) => ({
|
||||
type: 'distribution_change' as const,
|
||||
bucket,
|
||||
details,
|
||||
})),
|
||||
z
|
||||
.object({
|
||||
type: z.object({
|
||||
non_stationary: esChangePointCorrelationSchema.extend({
|
||||
trend: z.enum(['increasing', 'decreasing']),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
.transform(({ type: { non_stationary: details } }) => ({
|
||||
type: 'non_stationary' as const,
|
||||
details,
|
||||
})),
|
||||
z
|
||||
.object({
|
||||
type: z.object({
|
||||
stationary: z.object({}),
|
||||
}),
|
||||
})
|
||||
.transform(() => ({ type: 'stationary' as const })),
|
||||
z
|
||||
.object({
|
||||
type: z.object({}),
|
||||
})
|
||||
.transform((value) => ({ type: 'unknown' as const, rawChange: JSON.stringify(value) })),
|
||||
]);
|
||||
|
||||
const esHistogramSchema = z
|
||||
.object({
|
||||
buckets: z.array(
|
||||
z
|
||||
.object({
|
||||
key_as_string: z.string(),
|
||||
doc_count: z.number(),
|
||||
})
|
||||
.transform((bucket) => ({
|
||||
timestamp: bucket.key_as_string,
|
||||
documentCount: bucket.doc_count,
|
||||
}))
|
||||
),
|
||||
})
|
||||
.transform(({ buckets }) => buckets);
|
||||
|
||||
type EsHistogram = z.output<typeof esHistogramSchema>;
|
||||
|
||||
const esCategoryBucketSchema = z.object({
|
||||
key: z.string(),
|
||||
doc_count: z.number(),
|
||||
change: esChangePointSchema,
|
||||
histogram: esHistogramSchema,
|
||||
});
|
||||
|
||||
type EsCategoryBucket = z.output<typeof esCategoryBucketSchema>;
|
||||
|
||||
const isRareInHistogram = (histogram: EsHistogram): boolean =>
|
||||
histogram.filter((bucket) => bucket.documentCount > 0).length <
|
||||
histogram.length * rarityThreshold;
|
||||
|
||||
const findFirstNonZeroBucket = (histogram: EsHistogram) =>
|
||||
histogram.find((bucket) => bucket.documentCount > 0);
|
||||
|
||||
export const createCategorizationRequestParams = ({
|
||||
index,
|
||||
timeField,
|
||||
messageField,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
randomSampler,
|
||||
minDocsPerCategory = 0,
|
||||
additionalFilters = [],
|
||||
ignoredCategoryTerms = [],
|
||||
maxCategoriesCount = 1000,
|
||||
}: {
|
||||
startTimestamp: string;
|
||||
endTimestamp: string;
|
||||
index: string;
|
||||
timeField: string;
|
||||
messageField: string;
|
||||
randomSampler: RandomSamplerWrapper;
|
||||
minDocsPerCategory?: number;
|
||||
additionalFilters?: QueryDslQueryContainer[];
|
||||
ignoredCategoryTerms?: string[];
|
||||
maxCategoriesCount?: number;
|
||||
}) => {
|
||||
const startMoment = moment(startTimestamp, isoTimestampFormat);
|
||||
const endMoment = moment(endTimestamp, isoTimestampFormat);
|
||||
const fixedIntervalDuration = calculateAuto.atLeast(
|
||||
24,
|
||||
moment.duration(endMoment.diff(startMoment))
|
||||
);
|
||||
const fixedIntervalSize = `${Math.ceil(fixedIntervalDuration?.asMinutes() ?? 1)}m`;
|
||||
|
||||
return {
|
||||
index,
|
||||
size: 0,
|
||||
track_total_hits: false,
|
||||
query: createCategorizationQuery({
|
||||
messageField,
|
||||
timeField,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
additionalFilters,
|
||||
ignoredCategoryTerms,
|
||||
}),
|
||||
aggs: randomSampler.wrap({
|
||||
histogram: {
|
||||
date_histogram: {
|
||||
field: '@timestamp',
|
||||
fixed_interval: fixedIntervalSize,
|
||||
extended_bounds: {
|
||||
min: startTimestamp,
|
||||
max: endTimestamp,
|
||||
},
|
||||
},
|
||||
},
|
||||
categories: {
|
||||
categorize_text: {
|
||||
field: messageField,
|
||||
size: maxCategoriesCount,
|
||||
categorization_analyzer: {
|
||||
tokenizer: 'standard',
|
||||
},
|
||||
...(minDocsPerCategory > 0 ? { min_doc_count: minDocsPerCategory } : {}),
|
||||
},
|
||||
aggs: {
|
||||
histogram: {
|
||||
date_histogram: {
|
||||
field: timeField,
|
||||
fixed_interval: fixedIntervalSize,
|
||||
extended_bounds: {
|
||||
min: startTimestamp,
|
||||
max: endTimestamp,
|
||||
},
|
||||
},
|
||||
},
|
||||
change: {
|
||||
// @ts-expect-error the official types don't support the change_point aggregation
|
||||
change_point: {
|
||||
buckets_path: 'histogram>_count',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
export const createCategoryQuery =
|
||||
(messageField: string) =>
|
||||
(categoryTerms: string): QueryDslQueryContainer => ({
|
||||
match: {
|
||||
[messageField]: {
|
||||
query: categoryTerms,
|
||||
operator: 'AND' as const,
|
||||
fuzziness: 0,
|
||||
auto_generate_synonyms_phrase_query: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const createCategorizationQuery = ({
|
||||
messageField,
|
||||
timeField,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
additionalFilters = [],
|
||||
ignoredCategoryTerms = [],
|
||||
}: {
|
||||
messageField: string;
|
||||
timeField: string;
|
||||
startTimestamp: string;
|
||||
endTimestamp: string;
|
||||
additionalFilters?: QueryDslQueryContainer[];
|
||||
ignoredCategoryTerms?: string[];
|
||||
}): QueryDslQueryContainer => {
|
||||
return {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
exists: {
|
||||
field: messageField,
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[timeField]: {
|
||||
gte: startTimestamp,
|
||||
lte: endTimestamp,
|
||||
format: 'strict_date_time',
|
||||
},
|
||||
},
|
||||
},
|
||||
...additionalFilters,
|
||||
],
|
||||
must_not: ignoredCategoryTerms.map(createCategoryQuery(messageField)),
|
||||
},
|
||||
};
|
||||
};
|
|
@ -0,0 +1,276 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import pLimit from 'p-limit';
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
import { castArray, sortBy, uniq, partition, shuffle } from 'lodash';
|
||||
import { truncateList } from '@kbn/inference-plugin/common/util/truncate_list';
|
||||
import { QueryDslQueryContainer } from '@kbn/data-views-plugin/common/types';
|
||||
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import { rangeQuery, excludeFrozenQuery } from './queries';
|
||||
|
||||
export interface DocumentAnalysis {
|
||||
total: number;
|
||||
sampled: number;
|
||||
fields: Array<{
|
||||
name: string;
|
||||
types: string[];
|
||||
cardinality: number | null;
|
||||
values: Array<string | number | boolean>;
|
||||
empty: boolean;
|
||||
}>;
|
||||
}
|
||||
|
||||
export async function getSampleDocuments({
|
||||
esClient,
|
||||
start,
|
||||
end,
|
||||
indexPatterns,
|
||||
count,
|
||||
dslFilter,
|
||||
}: {
|
||||
esClient: ElasticsearchClient;
|
||||
start: number;
|
||||
end: number;
|
||||
indexPatterns: string[];
|
||||
count: number;
|
||||
dslFilter?: QueryDslQueryContainer[];
|
||||
}): Promise<{
|
||||
total: number;
|
||||
samples: Array<Record<string, unknown[]>>;
|
||||
}> {
|
||||
return esClient
|
||||
.search({
|
||||
index: indexPatterns,
|
||||
track_total_hits: true,
|
||||
size: count,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
function_score: {
|
||||
functions: [
|
||||
{
|
||||
random_score: {},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
must: [...rangeQuery(start, end), ...(dslFilter ?? [])],
|
||||
},
|
||||
},
|
||||
sort: {
|
||||
_score: {
|
||||
order: 'desc',
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
fields: ['*' as const],
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
const hits = response.hits.total as estypes.SearchTotalHits;
|
||||
if (hits.value === 0) {
|
||||
return {
|
||||
total: 0,
|
||||
samples: [],
|
||||
};
|
||||
}
|
||||
return {
|
||||
total: hits.value,
|
||||
samples: response.hits.hits.map((hit) => hit.fields ?? {}),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export async function getKeywordAndNumericalFields({
|
||||
indexPatterns,
|
||||
esClient,
|
||||
start,
|
||||
end,
|
||||
}: {
|
||||
indexPatterns: string[];
|
||||
esClient: ElasticsearchClient;
|
||||
start: number;
|
||||
end: number;
|
||||
}): Promise<Array<{ name: string; esTypes: string[] }>> {
|
||||
const fieldCaps = await esClient.fieldCaps({
|
||||
index: indexPatterns,
|
||||
fields: '*',
|
||||
include_empty_fields: false,
|
||||
types: [
|
||||
'constant_keyword',
|
||||
'keyword',
|
||||
'integer',
|
||||
'long',
|
||||
'double',
|
||||
'float',
|
||||
'byte',
|
||||
'boolean',
|
||||
'alias',
|
||||
'flattened',
|
||||
'ip',
|
||||
'aggregate_metric_double',
|
||||
'histogram',
|
||||
],
|
||||
index_filter: {
|
||||
bool: {
|
||||
filter: [...excludeFrozenQuery(), ...rangeQuery(start, end)],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return Object.entries(fieldCaps.fields).map(([fieldName, fieldSpec]) => {
|
||||
return {
|
||||
name: fieldName,
|
||||
esTypes: Object.keys(fieldSpec),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function mergeSampleDocumentsWithFieldCaps({
|
||||
total,
|
||||
samples,
|
||||
fieldCaps,
|
||||
}: {
|
||||
total: number;
|
||||
samples: Array<Record<string, unknown[]>>;
|
||||
fieldCaps: Array<{ name: string; esTypes?: string[] }>;
|
||||
}): DocumentAnalysis {
|
||||
const nonEmptyFields = new Set<string>();
|
||||
const fieldValues = new Map<string, Array<string | number | boolean>>();
|
||||
|
||||
for (const document of samples) {
|
||||
Object.keys(document).forEach((field) => {
|
||||
if (!nonEmptyFields.has(field)) {
|
||||
nonEmptyFields.add(field);
|
||||
}
|
||||
|
||||
const values = castArray(document[field]);
|
||||
|
||||
const currentFieldValues = fieldValues.get(field) ?? [];
|
||||
|
||||
values.forEach((value) => {
|
||||
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
|
||||
currentFieldValues.push(value);
|
||||
}
|
||||
});
|
||||
|
||||
fieldValues.set(field, currentFieldValues);
|
||||
});
|
||||
}
|
||||
|
||||
const fields = fieldCaps.flatMap((spec) => {
|
||||
const values = fieldValues.get(spec.name);
|
||||
|
||||
const countByValues = new Map<string | number | boolean, number>();
|
||||
|
||||
values?.forEach((value) => {
|
||||
const currentCount = countByValues.get(value) ?? 0;
|
||||
countByValues.set(value, currentCount + 1);
|
||||
});
|
||||
|
||||
const sortedValues = sortBy(
|
||||
Array.from(countByValues.entries()).map(([value, count]) => {
|
||||
return {
|
||||
value,
|
||||
count,
|
||||
};
|
||||
}),
|
||||
'count',
|
||||
'desc'
|
||||
);
|
||||
|
||||
return {
|
||||
name: spec.name,
|
||||
types: spec.esTypes ?? [],
|
||||
empty: !nonEmptyFields.has(spec.name),
|
||||
cardinality: countByValues.size || null,
|
||||
values: uniq(sortedValues.flatMap(({ value }) => value)),
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
total,
|
||||
sampled: samples.length,
|
||||
fields,
|
||||
};
|
||||
}
|
||||
|
||||
export function sortAndTruncateAnalyzedFields(analysis: DocumentAnalysis) {
|
||||
const { fields, ...meta } = analysis;
|
||||
const [nonEmptyFields, emptyFields] = partition(analysis.fields, (field) => !field.empty);
|
||||
|
||||
const sortedFields = [...shuffle(nonEmptyFields), ...shuffle(emptyFields)];
|
||||
|
||||
return {
|
||||
...meta,
|
||||
fields: truncateList(
|
||||
sortedFields.map((field) => {
|
||||
let name = `${field.name}:${field.types.join(',')}`;
|
||||
|
||||
if (field.empty) {
|
||||
return `${name} (empty)`;
|
||||
}
|
||||
|
||||
name += ` - ${field.cardinality} distinct values`;
|
||||
|
||||
if (
|
||||
field.values.length &&
|
||||
(field.types.includes('keyword') || field.types.includes('text')) &&
|
||||
field.values.length <= 10
|
||||
) {
|
||||
return `${name} (${truncateList(
|
||||
field.values.map((value) => '`' + value + '`'),
|
||||
field.types.includes('text') ? 2 : 25
|
||||
).join(', ')})`;
|
||||
}
|
||||
|
||||
return name;
|
||||
}),
|
||||
500
|
||||
).sort(),
|
||||
};
|
||||
}
|
||||
|
||||
export async function confirmConstantsInDataset({
|
||||
esClient,
|
||||
constants,
|
||||
indexPatterns,
|
||||
}: {
|
||||
esClient: ElasticsearchClient;
|
||||
constants: Array<{ field: string }>;
|
||||
indexPatterns: string[];
|
||||
}): Promise<Array<{ field: string; constant: boolean; value?: string | number | boolean | null }>> {
|
||||
const limiter = pLimit(5);
|
||||
|
||||
return Promise.all(
|
||||
constants.map((constant) => {
|
||||
return limiter(async () => {
|
||||
return esClient
|
||||
.termsEnum({
|
||||
index: indexPatterns.join(','),
|
||||
field: constant.field,
|
||||
index_filter: {
|
||||
bool: {
|
||||
filter: [...excludeFrozenQuery()],
|
||||
},
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
const isConstant = response.terms.length === 1;
|
||||
return {
|
||||
field: constant.field,
|
||||
constant: isConstant,
|
||||
value: isConstant ? response.terms[0] : undefined,
|
||||
};
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
|
||||
export function rangeQuery(
|
||||
start: number,
|
||||
end: number,
|
||||
field = '@timestamp'
|
||||
): estypes.QueryDslQueryContainer[] {
|
||||
return [
|
||||
{
|
||||
range: {
|
||||
[field]: {
|
||||
gte: start,
|
||||
lte: end,
|
||||
format: 'epoch_millis',
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
export function excludeFrozenQuery(): estypes.QueryDslQueryContainer[] {
|
||||
return [
|
||||
{
|
||||
bool: {
|
||||
must_not: [
|
||||
{
|
||||
term: {
|
||||
_tier: 'data_frozen',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
|
@ -23,6 +23,8 @@ import {
|
|||
updateInvestigationParamsSchema,
|
||||
getEventsParamsSchema,
|
||||
GetEventsResponse,
|
||||
getEntitiesParamsSchema,
|
||||
GetEntitiesResponse,
|
||||
} from '@kbn/investigation-shared';
|
||||
import { ScopedAnnotationsClient } from '@kbn/observability-plugin/server';
|
||||
import { createInvestigation } from '../services/create_investigation';
|
||||
|
@ -44,6 +46,8 @@ import { updateInvestigationItem } from '../services/update_investigation_item';
|
|||
import { updateInvestigationNote } from '../services/update_investigation_note';
|
||||
import { createInvestigateAppServerRoute } from './create_investigate_app_server_route';
|
||||
import { getAllInvestigationStats } from '../services/get_all_investigation_stats';
|
||||
import { getEntitiesWithSource } from '../services/get_entities';
|
||||
import { createEntitiesESClient } from '../clients/create_entities_es_client';
|
||||
|
||||
const createInvestigationRoute = createInvestigateAppServerRoute({
|
||||
endpoint: 'POST /api/observability/investigations 2023-10-31',
|
||||
|
@ -344,6 +348,39 @@ const getEventsRoute = createInvestigateAppServerRoute({
|
|||
},
|
||||
});
|
||||
|
||||
const getEntitiesRoute = createInvestigateAppServerRoute({
|
||||
endpoint: 'GET /api/observability/investigation/entities 2023-10-31',
|
||||
options: {
|
||||
tags: [],
|
||||
},
|
||||
params: getEntitiesParamsSchema,
|
||||
handler: async ({ params, context, request }): Promise<GetEntitiesResponse> => {
|
||||
const core = await context.core;
|
||||
const esClient = core.elasticsearch.client.asCurrentUser;
|
||||
const entitiesEsClient = createEntitiesESClient({ request, esClient });
|
||||
|
||||
const {
|
||||
'service.name': serviceName,
|
||||
'service.environment': serviceEnvironment,
|
||||
'container.id': containerId,
|
||||
'host.name': hostName,
|
||||
} = params?.query ?? {};
|
||||
|
||||
const { entities } = await getEntitiesWithSource({
|
||||
serviceName,
|
||||
serviceEnvironment,
|
||||
containerId,
|
||||
hostName,
|
||||
entitiesEsClient,
|
||||
esClient,
|
||||
});
|
||||
|
||||
return {
|
||||
entities,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
export function getGlobalInvestigateAppServerRouteRepository() {
|
||||
return {
|
||||
...createInvestigationRoute,
|
||||
|
@ -360,6 +397,7 @@ export function getGlobalInvestigateAppServerRouteRepository() {
|
|||
...updateInvestigationItemRoute,
|
||||
...getInvestigationItemsRoute,
|
||||
...getEventsRoute,
|
||||
...getEntitiesRoute,
|
||||
...getAllInvestigationStatsRoute,
|
||||
...getAllInvestigationTagsRoute,
|
||||
};
|
||||
|
|
|
@ -0,0 +1,175 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { z } from '@kbn/zod';
|
||||
import { entityLatestSchema } from '@kbn/entities-schema';
|
||||
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import type {
|
||||
GetEntitiesResponse,
|
||||
EntityWithSource,
|
||||
EntitySource,
|
||||
} from '@kbn/investigation-shared';
|
||||
import type { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import type { IndicesIndexState } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import type { EntitiesESClient } from '../clients/create_entities_es_client';
|
||||
import {
|
||||
SERVICE_ENTITIES_LATEST_ALIAS,
|
||||
CONTAINER_ENTITIES_LATEST_ALIAS,
|
||||
HOST_ENTITIES_LATEST_ALIAS,
|
||||
} from '../clients/create_entities_es_client';
|
||||
|
||||
// the official types do not explicitly define sourceIndex in the schema, but it is present in the data at the time of writing this
|
||||
type EntitiesLatest = z.infer<typeof entityLatestSchema> & { sourceIndex: string[] };
|
||||
|
||||
export async function getEntitiesWithSource({
|
||||
serviceEnvironment,
|
||||
serviceName,
|
||||
containerId,
|
||||
hostName,
|
||||
entitiesEsClient,
|
||||
esClient,
|
||||
}: {
|
||||
serviceName?: string;
|
||||
serviceEnvironment?: string;
|
||||
containerId?: string;
|
||||
hostName?: string;
|
||||
entitiesEsClient: EntitiesESClient;
|
||||
esClient: ElasticsearchClient;
|
||||
}): Promise<GetEntitiesResponse> {
|
||||
const entityCategoryPromises = getFetchEntitiesPromises({
|
||||
entitiesEsClient,
|
||||
serviceName,
|
||||
serviceEnvironment,
|
||||
hostName,
|
||||
containerId,
|
||||
});
|
||||
const entityResponses = await Promise.all(entityCategoryPromises);
|
||||
const entitiesWithSource: EntityWithSource[] = [];
|
||||
for (const response of entityResponses) {
|
||||
const processedEntities = await Promise.all(
|
||||
response.map(async (entity: EntitiesLatest) => {
|
||||
const sourceIndex = entity?.sourceIndex;
|
||||
if (!sourceIndex || !sourceIndex.length) return null;
|
||||
|
||||
const indices = await esClient.indices.get({ index: sourceIndex });
|
||||
const sources = await fetchSources(indices);
|
||||
|
||||
return {
|
||||
identityFields: entity?.entity.identityFields,
|
||||
id: entity?.entity.id,
|
||||
definitionId: entity?.entity.definitionId,
|
||||
firstSeenTimestamp: entity?.entity.firstSeenTimestamp,
|
||||
lastSeenTimestamp: entity?.entity.lastSeenTimestamp,
|
||||
displayName: entity?.entity.displayName,
|
||||
metrics: entity?.entity.metrics,
|
||||
schemaVersion: entity?.entity.schemaVersion,
|
||||
definitionVersion: entity?.entity.definitionVersion,
|
||||
type: entity?.entity.type,
|
||||
sources,
|
||||
};
|
||||
})
|
||||
);
|
||||
entitiesWithSource.push(...(processedEntities.filter(Boolean) as EntityWithSource[]));
|
||||
}
|
||||
return {
|
||||
entities: entitiesWithSource,
|
||||
};
|
||||
}
|
||||
|
||||
function fetchSources(indices: Record<string, IndicesIndexState>): EntitySource[] {
|
||||
return Object.values(indices).map((index) => {
|
||||
return getEntitySource({ index });
|
||||
});
|
||||
}
|
||||
|
||||
const getEntitySource = ({ index }: { index: IndicesIndexState }) => {
|
||||
const dataStream = index.data_stream;
|
||||
const source = {
|
||||
dataStream,
|
||||
};
|
||||
return source;
|
||||
};
|
||||
|
||||
const getFetchEntitiesPromises = ({
|
||||
entitiesEsClient,
|
||||
serviceName,
|
||||
serviceEnvironment,
|
||||
hostName,
|
||||
containerId,
|
||||
}: {
|
||||
entitiesEsClient: EntitiesESClient;
|
||||
serviceName?: string;
|
||||
hostName?: string;
|
||||
containerId?: string;
|
||||
serviceEnvironment?: string;
|
||||
}): Array<Promise<Array<{ sourceIndex: string[]; entity: EntitiesLatest['entity'] }>>> => {
|
||||
const shouldFilterForServiceEnvironment =
|
||||
serviceEnvironment &&
|
||||
serviceName &&
|
||||
serviceEnvironment !== 'ENVIRONMENT_ALL' &&
|
||||
serviceEnvironment !== 'ENVIRONMENT_NOT_DEFINED';
|
||||
const containersPromise = getFetchEntityPromise({
|
||||
index: CONTAINER_ENTITIES_LATEST_ALIAS,
|
||||
shouldFetch: Boolean(hostName || containerId),
|
||||
shouldMatch: [
|
||||
...(hostName ? [{ term: { 'host.name': hostName } }] : []),
|
||||
...(containerId ? [{ term: { 'container.id': containerId } }] : []),
|
||||
],
|
||||
entitiesEsClient,
|
||||
});
|
||||
const hostsPromise = getFetchEntityPromise({
|
||||
index: HOST_ENTITIES_LATEST_ALIAS,
|
||||
shouldFetch: Boolean(hostName),
|
||||
shouldMatch: hostName ? [{ term: { 'host.name': hostName } }] : [],
|
||||
entitiesEsClient,
|
||||
});
|
||||
const servicesPromise = getFetchEntityPromise({
|
||||
index: SERVICE_ENTITIES_LATEST_ALIAS,
|
||||
shouldFetch: Boolean(serviceName),
|
||||
shouldMatch: [
|
||||
...(serviceName ? [{ term: { 'service.name': serviceName } }] : []),
|
||||
...(shouldFilterForServiceEnvironment
|
||||
? [{ term: { 'service.environment': serviceEnvironment } }]
|
||||
: []),
|
||||
],
|
||||
entitiesEsClient,
|
||||
});
|
||||
|
||||
return [containersPromise, hostsPromise, servicesPromise].filter(
|
||||
(promise) => promise !== null
|
||||
) as Array<Promise<Array<{ sourceIndex: string[]; entity: EntitiesLatest['entity'] }>>>;
|
||||
};
|
||||
|
||||
const getFetchEntityPromise = ({
|
||||
index,
|
||||
shouldFetch,
|
||||
shouldMatch,
|
||||
entitiesEsClient,
|
||||
}: {
|
||||
index: string;
|
||||
shouldFetch: boolean;
|
||||
shouldMatch: QueryDslQueryContainer[];
|
||||
entitiesEsClient: EntitiesESClient;
|
||||
}): Promise<Array<{ sourceIndex: string[]; entity: EntitiesLatest['entity'] }>> | null => {
|
||||
return shouldFetch
|
||||
? entitiesEsClient
|
||||
.search<{ sourceIndex: string[]; entity: EntitiesLatest['entity'] }>(index, {
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
should: shouldMatch,
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
return response.hits.hits.map((hit) => {
|
||||
return { sourceIndex: hit?._source.sourceIndex, entity: hit._source.entity };
|
||||
});
|
||||
})
|
||||
: null;
|
||||
};
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
|
||||
import datemath from '@elastic/datemath';
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
import {
|
||||
GetEventsParams,
|
||||
GetEventsResponse,
|
||||
|
@ -21,24 +20,7 @@ import {
|
|||
ALERT_UUID,
|
||||
} from '@kbn/rule-data-utils';
|
||||
import { AlertsClient } from './get_alerts_client';
|
||||
|
||||
export function rangeQuery(
|
||||
start: number,
|
||||
end: number,
|
||||
field = '@timestamp'
|
||||
): estypes.QueryDslQueryContainer[] {
|
||||
return [
|
||||
{
|
||||
range: {
|
||||
[field]: {
|
||||
gte: start,
|
||||
lte: end,
|
||||
format: 'epoch_millis',
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
import { rangeQuery } from '../lib/queries';
|
||||
|
||||
export async function getAnnotationEvents(
|
||||
params: GetEventsParams,
|
||||
|
|
|
@ -61,5 +61,11 @@
|
|||
"@kbn/observability-plugin",
|
||||
"@kbn/licensing-plugin",
|
||||
"@kbn/rule-data-utils",
|
||||
"@kbn/entities-schema",
|
||||
"@kbn/inference-plugin",
|
||||
"@kbn/core-elasticsearch-server",
|
||||
"@kbn/calculate-auto",
|
||||
"@kbn/ml-random-sampler-utils",
|
||||
"@kbn/observability-utils",
|
||||
],
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue