mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[SLO] [Alert detail page] render log rate analysis for custom kql (#179147)
Fixes https://github.com/elastic/kibana/issues/178446
This PR adds the following AI Ops features to the Burn rate alert detail
page:
- Log Rate Analysis
- ML features / AI Assistant
bf00c636
-10ab-4cd8-859b-4a8f0471bf15
## Implementation details
Log rate analysis is useful when there is a significant dip/spike in
number of logs. It finds significant differences between the logs in
baseline (before dip/spike) and in deviation (after dip/spike). For the
Log Rate Analysis to work on the Alert details of an SLO burn rate rule
we had to specify a few params, here are the most important ones:
- dataView
- esSearchQuery
- timeRange -> I am looking into this and need a few clarifications
regarding lookBackPeriod
- initialAnalysisStart
For the esQuery here's what we used
```
const finalQuery = {
bool: {
filter: [customTotalFilter, customFilters, customGroupByFilters],
must_not: customGoodFilter,
},
};
```
We add the `total query`, any `optional filters` and any `group by
filters` to the `filter` clause of the ES query, where as we add the
good filter in the `must_not` clause.
## 🔬 How to test
- Add the following lines to your `config/kibana.dev.yaml`:
- `server.basePath: '/kibana'`
- `server.publicBaseUrl: 'http://localhost:5601/kibana'`
- Start with the following command: `node x-pack/scripts/data_forge.js
--events-per-cycle 50 --lookback now-1d --dataset fake_stack
--install-kibana-assets --kibana-url http://localhost:5601/kibana
--event-template good` to start with some good events
- Wait til the log message says `info Waiting 60000ms`
- Create one SLO:
- "Admin Console Availability" using the "Custom Query" SLI with the
`Admin Console` DataView, set the "Good query" to
`http.response.status_code < 500` and the set the "Total query" to
`http.response.status_code: *` using a rolling `7d` time window
- You should have 1 burn rate rule created by default
- Stop the first `data_forge.js` command
- Start `node x-pack/scripts/data_forge.js --events-per-cycle 50
--lookback now --dataset fake_stack --install-kibana-assets --kibana-url
http://localhost:5601/kibana --event-template bad`
- Go to alert details page of the burn rate rule to verify that the Log
Rate Analysis tool detected a spike
Note: you can skip first step, but in this case you should start kibana
with this command instead `yarn start --no-base-path`. This way you
don't have to pass `--kibana-url` when running the `data_forge` command
I included some snapshot tests with different SLOs that could be used
for verifying the generation of the ES query. Here are a few examples:
- Good query (KQL), no filter, no total query
- Good query (filter), no optional filter, no total query
- Good query (kql) optional filter (KQL), no total query
- Good query (kql), optional filter (filter), no total query
- Good query (kql), optional filter (kql), total query (kql)
- Good query (filter), optional filter(filter), total query(kql)
💀 I will need extra help testing this feature, to make sure that
provided params to the `LogRateAnalysisContent` component. Especially
`esSearchQuery` and `timeRange` (timeRange still WIP, but I want some
early feedback on the lookbackDuration and the intervalFactor)
## ✔️ Acceptance criteria
- Render the Log Rate Analysis tool only to platinum users and above
- Render AI assistant contextual insights only to platinum users and
above
---------
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Cauê Marcondes <55978943+cauemarcondes@users.noreply.github.com>
Co-authored-by: Walter Rafelsberger <walter.rafelsberger@elastic.co>
This commit is contained in:
parent
2c4f1a2204
commit
42e24b58ed
14 changed files with 1206 additions and 2 deletions
|
@ -15,6 +15,7 @@ import {
|
|||
kqlWithFiltersSchema,
|
||||
metricCustomIndicatorSchema,
|
||||
querySchema,
|
||||
groupingsSchema,
|
||||
syntheticsAvailabilityIndicatorSchema,
|
||||
timesliceMetricBasicMetricWithField,
|
||||
timesliceMetricDocCountMetric,
|
||||
|
@ -41,6 +42,7 @@ type HistogramIndicator = t.OutputOf<typeof histogramIndicatorSchema>;
|
|||
type KQLCustomIndicator = t.OutputOf<typeof kqlCustomIndicatorSchema>;
|
||||
type KqlWithFiltersSchema = t.TypeOf<typeof kqlWithFiltersSchema>;
|
||||
type QuerySchema = t.TypeOf<typeof querySchema>;
|
||||
type GroupingsSchema = t.TypeOf<typeof groupingsSchema>;
|
||||
|
||||
export type {
|
||||
APMTransactionDurationIndicator,
|
||||
|
@ -57,4 +59,5 @@ export type {
|
|||
KQLCustomIndicator,
|
||||
KqlWithFiltersSchema,
|
||||
QuerySchema,
|
||||
GroupingsSchema,
|
||||
};
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { difference, first } from 'lodash';
|
||||
import { euiPaletteColorBlind } from '@elastic/eui';
|
||||
|
||||
export type Color =
|
||||
| 'color0'
|
||||
| 'color1'
|
||||
| 'color2'
|
||||
| 'color3'
|
||||
| 'color4'
|
||||
| 'color5'
|
||||
| 'color6'
|
||||
| 'color7'
|
||||
| 'color8'
|
||||
| 'color9';
|
||||
|
||||
export type Palette = {
|
||||
[K in Color]: string;
|
||||
};
|
||||
|
||||
const euiPalette = euiPaletteColorBlind();
|
||||
|
||||
export const defaultPalette: Palette = {
|
||||
color0: euiPalette[1], // (blue)
|
||||
color1: euiPalette[2], // (pink)
|
||||
color2: euiPalette[0], // (green-ish)
|
||||
color3: euiPalette[3], // (purple)
|
||||
color4: euiPalette[4], // (light pink)
|
||||
color5: euiPalette[5], // (yellow)
|
||||
color6: euiPalette[6], // (tan)
|
||||
color7: euiPalette[7], // (orange)
|
||||
color8: euiPalette[8], // (brown)
|
||||
color9: euiPalette[9], // (red)
|
||||
};
|
||||
|
||||
export const createPaletteTransformer = (palette: Palette) => (color: Color) => palette[color];
|
||||
|
||||
export const colorTransformer = createPaletteTransformer(defaultPalette);
|
||||
|
||||
export const sampleColor = (usedColors: Color[] = []): Color => {
|
||||
const allColors: Color[] = [
|
||||
'color0',
|
||||
'color1',
|
||||
'color2',
|
||||
'color3',
|
||||
'color4',
|
||||
'color5',
|
||||
'color6',
|
||||
'color7',
|
||||
'color8',
|
||||
'color9',
|
||||
];
|
||||
const available = difference(allColors, usedColors);
|
||||
return first(available) || allColors[0];
|
||||
};
|
|
@ -141,6 +141,7 @@ export {
|
|||
SYNTHETICS_WAIT_TIMINGS,
|
||||
} from './field_names/synthetics';
|
||||
|
||||
export { type Color, colorTransformer } from './color_palette';
|
||||
export { ObservabilityTriggerId } from './trigger_ids';
|
||||
export { getInspectResponse } from './utils/get_inspect_response';
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
"slo"
|
||||
],
|
||||
"requiredPlugins": [
|
||||
"aiops",
|
||||
"alerting",
|
||||
"cases",
|
||||
"charts",
|
||||
|
|
|
@ -15,6 +15,7 @@ import { useFetchSloDetails } from '../../../../hooks/use_fetch_slo_details';
|
|||
import { BurnRateRuleParams } from '../../../../typings/slo';
|
||||
import { AlertsHistoryPanel } from './components/alerts_history/alerts_history_panel';
|
||||
import { ErrorRatePanel } from './components/error_rate/error_rate_panel';
|
||||
import { CustomAlertDetailsPanel } from './components/custom_panels/custom_panels';
|
||||
|
||||
export type BurnRateRule = Rule<BurnRateRuleParams>;
|
||||
export type BurnRateAlert = TopAlert;
|
||||
|
@ -72,6 +73,7 @@ export default function AlertDetailsAppSection({
|
|||
return (
|
||||
<EuiFlexGroup direction="column" data-test-subj="overviewSection">
|
||||
<ErrorRatePanel alert={alert} slo={slo} isLoading={isLoading} />
|
||||
<CustomAlertDetailsPanel alert={alert} slo={slo} rule={rule} />
|
||||
<AlertsHistoryPanel alert={alert} rule={rule} slo={slo} isLoading={isLoading} />
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { GetSLOResponse } from '@kbn/slo-schema';
|
||||
import React from 'react';
|
||||
import { LogRateAnalysisPanel } from './log_rate_analysis_panel';
|
||||
import { BurnRateAlert, BurnRateRule } from '../../../alert_details_app_section';
|
||||
import { useLicense } from '../../../../../../../hooks/use_license';
|
||||
|
||||
interface Props {
|
||||
slo: GetSLOResponse;
|
||||
alert: BurnRateAlert;
|
||||
rule: BurnRateRule;
|
||||
}
|
||||
|
||||
export function CustomKqlPanels({ slo, alert, rule }: Props) {
|
||||
const { hasAtLeast } = useLicense();
|
||||
const hasLicenseForLogRateAnalysis = hasAtLeast('platinum');
|
||||
return hasLicenseForLogRateAnalysis ? (
|
||||
<LogRateAnalysisPanel slo={slo} alert={alert} rule={rule} />
|
||||
) : null;
|
||||
}
|
|
@ -0,0 +1,480 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as KQL), no optional filter and no total 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as KQL), with optional filter (as KQL) and not total filter 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"match": Object {
|
||||
"host.name": "admin-console.prod.001",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as KQL), with optional filter (as KQL) and total filter (as KQL) 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"exists": Object {
|
||||
"field": "http.response.status_code",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"match": Object {
|
||||
"host.name": "admin-console.prod.001",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as KQL), with optional filter (as KQL), total filter (as KQL) and group by two fields 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"exists": Object {
|
||||
"field": "http.response.status_code",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"match": Object {
|
||||
"host.name": "admin-console.prod.001",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"match_phrase": Object {
|
||||
"not_nested_1": "authentication",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"match_phrase": Object {
|
||||
"not_nested_2": "blast-mail.co",
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as KQL), with optional filter (as filter) and not total filter 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"match_phrase": Object {
|
||||
"host.name": "admin-console.prod.001",
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as filter), no optional filter and no total 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`buildEsQuery should generate correct es query for rule with good query (as filter), with optional filter (as filter) and total filter (as KQL) 1`] = `
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"bool": Object {
|
||||
"minimum_should_match": 1,
|
||||
"should": Array [
|
||||
Object {
|
||||
"exists": Object {
|
||||
"field": "http.response.status_code",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"match_phrase": Object {
|
||||
"host.name": "admin-console.prod.001",
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"bool": Object {
|
||||
"filter": Array [],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
],
|
||||
"must_not": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
Object {
|
||||
"range": Object {
|
||||
"http.response.status_code": Object {
|
||||
"lt": "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Array [],
|
||||
"must_not": Array [],
|
||||
"should": Array [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getESQueryForLogRateAnalysis } from './log_rate_analysis_query';
|
||||
import { KQLCustomIndicator } from '@kbn/slo-schema';
|
||||
|
||||
describe('buildEsQuery', () => {
|
||||
const testData = [
|
||||
{
|
||||
title: 'rule with good query (as KQL), no optional filter and no total',
|
||||
params: {
|
||||
filter: '',
|
||||
total: '',
|
||||
good: 'http.response.status_code < 500',
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'rule with good query (as filter), no optional filter and no total',
|
||||
params: {
|
||||
filter: '',
|
||||
total: '',
|
||||
good: {
|
||||
kqlQuery: '',
|
||||
filters: [
|
||||
{
|
||||
$state: {
|
||||
store: 'appState',
|
||||
},
|
||||
meta: {
|
||||
field: 'http.response.status_code',
|
||||
negate: false,
|
||||
alias: null,
|
||||
index: 'kbn-data-forge-fake_stack.admin-console-*-id',
|
||||
disabled: false,
|
||||
params: {
|
||||
lt: '500',
|
||||
},
|
||||
type: 'range',
|
||||
key: 'http.response.status_code',
|
||||
},
|
||||
query: {
|
||||
range: {
|
||||
'http.response.status_code': {
|
||||
lt: '500',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'rule with good query (as KQL), with optional filter (as KQL) and not total filter',
|
||||
params: {
|
||||
total: '',
|
||||
good: 'http.response.status_code < 500',
|
||||
filter: 'host.name: admin-console.prod.001',
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'rule with good query (as KQL), with optional filter (as filter) and not total filter',
|
||||
params: {
|
||||
total: '',
|
||||
good: 'http.response.status_code < 500',
|
||||
filter: {
|
||||
kqlQuery: '',
|
||||
filters: [
|
||||
{
|
||||
$state: {
|
||||
store: 'appState',
|
||||
},
|
||||
meta: {
|
||||
field: 'host.name',
|
||||
negate: false,
|
||||
alias: null,
|
||||
index: 'kbn-data-forge-fake_stack.admin-console-*-id',
|
||||
disabled: false,
|
||||
params: {
|
||||
query: 'admin-console.prod.001',
|
||||
},
|
||||
type: 'phrase',
|
||||
key: 'host.name',
|
||||
},
|
||||
query: {
|
||||
match_phrase: {
|
||||
'host.name': 'admin-console.prod.001',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
title:
|
||||
'rule with good query (as KQL), with optional filter (as KQL) and total filter (as KQL)',
|
||||
params: {
|
||||
good: 'http.response.status_code < 500',
|
||||
filter: 'host.name: admin-console.prod.001',
|
||||
total: 'http.response.status_code: *',
|
||||
},
|
||||
},
|
||||
{
|
||||
title:
|
||||
'rule with good query (as filter), with optional filter (as filter) and total filter (as KQL)',
|
||||
params: {
|
||||
good: {
|
||||
kqlQuery: '',
|
||||
filters: [
|
||||
{
|
||||
$state: {
|
||||
store: 'appState',
|
||||
},
|
||||
meta: {
|
||||
field: 'http.response.status_code',
|
||||
negate: false,
|
||||
alias: null,
|
||||
index: 'kbn-data-forge-fake_stack.admin-console-*-id',
|
||||
disabled: false,
|
||||
params: {
|
||||
lt: '500',
|
||||
},
|
||||
type: 'range',
|
||||
key: 'http.response.status_code',
|
||||
},
|
||||
query: {
|
||||
range: {
|
||||
'http.response.status_code': {
|
||||
lt: '500',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
filter: {
|
||||
kqlQuery: '',
|
||||
filters: [
|
||||
{
|
||||
$state: {
|
||||
store: 'appState',
|
||||
},
|
||||
meta: {
|
||||
field: 'host.name',
|
||||
negate: false,
|
||||
alias: null,
|
||||
index: 'kbn-data-forge-fake_stack.admin-console-*-id',
|
||||
disabled: false,
|
||||
params: {
|
||||
query: 'admin-console.prod.001',
|
||||
},
|
||||
type: 'phrase',
|
||||
key: 'host.name',
|
||||
},
|
||||
query: {
|
||||
match_phrase: {
|
||||
'host.name': 'admin-console.prod.001',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
total: 'http.response.status_code: *',
|
||||
},
|
||||
},
|
||||
{
|
||||
title:
|
||||
'rule with good query (as KQL), with optional filter (as KQL), total filter (as KQL) and group by two fields',
|
||||
params: {
|
||||
good: 'http.response.status_code < 500',
|
||||
filter: 'host.name: admin-console.prod.001',
|
||||
total: 'http.response.status_code: *',
|
||||
},
|
||||
groupBy: ['not_nested_1', 'not_nested_2'],
|
||||
groupings: {
|
||||
not_nested_1: 'authentication',
|
||||
not_nested_2: 'blast-mail.co',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testData)(
|
||||
'should generate correct es query for $title',
|
||||
({ params, groupBy, groupings }) => {
|
||||
expect(
|
||||
getESQueryForLogRateAnalysis(params as KQLCustomIndicator['params'], groupBy, groupings)
|
||||
).toMatchSnapshot();
|
||||
}
|
||||
);
|
||||
});
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { kqlWithFiltersSchema } from '@kbn/slo-schema';
|
||||
import { Filter, FilterStateStore } from '@kbn/es-query';
|
||||
import { buildEsQuery } from '@kbn/observability-plugin/public';
|
||||
import { KQLCustomIndicator, GroupingsSchema, ALL_VALUE } from '@kbn/slo-schema';
|
||||
import { isEmpty } from 'lodash';
|
||||
|
||||
export const getESQueryForLogRateAnalysis = (
|
||||
params: KQLCustomIndicator['params'],
|
||||
groupBy?: string | string[],
|
||||
groupings?: GroupingsSchema
|
||||
) => {
|
||||
const { filter, good, total } = params;
|
||||
|
||||
const filterKuery = kqlWithFiltersSchema.is(filter) ? filter.kqlQuery : filter;
|
||||
const filterFilters: Filter[] = [];
|
||||
|
||||
if (kqlWithFiltersSchema.is(filter)) {
|
||||
filter.filters.forEach((i) => filterFilters.push(i));
|
||||
}
|
||||
const goodKuery = kqlWithFiltersSchema.is(good) ? good.kqlQuery : good;
|
||||
const goodFilters = kqlWithFiltersSchema.is(good) ? good.filters : [];
|
||||
const totalKuery = kqlWithFiltersSchema.is(total) ? total.kqlQuery : total;
|
||||
const totalFilters = kqlWithFiltersSchema.is(total) ? total.filters : [];
|
||||
const customGoodFilter = buildEsQuery({ kuery: goodKuery, filters: goodFilters });
|
||||
const customTotalFilter = buildEsQuery({ kuery: totalKuery, filters: totalFilters });
|
||||
const customFilters = buildEsQuery({ kuery: filterKuery, filters: filterFilters });
|
||||
const groupByFilters: Filter[] = [];
|
||||
|
||||
if (groupBy && groupings) {
|
||||
const groupByFields = [groupBy].flat();
|
||||
if (
|
||||
!isEmpty(groupings) &&
|
||||
groupByFields &&
|
||||
groupByFields.length > 0 &&
|
||||
groupByFields.every((field) => field === ALL_VALUE) === false
|
||||
) {
|
||||
groupByFields.forEach((field) => {
|
||||
groupByFilters.push({
|
||||
meta: {
|
||||
disabled: false,
|
||||
negate: false,
|
||||
alias: null,
|
||||
key: field,
|
||||
params: {
|
||||
query: groupings[field],
|
||||
},
|
||||
type: 'phrase',
|
||||
index: params.index,
|
||||
},
|
||||
$state: {
|
||||
store: FilterStateStore.APP_STATE,
|
||||
},
|
||||
query: {
|
||||
match_phrase: {
|
||||
[field]: groupings[field],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
const customGroupByFilters = buildEsQuery({ kuery: '', filters: groupByFilters });
|
||||
const finalQuery = {
|
||||
bool: {
|
||||
filter: [customTotalFilter, customFilters, customGroupByFilters],
|
||||
must_not: customGoodFilter,
|
||||
},
|
||||
};
|
||||
return finalQuery;
|
||||
};
|
|
@ -0,0 +1,322 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { pick, orderBy } from 'lodash';
|
||||
import { GetSLOResponse } from '@kbn/slo-schema';
|
||||
import React, { useEffect, useState, useMemo } from 'react';
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiPanel, EuiTitle } from '@elastic/eui';
|
||||
import moment from 'moment';
|
||||
import { DataView } from '@kbn/data-views-plugin/common';
|
||||
import {
|
||||
LOG_RATE_ANALYSIS_TYPE,
|
||||
type LogRateAnalysisType,
|
||||
} from '@kbn/aiops-log-rate-analysis/log_rate_analysis_type';
|
||||
import { LogRateAnalysisContent, type LogRateAnalysisResultsData } from '@kbn/aiops-plugin/public';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
import { ALERT_END, ALERT_RULE_PARAMETERS, ALERT_TIME_RANGE } from '@kbn/rule-data-utils';
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { useFetchDataViews } from '@kbn/observability-plugin/public';
|
||||
import { colorTransformer } from '@kbn/observability-shared-plugin/common';
|
||||
import { KQLCustomIndicator, DurationUnit } from '@kbn/slo-schema';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import type { Message } from '@kbn/observability-ai-assistant-plugin/public';
|
||||
import type { WindowSchema } from '../../../../../../../typings';
|
||||
import { TimeRange } from '../../../../../error_rate_chart/use_lens_definition';
|
||||
import { BurnRateAlert, BurnRateRule } from '../../../alert_details_app_section';
|
||||
import { getActionGroupFromReason } from '../../../utils/alert';
|
||||
import { useKibana } from '../../../../../../../utils/kibana_react';
|
||||
import { getESQueryForLogRateAnalysis } from './helpers/log_rate_analysis_query';
|
||||
function getDataTimeRange(
|
||||
timeRange: { gte: string; lte?: string },
|
||||
window: WindowSchema
|
||||
): TimeRange {
|
||||
const windowDurationInMs = window.longWindow.value * 60 * 60 * 1000;
|
||||
return {
|
||||
from: new Date(new Date(timeRange.gte).getTime() - windowDurationInMs),
|
||||
to: timeRange.lte ? new Date(timeRange.lte) : new Date(),
|
||||
};
|
||||
}
|
||||
|
||||
interface Props {
|
||||
slo: GetSLOResponse;
|
||||
alert: BurnRateAlert;
|
||||
rule: BurnRateRule;
|
||||
}
|
||||
|
||||
interface SignificantFieldValue {
|
||||
field: string;
|
||||
value: string | number;
|
||||
docCount: number;
|
||||
pValue: number | null;
|
||||
}
|
||||
|
||||
export function LogRateAnalysisPanel({ slo, alert, rule }: Props) {
|
||||
const services = useKibana().services;
|
||||
const { dataViews: dataViewsService, observabilityAIAssistant } = services;
|
||||
const ObservabilityAIAssistantContextualInsight =
|
||||
observabilityAIAssistant?.ObservabilityAIAssistantContextualInsight;
|
||||
const [dataView, setDataView] = useState<DataView | undefined>();
|
||||
const [esSearchQuery, setEsSearchQuery] = useState<QueryDslQueryContainer | undefined>();
|
||||
const [logRateAnalysisParams, setLogRateAnalysisParams] = useState<
|
||||
| { logRateAnalysisType: LogRateAnalysisType; significantFieldValues: SignificantFieldValue[] }
|
||||
| undefined
|
||||
>();
|
||||
const params = slo.indicator.params as KQLCustomIndicator['params'];
|
||||
const groupBy = slo.groupBy;
|
||||
const groupings = slo.groupings;
|
||||
const { index } = params;
|
||||
const { data: dataViews = [] } = useFetchDataViews();
|
||||
|
||||
useEffect(() => {
|
||||
const getDataView = async () => {
|
||||
const getDataViewByIndexPattern = (indexPattern: string) =>
|
||||
dataViews.find((dataView0) => dataView0.title === indexPattern);
|
||||
|
||||
const dataViewId = getDataViewByIndexPattern(index)?.id;
|
||||
if (dataViewId) {
|
||||
const sloDataView = await dataViewsService.get(dataViewId);
|
||||
setDataView(sloDataView);
|
||||
getQuery();
|
||||
}
|
||||
};
|
||||
|
||||
const getQuery = () => {
|
||||
const esSearchRequest = getESQueryForLogRateAnalysis(
|
||||
params,
|
||||
groupBy,
|
||||
groupings
|
||||
) as QueryDslQueryContainer;
|
||||
if (esSearchRequest) {
|
||||
setEsSearchQuery(esSearchRequest);
|
||||
}
|
||||
};
|
||||
getDataView();
|
||||
}, [index, dataViews, params, dataViewsService, groupBy, groupings]);
|
||||
|
||||
// Identify `intervalFactor` to adjust time ranges based on alert settings.
|
||||
// The default time ranges for `initialAnalysisStart` are suitable for a `1m` lookback.
|
||||
// If an alert would have a `5m` lookback, this would result in a factor of `5`.
|
||||
// If an alert is just starting, the visible time range to look back might not cover the
|
||||
// long window yet, only then we'll take the short window to look back,
|
||||
// otherwise it will be the long window.
|
||||
const alertActionGroup = getActionGroupFromReason(alert.reason);
|
||||
const relatedWindow = (
|
||||
(alert.fields[ALERT_RULE_PARAMETERS]?.windows ?? []) as WindowSchema[]
|
||||
).find((window: WindowSchema) => window.actionGroup === alertActionGroup);
|
||||
|
||||
const longWindowValue = relatedWindow?.longWindow.value;
|
||||
const longWindowUnit = relatedWindow?.longWindow.unit;
|
||||
const longWindowLookbackDuration =
|
||||
longWindowValue && longWindowUnit
|
||||
? moment.duration(longWindowValue as number, longWindowUnit as DurationUnit)
|
||||
: moment.duration(1, 'm');
|
||||
const longWindowLookbackDurationAsSeconds = longWindowLookbackDuration.asSeconds();
|
||||
|
||||
const shortWindowValue = relatedWindow?.shortWindow.value;
|
||||
const shortWindowUnit = relatedWindow?.shortWindow.unit;
|
||||
const shortWindowLookbackDuration =
|
||||
shortWindowValue && shortWindowUnit
|
||||
? moment.duration(shortWindowValue as number, shortWindowUnit as DurationUnit)
|
||||
: moment.duration(1, 'm');
|
||||
const shortWindowLookbackDurationAsSeconds = shortWindowLookbackDuration.asSeconds();
|
||||
|
||||
// @ts-ignore
|
||||
const dataTimeRange = getDataTimeRange(alert.fields[ALERT_TIME_RANGE], relatedWindow);
|
||||
const timeRange = { min: moment(dataTimeRange.from), max: moment(dataTimeRange.to) };
|
||||
const alertStart = moment(alert.start);
|
||||
const alertEnd = alert.fields[ALERT_END] ? moment(alert.fields[ALERT_END]) : undefined;
|
||||
|
||||
const chartStartToAlertStart = (alert.start - dataTimeRange.from.getTime()) / 1000;
|
||||
|
||||
// Here we check if the available time range before the alert start is long enough
|
||||
// to consider the long window lookback. We consider 3x the long window to be good enough
|
||||
// to cover both the look back within the deviation and the baseline time range.
|
||||
// If the available time range is shorter we fall back to the short window.
|
||||
const lookbackDurationAsSeconds =
|
||||
longWindowLookbackDurationAsSeconds * 3 < chartStartToAlertStart
|
||||
? longWindowLookbackDurationAsSeconds
|
||||
: shortWindowLookbackDurationAsSeconds;
|
||||
const intervalFactor = Math.max(1, lookbackDurationAsSeconds / 60);
|
||||
|
||||
const logRateAnalysisTitle = i18n.translate(
|
||||
'xpack.slo.burnRateRule.alertDetails.logRateAnalysisTitle',
|
||||
{
|
||||
defaultMessage: 'Possible causes and remediations',
|
||||
}
|
||||
);
|
||||
|
||||
function getDeviationMax() {
|
||||
if (alertEnd) {
|
||||
return alertEnd
|
||||
.clone()
|
||||
.subtract(1 * intervalFactor, 'minutes')
|
||||
.valueOf();
|
||||
} else if (
|
||||
alertStart
|
||||
.clone()
|
||||
.add(10 * intervalFactor, 'minutes')
|
||||
.isAfter(moment(new Date()))
|
||||
) {
|
||||
return moment(new Date()).valueOf();
|
||||
} else {
|
||||
return alertStart
|
||||
.clone()
|
||||
.add(10 * intervalFactor, 'minutes')
|
||||
.valueOf();
|
||||
}
|
||||
}
|
||||
|
||||
const initialAnalysisStart = {
|
||||
baselineMin: alertStart
|
||||
.clone()
|
||||
.subtract(13 * intervalFactor, 'minutes')
|
||||
.valueOf(),
|
||||
baselineMax: alertStart
|
||||
.clone()
|
||||
.subtract(2 * intervalFactor, 'minutes')
|
||||
.valueOf(),
|
||||
deviationMin: alertStart
|
||||
.clone()
|
||||
.subtract(1 * intervalFactor, 'minutes')
|
||||
.valueOf(),
|
||||
deviationMax: getDeviationMax(),
|
||||
};
|
||||
|
||||
const onAnalysisCompleted = (analysisResults: LogRateAnalysisResultsData | undefined) => {
|
||||
const significantFieldValues = orderBy(
|
||||
analysisResults?.significantItems?.map((item) => ({
|
||||
field: item.fieldName,
|
||||
value: item.fieldValue,
|
||||
docCount: item.doc_count,
|
||||
pValue: item.pValue,
|
||||
})),
|
||||
['pValue', 'docCount'],
|
||||
['asc', 'asc']
|
||||
).slice(0, 50);
|
||||
|
||||
const logRateAnalysisType = analysisResults?.analysisType;
|
||||
setLogRateAnalysisParams(
|
||||
significantFieldValues && logRateAnalysisType
|
||||
? { logRateAnalysisType, significantFieldValues }
|
||||
: undefined
|
||||
);
|
||||
};
|
||||
|
||||
const messages = useMemo<Message[] | undefined>(() => {
|
||||
const hasLogRateAnalysisParams =
|
||||
logRateAnalysisParams && logRateAnalysisParams.significantFieldValues?.length > 0;
|
||||
|
||||
if (!hasLogRateAnalysisParams) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { logRateAnalysisType } = logRateAnalysisParams;
|
||||
|
||||
const header = 'Field name,Field value,Doc count,p-value';
|
||||
const rows = logRateAnalysisParams.significantFieldValues
|
||||
.map((item) => Object.values(item).join(','))
|
||||
.join('\n');
|
||||
|
||||
const content = `You are an observability expert using Elastic Observability Suite on call being consulted about a log threshold alert that got triggered by a ${logRateAnalysisType} in log messages. Your job is to take immediate action and proceed with both urgency and precision.
|
||||
"Log Rate Analysis" is an AIOps feature that uses advanced statistical methods to identify reasons for increases and decreases in log rates. It makes it easy to find and investigate causes of unusual spikes or dips by using the analysis workflow view.
|
||||
You are using "Log Rate Analysis" and ran the statistical analysis on the log messages which occured during the alert.
|
||||
You received the following analysis results from "Log Rate Analysis" which list statistically significant co-occuring field/value combinations sorted from most significant (lower p-values) to least significant (higher p-values) that ${
|
||||
logRateAnalysisType === LOG_RATE_ANALYSIS_TYPE.SPIKE
|
||||
? 'contribute to the log rate spike'
|
||||
: 'are less or not present in the log rate dip'
|
||||
}:
|
||||
|
||||
${
|
||||
logRateAnalysisType === LOG_RATE_ANALYSIS_TYPE.SPIKE
|
||||
? 'The median log rate in the selected deviation time range is higher than the baseline. Therefore, the results shows statistically significant items within the deviation time range that are contributors to the spike. The "doc count" column refers to the amount of documents in the deviation time range.'
|
||||
: 'The median log rate in the selected deviation time range is lower than the baseline. Therefore, the analysis results table shows statistically significant items within the baseline time range that are less in number or missing within the deviation time range. The "doc count" column refers to the amount of documents in the baseline time range.'
|
||||
}
|
||||
|
||||
${header}
|
||||
${rows}
|
||||
|
||||
Based on the above analysis results and your observability expert knowledge, output the following:
|
||||
Analyse the type of these logs and explain their usual purpose (1 paragraph).
|
||||
${
|
||||
logRateAnalysisType === LOG_RATE_ANALYSIS_TYPE.SPIKE
|
||||
? 'Based on the type of these logs do a root cause analysis on why the field and value combinations from the analysis results are causing this log rate spike (2 parapraphs)'
|
||||
: 'Based on the type of these logs explain why the statistically significant field and value combinations are less in number or missing from the log rate dip with concrete examples based on the analysis results data which contains items that are present in the baseline time range and are missing or less in number in the deviation time range (2 paragraphs)'
|
||||
}.
|
||||
${
|
||||
logRateAnalysisType === LOG_RATE_ANALYSIS_TYPE.SPIKE
|
||||
? 'Recommend concrete remediations to resolve the root cause (3 bullet points).'
|
||||
: ''
|
||||
}
|
||||
|
||||
Do not mention individual p-values from the analysis results.
|
||||
Do not repeat the full list of field names and field values back to the user.
|
||||
Do not guess, just say what you are sure of. Do not repeat the given instructions in your output.`;
|
||||
|
||||
return observabilityAIAssistant?.getContextualInsightMessages({
|
||||
message:
|
||||
'Can you identify possible causes and remediations for these log rate analysis results',
|
||||
instructions: content,
|
||||
});
|
||||
}, [logRateAnalysisParams, observabilityAIAssistant]);
|
||||
|
||||
if (!dataView || !esSearchQuery) return null;
|
||||
|
||||
return (
|
||||
<EuiPanel hasBorder={true} data-test-subj="logRateAnalysisBurnRateAlertDetails">
|
||||
<EuiFlexGroup direction="column" gutterSize="none" responsive={false}>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiTitle size="xs">
|
||||
<h2>
|
||||
<FormattedMessage
|
||||
id="xpack.slo.burnRate.alertDetails.logRateAnalysis.sectionTitle"
|
||||
defaultMessage="Log Rate Analysis"
|
||||
/>
|
||||
</h2>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<LogRateAnalysisContent
|
||||
embeddingOrigin="observability_slo_burn_rate_alert_details"
|
||||
dataView={dataView}
|
||||
esSearchQuery={esSearchQuery}
|
||||
timeRange={timeRange}
|
||||
initialAnalysisStart={initialAnalysisStart}
|
||||
barColorOverride={colorTransformer('color0')}
|
||||
barHighlightColorOverride={colorTransformer('color1')}
|
||||
onAnalysisCompleted={onAnalysisCompleted}
|
||||
appDependencies={pick(services, [
|
||||
'analytics',
|
||||
'application',
|
||||
'data',
|
||||
'executionContext',
|
||||
'charts',
|
||||
'fieldFormats',
|
||||
'http',
|
||||
'notifications',
|
||||
'share',
|
||||
'storage',
|
||||
'uiSettings',
|
||||
'unifiedSearch',
|
||||
'theme',
|
||||
'lens',
|
||||
'i18n',
|
||||
])}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiFlexGroup direction="column" gutterSize="m">
|
||||
{ObservabilityAIAssistantContextualInsight && messages ? (
|
||||
<EuiFlexItem grow={false}>
|
||||
<ObservabilityAIAssistantContextualInsight
|
||||
title={logRateAnalysisTitle}
|
||||
messages={messages}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
) : null}
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { GetSLOResponse } from '@kbn/slo-schema';
|
||||
import { CustomKqlPanels } from './custom_kql/custom_kql_panels';
|
||||
import { BurnRateAlert, BurnRateRule } from '../../alert_details_app_section';
|
||||
|
||||
interface Props {
|
||||
alert: BurnRateAlert;
|
||||
rule: BurnRateRule;
|
||||
slo?: GetSLOResponse;
|
||||
}
|
||||
|
||||
export function CustomAlertDetailsPanel({ slo, alert, rule }: Props) {
|
||||
switch (slo?.indicator.type) {
|
||||
case 'sli.kql.custom':
|
||||
return <CustomKqlPanels slo={slo} alert={alert} rule={rule} />;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -209,7 +209,14 @@ export function EventsChartPanel({ slo, range }: Props) {
|
|||
showLegendExtra={false}
|
||||
legendPosition={Position.Left}
|
||||
noResults={
|
||||
<EuiIcon type="visualizeApp" size="l" color="subdued" title="no results" />
|
||||
<EuiIcon
|
||||
type="visualizeApp"
|
||||
size="l"
|
||||
color="subdued"
|
||||
title={i18n.translate('xpack.slo.eventsChartPanel.euiIcon.noResultsLabel', {
|
||||
defaultMessage: 'no results',
|
||||
})}
|
||||
/>
|
||||
}
|
||||
onPointerUpdate={handleCursorUpdate}
|
||||
externalPointerEvents={{
|
||||
|
|
|
@ -13,6 +13,7 @@ import type {
|
|||
ObservabilitySharedPluginSetup,
|
||||
ObservabilitySharedPluginStart,
|
||||
} from '@kbn/observability-shared-plugin/public';
|
||||
import { AiopsPluginStart } from '@kbn/aiops-plugin/public/types';
|
||||
import type { ChartsPluginStart } from '@kbn/charts-plugin/public';
|
||||
import type { EmbeddableStart } from '@kbn/embeddable-plugin/public';
|
||||
import type { EmbeddableSetup } from '@kbn/embeddable-plugin/public';
|
||||
|
@ -72,6 +73,7 @@ export interface SloPublicPluginsSetup {
|
|||
|
||||
export interface SloPublicPluginsStart {
|
||||
actionTypeRegistry: ActionTypeRegistryContract;
|
||||
aiops: AiopsPluginStart;
|
||||
cases: CasesPublicStart;
|
||||
cloud?: CloudStart;
|
||||
dataViewEditor: DataViewEditorStart;
|
||||
|
|
|
@ -85,6 +85,9 @@
|
|||
"@kbn/discover-plugin",
|
||||
"@kbn/field-formats-plugin",
|
||||
"@kbn/core-http-server",
|
||||
"@kbn/aiops-plugin",
|
||||
"@kbn/presentation-publishing",
|
||||
"@kbn/aiops-log-rate-analysis",
|
||||
"@kbn/test-jest-helpers",
|
||||
"@kbn/core-ui-settings-browser-mocks",
|
||||
"@kbn/core-i18n-browser-mocks",
|
||||
|
@ -94,6 +97,5 @@
|
|||
"@kbn/data-view-field-editor-plugin",
|
||||
"@kbn/securitysolution-io-ts-utils",
|
||||
"@kbn/core-elasticsearch-server-mocks",
|
||||
"@kbn/presentation-publishing"
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue