mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Backports the following commits to 7.x: - [Logs UI] Adapt log entry rate data visualisations (#47558)
This commit is contained in:
parent
fd9f4a4cd6
commit
6d54e58be4
15 changed files with 1076 additions and 175 deletions
|
@ -37,15 +37,17 @@ export const logEntryRateAnomaly = rt.type({
|
|||
typicalLogEntryRate: rt.number,
|
||||
});
|
||||
|
||||
export const logEntryRateDataSetRT = rt.type({
|
||||
export const logEntryRatePartitionRT = rt.type({
|
||||
analysisBucketCount: rt.number,
|
||||
anomalies: rt.array(logEntryRateAnomaly),
|
||||
averageActualLogEntryRate: rt.number,
|
||||
dataSetId: rt.string,
|
||||
maximumAnomalyScore: rt.number,
|
||||
numberOfLogEntries: rt.number,
|
||||
partitionId: rt.string,
|
||||
});
|
||||
|
||||
export const logEntryRateHistogramBucket = rt.type({
|
||||
dataSets: rt.array(logEntryRateDataSetRT),
|
||||
partitions: rt.array(logEntryRatePartitionRT),
|
||||
startTime: rt.number,
|
||||
});
|
||||
|
||||
|
@ -53,6 +55,7 @@ export const getLogEntryRateSuccessReponsePayloadRT = rt.type({
|
|||
data: rt.type({
|
||||
bucketDuration: rt.number,
|
||||
histogramBuckets: rt.array(logEntryRateHistogramBucket),
|
||||
totalNumberOfLogEntries: rt.number,
|
||||
}),
|
||||
});
|
||||
|
||||
|
|
|
@ -9,19 +9,19 @@ import {
|
|||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiPage,
|
||||
EuiPageBody,
|
||||
EuiPageContent,
|
||||
EuiPageContentBody,
|
||||
EuiPanel,
|
||||
EuiSuperDatePicker,
|
||||
EuiBadge,
|
||||
EuiText,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import moment from 'moment';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
import euiStyled from '../../../../../../common/eui_styled_components';
|
||||
import { TimeRange } from '../../../../common/http_api/shared/time_range';
|
||||
import { bucketSpan } from '../../../../common/log_analysis';
|
||||
import euiStyled from '../../../../../../common/eui_styled_components';
|
||||
import { LoadingPage } from '../../../components/loading_page';
|
||||
import {
|
||||
StringTimeRange,
|
||||
|
@ -31,6 +31,8 @@ import {
|
|||
import { useTrackPageview } from '../../../hooks/use_track_metric';
|
||||
import { FirstUseCallout } from './first_use';
|
||||
import { LogRateResults } from './sections/log_rate';
|
||||
import { AnomaliesResults } from './sections/anomalies';
|
||||
import { useKibanaUiSetting } from '../../../utils/use_kibana_ui_setting';
|
||||
|
||||
export const AnalysisResultsContent = ({
|
||||
sourceId,
|
||||
|
@ -42,6 +44,8 @@ export const AnalysisResultsContent = ({
|
|||
useTrackPageview({ app: 'infra_logs', path: 'analysis_results' });
|
||||
useTrackPageview({ app: 'infra_logs', path: 'analysis_results', delay: 15000 });
|
||||
|
||||
const [dateFormat] = useKibanaUiSetting('dateFormat', 'MMMM D, YYYY h:mm A');
|
||||
|
||||
const {
|
||||
timeRange: selectedTimeRange,
|
||||
setTimeRange: setSelectedTimeRange,
|
||||
|
@ -56,13 +60,13 @@ export const AnalysisResultsContent = ({
|
|||
const bucketDuration = useMemo(() => {
|
||||
// This function takes the current time range in ms,
|
||||
// works out the bucket interval we'd need to always
|
||||
// display 200 data points, and then takes that new
|
||||
// display 100 data points, and then takes that new
|
||||
// value and works out the nearest multiple of
|
||||
// 900000 (15 minutes) to it, so that we don't end up with
|
||||
// jaggy bucket boundaries between the ML buckets and our
|
||||
// aggregation buckets.
|
||||
const msRange = moment(queryTimeRange.endTime).diff(moment(queryTimeRange.startTime));
|
||||
const bucketIntervalInMs = msRange / 200;
|
||||
const bucketIntervalInMs = msRange / 100;
|
||||
const result = bucketSpan * Math.round(bucketIntervalInMs / bucketSpan);
|
||||
const roundedResult = parseInt(Number(result).toFixed(0), 10);
|
||||
return roundedResult < bucketSpan ? bucketSpan : roundedResult;
|
||||
|
@ -130,28 +134,50 @@ export const AnalysisResultsContent = ({
|
|||
/>
|
||||
) : (
|
||||
<>
|
||||
<EuiPage>
|
||||
<EuiPanel paddingSize="l">
|
||||
<EuiFlexGroup justifyContent="spaceBetween">
|
||||
<EuiFlexItem></EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiSuperDatePicker
|
||||
start={selectedTimeRange.startTime}
|
||||
end={selectedTimeRange.endTime}
|
||||
onTimeChange={handleSelectedTimeRangeChange}
|
||||
isPaused={autoRefresh.isPaused}
|
||||
refreshInterval={autoRefresh.interval}
|
||||
onRefreshChange={handleAutoRefreshChange}
|
||||
onRefresh={handleQueryTimeRangeChange}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
</EuiPage>
|
||||
<ExpandingPage>
|
||||
<EuiPageBody>
|
||||
<EuiPageContent>
|
||||
<EuiPageContentBody>
|
||||
<ResultsContentPage>
|
||||
<EuiFlexGroup direction="column">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="l">
|
||||
<EuiFlexGroup justifyContent="spaceBetween" alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
{!isLoading && logEntryRate ? (
|
||||
<EuiText size="s">
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.analysis.logRateResultsToolbarText"
|
||||
defaultMessage="Analyzed {numberOfLogs} log entries from {startTime} to {endTime}"
|
||||
values={{
|
||||
numberOfLogs: (
|
||||
<EuiBadge color="primary">
|
||||
<EuiText size="s" color="ghost">
|
||||
{numeral(logEntryRate.totalNumberOfLogEntries).format('0.00a')}
|
||||
</EuiText>
|
||||
</EuiBadge>
|
||||
),
|
||||
startTime: (
|
||||
<b>{moment(queryTimeRange.startTime).format(dateFormat)}</b>
|
||||
),
|
||||
endTime: <b>{moment(queryTimeRange.endTime).format(dateFormat)}</b>,
|
||||
}}
|
||||
/>
|
||||
</EuiText>
|
||||
) : null}
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiSuperDatePicker
|
||||
start={selectedTimeRange.startTime}
|
||||
end={selectedTimeRange.endTime}
|
||||
onTimeChange={handleSelectedTimeRangeChange}
|
||||
isPaused={autoRefresh.isPaused}
|
||||
refreshInterval={autoRefresh.interval}
|
||||
onRefreshChange={handleAutoRefreshChange}
|
||||
onRefresh={handleQueryTimeRangeChange}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="l">
|
||||
{isFirstUse && !hasResults ? <FirstUseCallout /> : null}
|
||||
<LogRateResults
|
||||
isLoading={isLoading}
|
||||
|
@ -159,10 +185,20 @@ export const AnalysisResultsContent = ({
|
|||
setTimeRange={handleChartTimeRangeChange}
|
||||
timeRange={queryTimeRange}
|
||||
/>
|
||||
</EuiPageContentBody>
|
||||
</EuiPageContent>
|
||||
</EuiPageBody>
|
||||
</ExpandingPage>
|
||||
</EuiPanel>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="l">
|
||||
<AnomaliesResults
|
||||
isLoading={isLoading}
|
||||
results={logEntryRate}
|
||||
setTimeRange={handleChartTimeRangeChange}
|
||||
timeRange={queryTimeRange}
|
||||
/>
|
||||
</EuiPanel>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</ResultsContentPage>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
|
@ -183,6 +219,10 @@ const stringToNumericTimeRange = (timeRange: StringTimeRange): TimeRange => ({
|
|||
).valueOf(),
|
||||
});
|
||||
|
||||
const ExpandingPage = euiStyled(EuiPage)`
|
||||
flex: 1 0 0%;
|
||||
// This is needed due to the flex-basis: 100% !important; rule that
|
||||
// kicks in on small screens via media queries breaking when using direction="column"
|
||||
export const ResultsContentPage = euiStyled(EuiPage)`
|
||||
.euiFlexGroup--responsive > .euiFlexItem {
|
||||
flex-basis: auto !important;
|
||||
}
|
||||
`;
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { RectAnnotationDatum, AnnotationId } from '@elastic/charts';
|
||||
import {
|
||||
Axis,
|
||||
BarSeries,
|
||||
Chart,
|
||||
getAxisId,
|
||||
getSpecId,
|
||||
niceTimeFormatter,
|
||||
Settings,
|
||||
TooltipValue,
|
||||
LIGHT_THEME,
|
||||
DARK_THEME,
|
||||
getAnnotationId,
|
||||
RectAnnotation,
|
||||
} from '@elastic/charts';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import moment from 'moment';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
|
||||
import { MLSeverityScoreCategories } from '../helpers/data_formatters';
|
||||
|
||||
export const AnomaliesChart: React.FunctionComponent<{
|
||||
chartId: string;
|
||||
setTimeRange: (timeRange: TimeRange) => void;
|
||||
timeRange: TimeRange;
|
||||
series: Array<{ time: number; value: number }>;
|
||||
annotations: Record<MLSeverityScoreCategories, RectAnnotationDatum[]>;
|
||||
renderAnnotationTooltip?: (details?: string) => JSX.Element;
|
||||
}> = ({ chartId, series, annotations, setTimeRange, timeRange, renderAnnotationTooltip }) => {
|
||||
const [dateFormat] = useKibanaUiSetting('dateFormat', 'Y-MM-DD HH:mm:ss.SSS');
|
||||
const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
|
||||
|
||||
const chartDateFormatter = useMemo(
|
||||
() => niceTimeFormatter([timeRange.startTime, timeRange.endTime]),
|
||||
[timeRange]
|
||||
);
|
||||
|
||||
const logEntryRateSpecId = getSpecId('averageValues');
|
||||
|
||||
const tooltipProps = useMemo(
|
||||
() => ({
|
||||
headerFormatter: (tooltipData: TooltipValue) => moment(tooltipData.value).format(dateFormat),
|
||||
}),
|
||||
[dateFormat]
|
||||
);
|
||||
|
||||
const handleBrushEnd = useCallback(
|
||||
(startTime: number, endTime: number) => {
|
||||
setTimeRange({
|
||||
endTime,
|
||||
startTime,
|
||||
});
|
||||
},
|
||||
[setTimeRange]
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ height: 160, width: '100%' }}>
|
||||
<Chart className="log-entry-rate-chart">
|
||||
<Axis
|
||||
id={getAxisId('timestamp')}
|
||||
position="bottom"
|
||||
showOverlappingTicks
|
||||
tickFormat={chartDateFormatter}
|
||||
/>
|
||||
<Axis
|
||||
id={getAxisId('values')}
|
||||
position="left"
|
||||
tickFormat={value => numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
|
||||
/>
|
||||
<BarSeries
|
||||
id={logEntryRateSpecId}
|
||||
name={i18n.translate('xpack.infra.logs.analysis.anomaliesSectionLineSeriesName', {
|
||||
defaultMessage: 'Log entries per 15 minutes (avg)',
|
||||
})}
|
||||
xScaleType="time"
|
||||
yScaleType="linear"
|
||||
xAccessor={'time'}
|
||||
yAccessors={['value']}
|
||||
data={series}
|
||||
barSeriesStyle={barSeriesStyle}
|
||||
/>
|
||||
{renderAnnotations(annotations, chartId, renderAnnotationTooltip)}
|
||||
<Settings
|
||||
onBrushEnd={handleBrushEnd}
|
||||
tooltip={tooltipProps}
|
||||
baseTheme={isDarkMode ? DARK_THEME : LIGHT_THEME}
|
||||
/>
|
||||
</Chart>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
interface SeverityConfig {
|
||||
annotationId: AnnotationId;
|
||||
style: {
|
||||
fill: string;
|
||||
opacity: number;
|
||||
};
|
||||
}
|
||||
|
||||
const severityConfigs: Record<string, SeverityConfig> = {
|
||||
warning: {
|
||||
annotationId: getAnnotationId(`anomalies-warning`),
|
||||
style: { fill: 'rgb(125, 180, 226)', opacity: 0.7 },
|
||||
},
|
||||
minor: {
|
||||
annotationId: getAnnotationId(`anomalies-minor`),
|
||||
style: { fill: 'rgb(255, 221, 0)', opacity: 0.7 },
|
||||
},
|
||||
major: {
|
||||
annotationId: getAnnotationId(`anomalies-major`),
|
||||
style: { fill: 'rgb(229, 113, 0)', opacity: 0.7 },
|
||||
},
|
||||
critical: {
|
||||
annotationId: getAnnotationId(`anomalies-critical`),
|
||||
style: { fill: 'rgb(228, 72, 72)', opacity: 0.7 },
|
||||
},
|
||||
};
|
||||
|
||||
const renderAnnotations = (
|
||||
annotations: Record<MLSeverityScoreCategories, RectAnnotationDatum[]>,
|
||||
chartId: string,
|
||||
renderAnnotationTooltip?: (details?: string) => JSX.Element
|
||||
) => {
|
||||
return Object.entries(annotations).map((entry, index) => {
|
||||
return (
|
||||
<RectAnnotation
|
||||
key={`${chartId}:${entry[0]}`}
|
||||
dataValues={entry[1]}
|
||||
annotationId={severityConfigs[entry[0]].annotationId}
|
||||
style={severityConfigs[entry[0]].style}
|
||||
renderTooltip={renderAnnotationTooltip}
|
||||
/>
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const barSeriesStyle = { rect: { fill: '#D3DAE6', opacity: 0.6 } }; // TODO: Acquire this from "theme" as euiColorLightShade
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useMemo } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiStat } from '@elastic/eui';
|
||||
import { AnomaliesChart } from './chart';
|
||||
import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import {
|
||||
getLogEntryRateSeriesForPartition,
|
||||
getAnnotationsForPartition,
|
||||
formatAnomalyScore,
|
||||
getTotalNumberOfLogEntriesForPartition,
|
||||
} from '../helpers/data_formatters';
|
||||
|
||||
export const AnomaliesTableExpandedRow: React.FunctionComponent<{
|
||||
partitionId: string;
|
||||
topAnomalyScore: number;
|
||||
results: GetLogEntryRateSuccessResponsePayload['data'];
|
||||
setTimeRange: (timeRange: TimeRange) => void;
|
||||
timeRange: TimeRange;
|
||||
}> = ({ results, timeRange, setTimeRange, topAnomalyScore, partitionId }) => {
|
||||
const logEntryRateSeries = useMemo(
|
||||
() =>
|
||||
results && results.histogramBuckets
|
||||
? getLogEntryRateSeriesForPartition(results, partitionId)
|
||||
: [],
|
||||
[results, partitionId]
|
||||
);
|
||||
const anomalyAnnotations = useMemo(
|
||||
() =>
|
||||
results && results.histogramBuckets
|
||||
? getAnnotationsForPartition(results, partitionId)
|
||||
: {
|
||||
warning: [],
|
||||
minor: [],
|
||||
major: [],
|
||||
critical: [],
|
||||
},
|
||||
[results, partitionId]
|
||||
);
|
||||
const totalNumberOfLogEntries = useMemo(
|
||||
() =>
|
||||
results && results.histogramBuckets
|
||||
? getTotalNumberOfLogEntriesForPartition(results, partitionId)
|
||||
: undefined,
|
||||
[results, partitionId]
|
||||
);
|
||||
return (
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow={8}>
|
||||
<AnomaliesChart
|
||||
chartId={`${partitionId}-anomalies`}
|
||||
timeRange={timeRange}
|
||||
setTimeRange={setTimeRange}
|
||||
series={logEntryRateSeries}
|
||||
annotations={anomalyAnnotations}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiStat
|
||||
title={numeral(totalNumberOfLogEntries).format('0.00a')}
|
||||
description={i18n.translate(
|
||||
'xpack.infra.logs.analysis.anomaliesExpandedRowNumberOfLogEntriesDescription',
|
||||
{
|
||||
defaultMessage: 'Number of log entries',
|
||||
}
|
||||
)}
|
||||
reverse
|
||||
/>
|
||||
<EuiStat
|
||||
title={formatAnomalyScore(topAnomalyScore)}
|
||||
description={i18n.translate(
|
||||
'xpack.infra.logs.analysis.anomaliesExpandedRowTopAnomalyScoreDescription',
|
||||
{
|
||||
defaultMessage: 'Max anomaly score',
|
||||
}
|
||||
)}
|
||||
reverse
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,217 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
EuiEmptyPrompt,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiLoadingChart,
|
||||
EuiSpacer,
|
||||
EuiTitle,
|
||||
EuiStat,
|
||||
} from '@elastic/eui';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate';
|
||||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import { AnomaliesChart } from './chart';
|
||||
import { AnomaliesTable } from './table';
|
||||
import {
|
||||
getLogEntryRateCombinedSeries,
|
||||
getAnnotationsForAll,
|
||||
getTopAnomalyScoreAcrossAllPartitions,
|
||||
formatAnomalyScore,
|
||||
} from '../helpers/data_formatters';
|
||||
|
||||
export const AnomaliesResults = ({
|
||||
isLoading,
|
||||
results,
|
||||
setTimeRange,
|
||||
timeRange,
|
||||
}: {
|
||||
isLoading: boolean;
|
||||
results: GetLogEntryRateSuccessResponsePayload['data'] | null;
|
||||
setTimeRange: (timeRange: TimeRange) => void;
|
||||
timeRange: TimeRange;
|
||||
}) => {
|
||||
const title = i18n.translate('xpack.infra.logs.analysis.anomaliesSectionTitle', {
|
||||
defaultMessage: 'Anomalies',
|
||||
});
|
||||
|
||||
const loadingAriaLabel = i18n.translate(
|
||||
'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
|
||||
{ defaultMessage: 'Loading anomalies' }
|
||||
);
|
||||
|
||||
const hasAnomalies = useMemo(() => {
|
||||
return results && results.histogramBuckets
|
||||
? results.histogramBuckets.some(bucket => {
|
||||
return bucket.partitions.some(partition => {
|
||||
return partition.anomalies.length > 0;
|
||||
});
|
||||
})
|
||||
: false;
|
||||
}, [results]);
|
||||
|
||||
const logEntryRateSeries = useMemo(
|
||||
() => (results && results.histogramBuckets ? getLogEntryRateCombinedSeries(results) : []),
|
||||
[results]
|
||||
);
|
||||
const anomalyAnnotations = useMemo(
|
||||
() =>
|
||||
results && results.histogramBuckets
|
||||
? getAnnotationsForAll(results)
|
||||
: {
|
||||
warning: [],
|
||||
minor: [],
|
||||
major: [],
|
||||
critical: [],
|
||||
},
|
||||
[results]
|
||||
);
|
||||
|
||||
const topAnomalyScore = useMemo(
|
||||
() =>
|
||||
results && results.histogramBuckets
|
||||
? getTopAnomalyScoreAcrossAllPartitions(results)
|
||||
: undefined,
|
||||
[results]
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiTitle size="s" aria-label={title}>
|
||||
<h2>{title}</h2>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="l" />
|
||||
{isLoading ? (
|
||||
<EuiFlexGroup justifyContent="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLoadingChart size="xl" aria-label={loadingAriaLabel} />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
) : !results || (results && results.histogramBuckets && !results.histogramBuckets.length) ? (
|
||||
<EuiEmptyPrompt
|
||||
title={
|
||||
<h2>
|
||||
{i18n.translate('xpack.infra.logs.analysis.anomalySectionNoDataTitle', {
|
||||
defaultMessage: 'There is no data to display.',
|
||||
})}
|
||||
</h2>
|
||||
}
|
||||
titleSize="m"
|
||||
body={
|
||||
<p>
|
||||
{i18n.translate('xpack.infra.logs.analysis.anomalySectionNoDataBody', {
|
||||
defaultMessage: 'You may want to adjust your time range.',
|
||||
})}
|
||||
</p>
|
||||
}
|
||||
/>
|
||||
) : !hasAnomalies ? (
|
||||
<EuiEmptyPrompt
|
||||
title={
|
||||
<h2>
|
||||
{i18n.translate('xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle', {
|
||||
defaultMessage: 'No anomalies were detected.',
|
||||
})}
|
||||
</h2>
|
||||
}
|
||||
titleSize="m"
|
||||
/>
|
||||
) : (
|
||||
<>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow={8}>
|
||||
<AnomaliesChart
|
||||
chartId="overall"
|
||||
setTimeRange={setTimeRange}
|
||||
timeRange={timeRange}
|
||||
series={logEntryRateSeries}
|
||||
annotations={anomalyAnnotations}
|
||||
renderAnnotationTooltip={renderAnnotationTooltip}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={2}>
|
||||
<EuiStat
|
||||
title={numeral(results.totalNumberOfLogEntries).format('0.00a')}
|
||||
description={i18n.translate(
|
||||
'xpack.infra.logs.analysis.overallAnomaliesNumberOfLogEntriesDescription',
|
||||
{
|
||||
defaultMessage: 'Number of log entries',
|
||||
}
|
||||
)}
|
||||
reverse
|
||||
/>
|
||||
<EuiStat
|
||||
title={topAnomalyScore ? formatAnomalyScore(topAnomalyScore) : null}
|
||||
description={i18n.translate(
|
||||
'xpack.infra.logs.analysis.overallAnomaliesTopAnomalyScoreDescription',
|
||||
{
|
||||
defaultMessage: 'Max anomaly score',
|
||||
}
|
||||
)}
|
||||
reverse
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer size="l" />
|
||||
<AnomaliesTable results={results} setTimeRange={setTimeRange} timeRange={timeRange} />
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
interface ParsedAnnotationDetails {
|
||||
anomalyScoresByPartition: Array<{ partitionId: string; maximumAnomalyScore: number }>;
|
||||
}
|
||||
|
||||
const overallAnomalyScoreLabel = i18n.translate(
|
||||
'xpack.infra.logs.analysis.overallAnomalyChartMaxScoresLabel',
|
||||
{
|
||||
defaultMessage: 'Max anomaly scores:',
|
||||
}
|
||||
);
|
||||
const AnnotationTooltip: React.FunctionComponent<{ details: string }> = ({ details }) => {
|
||||
const parsedDetails: ParsedAnnotationDetails = JSON.parse(details);
|
||||
return (
|
||||
<TooltipWrapper>
|
||||
<span>
|
||||
<b>{overallAnomalyScoreLabel}</b>
|
||||
</span>
|
||||
<ul>
|
||||
{parsedDetails.anomalyScoresByPartition.map(
|
||||
({ partitionId, maximumAnomalyScore }, index) => {
|
||||
return (
|
||||
<li key={`overall-anomaly-chart-${partitionId}`}>
|
||||
<span>
|
||||
{`${partitionId}: `}
|
||||
<b>{maximumAnomalyScore}</b>
|
||||
</span>
|
||||
</li>
|
||||
);
|
||||
}
|
||||
)}
|
||||
</ul>
|
||||
</TooltipWrapper>
|
||||
);
|
||||
};
|
||||
|
||||
const renderAnnotationTooltip = (details?: string) => {
|
||||
// Note: Seems to be necessary to get things typed correctly all the way through to elastic-charts components
|
||||
if (!details) {
|
||||
return <div></div>;
|
||||
}
|
||||
return <AnnotationTooltip details={details} />;
|
||||
};
|
||||
|
||||
const TooltipWrapper = euiStyled('div')`
|
||||
white-space: nowrap;
|
||||
`;
|
|
@ -0,0 +1,163 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useMemo, useState, useCallback } from 'react';
|
||||
import { EuiBasicTable, EuiButtonIcon } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { RIGHT_ALIGNMENT } from '@elastic/eui/lib/services';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate';
|
||||
import { AnomaliesTableExpandedRow } from './expanded_row';
|
||||
import { getTopAnomalyScoresByPartition, formatAnomalyScore } from '../helpers/data_formatters';
|
||||
|
||||
interface TableItem {
|
||||
id: string;
|
||||
partition: string;
|
||||
topAnomalyScore: number;
|
||||
}
|
||||
|
||||
interface SortingOptions {
|
||||
sort: {
|
||||
field: string;
|
||||
direction: string;
|
||||
};
|
||||
}
|
||||
|
||||
const collapseAriaLabel = i18n.translate('xpack.infra.logs.analysis.anomaliesTableCollapseLabel', {
|
||||
defaultMessage: 'Collapse',
|
||||
});
|
||||
|
||||
const expandAriaLabel = i18n.translate('xpack.infra.logs.analysis.anomaliesTableExpandLabel', {
|
||||
defaultMessage: 'Expand',
|
||||
});
|
||||
|
||||
const partitionColumnName = i18n.translate(
|
||||
'xpack.infra.logs.analysis.anomaliesTablePartitionColumnName',
|
||||
{
|
||||
defaultMessage: 'Partition',
|
||||
}
|
||||
);
|
||||
|
||||
const maxAnomalyScoreColumnName = i18n.translate(
|
||||
'xpack.infra.logs.analysis.anomaliesTableMaxAnomalyScoreColumnName',
|
||||
{
|
||||
defaultMessage: 'Max anomaly score',
|
||||
}
|
||||
);
|
||||
|
||||
export const AnomaliesTable: React.FunctionComponent<{
|
||||
results: GetLogEntryRateSuccessResponsePayload['data'];
|
||||
setTimeRange: (timeRange: TimeRange) => void;
|
||||
timeRange: TimeRange;
|
||||
}> = ({ results, timeRange, setTimeRange }) => {
|
||||
const tableItems: TableItem[] = useMemo(() => {
|
||||
return Object.entries(getTopAnomalyScoresByPartition(results)).map(([key, value]) => {
|
||||
return {
|
||||
id: key || 'unknown', // Note: EUI's table expanded rows won't work with a key of '' in itemIdToExpandedRowMap
|
||||
partition: key || 'unknown',
|
||||
topAnomalyScore: formatAnomalyScore(value),
|
||||
};
|
||||
});
|
||||
}, [results]);
|
||||
|
||||
const [itemIdToExpandedRowMap, setItemIdToExpandedRowMap] = useState<
|
||||
Record<string, React.ReactNode>
|
||||
>({});
|
||||
|
||||
const [sorting, setSorting] = useState<SortingOptions>({
|
||||
sort: {
|
||||
field: 'topAnomalyScore',
|
||||
direction: 'desc',
|
||||
},
|
||||
});
|
||||
|
||||
const handleTableChange = useCallback(
|
||||
({ sort = {} }) => {
|
||||
const { field, direction } = sort;
|
||||
setSorting({
|
||||
sort: {
|
||||
field,
|
||||
direction,
|
||||
},
|
||||
});
|
||||
},
|
||||
[setSorting]
|
||||
);
|
||||
|
||||
const sortedTableItems = useMemo(() => {
|
||||
let sortedItems: TableItem[] = [];
|
||||
if (sorting.sort.field === 'partition') {
|
||||
sortedItems = tableItems.sort((a, b) => (a.partition > b.partition ? 1 : -1));
|
||||
} else if (sorting.sort.field === 'topAnomalyScore') {
|
||||
sortedItems = tableItems.sort((a, b) => a.topAnomalyScore - b.topAnomalyScore);
|
||||
}
|
||||
return sorting.sort.direction === 'asc' ? sortedItems : sortedItems.reverse();
|
||||
}, [tableItems, sorting]);
|
||||
|
||||
const toggleExpandedItems = useCallback(
|
||||
item => {
|
||||
if (itemIdToExpandedRowMap[item.id]) {
|
||||
const { [item.id]: toggledItem, ...remainingExpandedRowMap } = itemIdToExpandedRowMap;
|
||||
setItemIdToExpandedRowMap(remainingExpandedRowMap);
|
||||
} else {
|
||||
const newItemIdToExpandedRowMap = {
|
||||
...itemIdToExpandedRowMap,
|
||||
[item.id]: (
|
||||
<AnomaliesTableExpandedRow
|
||||
partitionId={item.id}
|
||||
results={results}
|
||||
topAnomalyScore={item.topAnomalyScore}
|
||||
setTimeRange={setTimeRange}
|
||||
timeRange={timeRange}
|
||||
/>
|
||||
),
|
||||
};
|
||||
setItemIdToExpandedRowMap(newItemIdToExpandedRowMap);
|
||||
}
|
||||
},
|
||||
[results, setTimeRange, timeRange, itemIdToExpandedRowMap, setItemIdToExpandedRowMap]
|
||||
);
|
||||
|
||||
const columns = [
|
||||
{
|
||||
field: 'partition',
|
||||
name: partitionColumnName,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
},
|
||||
{
|
||||
field: 'topAnomalyScore',
|
||||
name: maxAnomalyScoreColumnName,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
},
|
||||
{
|
||||
align: RIGHT_ALIGNMENT,
|
||||
width: '40px',
|
||||
isExpander: true,
|
||||
render: (item: TableItem) => (
|
||||
<EuiButtonIcon
|
||||
onClick={() => toggleExpandedItems(item)}
|
||||
aria-label={itemIdToExpandedRowMap[item.id] ? collapseAriaLabel : expandAriaLabel}
|
||||
iconType={itemIdToExpandedRowMap[item.id] ? 'arrowUp' : 'arrowDown'}
|
||||
/>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<EuiBasicTable
|
||||
items={sortedTableItems}
|
||||
itemId="id"
|
||||
itemIdToExpandedRowMap={itemIdToExpandedRowMap}
|
||||
isExpandable={true}
|
||||
hasActions={true}
|
||||
columns={columns}
|
||||
sorting={sorting}
|
||||
onChange={handleTableChange}
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,259 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { RectAnnotationDatum } from '@elastic/charts';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate';
|
||||
|
||||
export type MLSeverityScoreCategories = 'warning' | 'minor' | 'major' | 'critical';
|
||||
type MLSeverityScores = Record<MLSeverityScoreCategories, number>;
|
||||
const ML_SEVERITY_SCORES: MLSeverityScores = {
|
||||
warning: 3,
|
||||
minor: 25,
|
||||
major: 50,
|
||||
critical: 75,
|
||||
};
|
||||
|
||||
export const getLogEntryRatePartitionedSeries = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data']
|
||||
) => {
|
||||
return results.histogramBuckets.reduce<Array<{ group: string; time: number; value: number }>>(
|
||||
(buckets, bucket) => {
|
||||
return [
|
||||
...buckets,
|
||||
...bucket.partitions.map(partition => ({
|
||||
group: partition.partitionId === '' ? 'unknown' : partition.partitionId,
|
||||
time: bucket.startTime,
|
||||
value: partition.averageActualLogEntryRate,
|
||||
})),
|
||||
];
|
||||
},
|
||||
[]
|
||||
);
|
||||
};
|
||||
|
||||
export const getLogEntryRateCombinedSeries = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data']
|
||||
) => {
|
||||
return results.histogramBuckets.reduce<Array<{ time: number; value: number }>>(
|
||||
(buckets, bucket) => {
|
||||
return [
|
||||
...buckets,
|
||||
{
|
||||
time: bucket.startTime,
|
||||
value: bucket.partitions.reduce((accumulatedValue, partition) => {
|
||||
return accumulatedValue + partition.averageActualLogEntryRate;
|
||||
}, 0),
|
||||
},
|
||||
];
|
||||
},
|
||||
[]
|
||||
);
|
||||
};
|
||||
|
||||
export const getLogEntryRateSeriesForPartition = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data'],
|
||||
partitionId: string
|
||||
) => {
|
||||
return results.histogramBuckets.reduce<Array<{ time: number; value: number }>>(
|
||||
(buckets, bucket) => {
|
||||
const partitionResults = bucket.partitions.find(partition => {
|
||||
return (
|
||||
partition.partitionId === partitionId ||
|
||||
(partition.partitionId === '' && partitionId === 'unknown')
|
||||
);
|
||||
});
|
||||
if (!partitionResults) {
|
||||
return buckets;
|
||||
}
|
||||
return [
|
||||
...buckets,
|
||||
{
|
||||
time: bucket.startTime,
|
||||
value: partitionResults.averageActualLogEntryRate,
|
||||
},
|
||||
];
|
||||
},
|
||||
[]
|
||||
);
|
||||
};
|
||||
|
||||
export const getTopAnomalyScoresByPartition = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data']
|
||||
) => {
|
||||
return results.histogramBuckets.reduce<Record<string, number>>((topScores, bucket) => {
|
||||
bucket.partitions.forEach(partition => {
|
||||
if (partition.maximumAnomalyScore > 0) {
|
||||
topScores = {
|
||||
...topScores,
|
||||
[partition.partitionId]:
|
||||
!topScores[partition.partitionId] ||
|
||||
partition.maximumAnomalyScore > topScores[partition.partitionId]
|
||||
? partition.maximumAnomalyScore
|
||||
: topScores[partition.partitionId],
|
||||
};
|
||||
}
|
||||
});
|
||||
return topScores;
|
||||
}, {});
|
||||
};
|
||||
|
||||
export const getAnnotationsForPartition = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data'],
|
||||
partitionId: string
|
||||
) => {
|
||||
return results.histogramBuckets.reduce<Record<MLSeverityScoreCategories, RectAnnotationDatum[]>>(
|
||||
(annotatedBucketsBySeverity, bucket) => {
|
||||
const partitionResults = bucket.partitions.find(partition => {
|
||||
return (
|
||||
partition.partitionId === partitionId ||
|
||||
(partition.partitionId === '' && partitionId === 'unknown')
|
||||
);
|
||||
});
|
||||
const severityCategory = partitionResults
|
||||
? getSeverityCategoryForScore(partitionResults.maximumAnomalyScore)
|
||||
: null;
|
||||
if (!partitionResults || !partitionResults.maximumAnomalyScore || !severityCategory) {
|
||||
return annotatedBucketsBySeverity;
|
||||
}
|
||||
|
||||
return {
|
||||
...annotatedBucketsBySeverity,
|
||||
[severityCategory]: [
|
||||
...annotatedBucketsBySeverity[severityCategory],
|
||||
{
|
||||
coordinates: {
|
||||
x0: bucket.startTime,
|
||||
x1: bucket.startTime + results.bucketDuration,
|
||||
},
|
||||
details: i18n.translate(
|
||||
'xpack.infra.logs.analysis.partitionMaxAnomalyScoreAnnotationLabel',
|
||||
{
|
||||
defaultMessage: 'Max anomaly score: {maxAnomalyScore}',
|
||||
values: {
|
||||
maxAnomalyScore: formatAnomalyScore(partitionResults.maximumAnomalyScore),
|
||||
},
|
||||
}
|
||||
),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
{
|
||||
warning: [],
|
||||
minor: [],
|
||||
major: [],
|
||||
critical: [],
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getTotalNumberOfLogEntriesForPartition = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data'],
|
||||
partitionId: string
|
||||
) => {
|
||||
return results.histogramBuckets.reduce<number>((sumPartitionNumberOfLogEntries, bucket) => {
|
||||
const partitionResults = bucket.partitions.find(partition => {
|
||||
return (
|
||||
partition.partitionId === partitionId ||
|
||||
(partition.partitionId === '' && partitionId === 'unknown')
|
||||
);
|
||||
});
|
||||
if (!partitionResults || !partitionResults.numberOfLogEntries) {
|
||||
return sumPartitionNumberOfLogEntries;
|
||||
} else {
|
||||
return (sumPartitionNumberOfLogEntries += partitionResults.numberOfLogEntries);
|
||||
}
|
||||
}, 0);
|
||||
};
|
||||
|
||||
export const getAnnotationsForAll = (results: GetLogEntryRateSuccessResponsePayload['data']) => {
|
||||
return results.histogramBuckets.reduce<Record<MLSeverityScoreCategories, RectAnnotationDatum[]>>(
|
||||
(annotatedBucketsBySeverity, bucket) => {
|
||||
const maxAnomalyScoresByPartition = bucket.partitions.reduce<
|
||||
Array<{ partitionId: string; maximumAnomalyScore: number }>
|
||||
>((bucketMaxAnomalyScoresByPartition, partition) => {
|
||||
if (!getSeverityCategoryForScore(partition.maximumAnomalyScore)) {
|
||||
return bucketMaxAnomalyScoresByPartition;
|
||||
}
|
||||
return [
|
||||
...bucketMaxAnomalyScoresByPartition,
|
||||
{
|
||||
partitionId: partition.partitionId ? partition.partitionId : 'unknown',
|
||||
maximumAnomalyScore: formatAnomalyScore(partition.maximumAnomalyScore),
|
||||
},
|
||||
];
|
||||
}, []);
|
||||
|
||||
if (maxAnomalyScoresByPartition.length === 0) {
|
||||
return annotatedBucketsBySeverity;
|
||||
}
|
||||
const severityCategory = getSeverityCategoryForScore(
|
||||
Math.max(
|
||||
...maxAnomalyScoresByPartition.map(partitionScore => partitionScore.maximumAnomalyScore)
|
||||
)
|
||||
);
|
||||
if (!severityCategory) {
|
||||
return annotatedBucketsBySeverity;
|
||||
}
|
||||
const sortedMaxAnomalyScoresByPartition = maxAnomalyScoresByPartition.sort(
|
||||
(a, b) => b.maximumAnomalyScore - a.maximumAnomalyScore
|
||||
);
|
||||
return {
|
||||
...annotatedBucketsBySeverity,
|
||||
[severityCategory]: [
|
||||
...annotatedBucketsBySeverity[severityCategory],
|
||||
{
|
||||
coordinates: {
|
||||
x0: bucket.startTime,
|
||||
x1: bucket.startTime + results.bucketDuration,
|
||||
},
|
||||
details: JSON.stringify({
|
||||
anomalyScoresByPartition: sortedMaxAnomalyScoresByPartition,
|
||||
}),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
{
|
||||
warning: [],
|
||||
minor: [],
|
||||
major: [],
|
||||
critical: [],
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getTopAnomalyScoreAcrossAllPartitions = (
|
||||
results: GetLogEntryRateSuccessResponsePayload['data']
|
||||
) => {
|
||||
const allMaxScores = results.histogramBuckets.reduce<number[]>((scores, bucket) => {
|
||||
const bucketMaxScores = bucket.partitions.reduce<number[]>((bucketScores, partition) => {
|
||||
return [...bucketScores, partition.maximumAnomalyScore];
|
||||
}, []);
|
||||
return [...scores, ...bucketMaxScores];
|
||||
}, []);
|
||||
return Math.max(...allMaxScores);
|
||||
};
|
||||
|
||||
const getSeverityCategoryForScore = (score: number): MLSeverityScoreCategories | undefined => {
|
||||
if (score >= ML_SEVERITY_SCORES.critical) {
|
||||
return 'critical';
|
||||
} else if (score >= ML_SEVERITY_SCORES.major) {
|
||||
return 'major';
|
||||
} else if (score >= ML_SEVERITY_SCORES.minor) {
|
||||
return 'minor';
|
||||
} else if (score >= ML_SEVERITY_SCORES.warning) {
|
||||
return 'warning';
|
||||
} else {
|
||||
// Category is too low to include
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const formatAnomalyScore = (score: number) => {
|
||||
return Math.round(score);
|
||||
};
|
|
@ -8,33 +8,27 @@ import {
|
|||
Axis,
|
||||
BarSeries,
|
||||
Chart,
|
||||
getAnnotationId,
|
||||
getAxisId,
|
||||
getSpecId,
|
||||
niceTimeFormatter,
|
||||
RectAnnotation,
|
||||
RectAnnotationDatum,
|
||||
Settings,
|
||||
TooltipValue,
|
||||
LIGHT_THEME,
|
||||
DARK_THEME,
|
||||
} from '@elastic/charts';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import numeral from '@elastic/numeral';
|
||||
import moment from 'moment';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
|
||||
import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
|
||||
|
||||
type LogEntryRateHistogramBuckets = GetLogEntryRateSuccessResponsePayload['data']['histogramBuckets'];
|
||||
|
||||
export const LogEntryRateBarChart: React.FunctionComponent<{
|
||||
bucketDuration: number;
|
||||
histogramBuckets: LogEntryRateHistogramBuckets | null;
|
||||
setTimeRange: (timeRange: TimeRange) => void;
|
||||
timeRange: TimeRange;
|
||||
}> = ({ bucketDuration, histogramBuckets, setTimeRange, timeRange }) => {
|
||||
series: Array<{ group: string; time: number; value: number }>;
|
||||
}> = ({ series, setTimeRange, timeRange }) => {
|
||||
const [dateFormat] = useKibanaUiSetting('dateFormat');
|
||||
const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
|
||||
|
||||
|
@ -43,62 +37,7 @@ export const LogEntryRateBarChart: React.FunctionComponent<{
|
|||
[timeRange]
|
||||
);
|
||||
|
||||
const logEntryRateSeries = useMemo(
|
||||
() =>
|
||||
histogramBuckets
|
||||
? histogramBuckets.reduce<Array<{ group: string; time: number; value: number }>>(
|
||||
(buckets, bucket) => {
|
||||
return [
|
||||
...buckets,
|
||||
...bucket.dataSets.map(dataSet => ({
|
||||
group: dataSet.dataSetId === '' ? 'unknown' : dataSet.dataSetId,
|
||||
time: bucket.startTime,
|
||||
value: dataSet.averageActualLogEntryRate,
|
||||
})),
|
||||
];
|
||||
},
|
||||
[]
|
||||
)
|
||||
: [],
|
||||
[histogramBuckets]
|
||||
);
|
||||
|
||||
const logEntryRateAnomalyAnnotations = useMemo(
|
||||
() =>
|
||||
histogramBuckets
|
||||
? histogramBuckets.reduce<RectAnnotationDatum[]>((annotatedBuckets, bucket) => {
|
||||
const anomalies = bucket.dataSets.reduce<typeof bucket['dataSets'][0]['anomalies']>(
|
||||
(accumulatedAnomalies, dataSet) => [...accumulatedAnomalies, ...dataSet.anomalies],
|
||||
[]
|
||||
);
|
||||
if (anomalies.length <= 0) {
|
||||
return annotatedBuckets;
|
||||
}
|
||||
return [
|
||||
...annotatedBuckets,
|
||||
{
|
||||
coordinates: {
|
||||
x0: bucket.startTime,
|
||||
x1: bucket.startTime + bucketDuration,
|
||||
},
|
||||
details: i18n.translate(
|
||||
'xpack.infra.logs.analysis.logRateSectionAnomalyCountTooltipLabel',
|
||||
{
|
||||
defaultMessage: `{anomalyCount, plural, one {# anomaly} other {# anomalies}}`,
|
||||
values: {
|
||||
anomalyCount: anomalies.length,
|
||||
},
|
||||
}
|
||||
),
|
||||
},
|
||||
];
|
||||
}, [])
|
||||
: [],
|
||||
[histogramBuckets]
|
||||
);
|
||||
|
||||
const logEntryRateSpecId = getSpecId('averageValues');
|
||||
const logEntryRateAnomalyAnnotationsId = getAnnotationId('anomalies');
|
||||
|
||||
const tooltipProps = useMemo(
|
||||
() => ({
|
||||
|
@ -119,24 +58,18 @@ export const LogEntryRateBarChart: React.FunctionComponent<{
|
|||
);
|
||||
|
||||
return (
|
||||
<div style={{ height: 400, width: '100%' }}>
|
||||
<div style={{ height: 200, width: '100%' }}>
|
||||
<Chart className="log-entry-rate-chart">
|
||||
<Axis
|
||||
id={getAxisId('timestamp')}
|
||||
title={i18n.translate('xpack.infra.logs.analysis.logRateSectionXaxisTitle', {
|
||||
defaultMessage: 'Time',
|
||||
})}
|
||||
position="bottom"
|
||||
showOverlappingTicks
|
||||
tickFormat={chartDateFormatter}
|
||||
/>
|
||||
<Axis
|
||||
id={getAxisId('values')}
|
||||
title={i18n.translate('xpack.infra.logs.analysis.logRateSectionYaxisTitle', {
|
||||
defaultMessage: 'Log entries per 15 minutes',
|
||||
})}
|
||||
position="left"
|
||||
tickFormat={value => Number(value).toFixed(0)}
|
||||
tickFormat={value => numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
|
||||
/>
|
||||
<BarSeries
|
||||
id={logEntryRateSpecId}
|
||||
|
@ -149,17 +82,14 @@ export const LogEntryRateBarChart: React.FunctionComponent<{
|
|||
yAccessors={['value']}
|
||||
splitSeriesAccessors={['group']}
|
||||
stackAccessors={['time']}
|
||||
data={logEntryRateSeries}
|
||||
/>
|
||||
<RectAnnotation
|
||||
dataValues={logEntryRateAnomalyAnnotations}
|
||||
annotationId={logEntryRateAnomalyAnnotationsId}
|
||||
data={series}
|
||||
/>
|
||||
<Settings
|
||||
onBrushEnd={handleBrushEnd}
|
||||
tooltip={tooltipProps}
|
||||
theme={isDarkMode ? DARK_THEME : LIGHT_THEME}
|
||||
xDomain={{ min: timeRange.startTime, max: timeRange.endTime }}
|
||||
showLegend
|
||||
legendPosition="right"
|
||||
/>
|
||||
</Chart>
|
||||
</div>
|
||||
|
|
|
@ -11,13 +11,15 @@ import {
|
|||
EuiLoadingChart,
|
||||
EuiSpacer,
|
||||
EuiTitle,
|
||||
EuiText,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import { LogEntryRateBarChart } from './bar_chart';
|
||||
import { getLogEntryRatePartitionedSeries } from '../helpers/data_formatters';
|
||||
|
||||
export const LogRateResults = ({
|
||||
isLoading,
|
||||
|
@ -31,7 +33,7 @@ export const LogRateResults = ({
|
|||
timeRange: TimeRange;
|
||||
}) => {
|
||||
const title = i18n.translate('xpack.infra.logs.analysis.logRateSectionTitle', {
|
||||
defaultMessage: 'Log rate',
|
||||
defaultMessage: 'Log entries',
|
||||
});
|
||||
|
||||
const loadingAriaLabel = i18n.translate(
|
||||
|
@ -39,43 +41,66 @@ export const LogRateResults = ({
|
|||
{ defaultMessage: 'Loading log rate results' }
|
||||
);
|
||||
|
||||
const logEntryRateSeries = useMemo(
|
||||
() => (results && results.histogramBuckets ? getLogEntryRatePartitionedSeries(results) : []),
|
||||
[results]
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiTitle size="m" aria-label={title}>
|
||||
<h2>{title}</h2>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="l" />
|
||||
{isLoading ? (
|
||||
<EuiFlexGroup justifyContent="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLoadingChart size="xl" aria-label={loadingAriaLabel} />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<>
|
||||
<EuiSpacer size="l" />
|
||||
<EuiFlexGroup justifyContent="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLoadingChart size="xl" aria-label={loadingAriaLabel} />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</>
|
||||
) : !results || (results && results.histogramBuckets && !results.histogramBuckets.length) ? (
|
||||
<EuiEmptyPrompt
|
||||
title={
|
||||
<h2>
|
||||
{i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataTitle', {
|
||||
defaultMessage: 'There is no data to display.',
|
||||
})}
|
||||
</h2>
|
||||
}
|
||||
titleSize="m"
|
||||
body={
|
||||
<>
|
||||
<EuiSpacer size="l" />
|
||||
<EuiEmptyPrompt
|
||||
title={
|
||||
<h2>
|
||||
{i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataTitle', {
|
||||
defaultMessage: 'There is no data to display.',
|
||||
})}
|
||||
</h2>
|
||||
}
|
||||
titleSize="m"
|
||||
body={
|
||||
<p>
|
||||
{i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataBody', {
|
||||
defaultMessage: 'You may want to adjust your time range.',
|
||||
})}
|
||||
</p>
|
||||
}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<EuiText size="s">
|
||||
<p>
|
||||
{i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataBody', {
|
||||
defaultMessage: 'You may want to adjust your time range.',
|
||||
<b>
|
||||
{i18n.translate('xpack.infra.logs.analysis.logRateSectionBucketSpanLabel', {
|
||||
defaultMessage: 'Bucket span: ',
|
||||
})}
|
||||
</b>
|
||||
{i18n.translate('xpack.infra.logs.analysis.logRateSectionBucketSpanValue', {
|
||||
defaultMessage: '15 minutes',
|
||||
})}
|
||||
</p>
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<LogEntryRateBarChart
|
||||
bucketDuration={results.bucketDuration}
|
||||
histogramBuckets={results.histogramBuckets}
|
||||
setTimeRange={setTimeRange}
|
||||
timeRange={timeRange}
|
||||
/>
|
||||
</EuiText>
|
||||
<LogEntryRateBarChart
|
||||
setTimeRange={setTimeRange}
|
||||
timeRange={timeRange}
|
||||
series={logEntryRateSeries}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
logRateModelPlotResponseRT,
|
||||
createLogEntryRateQuery,
|
||||
LogRateModelPlotBucket,
|
||||
CompositeTimestampDataSetKey,
|
||||
CompositeTimestampPartitionKey,
|
||||
} from './queries';
|
||||
|
||||
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
|
||||
|
@ -43,7 +43,7 @@ export class InfraLogAnalysis {
|
|||
const logRateJobId = this.getJobIds(request, sourceId).logEntryRate;
|
||||
|
||||
let mlModelPlotBuckets: LogRateModelPlotBucket[] = [];
|
||||
let afterLatestBatchKey: CompositeTimestampDataSetKey | undefined;
|
||||
let afterLatestBatchKey: CompositeTimestampPartitionKey | undefined;
|
||||
|
||||
while (true) {
|
||||
const mlModelPlotResponse = await this.libs.framework.callWithRequest(
|
||||
|
@ -67,7 +67,7 @@ export class InfraLogAnalysis {
|
|||
|
||||
const { after_key: afterKey, buckets: latestBatchBuckets } = pipe(
|
||||
logRateModelPlotResponseRT.decode(mlModelPlotResponse),
|
||||
map(response => response.aggregations.timestamp_data_set_buckets),
|
||||
map(response => response.aggregations.timestamp_partition_buckets),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
|
@ -81,7 +81,7 @@ export class InfraLogAnalysis {
|
|||
|
||||
return mlModelPlotBuckets.reduce<
|
||||
Array<{
|
||||
dataSets: Array<{
|
||||
partitions: Array<{
|
||||
analysisBucketCount: number;
|
||||
anomalies: Array<{
|
||||
actualLogEntryRate: number;
|
||||
|
@ -91,15 +91,17 @@ export class InfraLogAnalysis {
|
|||
typicalLogEntryRate: number;
|
||||
}>;
|
||||
averageActualLogEntryRate: number;
|
||||
dataSetId: string;
|
||||
maximumAnomalyScore: number;
|
||||
numberOfLogEntries: number;
|
||||
partitionId: string;
|
||||
}>;
|
||||
startTime: number;
|
||||
}>
|
||||
>((histogramBuckets, timestampDataSetBucket) => {
|
||||
>((histogramBuckets, timestampPartitionBucket) => {
|
||||
const previousHistogramBucket = histogramBuckets[histogramBuckets.length - 1];
|
||||
const dataSet = {
|
||||
analysisBucketCount: timestampDataSetBucket.filter_model_plot.doc_count,
|
||||
anomalies: timestampDataSetBucket.filter_records.top_hits_record.hits.hits.map(
|
||||
const partition = {
|
||||
analysisBucketCount: timestampPartitionBucket.filter_model_plot.doc_count,
|
||||
anomalies: timestampPartitionBucket.filter_records.top_hits_record.hits.hits.map(
|
||||
({ _source: record }) => ({
|
||||
actualLogEntryRate: record.actual[0],
|
||||
anomalyScore: record.record_score,
|
||||
|
@ -108,26 +110,30 @@ export class InfraLogAnalysis {
|
|||
typicalLogEntryRate: record.typical[0],
|
||||
})
|
||||
),
|
||||
averageActualLogEntryRate: timestampDataSetBucket.filter_model_plot.average_actual.value,
|
||||
dataSetId: timestampDataSetBucket.key.data_set,
|
||||
averageActualLogEntryRate:
|
||||
timestampPartitionBucket.filter_model_plot.average_actual.value || 0,
|
||||
maximumAnomalyScore:
|
||||
timestampPartitionBucket.filter_records.maximum_record_score.value || 0,
|
||||
numberOfLogEntries: timestampPartitionBucket.filter_model_plot.sum_actual.value || 0,
|
||||
partitionId: timestampPartitionBucket.key.partition,
|
||||
};
|
||||
if (
|
||||
previousHistogramBucket &&
|
||||
previousHistogramBucket.startTime === timestampDataSetBucket.key.timestamp
|
||||
previousHistogramBucket.startTime === timestampPartitionBucket.key.timestamp
|
||||
) {
|
||||
return [
|
||||
...histogramBuckets.slice(0, -1),
|
||||
{
|
||||
...previousHistogramBucket,
|
||||
dataSets: [...previousHistogramBucket.dataSets, dataSet],
|
||||
partitions: [...previousHistogramBucket.partitions, partition],
|
||||
},
|
||||
];
|
||||
} else {
|
||||
return [
|
||||
...histogramBuckets,
|
||||
{
|
||||
dataSets: [dataSet],
|
||||
startTime: timestampDataSetBucket.key.timestamp,
|
||||
partitions: [partition],
|
||||
startTime: timestampPartitionBucket.key.timestamp,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ export const createLogEntryRateQuery = (
|
|||
endTime: number,
|
||||
bucketDuration: number,
|
||||
size: number,
|
||||
afterKey?: CompositeTimestampDataSetKey
|
||||
afterKey?: CompositeTimestampPartitionKey
|
||||
) => ({
|
||||
allowNoIndices: true,
|
||||
body: {
|
||||
|
@ -45,7 +45,7 @@ export const createLogEntryRateQuery = (
|
|||
},
|
||||
},
|
||||
aggs: {
|
||||
timestamp_data_set_buckets: {
|
||||
timestamp_partition_buckets: {
|
||||
composite: {
|
||||
after: afterKey,
|
||||
size,
|
||||
|
@ -60,7 +60,7 @@ export const createLogEntryRateQuery = (
|
|||
},
|
||||
},
|
||||
{
|
||||
data_set: {
|
||||
partition: {
|
||||
terms: {
|
||||
field: 'partition_field_value',
|
||||
order: 'asc',
|
||||
|
@ -82,6 +82,11 @@ export const createLogEntryRateQuery = (
|
|||
field: 'actual',
|
||||
},
|
||||
},
|
||||
sum_actual: {
|
||||
sum: {
|
||||
field: 'actual',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
filter_records: {
|
||||
|
@ -91,6 +96,11 @@ export const createLogEntryRateQuery = (
|
|||
},
|
||||
},
|
||||
aggs: {
|
||||
maximum_record_score: {
|
||||
max: {
|
||||
field: 'record_score',
|
||||
},
|
||||
},
|
||||
top_hits_record: {
|
||||
top_hits: {
|
||||
_source: Object.keys(logRateMlRecordRT.props),
|
||||
|
@ -124,20 +134,21 @@ const logRateMlRecordRT = rt.type({
|
|||
});
|
||||
|
||||
const metricAggregationRT = rt.type({
|
||||
value: rt.number,
|
||||
value: rt.union([rt.number, rt.null]),
|
||||
});
|
||||
|
||||
const compositeTimestampDataSetKeyRT = rt.type({
|
||||
data_set: rt.string,
|
||||
const compositeTimestampPartitionKeyRT = rt.type({
|
||||
partition: rt.string,
|
||||
timestamp: rt.number,
|
||||
});
|
||||
|
||||
export type CompositeTimestampDataSetKey = rt.TypeOf<typeof compositeTimestampDataSetKeyRT>;
|
||||
export type CompositeTimestampPartitionKey = rt.TypeOf<typeof compositeTimestampPartitionKeyRT>;
|
||||
|
||||
export const logRateModelPlotBucketRT = rt.type({
|
||||
key: compositeTimestampDataSetKeyRT,
|
||||
key: compositeTimestampPartitionKeyRT,
|
||||
filter_records: rt.type({
|
||||
doc_count: rt.number,
|
||||
maximum_record_score: metricAggregationRT,
|
||||
top_hits_record: rt.type({
|
||||
hits: rt.type({
|
||||
hits: rt.array(
|
||||
|
@ -151,6 +162,7 @@ export const logRateModelPlotBucketRT = rt.type({
|
|||
filter_model_plot: rt.type({
|
||||
doc_count: rt.number,
|
||||
average_actual: metricAggregationRT,
|
||||
sum_actual: metricAggregationRT,
|
||||
}),
|
||||
});
|
||||
|
||||
|
@ -158,12 +170,12 @@ export type LogRateModelPlotBucket = rt.TypeOf<typeof logRateModelPlotBucketRT>;
|
|||
|
||||
export const logRateModelPlotResponseRT = rt.type({
|
||||
aggregations: rt.type({
|
||||
timestamp_data_set_buckets: rt.intersection([
|
||||
timestamp_partition_buckets: rt.intersection([
|
||||
rt.type({
|
||||
buckets: rt.array(logRateModelPlotBucketRT),
|
||||
}),
|
||||
rt.partial({
|
||||
after_key: compositeTimestampDataSetKeyRT,
|
||||
after_key: compositeTimestampPartitionKeyRT,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
|
||||
getLogEntryRateRequestPayloadRT,
|
||||
getLogEntryRateSuccessReponsePayloadRT,
|
||||
GetLogEntryRateSuccessResponsePayload,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { throwErrors } from '../../../../common/runtime_types';
|
||||
import { NoLogRateResultsIndexError } from '../../../lib/log_analysis';
|
||||
|
@ -52,9 +53,21 @@ export const initLogAnalysisGetLogEntryRateRoute = ({
|
|||
data: {
|
||||
bucketDuration: payload.data.bucketDuration,
|
||||
histogramBuckets: logEntryRateBuckets,
|
||||
totalNumberOfLogEntries: getTotalNumberOfLogEntries(logEntryRateBuckets),
|
||||
},
|
||||
})
|
||||
);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const getTotalNumberOfLogEntries = (
|
||||
logEntryRateBuckets: GetLogEntryRateSuccessResponsePayload['data']['histogramBuckets']
|
||||
) => {
|
||||
return logEntryRateBuckets.reduce((sumNumberOfLogEntries, bucket) => {
|
||||
const sumPartitions = bucket.partitions.reduce((partitionsTotal, partition) => {
|
||||
return (partitionsTotal += partition.numberOfLogEntries);
|
||||
}, 0);
|
||||
return (sumNumberOfLogEntries += sumPartitions);
|
||||
}, 0);
|
||||
};
|
||||
|
|
|
@ -5246,8 +5246,6 @@
|
|||
"xpack.infra.logs.analysis.logRateSectionNoDataBody": "時間範囲を調整する必要があるかもしれません。",
|
||||
"xpack.infra.logs.analysis.logRateSectionNoDataTitle": "表示するデータがありません。",
|
||||
"xpack.infra.logs.analysis.logRateSectionTitle": "ログレート",
|
||||
"xpack.infra.logs.analysis.logRateSectionXaxisTitle": "時間",
|
||||
"xpack.infra.logs.analysis.logRateSectionYaxisTitle": "15 分ごとのログエントリー",
|
||||
"xpack.infra.logs.analysisPage.loadingMessage": "分析ジョブのステータスを確認中…",
|
||||
"xpack.infra.logs.analysisPage.unavailable.mlAppButton": "機械学習を開く",
|
||||
"xpack.infra.logs.analysisPage.unavailable.mlAppLink": "機械学習アプリ",
|
||||
|
|
|
@ -5248,8 +5248,6 @@
|
|||
"xpack.infra.logs.analysis.logRateSectionNoDataBody": "您可能想调整时间范围。",
|
||||
"xpack.infra.logs.analysis.logRateSectionNoDataTitle": "没有可显示的数据。",
|
||||
"xpack.infra.logs.analysis.logRateSectionTitle": "日志速率",
|
||||
"xpack.infra.logs.analysis.logRateSectionXaxisTitle": "时间",
|
||||
"xpack.infra.logs.analysis.logRateSectionYaxisTitle": "每 15 分钟日志条目数",
|
||||
"xpack.infra.logs.analysisPage.loadingMessage": "正在检查分析作业的状态......",
|
||||
"xpack.infra.logs.analysisPage.unavailable.mlAppButton": "打开 Machine Learning",
|
||||
"xpack.infra.logs.analysisPage.unavailable.mlAppLink": "Machine Learning 应用",
|
||||
|
|
|
@ -66,7 +66,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
expect(logEntryRateBuckets.data.histogramBuckets).to.not.be.empty();
|
||||
expect(
|
||||
logEntryRateBuckets.data.histogramBuckets.some(bucket => {
|
||||
return bucket.dataSets.some(dataSet => dataSet.anomalies.length > 0);
|
||||
return bucket.partitions.some(partition => partition.anomalies.length > 0);
|
||||
})
|
||||
).to.be(true);
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue