mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Backports the following commits to 7.x: - [Logs UI] Add categories table to the categorization tab (#53004)
This commit is contained in:
parent
05f6d7ba5d
commit
fd3a8fe575
68 changed files with 2581 additions and 160 deletions
|
@ -4,4 +4,6 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_entry_categories';
|
||||
export * from './log_entry_category_datasets';
|
||||
export * from './log_entry_rate';
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import {
|
||||
badRequestErrorRT,
|
||||
forbiddenErrorRT,
|
||||
timeRangeRT,
|
||||
routeTimingMetadataRT,
|
||||
} from '../../shared';
|
||||
|
||||
export const LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH =
|
||||
'/api/infra/log_analysis/results/log_entry_categories';
|
||||
|
||||
/**
|
||||
* request
|
||||
*/
|
||||
|
||||
const logEntryCategoriesHistogramParametersRT = rt.type({
|
||||
id: rt.string,
|
||||
timeRange: timeRangeRT,
|
||||
bucketCount: rt.number,
|
||||
});
|
||||
|
||||
export type LogEntryCategoriesHistogramParameters = rt.TypeOf<
|
||||
typeof logEntryCategoriesHistogramParametersRT
|
||||
>;
|
||||
|
||||
export const getLogEntryCategoriesRequestPayloadRT = rt.type({
|
||||
data: rt.intersection([
|
||||
rt.type({
|
||||
// the number of categories to fetch
|
||||
categoryCount: rt.number,
|
||||
// the id of the source configuration
|
||||
sourceId: rt.string,
|
||||
// the time range to fetch the categories from
|
||||
timeRange: timeRangeRT,
|
||||
// a list of histograms to create
|
||||
histograms: rt.array(logEntryCategoriesHistogramParametersRT),
|
||||
}),
|
||||
rt.partial({
|
||||
// the datasets to filter for (optional, unfiltered if not present)
|
||||
datasets: rt.array(rt.string),
|
||||
}),
|
||||
]),
|
||||
});
|
||||
|
||||
export type GetLogEntryCategoriesRequestPayload = rt.TypeOf<
|
||||
typeof getLogEntryCategoriesRequestPayloadRT
|
||||
>;
|
||||
|
||||
/**
|
||||
* response
|
||||
*/
|
||||
|
||||
export const logEntryCategoryHistogramBucketRT = rt.type({
|
||||
startTime: rt.number,
|
||||
bucketDuration: rt.number,
|
||||
logEntryCount: rt.number,
|
||||
});
|
||||
|
||||
export type LogEntryCategoryHistogramBucket = rt.TypeOf<typeof logEntryCategoryHistogramBucketRT>;
|
||||
|
||||
export const logEntryCategoryHistogramRT = rt.type({
|
||||
histogramId: rt.string,
|
||||
buckets: rt.array(logEntryCategoryHistogramBucketRT),
|
||||
});
|
||||
|
||||
export type LogEntryCategoryHistogram = rt.TypeOf<typeof logEntryCategoryHistogramRT>;
|
||||
|
||||
export const logEntryCategoryRT = rt.type({
|
||||
categoryId: rt.number,
|
||||
datasets: rt.array(rt.string),
|
||||
histograms: rt.array(logEntryCategoryHistogramRT),
|
||||
logEntryCount: rt.number,
|
||||
maximumAnomalyScore: rt.number,
|
||||
regularExpression: rt.string,
|
||||
});
|
||||
|
||||
export type LogEntryCategory = rt.TypeOf<typeof logEntryCategoryRT>;
|
||||
|
||||
export const getLogEntryCategoriesSuccessReponsePayloadRT = rt.intersection([
|
||||
rt.type({
|
||||
data: rt.type({
|
||||
categories: rt.array(logEntryCategoryRT),
|
||||
}),
|
||||
}),
|
||||
rt.partial({
|
||||
timing: routeTimingMetadataRT,
|
||||
}),
|
||||
]);
|
||||
|
||||
export type GetLogEntryCategoriesSuccessResponsePayload = rt.TypeOf<
|
||||
typeof getLogEntryCategoriesSuccessReponsePayloadRT
|
||||
>;
|
||||
|
||||
export const getLogEntryCategoriesResponsePayloadRT = rt.union([
|
||||
getLogEntryCategoriesSuccessReponsePayloadRT,
|
||||
badRequestErrorRT,
|
||||
forbiddenErrorRT,
|
||||
]);
|
||||
|
||||
export type GetLogEntryCategoriesReponsePayload = rt.TypeOf<
|
||||
typeof getLogEntryCategoriesResponsePayloadRT
|
||||
>;
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import {
|
||||
badRequestErrorRT,
|
||||
forbiddenErrorRT,
|
||||
timeRangeRT,
|
||||
routeTimingMetadataRT,
|
||||
} from '../../shared';
|
||||
|
||||
export const LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH =
|
||||
'/api/infra/log_analysis/results/log_entry_category_datasets';
|
||||
|
||||
/**
|
||||
* request
|
||||
*/
|
||||
|
||||
export const getLogEntryCategoryDatasetsRequestPayloadRT = rt.type({
|
||||
data: rt.type({
|
||||
// the id of the source configuration
|
||||
sourceId: rt.string,
|
||||
// the time range to fetch the category datasets from
|
||||
timeRange: timeRangeRT,
|
||||
}),
|
||||
});
|
||||
|
||||
export type GetLogEntryCategoryDatasetsRequestPayload = rt.TypeOf<
|
||||
typeof getLogEntryCategoryDatasetsRequestPayloadRT
|
||||
>;
|
||||
|
||||
/**
|
||||
* response
|
||||
*/
|
||||
|
||||
export const getLogEntryCategoryDatasetsSuccessReponsePayloadRT = rt.intersection([
|
||||
rt.type({
|
||||
data: rt.type({
|
||||
datasets: rt.array(rt.string),
|
||||
}),
|
||||
}),
|
||||
rt.partial({
|
||||
timing: routeTimingMetadataRT,
|
||||
}),
|
||||
]);
|
||||
|
||||
export type GetLogEntryCategoryDatasetsSuccessResponsePayload = rt.TypeOf<
|
||||
typeof getLogEntryCategoryDatasetsSuccessReponsePayloadRT
|
||||
>;
|
||||
|
||||
export const getLogEntryCategoryDatasetsResponsePayloadRT = rt.union([
|
||||
getLogEntryCategoryDatasetsSuccessReponsePayloadRT,
|
||||
badRequestErrorRT,
|
||||
forbiddenErrorRT,
|
||||
]);
|
||||
|
||||
export type GetLogEntryCategoryDatasetsReponsePayload = rt.TypeOf<
|
||||
typeof getLogEntryCategoryDatasetsResponsePayloadRT
|
||||
>;
|
|
@ -7,3 +7,4 @@
|
|||
export * from './errors';
|
||||
export * from './metric_statistics';
|
||||
export * from './time_range';
|
||||
export * from './timing';
|
||||
|
|
13
x-pack/legacy/plugins/infra/common/http_api/shared/timing.ts
Normal file
13
x-pack/legacy/plugins/infra/common/http_api/shared/timing.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { tracingSpanRT } from '../../performance_tracing';
|
||||
|
||||
export const routeTimingMetadataRT = rt.type({
|
||||
spans: rt.array(tracingSpanRT),
|
||||
});
|
|
@ -5,4 +5,7 @@
|
|||
*/
|
||||
|
||||
export * from './log_analysis';
|
||||
export * from './log_analysis_results';
|
||||
export * from './log_entry_rate_analysis';
|
||||
export * from './log_entry_categories_analysis';
|
||||
export * from './job_parameters';
|
||||
|
|
|
@ -4,14 +4,6 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const jobTypeRT = rt.keyof({
|
||||
'log-entry-rate': null,
|
||||
});
|
||||
|
||||
export type JobType = rt.TypeOf<typeof jobTypeRT>;
|
||||
|
||||
// combines and abstracts job and datafeed status
|
||||
export type JobStatus =
|
||||
| 'unknown'
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export const ML_SEVERITY_SCORES = {
|
||||
warning: 3,
|
||||
minor: 25,
|
||||
major: 50,
|
||||
critical: 75,
|
||||
};
|
||||
|
||||
export type MLSeverityScoreCategories = keyof typeof ML_SEVERITY_SCORES;
|
||||
|
||||
export const ML_SEVERITY_COLORS = {
|
||||
critical: 'rgb(228, 72, 72)',
|
||||
major: 'rgb(229, 113, 0)',
|
||||
minor: 'rgb(255, 221, 0)',
|
||||
warning: 'rgb(125, 180, 226)',
|
||||
};
|
||||
|
||||
export const getSeverityCategoryForScore = (
|
||||
score: number
|
||||
): MLSeverityScoreCategories | undefined => {
|
||||
if (score >= ML_SEVERITY_SCORES.critical) {
|
||||
return 'critical';
|
||||
} else if (score >= ML_SEVERITY_SCORES.major) {
|
||||
return 'major';
|
||||
} else if (score >= ML_SEVERITY_SCORES.minor) {
|
||||
return 'minor';
|
||||
} else if (score >= ML_SEVERITY_SCORES.warning) {
|
||||
return 'warning';
|
||||
} else {
|
||||
// Category is too low to include
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const formatAnomalyScore = (score: number) => {
|
||||
return Math.round(score);
|
||||
};
|
||||
|
||||
export const getFriendlyNameForPartitionId = (partitionId: string) => {
|
||||
return partitionId !== '' ? partitionId : 'unknown';
|
||||
};
|
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const logEntryCategoriesJobTypeRT = rt.keyof({
|
||||
'log-entry-categories-count': null,
|
||||
});
|
||||
|
||||
export type LogEntryCategoriesJobType = rt.TypeOf<typeof logEntryCategoriesJobTypeRT>;
|
||||
|
||||
export const logEntryCategoriesJobTypes: LogEntryCategoriesJobType[] = [
|
||||
'log-entry-categories-count',
|
||||
];
|
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const logEntryRateJobTypeRT = rt.keyof({
|
||||
'log-entry-rate': null,
|
||||
});
|
||||
|
||||
export type LogEntryRateJobType = rt.TypeOf<typeof logEntryRateJobTypeRT>;
|
||||
|
||||
export const logEntryRateJobTypes: LogEntryRateJobType[] = ['log-entry-rate'];
|
33
x-pack/legacy/plugins/infra/common/performance_tracing.ts
Normal file
33
x-pack/legacy/plugins/infra/common/performance_tracing.ts
Normal file
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import uuid from 'uuid';
|
||||
|
||||
export const tracingSpanRT = rt.type({
|
||||
duration: rt.number,
|
||||
id: rt.string,
|
||||
name: rt.string,
|
||||
start: rt.number,
|
||||
});
|
||||
|
||||
export type TracingSpan = rt.TypeOf<typeof tracingSpanRT>;
|
||||
|
||||
export type ActiveTrace = (endTime?: number) => TracingSpan;
|
||||
|
||||
export const startTracingSpan = (name: string): ActiveTrace => {
|
||||
const initialState: TracingSpan = {
|
||||
duration: Number.POSITIVE_INFINITY,
|
||||
id: uuid.v4(),
|
||||
name,
|
||||
start: Date.now(),
|
||||
};
|
||||
|
||||
return (endTime: number = Date.now()) => ({
|
||||
...initialState,
|
||||
duration: endTime - initialState.start,
|
||||
});
|
||||
};
|
|
@ -4,11 +4,22 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Errors } from 'io-ts';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { Errors, Type } from 'io-ts';
|
||||
import { failure } from 'io-ts/lib/PathReporter';
|
||||
|
||||
type ErrorFactory = (message: string) => Error;
|
||||
|
||||
export const createPlainError = (message: string) => new Error(message);
|
||||
|
||||
export const throwErrors = (createError: (message: string) => Error) => (errors: Errors) => {
|
||||
export const throwErrors = (createError: ErrorFactory) => (errors: Errors) => {
|
||||
throw createError(failure(errors).join('\n'));
|
||||
};
|
||||
|
||||
export const decodeOrThrow = <A, O, I>(
|
||||
runtimeType: Type<A, O, I>,
|
||||
createError: ErrorFactory = createPlainError
|
||||
) => (inputValue: I) =>
|
||||
pipe(runtimeType.decode(inputValue), fold(throwErrors(createError), identity));
|
||||
|
|
|
@ -27,6 +27,7 @@ import {
|
|||
KibanaContextProvider,
|
||||
} from '../../../../../../src/plugins/kibana_react/public';
|
||||
import { ROOT_ELEMENT_ID } from '../app';
|
||||
|
||||
// NP_TODO: Type plugins
|
||||
export async function startApp(libs: InfraFrontendLibs, core: CoreStart, plugins: any) {
|
||||
const history = createHashHistory();
|
||||
|
|
|
@ -5,3 +5,4 @@
|
|||
*/
|
||||
|
||||
export * from './log_analysis_job_problem_indicator';
|
||||
export * from './recreate_job_button';
|
||||
|
|
|
@ -17,13 +17,22 @@ export const LogAnalysisJobProblemIndicator: React.FC<{
|
|||
onRecreateMlJobForReconfiguration: () => void;
|
||||
onRecreateMlJobForUpdate: () => void;
|
||||
}> = ({ jobStatus, setupStatus, onRecreateMlJobForReconfiguration, onRecreateMlJobForUpdate }) => {
|
||||
if (jobStatus === 'stopped') {
|
||||
if (isStopped(jobStatus)) {
|
||||
return <JobStoppedCallout />;
|
||||
} else if (setupStatus === 'skippedButUpdatable') {
|
||||
} else if (isUpdatable(setupStatus)) {
|
||||
return <JobDefinitionOutdatedCallout onRecreateMlJob={onRecreateMlJobForUpdate} />;
|
||||
} else if (setupStatus === 'skippedButReconfigurable') {
|
||||
} else if (isReconfigurable(setupStatus)) {
|
||||
return <JobConfigurationOutdatedCallout onRecreateMlJob={onRecreateMlJobForReconfiguration} />;
|
||||
}
|
||||
|
||||
return null; // no problem to indicate
|
||||
};
|
||||
|
||||
const isStopped = (jobStatus: JobStatus) => jobStatus === 'stopped';
|
||||
|
||||
const isUpdatable = (setupStatus: SetupStatus) => setupStatus === 'skippedButUpdatable';
|
||||
|
||||
const isReconfigurable = (setupStatus: SetupStatus) => setupStatus === 'skippedButReconfigurable';
|
||||
|
||||
export const jobHasProblem = (jobStatus: JobStatus, setupStatus: SetupStatus) =>
|
||||
isStopped(jobStatus) || isUpdatable(setupStatus) || isReconfigurable(setupStatus);
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiButton, PropsOf } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import React from 'react';
|
||||
|
||||
export const RecreateJobButton: React.FunctionComponent<PropsOf<typeof EuiButton>> = props => (
|
||||
<EuiButton {...props}>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.analysis.recreateJobButtonLabel"
|
||||
defaultMessage="Recreate ML job"
|
||||
/>
|
||||
</EuiButton>
|
||||
);
|
|
@ -5,8 +5,9 @@
|
|||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { EuiCallOut, EuiButton } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { EuiCallOut } from '@elastic/eui';
|
||||
|
||||
import { RecreateJobButton } from './recreate_job_button';
|
||||
|
||||
export const RecreateJobCallout: React.FC<{
|
||||
onRecreateMlJob: () => void;
|
||||
|
@ -14,11 +15,6 @@ export const RecreateJobCallout: React.FC<{
|
|||
}> = ({ children, onRecreateMlJob, title }) => (
|
||||
<EuiCallOut color="warning" iconType="alert" title={title}>
|
||||
<p>{children}</p>
|
||||
<EuiButton color="warning" onClick={onRecreateMlJob}>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.analysis.recreateJobButtonLabel"
|
||||
defaultMessage="Recreate ML job"
|
||||
/>
|
||||
</EuiButton>
|
||||
<RecreateJobButton color="warning" onClick={onRecreateMlJob} />
|
||||
</EuiCallOut>
|
||||
);
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiCallOut } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React from 'react';
|
||||
|
||||
export const FirstUseCallout = () => {
|
||||
return (
|
||||
<EuiCallOut
|
||||
color="success"
|
||||
title={i18n.translate('xpack.infra.logs.analysis.onboardingSuccessTitle', {
|
||||
defaultMessage: 'Success!',
|
||||
})}
|
||||
>
|
||||
<p>
|
||||
{i18n.translate('xpack.infra.logs.analysis.onboardingSuccessContent', {
|
||||
defaultMessage:
|
||||
'Please allow a few minutes for our machine learning robots to begin collecting data.',
|
||||
})}
|
||||
</p>
|
||||
</EuiCallOut>
|
||||
);
|
||||
};
|
|
@ -5,3 +5,4 @@
|
|||
*/
|
||||
|
||||
export * from './analyze_in_ml_button';
|
||||
export * from './first_use_callout';
|
||||
|
|
|
@ -41,6 +41,7 @@ export type FetchJobStatusRequestPayload = rt.TypeOf<typeof fetchJobStatusReques
|
|||
const datafeedStateRT = rt.keyof({
|
||||
started: null,
|
||||
stopped: null,
|
||||
stopping: null,
|
||||
'': null,
|
||||
});
|
||||
|
||||
|
|
|
@ -359,11 +359,7 @@ const isJobRevisionCurrent = (jobId: string, currentRevision: number) => (
|
|||
jobSummaries
|
||||
.filter(jobSummary => jobSummary.id === jobId)
|
||||
.every(
|
||||
jobSummary =>
|
||||
jobSummary.fullJob &&
|
||||
jobSummary.fullJob.custom_settings &&
|
||||
jobSummary.fullJob.custom_settings.job_revision &&
|
||||
jobSummary.fullJob.custom_settings.job_revision >= currentRevision
|
||||
jobSummary => (jobSummary?.fullJob?.custom_settings?.job_revision ?? 0) >= currentRevision
|
||||
);
|
||||
|
||||
const isJobConfigurationConsistent = (
|
||||
|
|
|
@ -8,6 +8,8 @@ import {
|
|||
bucketSpan,
|
||||
categoriesMessageField,
|
||||
getJobId,
|
||||
LogEntryCategoriesJobType,
|
||||
logEntryCategoriesJobTypes,
|
||||
partitionField,
|
||||
} from '../../../../common/log_analysis';
|
||||
|
||||
|
@ -21,22 +23,19 @@ import { callGetMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml
|
|||
import { callSetupMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_setup_module_api';
|
||||
import { callValidateIndicesAPI } from '../../../containers/logs/log_analysis/api/validate_indices';
|
||||
|
||||
const jobTypes = ['log-entry-categories-count'];
|
||||
const moduleId = 'logs_ui_categories';
|
||||
|
||||
type JobType = typeof jobTypes[0];
|
||||
|
||||
const getJobIds = (spaceId: string, sourceId: string) =>
|
||||
jobTypes.reduce(
|
||||
logEntryCategoriesJobTypes.reduce(
|
||||
(accumulatedJobIds, jobType) => ({
|
||||
...accumulatedJobIds,
|
||||
[jobType]: getJobId(spaceId, sourceId, jobType),
|
||||
}),
|
||||
{} as Record<JobType, string>
|
||||
{} as Record<LogEntryCategoriesJobType, string>
|
||||
);
|
||||
|
||||
const getJobSummary = async (spaceId: string, sourceId: string) => {
|
||||
const response = await callJobsSummaryAPI(spaceId, sourceId, jobTypes);
|
||||
const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryCategoriesJobTypes);
|
||||
const jobIds = Object.values(getJobIds(spaceId, sourceId));
|
||||
|
||||
return response.filter(jobSummary => jobIds.includes(jobSummary.id));
|
||||
|
@ -83,7 +82,7 @@ const setUpModule = async (
|
|||
};
|
||||
|
||||
const cleanUpModule = async (spaceId: string, sourceId: string) => {
|
||||
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, jobTypes);
|
||||
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes);
|
||||
};
|
||||
|
||||
const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceConfiguration) => {
|
||||
|
@ -103,9 +102,9 @@ const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceCon
|
|||
]);
|
||||
};
|
||||
|
||||
export const logEntryCategoriesModule: ModuleDescriptor<JobType> = {
|
||||
export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobType> = {
|
||||
moduleId,
|
||||
jobTypes,
|
||||
jobTypes: logEntryCategoriesJobTypes,
|
||||
bucketSpan,
|
||||
getJobIds,
|
||||
getJobSummary,
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
MlUnavailablePrompt,
|
||||
} from '../../../components/logging/log_analysis_setup';
|
||||
import { LogAnalysisCapabilities } from '../../../containers/logs/log_analysis';
|
||||
import { LogEntryCategoriesResultsContent } from './page_results_content';
|
||||
import { LogEntryCategoriesSetupContent } from './page_setup_content';
|
||||
import { useLogEntryCategoriesModuleContext } from './use_log_entry_categories_module';
|
||||
|
||||
|
@ -44,8 +45,7 @@ export const LogEntryCategoriesPageContent = () => {
|
|||
} else if (setupStatus === 'unknown') {
|
||||
return <LogAnalysisSetupStatusUnknownPrompt retry={fetchJobStatus} />;
|
||||
} else if (isSetupStatusWithResults(setupStatus)) {
|
||||
return null;
|
||||
// return <LogEntryCategoriesResultsContent />;
|
||||
return <LogEntryCategoriesResultsContent />;
|
||||
} else {
|
||||
return <LogEntryCategoriesSetupContent />;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,240 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import datemath from '@elastic/datemath';
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiPage, EuiPanel, EuiSuperDatePicker } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import moment from 'moment';
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import { useKibana } from '../../../../../../../../src/plugins/kibana_react/public';
|
||||
import euiStyled from '../../../../../../common/eui_styled_components';
|
||||
import { TimeRange } from '../../../../common/http_api/shared/time_range';
|
||||
import {
|
||||
LogAnalysisJobProblemIndicator,
|
||||
jobHasProblem,
|
||||
} from '../../../components/logging/log_analysis_job_status';
|
||||
import { FirstUseCallout } from '../../../components/logging/log_analysis_results';
|
||||
import { useInterval } from '../../../hooks/use_interval';
|
||||
import { useTrackPageview } from '../../../hooks/use_track_metric';
|
||||
import { TopCategoriesSection } from './sections/top_categories';
|
||||
import { useLogEntryCategoriesModuleContext } from './use_log_entry_categories_module';
|
||||
import { useLogEntryCategoriesResults } from './use_log_entry_categories_results';
|
||||
import {
|
||||
StringTimeRange,
|
||||
useLogEntryCategoriesResultsUrlState,
|
||||
} from './use_log_entry_categories_results_url_state';
|
||||
|
||||
const JOB_STATUS_POLLING_INTERVAL = 30000;
|
||||
|
||||
export const LogEntryCategoriesResultsContent: React.FunctionComponent = () => {
|
||||
useTrackPageview({ app: 'infra_logs', path: 'log_entry_categories_results' });
|
||||
useTrackPageview({ app: 'infra_logs', path: 'log_entry_categories_results', delay: 15000 });
|
||||
|
||||
const {
|
||||
fetchJobStatus,
|
||||
jobStatus,
|
||||
setupStatus,
|
||||
viewSetupForReconfiguration,
|
||||
viewSetupForUpdate,
|
||||
jobIds,
|
||||
sourceConfiguration: { sourceId },
|
||||
} = useLogEntryCategoriesModuleContext();
|
||||
|
||||
const {
|
||||
timeRange: selectedTimeRange,
|
||||
setTimeRange: setSelectedTimeRange,
|
||||
autoRefresh,
|
||||
setAutoRefresh,
|
||||
} = useLogEntryCategoriesResultsUrlState();
|
||||
|
||||
const [categoryQueryTimeRange, setCategoryQueryTimeRange] = useState<{
|
||||
lastChangedTime: number;
|
||||
timeRange: TimeRange;
|
||||
}>(() => ({
|
||||
lastChangedTime: Date.now(),
|
||||
timeRange: stringToNumericTimeRange(selectedTimeRange),
|
||||
}));
|
||||
|
||||
const [categoryQueryDatasets, setCategoryQueryDatasets] = useState<string[]>([]);
|
||||
|
||||
const { services } = useKibana<{}>();
|
||||
|
||||
const showLoadDataErrorNotification = useCallback(
|
||||
(error: Error) => {
|
||||
// eslint-disable-next-line no-unused-expressions
|
||||
services.notifications?.toasts.addError(error, {
|
||||
title: loadDataErrorTitle,
|
||||
});
|
||||
},
|
||||
[services.notifications]
|
||||
);
|
||||
|
||||
const {
|
||||
getLogEntryCategoryDatasets,
|
||||
getTopLogEntryCategories,
|
||||
isLoadingLogEntryCategoryDatasets,
|
||||
isLoadingTopLogEntryCategories,
|
||||
logEntryCategoryDatasets,
|
||||
topLogEntryCategories,
|
||||
} = useLogEntryCategoriesResults({
|
||||
categoriesCount: 25,
|
||||
endTime: categoryQueryTimeRange.timeRange.endTime,
|
||||
filteredDatasets: categoryQueryDatasets,
|
||||
onGetTopLogEntryCategoriesError: showLoadDataErrorNotification,
|
||||
sourceId,
|
||||
startTime: categoryQueryTimeRange.timeRange.startTime,
|
||||
});
|
||||
|
||||
const handleQueryTimeRangeChange = useCallback(
|
||||
({ start: startTime, end: endTime }: { start: string; end: string }) => {
|
||||
setCategoryQueryTimeRange(previousQueryParameters => ({
|
||||
...previousQueryParameters,
|
||||
timeRange: stringToNumericTimeRange({ startTime, endTime }),
|
||||
lastChangedTime: Date.now(),
|
||||
}));
|
||||
},
|
||||
[setCategoryQueryTimeRange]
|
||||
);
|
||||
|
||||
const handleSelectedTimeRangeChange = useCallback(
|
||||
(selectedTime: { start: string; end: string; isInvalid: boolean }) => {
|
||||
if (selectedTime.isInvalid) {
|
||||
return;
|
||||
}
|
||||
setSelectedTimeRange({
|
||||
startTime: selectedTime.start,
|
||||
endTime: selectedTime.end,
|
||||
});
|
||||
handleQueryTimeRangeChange(selectedTime);
|
||||
},
|
||||
[setSelectedTimeRange, handleQueryTimeRangeChange]
|
||||
);
|
||||
|
||||
const handleAutoRefreshChange = useCallback(
|
||||
({ isPaused, refreshInterval: interval }: { isPaused: boolean; refreshInterval: number }) => {
|
||||
setAutoRefresh({
|
||||
isPaused,
|
||||
interval,
|
||||
});
|
||||
},
|
||||
[setAutoRefresh]
|
||||
);
|
||||
|
||||
const isFirstUse = useMemo(() => setupStatus === 'hiddenAfterSuccess', [setupStatus]);
|
||||
|
||||
const hasResults = useMemo(() => topLogEntryCategories.length > 0, [
|
||||
topLogEntryCategories.length,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
getTopLogEntryCategories();
|
||||
}, [getTopLogEntryCategories, categoryQueryDatasets, categoryQueryTimeRange.lastChangedTime]);
|
||||
|
||||
useEffect(() => {
|
||||
getLogEntryCategoryDatasets();
|
||||
}, [getLogEntryCategoryDatasets, categoryQueryTimeRange.lastChangedTime]);
|
||||
|
||||
useInterval(() => {
|
||||
fetchJobStatus();
|
||||
}, JOB_STATUS_POLLING_INTERVAL);
|
||||
|
||||
useInterval(
|
||||
() => {
|
||||
handleQueryTimeRangeChange({
|
||||
start: selectedTimeRange.startTime,
|
||||
end: selectedTimeRange.endTime,
|
||||
});
|
||||
},
|
||||
autoRefresh.isPaused ? null : autoRefresh.interval
|
||||
);
|
||||
|
||||
return (
|
||||
<ResultsContentPage>
|
||||
<EuiFlexGroup direction="column">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="l">
|
||||
<EuiFlexGroup justifyContent="spaceBetween" alignItems="center">
|
||||
<EuiFlexItem />
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiSuperDatePicker
|
||||
start={selectedTimeRange.startTime}
|
||||
end={selectedTimeRange.endTime}
|
||||
onTimeChange={handleSelectedTimeRangeChange}
|
||||
isPaused={autoRefresh.isPaused}
|
||||
refreshInterval={autoRefresh.interval}
|
||||
onRefreshChange={handleAutoRefreshChange}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
</EuiFlexItem>
|
||||
{jobHasProblem(jobStatus['log-entry-categories-count'], setupStatus) ? (
|
||||
<EuiFlexItem grow={false}>
|
||||
<LogAnalysisJobProblemIndicator
|
||||
jobStatus={jobStatus['log-entry-categories-count']}
|
||||
onRecreateMlJobForReconfiguration={viewSetupForReconfiguration}
|
||||
onRecreateMlJobForUpdate={viewSetupForUpdate}
|
||||
setupStatus={setupStatus}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
) : null}
|
||||
{isFirstUse && !hasResults ? (
|
||||
<EuiFlexItem grow={false}>
|
||||
<FirstUseCallout />
|
||||
</EuiFlexItem>
|
||||
) : null}
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="l">
|
||||
<TopCategoriesSection
|
||||
availableDatasets={logEntryCategoryDatasets}
|
||||
isLoadingDatasets={isLoadingLogEntryCategoryDatasets}
|
||||
isLoadingTopCategories={isLoadingTopLogEntryCategories}
|
||||
jobId={jobIds['log-entry-categories-count']}
|
||||
onChangeDatasetSelection={setCategoryQueryDatasets}
|
||||
onRequestRecreateMlJob={viewSetupForReconfiguration}
|
||||
selectedDatasets={categoryQueryDatasets}
|
||||
timeRange={categoryQueryTimeRange.timeRange}
|
||||
topCategories={topLogEntryCategories}
|
||||
/>
|
||||
</EuiPanel>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</ResultsContentPage>
|
||||
);
|
||||
};
|
||||
|
||||
const stringToNumericTimeRange = (timeRange: StringTimeRange): TimeRange => ({
|
||||
startTime: moment(
|
||||
datemath.parse(timeRange.startTime, {
|
||||
momentInstance: moment,
|
||||
})
|
||||
).valueOf(),
|
||||
endTime: moment(
|
||||
datemath.parse(timeRange.endTime, {
|
||||
momentInstance: moment,
|
||||
roundUp: true,
|
||||
})
|
||||
).valueOf(),
|
||||
});
|
||||
|
||||
// This is needed due to the flex-basis: 100% !important; rule that
|
||||
// kicks in on small screens via media queries breaking when using direction="column"
|
||||
export const ResultsContentPage = euiStyled(EuiPage)`
|
||||
flex: 1 0 0%;
|
||||
flex-direction: column;
|
||||
|
||||
.euiFlexGroup--responsive > .euiFlexItem {
|
||||
flex-basis: auto !important;
|
||||
}
|
||||
`;
|
||||
|
||||
const loadDataErrorTitle = i18n.translate(
|
||||
'xpack.infra.logs.logEntryCategories.loadDataErrorTitle',
|
||||
{
|
||||
defaultMessage: 'Failed to load category data',
|
||||
}
|
||||
);
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiHealth } from '@elastic/eui';
|
||||
import React, { useMemo } from 'react';
|
||||
import {
|
||||
formatAnomalyScore,
|
||||
getSeverityCategoryForScore,
|
||||
ML_SEVERITY_COLORS,
|
||||
} from '../../../../../../common/log_analysis';
|
||||
|
||||
export const AnomalySeverityIndicator: React.FunctionComponent<{
|
||||
anomalyScore: number;
|
||||
}> = ({ anomalyScore }) => {
|
||||
const severityColor = useMemo(() => getColorForAnomalyScore(anomalyScore), [anomalyScore]);
|
||||
|
||||
return <EuiHealth color={severityColor}>{formatAnomalyScore(anomalyScore)}</EuiHealth>;
|
||||
};
|
||||
|
||||
const getColorForAnomalyScore = (anomalyScore: number) => {
|
||||
const severityCategory = getSeverityCategoryForScore(anomalyScore);
|
||||
|
||||
if (severityCategory != null && severityCategory in ML_SEVERITY_COLORS) {
|
||||
return ML_SEVERITY_COLORS[severityCategory];
|
||||
} else {
|
||||
return 'subdued';
|
||||
}
|
||||
};
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React, { memo } from 'react';
|
||||
|
||||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
|
||||
export const RegularExpressionRepresentation: React.FunctionComponent<{
|
||||
maximumSegmentCount?: number;
|
||||
regularExpression: string;
|
||||
}> = memo(({ maximumSegmentCount = 30, regularExpression }) => {
|
||||
const segments = regularExpression.split(collapsedRegularExpressionCharacters);
|
||||
|
||||
return (
|
||||
<CategoryPattern>
|
||||
{segments
|
||||
.slice(0, maximumSegmentCount)
|
||||
.map((segment, segmentIndex) => [
|
||||
segmentIndex > 0 ? (
|
||||
<CategoryPatternWildcard key={`wildcard-${segmentIndex}`}>⁕</CategoryPatternWildcard>
|
||||
) : null,
|
||||
<CategoryPatternSegment key={`segment-${segmentIndex}`}>
|
||||
{segment.replace(escapedRegularExpressionCharacters, '$1')}
|
||||
</CategoryPatternSegment>,
|
||||
])}
|
||||
{segments.length > maximumSegmentCount ? (
|
||||
<CategoryPatternWildcard
|
||||
title={i18n.translate(
|
||||
'xpack.infra.logs.logEntryCategories.truncatedPatternSegmentDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'{extraSegmentCount, plural, one {one more segment} other {# more segments}}',
|
||||
values: {
|
||||
extraSegmentCount: segments.length - maximumSegmentCount,
|
||||
},
|
||||
}
|
||||
)}
|
||||
>
|
||||
…
|
||||
</CategoryPatternWildcard>
|
||||
) : null}
|
||||
</CategoryPattern>
|
||||
);
|
||||
});
|
||||
|
||||
const CategoryPattern = euiStyled.span`
|
||||
font-family: ${props => props.theme.eui.euiCodeFontFamily};
|
||||
word-break: break-all;
|
||||
`;
|
||||
|
||||
const CategoryPatternWildcard = euiStyled.span`
|
||||
color: ${props => props.theme.eui.euiColorMediumShade};
|
||||
`;
|
||||
|
||||
const CategoryPatternSegment = euiStyled.span`
|
||||
font-weight: bold;
|
||||
`;
|
||||
|
||||
const collapsedRegularExpressionCharacters = /\.[+*]\??/g;
|
||||
|
||||
const escapedRegularExpressionCharacters = /\\([\\^$*+?.()\[\]])/g;
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { getFriendlyNameForPartitionId } from '../../../../../../common/log_analysis';
|
||||
|
||||
export const DatasetsList: React.FunctionComponent<{
|
||||
datasets: string[];
|
||||
}> = ({ datasets }) => (
|
||||
<ul>
|
||||
{datasets.sort().map(dataset => {
|
||||
const datasetLabel = getFriendlyNameForPartitionId(dataset);
|
||||
return <li key={datasetLabel}>{datasetLabel}</li>;
|
||||
})}
|
||||
</ul>
|
||||
);
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiComboBox, EuiComboBoxOptionProps } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
|
||||
import { getFriendlyNameForPartitionId } from '../../../../../../common/log_analysis';
|
||||
|
||||
type DatasetOptionProps = EuiComboBoxOptionProps<string>;
|
||||
|
||||
export const DatasetsSelector: React.FunctionComponent<{
|
||||
availableDatasets: string[];
|
||||
isLoading?: boolean;
|
||||
onChangeDatasetSelection: (datasets: string[]) => void;
|
||||
selectedDatasets: string[];
|
||||
}> = ({ availableDatasets, isLoading = false, onChangeDatasetSelection, selectedDatasets }) => {
|
||||
const options = useMemo<DatasetOptionProps[]>(
|
||||
() =>
|
||||
availableDatasets.map(dataset => ({
|
||||
value: dataset,
|
||||
label: getFriendlyNameForPartitionId(dataset),
|
||||
})),
|
||||
[availableDatasets]
|
||||
);
|
||||
|
||||
const selectedOptions = useMemo(
|
||||
() => options.filter(({ value }) => value != null && selectedDatasets.includes(value)),
|
||||
[options, selectedDatasets]
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(newSelectedOptions: DatasetOptionProps[]) =>
|
||||
onChangeDatasetSelection(newSelectedOptions.map(({ value }) => value).filter(isDefined)),
|
||||
[onChangeDatasetSelection]
|
||||
);
|
||||
|
||||
return (
|
||||
<EuiComboBox
|
||||
aria-label={datasetFilterPlaceholder}
|
||||
isLoading={isLoading}
|
||||
onChange={handleChange}
|
||||
options={options}
|
||||
placeholder={datasetFilterPlaceholder}
|
||||
selectedOptions={selectedOptions}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
const datasetFilterPlaceholder = i18n.translate(
|
||||
'xpack.infra.logs.logEntryCategories.datasetFilterPlaceholder',
|
||||
{
|
||||
defaultMessage: 'Filter by datasets',
|
||||
}
|
||||
);
|
||||
|
||||
const isDefined = <Value extends any>(value: Value): value is NonNullable<Value> => value != null;
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './top_categories_section';
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import { LogEntryCategoryHistogram } from '../../../../../../common/http_api/log_analysis';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared';
|
||||
import { SingleMetricComparison } from './single_metric_comparison';
|
||||
import { SingleMetricSparkline } from './single_metric_sparkline';
|
||||
|
||||
export const LogEntryCountSparkline: React.FunctionComponent<{
|
||||
currentCount: number;
|
||||
histograms: LogEntryCategoryHistogram[];
|
||||
timeRange: TimeRange;
|
||||
}> = ({ currentCount, histograms, timeRange }) => {
|
||||
const metric = useMemo(
|
||||
() =>
|
||||
histograms
|
||||
.find(histogram => histogram.histogramId === 'history')
|
||||
?.buckets?.map(({ startTime: timestamp, logEntryCount: value }) => ({
|
||||
timestamp,
|
||||
value,
|
||||
})) ?? [],
|
||||
[histograms]
|
||||
);
|
||||
const referenceCount = useMemo(
|
||||
() =>
|
||||
histograms.find(histogram => histogram.histogramId === 'reference')?.buckets?.[0]
|
||||
?.logEntryCount ?? 0,
|
||||
[histograms]
|
||||
);
|
||||
|
||||
const overallTimeRange = useMemo(
|
||||
() => ({
|
||||
endTime: timeRange.endTime,
|
||||
startTime: timeRange.startTime - (timeRange.endTime - timeRange.startTime),
|
||||
}),
|
||||
[timeRange.endTime, timeRange.startTime]
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<SingleMetricSparkline metric={metric} timeRange={overallTimeRange} />
|
||||
<SingleMetricComparison previousValue={referenceCount} currentValue={currentCount} />
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiIcon, EuiTextColor } from '@elastic/eui';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React from 'react';
|
||||
|
||||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
|
||||
export const SingleMetricComparison: React.FunctionComponent<{
|
||||
currentValue: number;
|
||||
previousValue: number;
|
||||
}> = ({ currentValue, previousValue }) => {
|
||||
const changeFactor = currentValue / previousValue - 1;
|
||||
|
||||
if (changeFactor < 0) {
|
||||
return (
|
||||
<NoWrapSpan>
|
||||
<EuiIcon type="sortDown" color="danger" />
|
||||
<EuiTextColor color="danger">{formatPercentage(changeFactor)}</EuiTextColor>
|
||||
</NoWrapSpan>
|
||||
);
|
||||
} else if (changeFactor > 0 && Number.isFinite(changeFactor)) {
|
||||
return (
|
||||
<NoWrapSpan>
|
||||
<EuiIcon type="sortUp" color="success" />
|
||||
<EuiTextColor color="secondary">{formatPercentage(changeFactor)}</EuiTextColor>
|
||||
</NoWrapSpan>
|
||||
);
|
||||
} else if (changeFactor > 0 && !Number.isFinite(changeFactor)) {
|
||||
return (
|
||||
<NoWrapSpan>
|
||||
<EuiIcon type="sortUp" color="success" />
|
||||
<EuiTextColor color="secondary">{newCategoryTrendLabel}</EuiTextColor>
|
||||
</NoWrapSpan>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const formatPercentage = (value: number) => numeral(value).format('+0,0 %');
|
||||
|
||||
const newCategoryTrendLabel = i18n.translate(
|
||||
'xpack.infra.logs.logEntryCategories.newCategoryTrendLabel',
|
||||
{
|
||||
defaultMessage: 'new',
|
||||
}
|
||||
);
|
||||
|
||||
const NoWrapSpan = euiStyled.span`
|
||||
white-space: nowrap;
|
||||
`;
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useMemo } from 'react';
|
||||
import { Chart, Settings, AreaSeries } from '@elastic/charts';
|
||||
import {
|
||||
EUI_CHARTS_THEME_LIGHT,
|
||||
EUI_SPARKLINE_THEME_PARTIAL,
|
||||
EUI_CHARTS_THEME_DARK,
|
||||
} from '@elastic/eui/dist/eui_charts_theme';
|
||||
|
||||
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared';
|
||||
|
||||
interface TimeSeriesPoint {
|
||||
timestamp: number;
|
||||
value: number;
|
||||
}
|
||||
|
||||
const timestampAccessor = 'timestamp';
|
||||
const valueAccessor = ['value'];
|
||||
const sparklineSize = {
|
||||
height: 20,
|
||||
width: 100,
|
||||
};
|
||||
|
||||
export const SingleMetricSparkline: React.FunctionComponent<{
|
||||
metric: TimeSeriesPoint[];
|
||||
timeRange: TimeRange;
|
||||
}> = ({ metric, timeRange }) => {
|
||||
const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
|
||||
|
||||
const theme = useMemo(
|
||||
() => [
|
||||
// localThemeOverride,
|
||||
EUI_SPARKLINE_THEME_PARTIAL,
|
||||
isDarkMode ? EUI_CHARTS_THEME_DARK.theme : EUI_CHARTS_THEME_LIGHT.theme,
|
||||
],
|
||||
[isDarkMode]
|
||||
);
|
||||
|
||||
const xDomain = useMemo(
|
||||
() => ({
|
||||
max: timeRange.endTime,
|
||||
min: timeRange.startTime,
|
||||
}),
|
||||
[timeRange]
|
||||
);
|
||||
|
||||
return (
|
||||
<Chart size={sparklineSize}>
|
||||
<Settings showLegend={false} theme={theme} tooltip="none" xDomain={xDomain} />
|
||||
<AreaSeries
|
||||
data={metric}
|
||||
id="metric"
|
||||
xAccessor={timestampAccessor}
|
||||
xScaleType="time"
|
||||
yAccessors={valueAccessor}
|
||||
/>
|
||||
</Chart>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiLoadingSpinner, EuiSpacer, EuiTitle } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React from 'react';
|
||||
|
||||
import { LogEntryCategory } from '../../../../../../common/http_api/log_analysis';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared';
|
||||
import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
|
||||
import { RecreateJobButton } from '../../../../../components/logging/log_analysis_job_status';
|
||||
import { AnalyzeInMlButton } from '../../../../../components/logging/log_analysis_results';
|
||||
import { DatasetsSelector } from './datasets_selector';
|
||||
import { TopCategoriesTable } from './top_categories_table';
|
||||
|
||||
export const TopCategoriesSection: React.FunctionComponent<{
|
||||
availableDatasets: string[];
|
||||
isLoadingDatasets?: boolean;
|
||||
isLoadingTopCategories?: boolean;
|
||||
jobId: string;
|
||||
onChangeDatasetSelection: (datasets: string[]) => void;
|
||||
onRequestRecreateMlJob: () => void;
|
||||
selectedDatasets: string[];
|
||||
timeRange: TimeRange;
|
||||
topCategories: LogEntryCategory[];
|
||||
}> = ({
|
||||
availableDatasets,
|
||||
isLoadingDatasets = false,
|
||||
isLoadingTopCategories = false,
|
||||
jobId,
|
||||
onChangeDatasetSelection,
|
||||
onRequestRecreateMlJob,
|
||||
selectedDatasets,
|
||||
timeRange,
|
||||
topCategories,
|
||||
}) => {
|
||||
return (
|
||||
<>
|
||||
<EuiFlexGroup alignItems="center" gutterSize="s">
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="m" aria-label={title}>
|
||||
<h1>{title}</h1>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<RecreateJobButton onClick={onRequestRecreateMlJob} size="s" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<AnalyzeInMlButton jobId={jobId} timeRange={timeRange} />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer size="m" />
|
||||
<DatasetsSelector
|
||||
availableDatasets={availableDatasets}
|
||||
isLoading={isLoadingDatasets}
|
||||
onChangeDatasetSelection={onChangeDatasetSelection}
|
||||
selectedDatasets={selectedDatasets}
|
||||
/>
|
||||
<EuiSpacer />
|
||||
<LoadingOverlayWrapper
|
||||
isLoading={isLoadingTopCategories}
|
||||
loadingChildren={<LoadingOverlayContent />}
|
||||
>
|
||||
<TopCategoriesTable timeRange={timeRange} topCategories={topCategories} />
|
||||
</LoadingOverlayWrapper>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const title = i18n.translate('xpack.infra.logs.logEntryCategories.topCategoriesSectionTitle', {
|
||||
defaultMessage: 'Log message categories',
|
||||
});
|
||||
|
||||
const loadingAriaLabel = i18n.translate(
|
||||
'xpack.infra.logs.logEntryCategories.topCategoriesSectionLoadingAriaLabel',
|
||||
{ defaultMessage: 'Loading message categories' }
|
||||
);
|
||||
|
||||
const LoadingOverlayContent = () => <EuiLoadingSpinner size="xl" aria-label={loadingAriaLabel} />;
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiBasicTable, EuiBasicTableColumn } from '@elastic/eui';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
import {
|
||||
LogEntryCategory,
|
||||
LogEntryCategoryHistogram,
|
||||
} from '../../../../../../common/http_api/log_analysis';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared';
|
||||
import { AnomalySeverityIndicator } from './anomaly_severity_indicator';
|
||||
import { RegularExpressionRepresentation } from './category_expression';
|
||||
import { DatasetsList } from './datasets_list';
|
||||
import { LogEntryCountSparkline } from './log_entry_count_sparkline';
|
||||
|
||||
export const TopCategoriesTable = euiStyled(
|
||||
({
|
||||
className,
|
||||
timeRange,
|
||||
topCategories,
|
||||
}: {
|
||||
className?: string;
|
||||
timeRange: TimeRange;
|
||||
topCategories: LogEntryCategory[];
|
||||
}) => {
|
||||
const columns = useMemo(() => createColumns(timeRange), [timeRange]);
|
||||
|
||||
return (
|
||||
<EuiBasicTable
|
||||
columns={columns}
|
||||
items={topCategories}
|
||||
rowProps={{ className: `${className} euiTableRow--topAligned` }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
)`
|
||||
&.euiTableRow--topAligned .euiTableRowCell {
|
||||
vertical-align: top;
|
||||
}
|
||||
`;
|
||||
|
||||
const createColumns = (timeRange: TimeRange): Array<EuiBasicTableColumn<LogEntryCategory>> => [
|
||||
{
|
||||
align: 'right',
|
||||
field: 'logEntryCount',
|
||||
name: i18n.translate('xpack.infra.logs.logEntryCategories.countColumnTitle', {
|
||||
defaultMessage: 'Message count',
|
||||
}),
|
||||
render: (logEntryCount: number) => {
|
||||
return numeral(logEntryCount).format('0,0');
|
||||
},
|
||||
width: '120px',
|
||||
},
|
||||
{
|
||||
field: 'histograms',
|
||||
name: i18n.translate('xpack.infra.logs.logEntryCategories.trendColumnTitle', {
|
||||
defaultMessage: 'Trend',
|
||||
}),
|
||||
render: (histograms: LogEntryCategoryHistogram[], item) => {
|
||||
return (
|
||||
<LogEntryCountSparkline
|
||||
currentCount={item.logEntryCount}
|
||||
histograms={histograms}
|
||||
timeRange={timeRange}
|
||||
/>
|
||||
);
|
||||
},
|
||||
width: '220px',
|
||||
},
|
||||
{
|
||||
field: 'regularExpression',
|
||||
name: i18n.translate('xpack.infra.logs.logEntryCategories.categoryColumnTitle', {
|
||||
defaultMessage: 'Category',
|
||||
}),
|
||||
truncateText: true,
|
||||
render: (regularExpression: string) => (
|
||||
<RegularExpressionRepresentation regularExpression={regularExpression} />
|
||||
),
|
||||
},
|
||||
{
|
||||
field: 'datasets',
|
||||
name: i18n.translate('xpack.infra.logs.logEntryCategories.datasetColumnTitle', {
|
||||
defaultMessage: 'Datasets',
|
||||
}),
|
||||
render: (datasets: string[]) => <DatasetsList datasets={datasets} />,
|
||||
width: '200px',
|
||||
},
|
||||
{
|
||||
align: 'right',
|
||||
field: 'maximumAnomalyScore',
|
||||
name: i18n.translate('xpack.infra.logs.logEntryCategories.maximumAnomalyScoreColumnTitle', {
|
||||
defaultMessage: 'Maximum anomaly score',
|
||||
}),
|
||||
render: (maximumAnomalyScore: number) => (
|
||||
<AnomalySeverityIndicator anomalyScore={maximumAnomalyScore} />
|
||||
),
|
||||
width: '160px',
|
||||
},
|
||||
];
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { npStart } from 'ui/new_platform';
|
||||
|
||||
import {
|
||||
getLogEntryCategoryDatasetsRequestPayloadRT,
|
||||
getLogEntryCategoryDatasetsSuccessReponsePayloadRT,
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH,
|
||||
} from '../../../../../common/http_api/log_analysis';
|
||||
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
|
||||
|
||||
export const callGetLogEntryCategoryDatasetsAPI = async (
|
||||
sourceId: string,
|
||||
startTime: number,
|
||||
endTime: number
|
||||
) => {
|
||||
const response = await npStart.core.http.fetch(
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify(
|
||||
getLogEntryCategoryDatasetsRequestPayloadRT.encode({
|
||||
data: {
|
||||
sourceId,
|
||||
timeRange: {
|
||||
startTime,
|
||||
endTime,
|
||||
},
|
||||
},
|
||||
})
|
||||
),
|
||||
}
|
||||
);
|
||||
|
||||
return pipe(
|
||||
getLogEntryCategoryDatasetsSuccessReponsePayloadRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { npStart } from 'ui/new_platform';
|
||||
|
||||
import {
|
||||
getLogEntryCategoriesRequestPayloadRT,
|
||||
getLogEntryCategoriesSuccessReponsePayloadRT,
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH,
|
||||
} from '../../../../../common/http_api/log_analysis';
|
||||
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
|
||||
|
||||
export const callGetTopLogEntryCategoriesAPI = async (
|
||||
sourceId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
categoryCount: number,
|
||||
datasets?: string[]
|
||||
) => {
|
||||
const intervalDuration = endTime - startTime;
|
||||
|
||||
const response = await npStart.core.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(
|
||||
getLogEntryCategoriesRequestPayloadRT.encode({
|
||||
data: {
|
||||
sourceId,
|
||||
timeRange: {
|
||||
startTime,
|
||||
endTime,
|
||||
},
|
||||
categoryCount,
|
||||
datasets,
|
||||
histograms: [
|
||||
{
|
||||
id: 'history',
|
||||
timeRange: {
|
||||
startTime: startTime - intervalDuration,
|
||||
endTime,
|
||||
},
|
||||
bucketCount: 10,
|
||||
},
|
||||
{
|
||||
id: 'reference',
|
||||
timeRange: {
|
||||
startTime: startTime - intervalDuration,
|
||||
endTime: startTime,
|
||||
},
|
||||
bucketCount: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
return pipe(
|
||||
getLogEntryCategoriesSuccessReponsePayloadRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useMemo, useState } from 'react';
|
||||
|
||||
import {
|
||||
GetLogEntryCategoriesSuccessResponsePayload,
|
||||
GetLogEntryCategoryDatasetsSuccessResponsePayload,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise';
|
||||
import { callGetTopLogEntryCategoriesAPI } from './service_calls/get_top_log_entry_categories';
|
||||
import { callGetLogEntryCategoryDatasetsAPI } from './service_calls/get_log_entry_category_datasets';
|
||||
|
||||
type TopLogEntryCategories = GetLogEntryCategoriesSuccessResponsePayload['data']['categories'];
|
||||
type LogEntryCategoryDatasets = GetLogEntryCategoryDatasetsSuccessResponsePayload['data']['datasets'];
|
||||
|
||||
export const useLogEntryCategoriesResults = ({
|
||||
categoriesCount,
|
||||
filteredDatasets: filteredDatasets,
|
||||
endTime,
|
||||
onGetLogEntryCategoryDatasetsError,
|
||||
onGetTopLogEntryCategoriesError,
|
||||
sourceId,
|
||||
startTime,
|
||||
}: {
|
||||
categoriesCount: number;
|
||||
filteredDatasets: string[];
|
||||
endTime: number;
|
||||
onGetLogEntryCategoryDatasetsError?: (error: Error) => void;
|
||||
onGetTopLogEntryCategoriesError?: (error: Error) => void;
|
||||
sourceId: string;
|
||||
startTime: number;
|
||||
}) => {
|
||||
const [topLogEntryCategories, setTopLogEntryCategories] = useState<TopLogEntryCategories>([]);
|
||||
const [logEntryCategoryDatasets, setLogEntryCategoryDatasets] = useState<
|
||||
LogEntryCategoryDatasets
|
||||
>([]);
|
||||
|
||||
const [getTopLogEntryCategoriesRequest, getTopLogEntryCategories] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'creation',
|
||||
createPromise: async () => {
|
||||
return await callGetTopLogEntryCategoriesAPI(
|
||||
sourceId,
|
||||
startTime,
|
||||
endTime,
|
||||
categoriesCount,
|
||||
filteredDatasets
|
||||
);
|
||||
},
|
||||
onResolve: ({ data: { categories } }) => {
|
||||
setTopLogEntryCategories(categories);
|
||||
},
|
||||
onReject: error => {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
!(error instanceof CanceledPromiseError) &&
|
||||
onGetTopLogEntryCategoriesError
|
||||
) {
|
||||
onGetTopLogEntryCategoriesError(error);
|
||||
}
|
||||
},
|
||||
},
|
||||
[categoriesCount, endTime, filteredDatasets, sourceId, startTime]
|
||||
);
|
||||
|
||||
const [getLogEntryCategoryDatasetsRequest, getLogEntryCategoryDatasets] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'creation',
|
||||
createPromise: async () => {
|
||||
return await callGetLogEntryCategoryDatasetsAPI(sourceId, startTime, endTime);
|
||||
},
|
||||
onResolve: ({ data: { datasets } }) => {
|
||||
setLogEntryCategoryDatasets(datasets);
|
||||
},
|
||||
onReject: error => {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
!(error instanceof CanceledPromiseError) &&
|
||||
onGetLogEntryCategoryDatasetsError
|
||||
) {
|
||||
onGetLogEntryCategoryDatasetsError(error);
|
||||
}
|
||||
},
|
||||
},
|
||||
[categoriesCount, endTime, sourceId, startTime]
|
||||
);
|
||||
|
||||
const isLoadingTopLogEntryCategories = useMemo(
|
||||
() => getTopLogEntryCategoriesRequest.state === 'pending',
|
||||
[getTopLogEntryCategoriesRequest.state]
|
||||
);
|
||||
|
||||
const isLoadingLogEntryCategoryDatasets = useMemo(
|
||||
() => getLogEntryCategoryDatasetsRequest.state === 'pending',
|
||||
[getLogEntryCategoryDatasetsRequest.state]
|
||||
);
|
||||
|
||||
const isLoading = useMemo(
|
||||
() => isLoadingTopLogEntryCategories || isLoadingLogEntryCategoryDatasets,
|
||||
[isLoadingLogEntryCategoryDatasets, isLoadingTopLogEntryCategories]
|
||||
);
|
||||
|
||||
return {
|
||||
getLogEntryCategoryDatasets,
|
||||
getTopLogEntryCategories,
|
||||
isLoading,
|
||||
isLoadingLogEntryCategoryDatasets,
|
||||
isLoadingTopLogEntryCategories,
|
||||
logEntryCategoryDatasets,
|
||||
topLogEntryCategories,
|
||||
};
|
||||
};
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { constant, identity } from 'fp-ts/lib/function';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { useUrlState } from '../../../utils/use_url_state';
|
||||
|
||||
const autoRefreshRT = rt.union([
|
||||
rt.type({
|
||||
interval: rt.number,
|
||||
isPaused: rt.boolean,
|
||||
}),
|
||||
rt.undefined,
|
||||
]);
|
||||
|
||||
export const stringTimeRangeRT = rt.type({
|
||||
startTime: rt.string,
|
||||
endTime: rt.string,
|
||||
});
|
||||
export type StringTimeRange = rt.TypeOf<typeof stringTimeRangeRT>;
|
||||
|
||||
const urlTimeRangeRT = rt.union([stringTimeRangeRT, rt.undefined]);
|
||||
|
||||
const TIME_RANGE_URL_STATE_KEY = 'timeRange';
|
||||
const AUTOREFRESH_URL_STATE_KEY = 'autoRefresh';
|
||||
|
||||
export const useLogEntryCategoriesResultsUrlState = () => {
|
||||
const [timeRange, setTimeRange] = useUrlState({
|
||||
defaultState: {
|
||||
startTime: 'now-2w',
|
||||
endTime: 'now',
|
||||
},
|
||||
decodeUrlState: (value: unknown) =>
|
||||
pipe(urlTimeRangeRT.decode(value), fold(constant(undefined), identity)),
|
||||
encodeUrlState: urlTimeRangeRT.encode,
|
||||
urlStateKey: TIME_RANGE_URL_STATE_KEY,
|
||||
writeDefaultState: true,
|
||||
});
|
||||
|
||||
const [autoRefresh, setAutoRefresh] = useUrlState({
|
||||
defaultState: {
|
||||
isPaused: false,
|
||||
interval: 60000,
|
||||
},
|
||||
decodeUrlState: (value: unknown) =>
|
||||
pipe(autoRefreshRT.decode(value), fold(constant(undefined), identity)),
|
||||
encodeUrlState: autoRefreshRT.encode,
|
||||
urlStateKey: AUTOREFRESH_URL_STATE_KEY,
|
||||
writeDefaultState: true,
|
||||
});
|
||||
|
||||
return {
|
||||
timeRange,
|
||||
setTimeRange,
|
||||
autoRefresh,
|
||||
setAutoRefresh,
|
||||
};
|
||||
};
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { EuiCallOut, EuiSpacer } from '@elastic/eui';
|
||||
|
||||
export const FirstUseCallout = () => {
|
||||
return (
|
||||
<>
|
||||
<EuiCallOut
|
||||
color="success"
|
||||
title={i18n.translate('xpack.infra.logs.logsAnalysisResults.onboardingSuccessTitle', {
|
||||
defaultMessage: 'Success!',
|
||||
})}
|
||||
>
|
||||
<p>
|
||||
{i18n.translate('xpack.infra.logs.logsAnalysisResults.onboardingSuccessContent', {
|
||||
defaultMessage:
|
||||
'Please allow a few minutes for our machine learning robots to begin collecting data.',
|
||||
})}
|
||||
</p>
|
||||
</EuiCallOut>
|
||||
<EuiSpacer />
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -4,7 +4,13 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { bucketSpan, getJobId, partitionField } from '../../../../common/log_analysis';
|
||||
import {
|
||||
bucketSpan,
|
||||
getJobId,
|
||||
LogEntryRateJobType,
|
||||
logEntryRateJobTypes,
|
||||
partitionField,
|
||||
} from '../../../../common/log_analysis';
|
||||
|
||||
import {
|
||||
ModuleDescriptor,
|
||||
|
@ -16,22 +22,19 @@ import { callGetMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml
|
|||
import { callSetupMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_setup_module_api';
|
||||
import { callValidateIndicesAPI } from '../../../containers/logs/log_analysis/api/validate_indices';
|
||||
|
||||
const jobTypes = ['log-entry-rate'];
|
||||
const moduleId = 'logs_ui_analysis';
|
||||
|
||||
type JobType = typeof jobTypes[0];
|
||||
|
||||
const getJobIds = (spaceId: string, sourceId: string) =>
|
||||
jobTypes.reduce(
|
||||
logEntryRateJobTypes.reduce(
|
||||
(accumulatedJobIds, jobType) => ({
|
||||
...accumulatedJobIds,
|
||||
[jobType]: getJobId(spaceId, sourceId, jobType),
|
||||
}),
|
||||
{} as Record<JobType, string>
|
||||
{} as Record<LogEntryRateJobType, string>
|
||||
);
|
||||
|
||||
const getJobSummary = async (spaceId: string, sourceId: string) => {
|
||||
const response = await callJobsSummaryAPI(spaceId, sourceId, jobTypes);
|
||||
const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryRateJobTypes);
|
||||
const jobIds = Object.values(getJobIds(spaceId, sourceId));
|
||||
|
||||
return response.filter(jobSummary => jobIds.includes(jobSummary.id));
|
||||
|
@ -78,7 +81,7 @@ const setUpModule = async (
|
|||
};
|
||||
|
||||
const cleanUpModule = async (spaceId: string, sourceId: string) => {
|
||||
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, jobTypes);
|
||||
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes);
|
||||
};
|
||||
|
||||
const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceConfiguration) => {
|
||||
|
@ -94,9 +97,9 @@ const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceCon
|
|||
]);
|
||||
};
|
||||
|
||||
export const logEntryRateModule: ModuleDescriptor<JobType> = {
|
||||
export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = {
|
||||
moduleId,
|
||||
jobTypes,
|
||||
jobTypes: logEntryRateJobTypes,
|
||||
bucketSpan,
|
||||
getJobIds,
|
||||
getJobSummary,
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
EuiFlexItem,
|
||||
EuiPage,
|
||||
EuiPanel,
|
||||
EuiSpacer,
|
||||
EuiSuperDatePicker,
|
||||
EuiText,
|
||||
} from '@elastic/eui';
|
||||
|
@ -26,7 +27,6 @@ import { LoadingOverlayWrapper } from '../../../components/loading_overlay_wrapp
|
|||
import { useInterval } from '../../../hooks/use_interval';
|
||||
import { useTrackPageview } from '../../../hooks/use_track_metric';
|
||||
import { useKibanaUiSetting } from '../../../utils/use_kibana_ui_setting';
|
||||
import { FirstUseCallout } from './first_use';
|
||||
import { AnomaliesResults } from './sections/anomalies';
|
||||
import { LogRateResults } from './sections/log_rate';
|
||||
import { useLogEntryRateModuleContext } from './use_log_entry_rate_module';
|
||||
|
@ -35,6 +35,7 @@ import {
|
|||
StringTimeRange,
|
||||
useLogAnalysisResultsUrlState,
|
||||
} from './use_log_entry_rate_results_url_state';
|
||||
import { FirstUseCallout } from '../../../components/logging/log_analysis_results';
|
||||
|
||||
const JOB_STATUS_POLLING_INTERVAL = 30000;
|
||||
|
||||
|
@ -196,7 +197,12 @@ export const LogEntryRateResultsContent: React.FunctionComponent = () => {
|
|||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="l">
|
||||
{isFirstUse && !hasResults ? <FirstUseCallout /> : null}
|
||||
{isFirstUse && !hasResults ? (
|
||||
<>
|
||||
<FirstUseCallout />
|
||||
<EuiSpacer />
|
||||
</>
|
||||
) : null}
|
||||
<LogRateResults
|
||||
isLoading={isLoading}
|
||||
results={logEntryRate}
|
||||
|
@ -259,6 +265,8 @@ const getBucketDuration = (startTime: number, endTime: number) => {
|
|||
// This is needed due to the flex-basis: 100% !important; rule that
|
||||
// kicks in on small screens via media queries breaking when using direction="column"
|
||||
export const ResultsContentPage = euiStyled(EuiPage)`
|
||||
flex: 1 0 0%;
|
||||
|
||||
.euiFlexGroup--responsive > .euiFlexItem {
|
||||
flex-basis: auto !important;
|
||||
}
|
||||
|
|
|
@ -22,8 +22,11 @@ import moment from 'moment';
|
|||
import React, { useCallback, useMemo } from 'react';
|
||||
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import {
|
||||
MLSeverityScoreCategories,
|
||||
ML_SEVERITY_COLORS,
|
||||
} from '../../../../../../common/log_analysis';
|
||||
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
|
||||
import { MLSeverityScoreCategories } from '../helpers/data_formatters';
|
||||
|
||||
export const AnomaliesChart: React.FunctionComponent<{
|
||||
chartId: string;
|
||||
|
@ -109,19 +112,19 @@ interface SeverityConfig {
|
|||
const severityConfigs: Record<string, SeverityConfig> = {
|
||||
warning: {
|
||||
id: `anomalies-warning`,
|
||||
style: { fill: 'rgb(125, 180, 226)', opacity: 0.7 },
|
||||
style: { fill: ML_SEVERITY_COLORS.warning, opacity: 0.7 },
|
||||
},
|
||||
minor: {
|
||||
id: `anomalies-minor`,
|
||||
style: { fill: 'rgb(255, 221, 0)', opacity: 0.7 },
|
||||
style: { fill: ML_SEVERITY_COLORS.minor, opacity: 0.7 },
|
||||
},
|
||||
major: {
|
||||
id: `anomalies-major`,
|
||||
style: { fill: 'rgb(229, 113, 0)', opacity: 0.7 },
|
||||
style: { fill: ML_SEVERITY_COLORS.major, opacity: 0.7 },
|
||||
},
|
||||
critical: {
|
||||
id: `anomalies-critical`,
|
||||
style: { fill: 'rgb(228, 72, 72)', opacity: 0.7 },
|
||||
style: { fill: ML_SEVERITY_COLORS.critical, opacity: 0.7 },
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ import {
|
|||
EuiStat,
|
||||
EuiTitle,
|
||||
EuiLoadingSpinner,
|
||||
EuiButton,
|
||||
} from '@elastic/eui';
|
||||
import numeral from '@elastic/numeral';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
@ -21,16 +20,18 @@ import React, { useMemo } from 'react';
|
|||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
import { LogEntryRateResults } from '../../use_log_entry_rate_results';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import { JobStatus, SetupStatus } from '../../../../../../common/log_analysis';
|
||||
import { formatAnomalyScore, JobStatus, SetupStatus } from '../../../../../../common/log_analysis';
|
||||
import {
|
||||
formatAnomalyScore,
|
||||
getAnnotationsForAll,
|
||||
getLogEntryRateCombinedSeries,
|
||||
getTopAnomalyScoreAcrossAllPartitions,
|
||||
} from '../helpers/data_formatters';
|
||||
import { AnomaliesChart } from './chart';
|
||||
import { AnomaliesTable } from './table';
|
||||
import { LogAnalysisJobProblemIndicator } from '../../../../../components/logging/log_analysis_job_status';
|
||||
import {
|
||||
LogAnalysisJobProblemIndicator,
|
||||
RecreateJobButton,
|
||||
} from '../../../../../components/logging/log_analysis_job_status';
|
||||
import { AnalyzeInMlButton } from '../../../../../components/logging/log_analysis_results';
|
||||
import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
|
||||
|
||||
|
@ -99,9 +100,7 @@ export const AnomaliesResults: React.FunctionComponent<{
|
|||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton onClick={viewSetupForUpdate} size="s">
|
||||
Recreate jobs
|
||||
</EuiButton>
|
||||
<RecreateJobButton onClick={viewSetupForUpdate} size="s" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<AnalyzeInMlButton jobId={jobId} timeRange={timeRange} />
|
||||
|
|
|
@ -4,15 +4,19 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useMemo, useState, useCallback } from 'react';
|
||||
import { EuiBasicTable, EuiButtonIcon } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { RIGHT_ALIGNMENT } from '@elastic/eui/lib/services';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
|
||||
import {
|
||||
formatAnomalyScore,
|
||||
getFriendlyNameForPartitionId,
|
||||
} from '../../../../../../common/log_analysis';
|
||||
import { LogEntryRateResults } from '../../use_log_entry_rate_results';
|
||||
import { AnomaliesTableExpandedRow } from './expanded_row';
|
||||
import { formatAnomalyScore, getFriendlyNameForPartitionId } from '../helpers/data_formatters';
|
||||
import euiStyled from '../../../../../../../../common/eui_styled_components';
|
||||
|
||||
interface TableItem {
|
||||
id: string;
|
||||
|
|
|
@ -7,17 +7,14 @@
|
|||
import { RectAnnotationDatum } from '@elastic/charts';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import {
|
||||
formatAnomalyScore,
|
||||
getFriendlyNameForPartitionId,
|
||||
getSeverityCategoryForScore,
|
||||
MLSeverityScoreCategories,
|
||||
} from '../../../../../../common/log_analysis';
|
||||
import { LogEntryRateResults } from '../../use_log_entry_rate_results';
|
||||
|
||||
const ML_SEVERITY_SCORES = {
|
||||
warning: 3,
|
||||
minor: 25,
|
||||
major: 50,
|
||||
critical: 75,
|
||||
};
|
||||
|
||||
export type MLSeverityScoreCategories = keyof typeof ML_SEVERITY_SCORES;
|
||||
|
||||
export const getLogEntryRatePartitionedSeries = (results: LogEntryRateResults) => {
|
||||
return results.histogramBuckets.reduce<Array<{ group: string; time: number; value: number }>>(
|
||||
(buckets, bucket) => {
|
||||
|
@ -182,26 +179,3 @@ export const getTopAnomalyScoreAcrossAllPartitions = (results: LogEntryRateResul
|
|||
);
|
||||
return Math.max(...allTopScores);
|
||||
};
|
||||
|
||||
const getSeverityCategoryForScore = (score: number): MLSeverityScoreCategories | undefined => {
|
||||
if (score >= ML_SEVERITY_SCORES.critical) {
|
||||
return 'critical';
|
||||
} else if (score >= ML_SEVERITY_SCORES.major) {
|
||||
return 'major';
|
||||
} else if (score >= ML_SEVERITY_SCORES.minor) {
|
||||
return 'minor';
|
||||
} else if (score >= ML_SEVERITY_SCORES.warning) {
|
||||
return 'warning';
|
||||
} else {
|
||||
// Category is too low to include
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const formatAnomalyScore = (score: number) => {
|
||||
return Math.round(score);
|
||||
};
|
||||
|
||||
export const getFriendlyNameForPartitionId = (partitionId: string) => {
|
||||
return partitionId !== '' ? partitionId : 'unknown';
|
||||
};
|
||||
|
|
|
@ -248,7 +248,7 @@ interface CancelablePromise<ResolvedValue> {
|
|||
promise: Promise<ResolvedValue>;
|
||||
}
|
||||
|
||||
class CanceledPromiseError extends Error {
|
||||
export class CanceledPromiseError extends Error {
|
||||
public isCanceled = true;
|
||||
|
||||
constructor(message?: string) {
|
||||
|
@ -257,6 +257,6 @@ class CanceledPromiseError extends Error {
|
|||
}
|
||||
}
|
||||
|
||||
class SilentCanceledPromiseError extends CanceledPromiseError {}
|
||||
export class SilentCanceledPromiseError extends CanceledPromiseError {}
|
||||
|
||||
const noOp = () => undefined;
|
||||
|
|
|
@ -12,6 +12,8 @@ import { createSourceStatusResolvers } from './graphql/source_status';
|
|||
import { createSourcesResolvers } from './graphql/sources';
|
||||
import { InfraBackendLibs } from './lib/infra_types';
|
||||
import {
|
||||
initGetLogEntryCategoriesRoute,
|
||||
initGetLogEntryCategoryDatasetsRoute,
|
||||
initGetLogEntryRateRoute,
|
||||
initValidateLogAnalysisIndicesRoute,
|
||||
} from './routes/log_analysis';
|
||||
|
@ -41,6 +43,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
|
|||
libs.framework.registerGraphQLEndpoint('/graphql', schema);
|
||||
|
||||
initIpToHostName(libs);
|
||||
initGetLogEntryCategoriesRoute(libs);
|
||||
initGetLogEntryCategoryDatasetsRoute(libs);
|
||||
initGetLogEntryRateRoute(libs);
|
||||
initSnapshotRoute(libs);
|
||||
initNodeDetailsRoute(libs);
|
||||
|
|
|
@ -12,7 +12,7 @@ import { InfraFieldsDomain } from '../domains/fields_domain';
|
|||
import { InfraLogEntriesDomain } from '../domains/log_entries_domain';
|
||||
import { InfraMetricsDomain } from '../domains/metrics_domain';
|
||||
import { InfraBackendLibs, InfraDomainLibs } from '../infra_types';
|
||||
import { InfraLogAnalysis } from '../log_analysis';
|
||||
import { LogEntryCategoriesAnalysis, LogEntryRateAnalysis } from '../log_analysis';
|
||||
import { InfraSnapshot } from '../snapshot';
|
||||
import { InfraSourceStatus } from '../source_status';
|
||||
import { InfraSources } from '../sources';
|
||||
|
@ -29,7 +29,8 @@ export function compose(core: CoreSetup, config: InfraConfig, plugins: InfraServ
|
|||
sources,
|
||||
});
|
||||
const snapshot = new InfraSnapshot({ sources, framework });
|
||||
const logAnalysis = new InfraLogAnalysis({ framework });
|
||||
const logEntryCategoriesAnalysis = new LogEntryCategoriesAnalysis({ framework });
|
||||
const logEntryRateAnalysis = new LogEntryRateAnalysis({ framework });
|
||||
|
||||
// TODO: separate these out individually and do away with "domains" as a temporary group
|
||||
const domainLibs: InfraDomainLibs = {
|
||||
|
@ -45,7 +46,8 @@ export function compose(core: CoreSetup, config: InfraConfig, plugins: InfraServ
|
|||
const libs: InfraBackendLibs = {
|
||||
configuration: config, // NP_TODO: Do we ever use this anywhere?
|
||||
framework,
|
||||
logAnalysis,
|
||||
logEntryCategoriesAnalysis,
|
||||
logEntryRateAnalysis,
|
||||
snapshot,
|
||||
sources,
|
||||
sourceStatus,
|
||||
|
|
|
@ -8,7 +8,7 @@ import { InfraSourceConfiguration } from '../../public/graphql/types';
|
|||
import { InfraFieldsDomain } from './domains/fields_domain';
|
||||
import { InfraLogEntriesDomain } from './domains/log_entries_domain';
|
||||
import { InfraMetricsDomain } from './domains/metrics_domain';
|
||||
import { InfraLogAnalysis } from './log_analysis/log_analysis';
|
||||
import { LogEntryCategoriesAnalysis, LogEntryRateAnalysis } from './log_analysis';
|
||||
import { InfraSnapshot } from './snapshot';
|
||||
import { InfraSources } from './sources';
|
||||
import { InfraSourceStatus } from './source_status';
|
||||
|
@ -31,7 +31,8 @@ export interface InfraDomainLibs {
|
|||
export interface InfraBackendLibs extends InfraDomainLibs {
|
||||
configuration: InfraConfig;
|
||||
framework: KibanaFramework;
|
||||
logAnalysis: InfraLogAnalysis;
|
||||
logEntryCategoriesAnalysis: LogEntryCategoriesAnalysis;
|
||||
logEntryRateAnalysis: LogEntryRateAnalysis;
|
||||
snapshot: InfraSnapshot;
|
||||
sources: InfraSources;
|
||||
sourceStatus: InfraSourceStatus;
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export class NoLogRateResultsIndexError extends Error {
|
||||
export class NoLogAnalysisResultsIndexError extends Error {
|
||||
constructor(message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
|
|
|
@ -5,4 +5,5 @@
|
|||
*/
|
||||
|
||||
export * from './errors';
|
||||
export * from './log_analysis';
|
||||
export * from './log_entry_categories_analysis';
|
||||
export * from './log_entry_rate_analysis';
|
||||
|
|
|
@ -0,0 +1,363 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { KibanaRequest, RequestHandlerContext } from '../../../../../../../src/core/server';
|
||||
import { getJobId, logEntryCategoriesJobTypes } from '../../../common/log_analysis';
|
||||
import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing';
|
||||
import { decodeOrThrow } from '../../../common/runtime_types';
|
||||
import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
|
||||
import { NoLogAnalysisResultsIndexError } from './errors';
|
||||
import {
|
||||
createLogEntryCategoriesQuery,
|
||||
logEntryCategoriesResponseRT,
|
||||
LogEntryCategoryHit,
|
||||
} from './queries/log_entry_categories';
|
||||
import {
|
||||
createLogEntryCategoryHistogramsQuery,
|
||||
logEntryCategoryHistogramsResponseRT,
|
||||
} from './queries/log_entry_category_histograms';
|
||||
import {
|
||||
CompositeDatasetKey,
|
||||
createLogEntryDatasetsQuery,
|
||||
LogEntryDatasetBucket,
|
||||
logEntryDatasetsResponseRT,
|
||||
} from './queries/log_entry_data_sets';
|
||||
import {
|
||||
createTopLogEntryCategoriesQuery,
|
||||
topLogEntryCategoriesResponseRT,
|
||||
} from './queries/top_log_entry_categories';
|
||||
|
||||
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
|
||||
|
||||
export class LogEntryCategoriesAnalysis {
|
||||
constructor(
|
||||
private readonly libs: {
|
||||
framework: KibanaFramework;
|
||||
}
|
||||
) {}
|
||||
|
||||
public async getTopLogEntryCategories(
|
||||
requestContext: RequestHandlerContext,
|
||||
request: KibanaRequest,
|
||||
sourceId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
categoryCount: number,
|
||||
datasets: string[],
|
||||
histograms: HistogramParameters[]
|
||||
) {
|
||||
const finalizeTopLogEntryCategoriesSpan = startTracingSpan('get top categories');
|
||||
|
||||
const logEntryCategoriesCountJobId = getJobId(
|
||||
this.libs.framework.getSpaceId(request),
|
||||
sourceId,
|
||||
logEntryCategoriesJobTypes[0]
|
||||
);
|
||||
|
||||
const {
|
||||
topLogEntryCategories,
|
||||
timing: { spans: fetchTopLogEntryCategoriesAggSpans },
|
||||
} = await this.fetchTopLogEntryCategories(
|
||||
requestContext,
|
||||
logEntryCategoriesCountJobId,
|
||||
startTime,
|
||||
endTime,
|
||||
categoryCount,
|
||||
datasets
|
||||
);
|
||||
|
||||
const categoryIds = topLogEntryCategories.map(({ categoryId }) => categoryId);
|
||||
|
||||
const {
|
||||
logEntryCategoriesById,
|
||||
timing: { spans: fetchTopLogEntryCategoryPatternsSpans },
|
||||
} = await this.fetchLogEntryCategories(
|
||||
requestContext,
|
||||
logEntryCategoriesCountJobId,
|
||||
categoryIds
|
||||
);
|
||||
|
||||
const {
|
||||
categoryHistogramsById,
|
||||
timing: { spans: fetchTopLogEntryCategoryHistogramsSpans },
|
||||
} = await this.fetchTopLogEntryCategoryHistograms(
|
||||
requestContext,
|
||||
logEntryCategoriesCountJobId,
|
||||
categoryIds,
|
||||
histograms
|
||||
);
|
||||
|
||||
const topLogEntryCategoriesSpan = finalizeTopLogEntryCategoriesSpan();
|
||||
|
||||
return {
|
||||
data: topLogEntryCategories.map(topCategory => ({
|
||||
...topCategory,
|
||||
regularExpression: logEntryCategoriesById[topCategory.categoryId]?._source.regex ?? '',
|
||||
histograms: categoryHistogramsById[topCategory.categoryId] ?? [],
|
||||
})),
|
||||
timing: {
|
||||
spans: [
|
||||
topLogEntryCategoriesSpan,
|
||||
...fetchTopLogEntryCategoriesAggSpans,
|
||||
...fetchTopLogEntryCategoryPatternsSpans,
|
||||
...fetchTopLogEntryCategoryHistogramsSpans,
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
public async getLogEntryCategoryDatasets(
|
||||
requestContext: RequestHandlerContext,
|
||||
request: KibanaRequest,
|
||||
sourceId: string,
|
||||
startTime: number,
|
||||
endTime: number
|
||||
) {
|
||||
const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets');
|
||||
|
||||
const logEntryCategoriesCountJobId = getJobId(
|
||||
this.libs.framework.getSpaceId(request),
|
||||
sourceId,
|
||||
logEntryCategoriesJobTypes[0]
|
||||
);
|
||||
|
||||
let logEntryDatasetBuckets: LogEntryDatasetBucket[] = [];
|
||||
let afterLatestBatchKey: CompositeDatasetKey | undefined;
|
||||
let esSearchSpans: TracingSpan[] = [];
|
||||
|
||||
while (true) {
|
||||
const finalizeEsSearchSpan = startTracingSpan('fetch category dataset batch from ES');
|
||||
|
||||
const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)(
|
||||
await this.libs.framework.callWithRequest(
|
||||
requestContext,
|
||||
'search',
|
||||
createLogEntryDatasetsQuery(
|
||||
logEntryCategoriesCountJobId,
|
||||
startTime,
|
||||
endTime,
|
||||
COMPOSITE_AGGREGATION_BATCH_SIZE,
|
||||
afterLatestBatchKey
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
if (logEntryDatasetsResponse._shards.total === 0) {
|
||||
throw new NoLogAnalysisResultsIndexError(
|
||||
`Failed to find ml result index for job ${logEntryCategoriesCountJobId}.`
|
||||
);
|
||||
}
|
||||
|
||||
const {
|
||||
after_key: afterKey,
|
||||
buckets: latestBatchBuckets,
|
||||
} = logEntryDatasetsResponse.aggregations.dataset_buckets;
|
||||
|
||||
logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets];
|
||||
afterLatestBatchKey = afterKey;
|
||||
esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()];
|
||||
|
||||
if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan();
|
||||
|
||||
return {
|
||||
data: logEntryDatasetBuckets.map(logEntryDatasetBucket => logEntryDatasetBucket.key.dataset),
|
||||
timing: {
|
||||
spans: [logEntryDatasetsSpan, ...esSearchSpans],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async fetchTopLogEntryCategories(
|
||||
requestContext: RequestHandlerContext,
|
||||
logEntryCategoriesCountJobId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
categoryCount: number,
|
||||
datasets: string[]
|
||||
) {
|
||||
const finalizeEsSearchSpan = startTracingSpan('Fetch top categories from ES');
|
||||
|
||||
const topLogEntryCategoriesResponse = decodeOrThrow(topLogEntryCategoriesResponseRT)(
|
||||
await this.libs.framework.callWithRequest(
|
||||
requestContext,
|
||||
'search',
|
||||
createTopLogEntryCategoriesQuery(
|
||||
logEntryCategoriesCountJobId,
|
||||
startTime,
|
||||
endTime,
|
||||
categoryCount,
|
||||
datasets
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
const esSearchSpan = finalizeEsSearchSpan();
|
||||
|
||||
if (topLogEntryCategoriesResponse._shards.total === 0) {
|
||||
throw new NoLogAnalysisResultsIndexError(
|
||||
`Failed to find ml result index for job ${logEntryCategoriesCountJobId}.`
|
||||
);
|
||||
}
|
||||
|
||||
const topLogEntryCategories = topLogEntryCategoriesResponse.aggregations.terms_category_id.buckets.map(
|
||||
topCategoryBucket => ({
|
||||
categoryId: parseCategoryId(topCategoryBucket.key),
|
||||
logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0,
|
||||
datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets.map(
|
||||
datasetBucket => datasetBucket.key
|
||||
),
|
||||
maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
topLogEntryCategories,
|
||||
timing: {
|
||||
spans: [esSearchSpan],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async fetchLogEntryCategories(
|
||||
requestContext: RequestHandlerContext,
|
||||
logEntryCategoriesCountJobId: string,
|
||||
categoryIds: number[]
|
||||
) {
|
||||
if (categoryIds.length === 0) {
|
||||
return {
|
||||
logEntryCategoriesById: {},
|
||||
timing: { spans: [] },
|
||||
};
|
||||
}
|
||||
|
||||
const finalizeEsSearchSpan = startTracingSpan('Fetch category patterns from ES');
|
||||
|
||||
const logEntryCategoriesResponse = decodeOrThrow(logEntryCategoriesResponseRT)(
|
||||
await this.libs.framework.callWithRequest(
|
||||
requestContext,
|
||||
'search',
|
||||
createLogEntryCategoriesQuery(logEntryCategoriesCountJobId, categoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
const esSearchSpan = finalizeEsSearchSpan();
|
||||
|
||||
const logEntryCategoriesById = logEntryCategoriesResponse.hits.hits.reduce<
|
||||
Record<number, LogEntryCategoryHit>
|
||||
>(
|
||||
(accumulatedCategoriesById, categoryHit) => ({
|
||||
...accumulatedCategoriesById,
|
||||
[categoryHit._source.category_id]: categoryHit,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
|
||||
return {
|
||||
logEntryCategoriesById,
|
||||
timing: {
|
||||
spans: [esSearchSpan],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async fetchTopLogEntryCategoryHistograms(
|
||||
requestContext: RequestHandlerContext,
|
||||
logEntryCategoriesCountJobId: string,
|
||||
categoryIds: number[],
|
||||
histograms: HistogramParameters[]
|
||||
) {
|
||||
if (categoryIds.length === 0 || histograms.length === 0) {
|
||||
return {
|
||||
categoryHistogramsById: {},
|
||||
timing: { spans: [] },
|
||||
};
|
||||
}
|
||||
|
||||
const finalizeEsSearchSpan = startTracingSpan('Fetch category histograms from ES');
|
||||
|
||||
const categoryHistogramsReponses = await Promise.all(
|
||||
histograms.map(({ bucketCount, endTime, id: histogramId, startTime }) =>
|
||||
this.libs.framework
|
||||
.callWithRequest(
|
||||
requestContext,
|
||||
'search',
|
||||
createLogEntryCategoryHistogramsQuery(
|
||||
logEntryCategoriesCountJobId,
|
||||
categoryIds,
|
||||
startTime,
|
||||
endTime,
|
||||
bucketCount
|
||||
)
|
||||
)
|
||||
.then(decodeOrThrow(logEntryCategoryHistogramsResponseRT))
|
||||
.then(response => ({
|
||||
histogramId,
|
||||
histogramBuckets: response.aggregations.filters_categories.buckets,
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
const esSearchSpan = finalizeEsSearchSpan();
|
||||
|
||||
const categoryHistogramsById = Object.values(categoryHistogramsReponses).reduce<
|
||||
Record<
|
||||
number,
|
||||
Array<{
|
||||
histogramId: string;
|
||||
buckets: Array<{
|
||||
bucketDuration: number;
|
||||
logEntryCount: number;
|
||||
startTime: number;
|
||||
}>;
|
||||
}>
|
||||
>
|
||||
>(
|
||||
(outerAccumulatedHistograms, { histogramId, histogramBuckets }) =>
|
||||
Object.entries(histogramBuckets).reduce(
|
||||
(innerAccumulatedHistograms, [categoryBucketKey, categoryBucket]) => {
|
||||
const categoryId = parseCategoryId(categoryBucketKey);
|
||||
return {
|
||||
...innerAccumulatedHistograms,
|
||||
[categoryId]: [
|
||||
...(innerAccumulatedHistograms[categoryId] ?? []),
|
||||
{
|
||||
histogramId,
|
||||
buckets: categoryBucket.histogram_timestamp.buckets.map(bucket => ({
|
||||
bucketDuration: categoryBucket.histogram_timestamp.meta.bucketDuration,
|
||||
logEntryCount: bucket.sum_actual.value,
|
||||
startTime: bucket.key,
|
||||
})),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
outerAccumulatedHistograms
|
||||
),
|
||||
{}
|
||||
);
|
||||
|
||||
return {
|
||||
categoryHistogramsById,
|
||||
timing: {
|
||||
spans: [esSearchSpan],
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const parseCategoryId = (rawCategoryId: string) => parseInt(rawCategoryId, 10);
|
||||
|
||||
interface HistogramParameters {
|
||||
id: string;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
bucketCount: number;
|
||||
}
|
|
@ -10,7 +10,7 @@ import { identity } from 'fp-ts/lib/function';
|
|||
import { getJobId } from '../../../common/log_analysis';
|
||||
import { throwErrors, createPlainError } from '../../../common/runtime_types';
|
||||
import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
|
||||
import { NoLogRateResultsIndexError } from './errors';
|
||||
import { NoLogAnalysisResultsIndexError } from './errors';
|
||||
import {
|
||||
logRateModelPlotResponseRT,
|
||||
createLogEntryRateQuery,
|
||||
|
@ -21,7 +21,7 @@ import { RequestHandlerContext, KibanaRequest } from '../../../../../../../src/c
|
|||
|
||||
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
|
||||
|
||||
export class InfraLogAnalysis {
|
||||
export class LogEntryRateAnalysis {
|
||||
constructor(
|
||||
private readonly libs: {
|
||||
framework: KibanaFramework;
|
||||
|
@ -36,11 +36,11 @@ export class InfraLogAnalysis {
|
|||
|
||||
public async getLogEntryRateBuckets(
|
||||
requestContext: RequestHandlerContext,
|
||||
request: KibanaRequest,
|
||||
sourceId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
bucketDuration: number,
|
||||
request: KibanaRequest
|
||||
bucketDuration: number
|
||||
) {
|
||||
const logRateJobId = this.getJobIds(request, sourceId).logEntryRate;
|
||||
let mlModelPlotBuckets: LogRateModelPlotBucket[] = [];
|
||||
|
@ -61,7 +61,7 @@ export class InfraLogAnalysis {
|
|||
);
|
||||
|
||||
if (mlModelPlotResponse._shards.total === 0) {
|
||||
throw new NoLogRateResultsIndexError(
|
||||
throw new NoLogAnalysisResultsIndexError(
|
||||
`Failed to find ml result index for job ${logRateJobId}.`
|
||||
);
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-';
|
||||
|
||||
export const getMlResultIndex = (jobId: string) => `${ML_ANOMALY_INDEX_PREFIX}${jobId}`;
|
||||
|
||||
export const defaultRequestParameters = {
|
||||
allowNoIndices: true,
|
||||
ignoreUnavailable: true,
|
||||
trackScores: false,
|
||||
trackTotalHits: false,
|
||||
};
|
||||
|
||||
export const createTimeRangeFilters = (startTime: number, endTime: number) => [
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
gte: startTime,
|
||||
lte: endTime,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
export const createResultTypeFilters = (resultType: 'model_plot' | 'record') => [
|
||||
{
|
||||
term: {
|
||||
result_type: {
|
||||
value: resultType,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
|
@ -5,3 +5,4 @@
|
|||
*/
|
||||
|
||||
export * from './log_entry_rate';
|
||||
export * from './top_log_entry_categories';
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
|
||||
import { defaultRequestParameters, getMlResultIndex } from './common';
|
||||
|
||||
export const createLogEntryCategoriesQuery = (
|
||||
logEntryCategoriesJobId: string,
|
||||
categoryIds: number[]
|
||||
) => ({
|
||||
...defaultRequestParameters,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
terms: {
|
||||
category_id: categoryIds,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_source: ['category_id', 'regex'],
|
||||
},
|
||||
index: getMlResultIndex(logEntryCategoriesJobId),
|
||||
size: categoryIds.length,
|
||||
});
|
||||
|
||||
export const logEntryCategoryHitRT = rt.type({
|
||||
_source: rt.type({
|
||||
category_id: rt.number,
|
||||
regex: rt.string,
|
||||
}),
|
||||
});
|
||||
|
||||
export type LogEntryCategoryHit = rt.TypeOf<typeof logEntryCategoryHitRT>;
|
||||
|
||||
export const logEntryCategoriesResponseRT = rt.intersection([
|
||||
commonSearchSuccessResponseFieldsRT,
|
||||
rt.type({
|
||||
hits: rt.type({
|
||||
hits: rt.array(logEntryCategoryHitRT),
|
||||
}),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type logEntryCategoriesResponse = rt.TypeOf<typeof logEntryCategoriesResponseRT>;
|
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
|
||||
import {
|
||||
createResultTypeFilters,
|
||||
createTimeRangeFilters,
|
||||
defaultRequestParameters,
|
||||
getMlResultIndex,
|
||||
} from './common';
|
||||
|
||||
export const createLogEntryCategoryHistogramsQuery = (
|
||||
logEntryCategoriesJobId: string,
|
||||
categoryIds: number[],
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
bucketCount: number
|
||||
) => ({
|
||||
...defaultRequestParameters,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
...createTimeRangeFilters(startTime, endTime),
|
||||
...createResultTypeFilters('model_plot'),
|
||||
...createCategoryFilters(categoryIds),
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
filters_categories: {
|
||||
filters: createCategoryFiltersAggregation(categoryIds),
|
||||
aggs: {
|
||||
histogram_timestamp: createHistogramAggregation(startTime, endTime, bucketCount),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
index: getMlResultIndex(logEntryCategoriesJobId),
|
||||
size: 0,
|
||||
});
|
||||
|
||||
const createCategoryFilters = (categoryIds: number[]) => [
|
||||
{
|
||||
terms: {
|
||||
by_field_value: categoryIds,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const createCategoryFiltersAggregation = (categoryIds: number[]) => ({
|
||||
filters: categoryIds.reduce<Record<string, { term: { by_field_value: number } }>>(
|
||||
(categoryFilters, categoryId) => ({
|
||||
...categoryFilters,
|
||||
[`${categoryId}`]: {
|
||||
term: {
|
||||
by_field_value: categoryId,
|
||||
},
|
||||
},
|
||||
}),
|
||||
{}
|
||||
),
|
||||
});
|
||||
|
||||
const createHistogramAggregation = (startTime: number, endTime: number, bucketCount: number) => {
|
||||
const bucketDuration = Math.round((endTime - startTime) / bucketCount);
|
||||
|
||||
return {
|
||||
histogram: {
|
||||
field: 'timestamp',
|
||||
interval: bucketDuration,
|
||||
offset: startTime,
|
||||
},
|
||||
meta: {
|
||||
bucketDuration,
|
||||
},
|
||||
aggs: {
|
||||
sum_actual: {
|
||||
sum: {
|
||||
field: 'actual',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const logEntryCategoryFilterBucketRT = rt.type({
|
||||
doc_count: rt.number,
|
||||
histogram_timestamp: rt.type({
|
||||
meta: rt.type({
|
||||
bucketDuration: rt.number,
|
||||
}),
|
||||
buckets: rt.array(
|
||||
rt.type({
|
||||
key: rt.number,
|
||||
doc_count: rt.number,
|
||||
sum_actual: rt.type({
|
||||
value: rt.number,
|
||||
}),
|
||||
})
|
||||
),
|
||||
}),
|
||||
});
|
||||
|
||||
export type LogEntryCategoryFilterBucket = rt.TypeOf<typeof logEntryCategoryFilterBucketRT>;
|
||||
|
||||
export const logEntryCategoryHistogramsResponseRT = rt.intersection([
|
||||
commonSearchSuccessResponseFieldsRT,
|
||||
rt.type({
|
||||
aggregations: rt.type({
|
||||
filters_categories: rt.type({
|
||||
buckets: rt.record(rt.string, logEntryCategoryFilterBucketRT),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type LogEntryCategorHistogramsResponse = rt.TypeOf<
|
||||
typeof logEntryCategoryHistogramsResponseRT
|
||||
>;
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
|
||||
import { defaultRequestParameters, getMlResultIndex } from './common';
|
||||
|
||||
export const createLogEntryDatasetsQuery = (
|
||||
logEntryAnalysisJobId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
size: number,
|
||||
afterKey?: CompositeDatasetKey
|
||||
) => ({
|
||||
...defaultRequestParameters,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
gte: startTime,
|
||||
lt: endTime,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
result_type: {
|
||||
value: 'model_plot',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
dataset_buckets: {
|
||||
composite: {
|
||||
after: afterKey,
|
||||
size,
|
||||
sources: [
|
||||
{
|
||||
dataset: {
|
||||
terms: {
|
||||
field: 'partition_field_value',
|
||||
order: 'asc',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
index: getMlResultIndex(logEntryAnalysisJobId),
|
||||
size: 0,
|
||||
});
|
||||
|
||||
const compositeDatasetKeyRT = rt.type({
|
||||
dataset: rt.string,
|
||||
});
|
||||
|
||||
export type CompositeDatasetKey = rt.TypeOf<typeof compositeDatasetKeyRT>;
|
||||
|
||||
const logEntryDatasetBucketRT = rt.type({
|
||||
key: compositeDatasetKeyRT,
|
||||
});
|
||||
|
||||
export type LogEntryDatasetBucket = rt.TypeOf<typeof logEntryDatasetBucketRT>;
|
||||
|
||||
export const logEntryDatasetsResponseRT = rt.intersection([
|
||||
commonSearchSuccessResponseFieldsRT,
|
||||
rt.type({
|
||||
aggregations: rt.type({
|
||||
dataset_buckets: rt.intersection([
|
||||
rt.type({
|
||||
buckets: rt.array(logEntryDatasetBucketRT),
|
||||
}),
|
||||
rt.partial({
|
||||
after_key: compositeDatasetKeyRT,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type LogEntryDatasetsResponse = rt.TypeOf<typeof logEntryDatasetsResponseRT>;
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-';
|
||||
import { defaultRequestParameters, getMlResultIndex } from './common';
|
||||
|
||||
export const createLogEntryRateQuery = (
|
||||
logRateJobId: string,
|
||||
|
@ -16,7 +16,7 @@ export const createLogEntryRateQuery = (
|
|||
size: number,
|
||||
afterKey?: CompositeTimestampPartitionKey
|
||||
) => ({
|
||||
allowNoIndices: true,
|
||||
...defaultRequestParameters,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
|
@ -118,11 +118,8 @@ export const createLogEntryRateQuery = (
|
|||
},
|
||||
},
|
||||
},
|
||||
ignoreUnavailable: true,
|
||||
index: `${ML_ANOMALY_INDEX_PREFIX}${logRateJobId}`,
|
||||
index: getMlResultIndex(logRateJobId),
|
||||
size: 0,
|
||||
trackScores: false,
|
||||
trackTotalHits: false,
|
||||
});
|
||||
|
||||
const logRateMlRecordRT = rt.type({
|
||||
|
|
|
@ -0,0 +1,160 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
|
||||
import {
|
||||
createResultTypeFilters,
|
||||
createTimeRangeFilters,
|
||||
defaultRequestParameters,
|
||||
getMlResultIndex,
|
||||
} from './common';
|
||||
|
||||
export const createTopLogEntryCategoriesQuery = (
|
||||
logEntryCategoriesJobId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
size: number,
|
||||
datasets: string[],
|
||||
sortDirection: 'asc' | 'desc' = 'desc'
|
||||
) => ({
|
||||
...defaultRequestParameters,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
...createTimeRangeFilters(startTime, endTime),
|
||||
...createDatasetsFilters(datasets),
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
bool: {
|
||||
filter: [
|
||||
...createResultTypeFilters('model_plot'),
|
||||
{
|
||||
range: {
|
||||
actual: {
|
||||
gt: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
filter: createResultTypeFilters('record'),
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
terms_category_id: {
|
||||
terms: {
|
||||
field: 'by_field_value',
|
||||
size,
|
||||
order: {
|
||||
'filter_model_plot>sum_actual': sortDirection,
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
filter_model_plot: {
|
||||
filter: {
|
||||
term: {
|
||||
result_type: 'model_plot',
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
sum_actual: {
|
||||
sum: {
|
||||
field: 'actual',
|
||||
},
|
||||
},
|
||||
terms_dataset: {
|
||||
terms: {
|
||||
field: 'partition_field_value',
|
||||
size: 1000,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
filter_record: {
|
||||
filter: {
|
||||
term: {
|
||||
result_type: 'record',
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
maximum_record_score: {
|
||||
max: {
|
||||
field: 'record_score',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
index: getMlResultIndex(logEntryCategoriesJobId),
|
||||
size: 0,
|
||||
});
|
||||
|
||||
const createDatasetsFilters = (datasets: string[]) =>
|
||||
datasets.length > 0
|
||||
? [
|
||||
{
|
||||
terms: {
|
||||
partition_field_value: datasets,
|
||||
},
|
||||
},
|
||||
]
|
||||
: [];
|
||||
|
||||
const metricAggregationRT = rt.type({
|
||||
value: rt.union([rt.number, rt.null]),
|
||||
});
|
||||
|
||||
export const logEntryCategoryBucketRT = rt.type({
|
||||
key: rt.string,
|
||||
doc_count: rt.number,
|
||||
filter_record: rt.type({
|
||||
maximum_record_score: metricAggregationRT,
|
||||
}),
|
||||
filter_model_plot: rt.type({
|
||||
sum_actual: metricAggregationRT,
|
||||
terms_dataset: rt.type({
|
||||
buckets: rt.array(
|
||||
rt.type({
|
||||
key: rt.string,
|
||||
doc_count: rt.number,
|
||||
})
|
||||
),
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
export type LogEntryCategoryBucket = rt.TypeOf<typeof logEntryCategoryBucketRT>;
|
||||
|
||||
export const topLogEntryCategoriesResponseRT = rt.intersection([
|
||||
commonSearchSuccessResponseFieldsRT,
|
||||
rt.type({
|
||||
aggregations: rt.type({
|
||||
terms_category_id: rt.type({
|
||||
buckets: rt.array(logEntryCategoryBucketRT),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type TopLogEntryCategoriesResponse = rt.TypeOf<typeof topLogEntryCategoriesResponseRT>;
|
|
@ -17,7 +17,7 @@ import { InfraElasticsearchSourceStatusAdapter } from './lib/adapters/source_sta
|
|||
import { InfraFieldsDomain } from './lib/domains/fields_domain';
|
||||
import { InfraLogEntriesDomain } from './lib/domains/log_entries_domain';
|
||||
import { InfraMetricsDomain } from './lib/domains/metrics_domain';
|
||||
import { InfraLogAnalysis } from './lib/log_analysis';
|
||||
import { LogEntryCategoriesAnalysis, LogEntryRateAnalysis } from './lib/log_analysis';
|
||||
import { InfraSnapshot } from './lib/snapshot';
|
||||
import { InfraSourceStatus } from './lib/source_status';
|
||||
import { InfraSources } from './lib/sources';
|
||||
|
@ -87,7 +87,8 @@ export class InfraServerPlugin {
|
|||
}
|
||||
);
|
||||
const snapshot = new InfraSnapshot({ sources, framework });
|
||||
const logAnalysis = new InfraLogAnalysis({ framework });
|
||||
const logEntryCategoriesAnalysis = new LogEntryCategoriesAnalysis({ framework });
|
||||
const logEntryRateAnalysis = new LogEntryRateAnalysis({ framework });
|
||||
|
||||
// TODO: separate these out individually and do away with "domains" as a temporary group
|
||||
const domainLibs: InfraDomainLibs = {
|
||||
|
@ -103,7 +104,8 @@ export class InfraServerPlugin {
|
|||
this.libs = {
|
||||
configuration: this.config,
|
||||
framework,
|
||||
logAnalysis,
|
||||
logEntryCategoriesAnalysis,
|
||||
logEntryRateAnalysis,
|
||||
snapshot,
|
||||
sources,
|
||||
sourceStatus,
|
||||
|
|
|
@ -4,4 +4,6 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_entry_categories';
|
||||
export * from './log_entry_category_datasets';
|
||||
export * from './log_entry_rate';
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import {
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH,
|
||||
getLogEntryCategoriesRequestPayloadRT,
|
||||
getLogEntryCategoriesSuccessReponsePayloadRT,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { throwErrors } from '../../../../common/runtime_types';
|
||||
import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis';
|
||||
|
||||
const anyObject = schema.object({}, { allowUnknowns: true });
|
||||
|
||||
export const initGetLogEntryCategoriesRoute = ({
|
||||
framework,
|
||||
logEntryCategoriesAnalysis,
|
||||
}: InfraBackendLibs) => {
|
||||
framework.registerRoute(
|
||||
{
|
||||
method: 'post',
|
||||
path: LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH,
|
||||
validate: {
|
||||
// short-circuit forced @kbn/config-schema validation so we can do io-ts validation
|
||||
body: anyObject,
|
||||
},
|
||||
},
|
||||
async (requestContext, request, response) => {
|
||||
const {
|
||||
data: {
|
||||
categoryCount,
|
||||
histograms,
|
||||
sourceId,
|
||||
timeRange: { startTime, endTime },
|
||||
datasets,
|
||||
},
|
||||
} = pipe(
|
||||
getLogEntryCategoriesRequestPayloadRT.decode(request.body),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
|
||||
try {
|
||||
const {
|
||||
data: topLogEntryCategories,
|
||||
timing,
|
||||
} = await logEntryCategoriesAnalysis.getTopLogEntryCategories(
|
||||
requestContext,
|
||||
request,
|
||||
sourceId,
|
||||
startTime,
|
||||
endTime,
|
||||
categoryCount,
|
||||
datasets ?? [],
|
||||
histograms.map(histogram => ({
|
||||
bucketCount: histogram.bucketCount,
|
||||
endTime: histogram.timeRange.endTime,
|
||||
id: histogram.id,
|
||||
startTime: histogram.timeRange.startTime,
|
||||
}))
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
body: getLogEntryCategoriesSuccessReponsePayloadRT.encode({
|
||||
data: {
|
||||
categories: topLogEntryCategories,
|
||||
},
|
||||
timing,
|
||||
}),
|
||||
});
|
||||
} catch (e) {
|
||||
const { statusCode = 500, message = 'Unknown error occurred' } = e;
|
||||
|
||||
if (e instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode,
|
||||
body: { message },
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import Boom from 'boom';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
|
||||
import {
|
||||
getLogEntryCategoryDatasetsRequestPayloadRT,
|
||||
getLogEntryCategoryDatasetsSuccessReponsePayloadRT,
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { throwErrors } from '../../../../common/runtime_types';
|
||||
import { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis';
|
||||
|
||||
const anyObject = schema.object({}, { allowUnknowns: true });
|
||||
|
||||
export const initGetLogEntryCategoryDatasetsRoute = ({
|
||||
framework,
|
||||
logEntryCategoriesAnalysis,
|
||||
}: InfraBackendLibs) => {
|
||||
framework.registerRoute(
|
||||
{
|
||||
method: 'post',
|
||||
path: LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH,
|
||||
validate: {
|
||||
// short-circuit forced @kbn/config-schema validation so we can do io-ts validation
|
||||
body: anyObject,
|
||||
},
|
||||
},
|
||||
async (requestContext, request, response) => {
|
||||
const {
|
||||
data: {
|
||||
sourceId,
|
||||
timeRange: { startTime, endTime },
|
||||
},
|
||||
} = pipe(
|
||||
getLogEntryCategoryDatasetsRequestPayloadRT.decode(request.body),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
|
||||
try {
|
||||
const {
|
||||
data: logEntryCategoryDatasets,
|
||||
timing,
|
||||
} = await logEntryCategoriesAnalysis.getLogEntryCategoryDatasets(
|
||||
requestContext,
|
||||
request,
|
||||
sourceId,
|
||||
startTime,
|
||||
endTime
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
body: getLogEntryCategoryDatasetsSuccessReponsePayloadRT.encode({
|
||||
data: {
|
||||
datasets: logEntryCategoryDatasets,
|
||||
},
|
||||
timing,
|
||||
}),
|
||||
});
|
||||
} catch (e) {
|
||||
const { statusCode = 500, message = 'Unknown error occurred' } = e;
|
||||
|
||||
if (e instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode,
|
||||
body: { message },
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
|
@ -18,11 +18,11 @@ import {
|
|||
GetLogEntryRateSuccessResponsePayload,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { throwErrors } from '../../../../common/runtime_types';
|
||||
import { NoLogRateResultsIndexError } from '../../../lib/log_analysis';
|
||||
import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis';
|
||||
|
||||
const anyObject = schema.object({}, { allowUnknowns: true });
|
||||
|
||||
export const initGetLogEntryRateRoute = ({ framework, logAnalysis }: InfraBackendLibs) => {
|
||||
export const initGetLogEntryRateRoute = ({ framework, logEntryRateAnalysis }: InfraBackendLibs) => {
|
||||
framework.registerRoute(
|
||||
{
|
||||
method: 'post',
|
||||
|
@ -39,13 +39,13 @@ export const initGetLogEntryRateRoute = ({ framework, logAnalysis }: InfraBacken
|
|||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
|
||||
const logEntryRateBuckets = await logAnalysis.getLogEntryRateBuckets(
|
||||
const logEntryRateBuckets = await logEntryRateAnalysis.getLogEntryRateBuckets(
|
||||
requestContext,
|
||||
request,
|
||||
payload.data.sourceId,
|
||||
payload.data.timeRange.startTime,
|
||||
payload.data.timeRange.endTime,
|
||||
payload.data.bucketDuration,
|
||||
request
|
||||
payload.data.bucketDuration
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
|
@ -59,7 +59,7 @@ export const initGetLogEntryRateRoute = ({ framework, logAnalysis }: InfraBacken
|
|||
});
|
||||
} catch (e) {
|
||||
const { statusCode = 500, message = 'Unknown error occurred' } = e;
|
||||
if (e instanceof NoLogRateResultsIndexError) {
|
||||
if (e instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message } });
|
||||
}
|
||||
return response.customError({
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const commonSearchSuccessResponseFieldsRT = rt.type({
|
||||
_shards: rt.type({
|
||||
total: rt.number,
|
||||
successful: rt.number,
|
||||
skipped: rt.number,
|
||||
failed: rt.number,
|
||||
}),
|
||||
timed_out: rt.boolean,
|
||||
took: rt.number,
|
||||
});
|
|
@ -6080,8 +6080,6 @@
|
|||
"xpack.infra.logs.highlights.goToPreviousHighlightButtonLabel": "前のハイライトにスキップ",
|
||||
"xpack.infra.logs.index.settingsTabTitle": "設定",
|
||||
"xpack.infra.logs.index.streamTabTitle": "ストリーム",
|
||||
"xpack.infra.logs.logsAnalysisResults.onboardingSuccessContent": "機械学習ロボットがデータの収集を開始するまでしばらくお待ちください。",
|
||||
"xpack.infra.logs.logsAnalysisResults.onboardingSuccessTitle": "成功!",
|
||||
"xpack.infra.logs.streamPage.documentTitle": "{previousTitle} | ストリーム",
|
||||
"xpack.infra.logsPage.toolbar.kqlSearchFieldAriaLabel": "ログエントリーを検索",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.cpuUtilSection.percentSeriesLabel": "パーセント",
|
||||
|
|
|
@ -6021,8 +6021,6 @@
|
|||
"xpack.infra.logs.highlights.goToPreviousHighlightButtonLabel": "跳转到上一高亮条目",
|
||||
"xpack.infra.logs.index.settingsTabTitle": "设置",
|
||||
"xpack.infra.logs.index.streamTabTitle": "流式传输",
|
||||
"xpack.infra.logs.logsAnalysisResults.onboardingSuccessContent": "请注意,我们的 Machine Learning 机器人若干分钟后才会开始收集数据。",
|
||||
"xpack.infra.logs.logsAnalysisResults.onboardingSuccessTitle": "成功!",
|
||||
"xpack.infra.logs.streamPage.documentTitle": "{previousTitle} | 流式传输",
|
||||
"xpack.infra.logsPage.toolbar.kqlSearchFieldAriaLabel": "搜索日志条目",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.cpuUtilSection.percentSeriesLabel": "百分比",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue