[infra] Shorten IDs for ML jobs (#168234)

Closes #47477

### Summary

ML job IDs have a limit of 64 characters. For the log ML jobs we add the
string `kibana-logs-ui` plus the space and log view IDs as a prefix to
the job names (`log-entry-rate` and `log-entry-categories-count`) which
can quickly eat up the 64 character limit (even our own Stack Monitoring
log view hits the limit). This prevents users from being able to create
ML jobs and it's hard to rename a space or log view, and the limit is
not hinted at during space creation (because they are unrelated in some
sense).

In order to achieve a more stable length to the ID, this PR introduces a
new format for the prefix which creates a UUID v5 which uses the space
and log view ID as seed information (it then removes the dashes to still
be within the size limit for the categorization job).

Since there is no technical difference between the new and old format,
this PR makes an effort to continue to support the old format and allow
migration of old jobs as needed. The old jobs work and may contain
important data so the user should not feel forced to migrate.

The main addition is a new small API that checks if any ML jobs are
available and which format they use for the ID so that the app can
request data accordingly and the APIs have been modified to take the ID
format into account (except during creation which should always use the
new format).

The solution applied is not ideal. It simply passes the ID format along
with the space and log view ID to each point where the ID is re-created
(which is multiple). The ideal solution would be to store the job data
in the store and pass that around instead but that seemed like a
considerably larger effort. This PR does introduce some functional tests
around the ML job creation process, so such a future refactor should be
a bit safer than previously.

### How to test

* Start from `main`
* Start Elasticsearch
* Start Kibana
* Load the Sample web logs (Kibana home -> Try sample data -> Other
sample data sets)
* Visit the Anomalies page in the Logs UI
* Set up any of the two ML jobs or both, wait for some results to show
up
* Checkout the PR branch
* Visit the anomalies page and verify that it still works (requests go
to resolve the ID format, should return 'legacy' which should then load
the data for the legacy job)
* Recreate the ML job and verify that the new job works and results
still show up (new requests should go out with the new format being
used, which may be a mixed mode if you have two jobs and only migrate
one of them)

---------

Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Milton Hultgren 2023-11-19 15:00:11 +01:00 committed by GitHub
parent 37bf74b0d8
commit 4963e6b11d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
71 changed files with 1434 additions and 156 deletions

View file

@ -10,3 +10,4 @@ export * from './log_alerts/v1';
export * from './log_analysis/results/v1';
export * from './log_analysis/validation/v1';
export * from './metrics_explorer_views/v1';
export * from './log_analysis/id_formats/v1/id_formats';

View file

@ -0,0 +1,39 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as rt from 'io-ts';
import { logEntryRateJobTypeRT, logEntryCategoriesJobTypeRT } from '../../../../log_analysis';
export const idFormatRT = rt.union([rt.literal('legacy'), rt.literal('hashed')]);
export type IdFormat = rt.TypeOf<typeof idFormatRT>;
const jobTypeRT = rt.union([logEntryRateJobTypeRT, logEntryCategoriesJobTypeRT]);
export type JobType = rt.TypeOf<typeof jobTypeRT>;
export const idFormatByJobTypeRT = rt.record(jobTypeRT, idFormatRT);
export type IdFormatByJobType = rt.TypeOf<typeof idFormatByJobTypeRT>;
export const LOG_ANALYSIS_GET_ID_FORMATS = '/api/infra/log_analysis/id_formats';
export const getLogAnalysisIdFormatsRequestPayloadRT = rt.type({
data: rt.type({
logViewId: rt.string,
spaceId: rt.string,
}),
});
export type GetLogAnalysisIdFormatsRequestPayload = rt.TypeOf<
typeof getLogAnalysisIdFormatsRequestPayloadRT
>;
export const getLogAnalysisIdFormatsSuccessResponsePayloadRT = rt.type({
data: rt.record(rt.union([logEntryRateJobTypeRT, logEntryCategoriesJobTypeRT]), idFormatRT),
});
export type GetLogAnalysisIdFormatsSuccessResponsePayload = rt.TypeOf<
typeof getLogAnalysisIdFormatsSuccessResponsePayloadRT
>;

View file

@ -8,6 +8,7 @@
import * as rt from 'io-ts';
import { persistedLogViewReferenceRT } from '@kbn/logs-shared-plugin/common';
import { idFormatByJobTypeRT } from '../../id_formats/v1/id_formats';
import { timeRangeRT, routeTimingMetadataRT } from '../../../shared';
import {
logEntryAnomalyRT,
@ -54,6 +55,7 @@ export const getLogEntryAnomaliesRequestPayloadRT = rt.type({
rt.type({
// log view
logView: persistedLogViewReferenceRT,
idFormats: idFormatByJobTypeRT,
// the time range to fetch the log entry anomalies from
timeRange: timeRangeRT,
}),

View file

@ -14,6 +14,7 @@ import {
timeRangeRT,
routeTimingMetadataRT,
} from '../../../shared';
import { idFormatByJobTypeRT } from '../../id_formats/v1/id_formats';
export const LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH =
'/api/infra/log_analysis/results/log_entry_anomalies_datasets';
@ -26,6 +27,7 @@ export const getLogEntryAnomaliesDatasetsRequestPayloadRT = rt.type({
data: rt.type({
// log view
logView: persistedLogViewReferenceRT,
idFormats: idFormatByJobTypeRT,
// the time range to fetch the anomalies datasets from
timeRange: timeRangeRT,
}),

View file

@ -8,6 +8,7 @@
import * as rt from 'io-ts';
import { persistedLogViewReferenceRT } from '@kbn/logs-shared-plugin/common';
import { idFormatRT } from '../../id_formats/v1/id_formats';
import {
badRequestErrorRT,
forbiddenErrorRT,
@ -41,6 +42,7 @@ export const getLogEntryCategoriesRequestPayloadRT = rt.type({
categoryCount: rt.number,
// log view
logView: persistedLogViewReferenceRT,
idFormat: idFormatRT,
// the time range to fetch the categories from
timeRange: timeRangeRT,
// a list of histograms to create

View file

@ -8,6 +8,7 @@
import * as rt from 'io-ts';
import { persistedLogViewReferenceRT } from '@kbn/logs-shared-plugin/common';
import { idFormatRT } from '../../id_formats/v1/id_formats';
import {
badRequestErrorRT,
forbiddenErrorRT,
@ -25,6 +26,7 @@ export const getLogEntryCategoryDatasetsRequestPayloadRT = rt.type({
data: rt.type({
// log view
logView: persistedLogViewReferenceRT,
idFormat: idFormatRT,
// the time range to fetch the category datasets from
timeRange: timeRangeRT,
}),

View file

@ -7,6 +7,7 @@
import { logEntryContextRT, persistedLogViewReferenceRT } from '@kbn/logs-shared-plugin/common';
import * as rt from 'io-ts';
import { idFormatRT } from '../../id_formats/v1/id_formats';
import {
badRequestErrorRT,
forbiddenErrorRT,
@ -29,6 +30,7 @@ export const getLogEntryCategoryExamplesRequestPayloadRT = rt.type({
exampleCount: rt.number,
// log view
logView: persistedLogViewReferenceRT,
idFormat: idFormatRT,
// the time range to fetch the category examples from
timeRange: timeRangeRT,
}),

View file

@ -7,6 +7,7 @@
import * as rt from 'io-ts';
import { persistedLogViewReferenceRT } from '@kbn/logs-shared-plugin/common';
import { idFormatRT } from '../../id_formats/v1/id_formats';
import { logEntryExampleRT } from '../../../../log_analysis';
import {
badRequestErrorRT,
@ -31,6 +32,7 @@ export const getLogEntryExamplesRequestPayloadRT = rt.type({
exampleCount: rt.number,
// logView
logView: persistedLogViewReferenceRT,
idFormat: idFormatRT,
// the time range to fetch the log rate examples from
timeRange: timeRangeRT,
}),

View file

@ -6,6 +6,8 @@
*/
import * as rt from 'io-ts';
import { v5 } from 'uuid';
import { IdFormat, JobType } from '../http_api/latest';
export const bucketSpan = 900000;
@ -13,14 +15,32 @@ export const categoriesMessageField = 'message';
export const partitionField = 'event.dataset';
export const getJobIdPrefix = (spaceId: string, sourceId: string) =>
`kibana-logs-ui-${spaceId}-${sourceId}-`;
const ID_NAMESPACE = 'f91b78c0-fdd3-425d-a4ba-4c028fe57e0f';
export const getJobId = (spaceId: string, logViewId: string, jobType: string) =>
`${getJobIdPrefix(spaceId, logViewId)}${jobType}`;
export const getJobIdPrefix = (spaceId: string, sourceId: string, idFormat: IdFormat) => {
if (idFormat === 'legacy') {
return `kibana-logs-ui-${spaceId}-${sourceId}-`;
} else {
// A UUID is 36 characters but based on the ML job names for logs, our limit is 32 characters
// Thus we remove the 4 dashes
const uuid = v5(`${spaceId}-${sourceId}`, ID_NAMESPACE).replaceAll('-', '');
return `logs-${uuid}-`;
}
};
export const getDatafeedId = (spaceId: string, logViewId: string, jobType: string) =>
`datafeed-${getJobId(spaceId, logViewId, jobType)}`;
export const getJobId = (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
jobType: JobType
) => `${getJobIdPrefix(spaceId, logViewId, idFormat)}${jobType}`;
export const getDatafeedId = (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
jobType: JobType
) => `datafeed-${getJobId(spaceId, logViewId, idFormat, jobType)}`;
export const datasetFilterRT = rt.union([
rt.strict({

View file

@ -8,9 +8,7 @@
import * as rt from 'io-ts';
import { sortRT } from './log_analysis_results';
export const logEntryCategoriesJobTypeRT = rt.keyof({
'log-entry-categories-count': null,
});
export const logEntryCategoriesJobTypeRT = rt.literal('log-entry-categories-count');
export type LogEntryCategoriesJobType = rt.TypeOf<typeof logEntryCategoriesJobTypeRT>;
@ -18,6 +16,8 @@ export const logEntryCategoriesJobTypes: LogEntryCategoriesJobType[] = [
'log-entry-categories-count',
];
export const logEntryCategoriesJobType: LogEntryCategoriesJobType = 'log-entry-categories-count';
export const logEntryCategoryDatasetRT = rt.type({
name: rt.string,
maximumAnomalyScore: rt.number,

View file

@ -7,10 +7,9 @@
import * as rt from 'io-ts';
export const logEntryRateJobTypeRT = rt.keyof({
'log-entry-rate': null,
});
export const logEntryRateJobTypeRT = rt.literal('log-entry-rate');
export type LogEntryRateJobType = rt.TypeOf<typeof logEntryRateJobTypeRT>;
export const logEntryRateJobTypes: LogEntryRateJobType[] = ['log-entry-rate'];
export const logEntryRateJobType: LogEntryRateJobType = 'log-entry-rate';
export const logEntryRateJobTypes: LogEntryRateJobType[] = [logEntryRateJobType];

View file

@ -7,6 +7,7 @@
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import React, { useCallback } from 'react';
import { logEntryCategoriesJobType, logEntryRateJobType } from '../../../../../common/log_analysis';
import { useLogAnalysisCapabilitiesContext } from '../../../../containers/logs/log_analysis';
import {
logEntryCategoriesModule,
@ -40,7 +41,7 @@ export const LogAnalysisModuleList: React.FC<{
<EuiFlexGroup>
<EuiFlexItem>
<LogAnalysisModuleListCard
jobId={logEntryRateJobIds['log-entry-rate']}
jobId={logEntryRateJobIds[logEntryRateJobType]}
hasSetupCapabilities={hasLogAnalysisSetupCapabilities}
moduleDescription={logEntryRateModule.moduleDescription}
moduleName={logEntryRateModule.moduleName}
@ -50,7 +51,7 @@ export const LogAnalysisModuleList: React.FC<{
</EuiFlexItem>
<EuiFlexItem>
<LogAnalysisModuleListCard
jobId={logEntryCategoriesJobIds['log-entry-categories-count']}
jobId={logEntryCategoriesJobIds[logEntryCategoriesJobType]}
hasSetupCapabilities={hasLogAnalysisSetupCapabilities}
moduleDescription={logEntryCategoriesModule.moduleDescription}
moduleName={logEntryCategoriesModule.moduleName}

View file

@ -34,7 +34,12 @@ export const LogAnalysisSetupFlyout: React.FC<{
}
return (
<EuiFlyout aria-labelledby={FLYOUT_HEADING_ID} maxWidth={800} onClose={closeFlyout}>
<EuiFlyout
aria-labelledby={FLYOUT_HEADING_ID}
maxWidth={800}
onClose={closeFlyout}
data-test-subj="infraLogAnalysisSetupFlyout"
>
<EuiFlyoutHeader hasBorder>
<EuiTitle>
<h2 id={FLYOUT_HEADING_ID}>

View file

@ -8,20 +8,22 @@
import * as rt from 'io-ts';
import type { HttpHandler } from '@kbn/core/public';
import { IdFormat, JobType } from '../../../../../common/http_api/latest';
import { getDatafeedId, getJobId } from '../../../../../common/log_analysis';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface DeleteJobsRequestArgs<JobType extends string> {
interface DeleteJobsRequestArgs<T extends JobType> {
spaceId: string;
logViewId: string;
jobTypes: JobType[];
idFormat: IdFormat;
jobTypes: T[];
}
export const callDeleteJobs = async <JobType extends string>(
requestArgs: DeleteJobsRequestArgs<JobType>,
export const callDeleteJobs = async <T extends JobType>(
requestArgs: DeleteJobsRequestArgs<T>,
fetch: HttpHandler
) => {
const { spaceId, logViewId, jobTypes } = requestArgs;
const { spaceId, logViewId, idFormat, jobTypes } = requestArgs;
// NOTE: Deleting the jobs via this API will delete the datafeeds at the same time
const deleteJobsResponse = await fetch('/internal/ml/jobs/delete_jobs', {
@ -29,7 +31,7 @@ export const callDeleteJobs = async <JobType extends string>(
version: '1',
body: JSON.stringify(
deleteJobsRequestPayloadRT.encode({
jobIds: jobTypes.map((jobType) => getJobId(spaceId, logViewId, jobType)),
jobIds: jobTypes.map((jobType) => getJobId(spaceId, logViewId, idFormat, jobType)),
})
),
});
@ -45,17 +47,18 @@ export const callGetJobDeletionTasks = async (fetch: HttpHandler) => {
return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse);
};
interface StopDatafeedsRequestArgs<JobType extends string> {
interface StopDatafeedsRequestArgs<T extends JobType> {
spaceId: string;
logViewId: string;
jobTypes: JobType[];
idFormat: IdFormat;
jobTypes: T[];
}
export const callStopDatafeeds = async <JobType extends string>(
requestArgs: StopDatafeedsRequestArgs<JobType>,
export const callStopDatafeeds = async <T extends JobType>(
requestArgs: StopDatafeedsRequestArgs<T>,
fetch: HttpHandler
) => {
const { spaceId, logViewId, jobTypes } = requestArgs;
const { spaceId, logViewId, idFormat, jobTypes } = requestArgs;
// Stop datafeed due to https://github.com/elastic/kibana/issues/44652
const stopDatafeedResponse = await fetch('/internal/ml/jobs/stop_datafeeds', {
@ -63,7 +66,9 @@ export const callStopDatafeeds = async <JobType extends string>(
version: '1',
body: JSON.stringify(
stopDatafeedsRequestPayloadRT.encode({
datafeedIds: jobTypes.map((jobType) => getDatafeedId(spaceId, logViewId, jobType)),
datafeedIds: jobTypes.map((jobType) =>
getDatafeedId(spaceId, logViewId, idFormat, jobType)
),
})
),
});

View file

@ -8,26 +8,28 @@
import * as rt from 'io-ts';
import type { HttpHandler } from '@kbn/core/public';
import { IdFormat, JobType } from '../../../../../common/http_api/latest';
import { getJobId, jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs<JobType extends string> {
interface RequestArgs<T extends JobType> {
spaceId: string;
logViewId: string;
jobTypes: JobType[];
idFormat: IdFormat;
jobTypes: T[];
}
export const callJobsSummaryAPI = async <JobType extends string>(
requestArgs: RequestArgs<JobType>,
export const callJobsSummaryAPI = async <T extends JobType>(
requestArgs: RequestArgs<T>,
fetch: HttpHandler
) => {
const { spaceId, logViewId, jobTypes } = requestArgs;
const { spaceId, logViewId, idFormat, jobTypes } = requestArgs;
const response = await fetch('/internal/ml/jobs/jobs_summary', {
method: 'POST',
version: '1',
body: JSON.stringify(
fetchJobStatusRequestPayloadRT.encode({
jobIds: jobTypes.map((jobType) => getJobId(spaceId, logViewId, jobType)),
jobIds: jobTypes.map((jobType) => getJobId(spaceId, logViewId, idFormat, jobType)),
})
),
});

View file

@ -46,7 +46,7 @@ export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: Http
start,
end,
indexPatternName: indexPattern,
prefix: getJobIdPrefix(spaceId, sourceId),
prefix: getJobIdPrefix(spaceId, sourceId, 'hashed'),
startDatafeed: true,
jobOverrides,
datafeedOverrides,

View file

@ -6,17 +6,19 @@
*/
import type { HttpHandler } from '@kbn/core/public';
import { IdFormat, JobType } from '../../../../common/http_api/latest';
import { getJobId } from '../../../../common/log_analysis';
import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup';
export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
export const cleanUpJobsAndDatafeeds = async <T extends JobType>(
spaceId: string,
logViewId: string,
jobTypes: JobType[],
idFormat: IdFormat,
jobTypes: T[],
fetch: HttpHandler
) => {
try {
await callStopDatafeeds({ spaceId, logViewId, jobTypes }, fetch);
await callStopDatafeeds({ spaceId, logViewId, idFormat, jobTypes }, fetch);
} catch (err) {
// Proceed only if datafeed has been deleted or didn't exist in the first place
if (err?.response?.status !== 404) {
@ -24,27 +26,32 @@ export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
}
}
return await deleteJobs(spaceId, logViewId, jobTypes, fetch);
return await deleteJobs(spaceId, logViewId, idFormat, jobTypes, fetch);
};
const deleteJobs = async <JobType extends string>(
const deleteJobs = async <T extends JobType>(
spaceId: string,
logViewId: string,
jobTypes: JobType[],
idFormat: IdFormat,
jobTypes: T[],
fetch: HttpHandler
) => {
const deleteJobsResponse = await callDeleteJobs({ spaceId, logViewId, jobTypes }, fetch);
await waitUntilJobsAreDeleted(spaceId, logViewId, jobTypes, fetch);
const deleteJobsResponse = await callDeleteJobs(
{ spaceId, logViewId, idFormat, jobTypes },
fetch
);
await waitUntilJobsAreDeleted(spaceId, logViewId, idFormat, jobTypes, fetch);
return deleteJobsResponse;
};
const waitUntilJobsAreDeleted = async <JobType extends string>(
const waitUntilJobsAreDeleted = async <T extends JobType>(
spaceId: string,
logViewId: string,
jobTypes: JobType[],
idFormat: IdFormat,
jobTypes: T[],
fetch: HttpHandler
) => {
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, logViewId, jobType));
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, logViewId, idFormat, jobType));
while (true) {
const { jobs } = await callGetJobDeletionTasks(fetch);
const needToWait = jobs

View file

@ -7,22 +7,27 @@
import { useCallback, useMemo } from 'react';
import { useUiTracker } from '@kbn/observability-shared-plugin/public';
import { useLogMlJobIdFormatsShimContext } from '../../../pages/logs/shared/use_log_ml_job_id_formats_shim';
import { IdFormat, JobType } from '../../../../common/http_api/latest';
import { DatasetFilter } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { useModuleStatus } from './log_analysis_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
export const useLogAnalysisModule = <JobType extends string>({
export const useLogAnalysisModule = <T extends JobType>({
sourceConfiguration,
idFormat,
moduleDescriptor,
}: {
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
idFormat: IdFormat;
moduleDescriptor: ModuleDescriptor<T>;
}) => {
const { services } = useKibanaContextForPlugin();
const { spaceId, sourceId: logViewId, timestampField, runtimeMappings } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
const { migrateIdFormat } = useLogMlJobIdFormatsShimContext();
const trackMetric = useUiTracker({ app: 'infra_logs' });
@ -31,7 +36,12 @@ export const useLogAnalysisModule = <JobType extends string>({
cancelPreviousOn: 'resolution',
createPromise: async () => {
dispatchModuleStatus({ type: 'fetchingJobStatuses' });
return await moduleDescriptor.getJobSummary(spaceId, logViewId, services.http.fetch);
return await moduleDescriptor.getJobSummary(
spaceId,
logViewId,
idFormat,
services.http.fetch
);
},
onResolve: (jobResponse) => {
dispatchModuleStatus({
@ -39,13 +49,14 @@ export const useLogAnalysisModule = <JobType extends string>({
payload: jobResponse,
spaceId,
logViewId,
idFormat,
});
},
onReject: () => {
dispatchModuleStatus({ type: 'failedFetchingJobStatuses' });
},
},
[spaceId, logViewId]
[spaceId, logViewId, idFormat]
);
const [, setUpModule] = useTrackedPromise(
@ -74,6 +85,7 @@ export const useLogAnalysisModule = <JobType extends string>({
const jobSummaries = await moduleDescriptor.getJobSummary(
spaceId,
logViewId,
'hashed',
services.http.fetch
);
return { setupResult, jobSummaries };
@ -105,7 +117,9 @@ export const useLogAnalysisModule = <JobType extends string>({
jobSummaries,
spaceId,
logViewId,
idFormat: 'hashed',
});
migrateIdFormat(moduleDescriptor.jobTypes[0]);
},
onReject: (e: any) => {
dispatchModuleStatus({ type: 'failedSetup' });
@ -121,13 +135,18 @@ export const useLogAnalysisModule = <JobType extends string>({
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.cleanUpModule(spaceId, logViewId, services.http.fetch);
return await moduleDescriptor.cleanUpModule(
spaceId,
logViewId,
idFormat,
services.http.fetch
);
},
onReject: (e) => {
throw new Error(`Failed to clean up previous ML job: ${e}`);
},
},
[spaceId, logViewId]
[spaceId, logViewId, idFormat]
);
const isCleaningUp = useMemo(
@ -159,8 +178,8 @@ export const useLogAnalysisModule = <JobType extends string>({
}, [dispatchModuleStatus]);
const jobIds = useMemo(
() => moduleDescriptor.getJobIds(spaceId, logViewId),
[moduleDescriptor, spaceId, logViewId]
() => moduleDescriptor.getJobIds(spaceId, logViewId, idFormat),
[moduleDescriptor, spaceId, logViewId, idFormat]
);
return {

View file

@ -7,14 +7,15 @@
import { useMemo } from 'react';
import equal from 'fast-deep-equal';
import { JobType } from '../../../../common/http_api/latest';
import { JobSummary } from './api/ml_get_jobs_summary_api';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
export const useLogAnalysisModuleConfiguration = <JobType extends string>({
export const useLogAnalysisModuleConfiguration = <T extends JobType>({
moduleDescriptor,
sourceConfiguration,
}: {
moduleDescriptor: ModuleDescriptor<JobType>;
moduleDescriptor: ModuleDescriptor<T>;
sourceConfiguration: ModuleSourceConfiguration;
}) => {
const getIsJobConfigurationOutdated = useMemo(
@ -28,8 +29,8 @@ export const useLogAnalysisModuleConfiguration = <JobType extends string>({
};
export const isJobConfigurationOutdated =
<JobType extends string>(
{ bucketSpan }: ModuleDescriptor<JobType>,
<T extends JobType>(
{ bucketSpan }: ModuleDescriptor<T>,
currentSourceConfiguration: ModuleSourceConfiguration
) =>
(jobSummary: JobSummary): boolean => {

View file

@ -6,6 +6,7 @@
*/
import { useCallback, useMemo, useState } from 'react';
import { IdFormat, JobType } from '../../../../common/http_api/latest';
import { getJobId } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
@ -13,12 +14,14 @@ import { JobSummary } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
export const useLogAnalysisModuleDefinition = <JobType extends string>({
export const useLogAnalysisModuleDefinition = <T extends JobType>({
sourceConfiguration: { spaceId, sourceId },
idFormat,
moduleDescriptor,
}: {
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
idFormat: IdFormat;
moduleDescriptor: ModuleDescriptor<T>;
}) => {
const { services } = useKibanaContextForPlugin();
const [moduleDefinition, setModuleDefinition] = useState<
@ -31,12 +34,12 @@ export const useLogAnalysisModuleDefinition = <JobType extends string>({
? moduleDefinition.jobs.reduce<Record<string, JobDefinition>>(
(accumulatedJobDefinitions, jobDefinition) => ({
...accumulatedJobDefinitions,
[getJobId(spaceId, sourceId, jobDefinition.id)]: jobDefinition,
[getJobId(spaceId, sourceId, idFormat, jobDefinition.id as T)]: jobDefinition,
}),
{}
)
: {},
[moduleDefinition, sourceId, spaceId]
[moduleDefinition, sourceId, spaceId, idFormat]
);
const [fetchModuleDefinitionRequest, fetchModuleDefinition] = useTrackedPromise(

View file

@ -7,6 +7,7 @@
import { useReducer } from 'react';
import { IdFormat, JobType } from '../../../../common/http_api/latest';
import {
JobStatus,
getDatafeedId,
@ -18,8 +19,8 @@ import { FetchJobStatusResponsePayload, JobSummary } from './api/ml_get_jobs_sum
import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
import { MandatoryProperty } from '../../../../common/utility_types';
interface StatusReducerState<JobType extends string> {
jobStatus: Record<JobType, JobStatus>;
interface StatusReducerState<T extends JobType> {
jobStatus: Record<T, JobStatus>;
jobSummaries: JobSummary[];
lastSetupErrorMessages: string[];
setupStatus: SetupStatus;
@ -31,6 +32,7 @@ type StatusReducerAction =
type: 'finishedSetup';
logViewId: string;
spaceId: string;
idFormat: IdFormat;
jobSetupResults: SetupMlModuleResponsePayload['jobs'];
jobSummaries: FetchJobStatusResponsePayload;
datafeedSetupResults: SetupMlModuleResponsePayload['datafeeds'];
@ -41,22 +43,23 @@ type StatusReducerAction =
type: 'fetchedJobStatuses';
spaceId: string;
logViewId: string;
idFormat: IdFormat;
payload: FetchJobStatusResponsePayload;
}
| { type: 'failedFetchingJobStatuses' }
| { type: 'viewedResults' };
const createInitialState = <JobType extends string>({
const createInitialState = <T extends JobType>({
jobTypes,
}: {
jobTypes: JobType[];
}): StatusReducerState<JobType> => ({
jobTypes: T[];
}): StatusReducerState<T> => ({
jobStatus: jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: 'unknown',
}),
{} as Record<JobType, JobStatus>
{} as Record<T, JobStatus>
),
jobSummaries: [],
lastSetupErrorMessages: [],
@ -64,11 +67,8 @@ const createInitialState = <JobType extends string>({
});
const createStatusReducer =
<JobType extends string>(jobTypes: JobType[]) =>
(
state: StatusReducerState<JobType>,
action: StatusReducerAction
): StatusReducerState<JobType> => {
<T extends JobType>(jobTypes: T[]) =>
(state: StatusReducerState<T>, action: StatusReducerAction): StatusReducerState<T> => {
switch (action.type) {
case 'startedSetup': {
return {
@ -78,25 +78,34 @@ const createStatusReducer =
...accumulatedJobStatus,
[jobType]: 'initializing',
}),
{} as Record<JobType, JobStatus>
{} as Record<T, JobStatus>
),
setupStatus: { type: 'pending' },
};
}
case 'finishedSetup': {
const { datafeedSetupResults, jobSetupResults, jobSummaries, spaceId, logViewId } = action;
const {
datafeedSetupResults,
jobSetupResults,
jobSummaries,
spaceId,
logViewId,
idFormat,
} = action;
const nextJobStatus = jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]:
hasSuccessfullyCreatedJob(getJobId(spaceId, logViewId, jobType))(jobSetupResults) &&
hasSuccessfullyStartedDatafeed(getDatafeedId(spaceId, logViewId, jobType))(
hasSuccessfullyCreatedJob(getJobId(spaceId, logViewId, idFormat, jobType))(
jobSetupResults
) &&
hasSuccessfullyStartedDatafeed(getDatafeedId(spaceId, logViewId, idFormat, jobType))(
datafeedSetupResults
)
? 'started'
: 'failed',
}),
{} as Record<JobType, JobStatus>
{} as Record<T, JobStatus>
);
const nextSetupStatus: SetupStatus = Object.values<JobStatus>(nextJobStatus).every(
(jobState) => jobState === 'started' || jobState === 'starting'
@ -129,7 +138,7 @@ const createStatusReducer =
...accumulatedJobStatus,
[jobType]: 'failed',
}),
{} as Record<JobType, JobStatus>
{} as Record<T, JobStatus>
),
setupStatus: { type: 'failed', reasons: action.reason ? [action.reason] : ['unknown'] },
};
@ -142,15 +151,15 @@ const createStatusReducer =
};
}
case 'fetchedJobStatuses': {
const { payload: jobSummaries, spaceId, logViewId } = action;
const { payload: jobSummaries, spaceId, logViewId, idFormat } = action;
const { setupStatus } = state;
const nextJobStatus = jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: getJobStatus(getJobId(spaceId, logViewId, jobType))(jobSummaries),
[jobType]: getJobStatus(getJobId(spaceId, logViewId, idFormat, jobType))(jobSummaries),
}),
{} as Record<JobType, JobStatus>
{} as Record<T, JobStatus>
);
const nextSetupStatus = getSetupStatus(nextJobStatus)(setupStatus);
@ -170,7 +179,7 @@ const createStatusReducer =
...accumulatedJobStatus,
[jobType]: 'unknown',
}),
{} as Record<JobType, JobStatus>
{} as Record<T, JobStatus>
),
};
}
@ -243,7 +252,7 @@ const getJobStatus =
})[0] || 'missing';
const getSetupStatus =
<JobType extends string>(everyJobStatus: Record<JobType, JobStatus>) =>
<T extends JobType>(everyJobStatus: Record<T, JobStatus>) =>
(previousSetupStatus: SetupStatus): SetupStatus =>
Object.entries<JobStatus>(everyJobStatus).reduce<SetupStatus>((setupStatus, [, jobStatus]) => {
if (jobStatus === 'missing') {
@ -265,6 +274,6 @@ const hasError = <Value extends { error?: any }>(
value: Value
): value is MandatoryProperty<Value, 'error'> => value.error != null;
export const useModuleStatus = <JobType extends string>(jobTypes: JobType[]) => {
export const useModuleStatus = <T extends JobType>(jobTypes: T[]) => {
return useReducer(createStatusReducer(jobTypes), { jobTypes }, createInitialState);
};

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { IdFormat, JobType } from '../../../../common/http_api/latest';
import {
ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload,
@ -19,16 +20,17 @@ import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
export type { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api';
export interface ModuleDescriptor<JobType extends string> {
export interface ModuleDescriptor<T extends JobType> {
moduleId: string;
moduleName: string;
moduleDescription: string;
jobTypes: JobType[];
jobTypes: T[];
bucketSpan: number;
getJobIds: (spaceId: string, logViewId: string) => Record<JobType, string>;
getJobIds: (spaceId: string, logViewId: string, idFormat: IdFormat) => Record<T, string>;
getJobSummary: (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
fetch: HttpHandler
) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: (fetch: HttpHandler) => Promise<GetMlModuleResponsePayload>;
@ -42,6 +44,7 @@ export interface ModuleDescriptor<JobType extends string> {
cleanUpModule: (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
fetch: HttpHandler
) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices: (

View file

@ -8,6 +8,7 @@
import { isEqual } from 'lodash';
import { useCallback, useEffect, useMemo, useState } from 'react';
import usePrevious from 'react-use/lib/usePrevious';
import { JobType } from '../../../../common/http_api/latest';
import {
combineDatasetFilters,
DatasetFilter,
@ -30,21 +31,21 @@ type SetupHandler = (
datasetFilter: DatasetFilter
) => void;
interface AnalysisSetupStateArguments<JobType extends string> {
interface AnalysisSetupStateArguments<T extends JobType> {
cleanUpAndSetUpModule: SetupHandler;
moduleDescriptor: ModuleDescriptor<JobType>;
moduleDescriptor: ModuleDescriptor<T>;
setUpModule: SetupHandler;
sourceConfiguration: ModuleSourceConfiguration;
}
const fourWeeksInMs = 86400000 * 7 * 4;
export const useAnalysisSetupState = <JobType extends string>({
export const useAnalysisSetupState = <T extends JobType>({
cleanUpAndSetUpModule,
moduleDescriptor: { validateSetupDatasets, validateSetupIndices },
setUpModule,
sourceConfiguration,
}: AnalysisSetupStateArguments<JobType>) => {
}: AnalysisSetupStateArguments<T>) => {
const { services } = useKibanaContextForPlugin();
const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs);
const [endTime, setEndTime] = useState<number | undefined>(undefined);

View file

@ -8,11 +8,13 @@
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { i18n } from '@kbn/i18n';
import type { HttpHandler } from '@kbn/core/public';
import { IdFormat } from '../../../../../../common/http_api/latest';
import {
bucketSpan,
categoriesMessageField,
DatasetFilter,
getJobId,
logEntryCategoriesJobType,
LogEntryCategoriesJobType,
logEntryCategoriesJobTypes,
partitionField,
@ -36,21 +38,26 @@ const moduleDescription = i18n.translate(
}
);
const getJobIds = (spaceId: string, logViewId: string) =>
const getJobIds = (spaceId: string, logViewId: string, idFormat: IdFormat) =>
logEntryCategoriesJobTypes.reduce(
(accumulatedJobIds, jobType) => ({
...accumulatedJobIds,
[jobType]: getJobId(spaceId, logViewId, jobType),
[jobType]: getJobId(spaceId, logViewId, idFormat, jobType),
}),
{} as Record<LogEntryCategoriesJobType, string>
);
const getJobSummary = async (spaceId: string, logViewId: string, fetch: HttpHandler) => {
const getJobSummary = async (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
fetch: HttpHandler
) => {
const response = await callJobsSummaryAPI(
{ spaceId, logViewId, jobTypes: logEntryCategoriesJobTypes },
{ spaceId, logViewId, idFormat, jobTypes: logEntryCategoriesJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, logViewId));
const jobIds = Object.values(getJobIds(spaceId, logViewId, idFormat));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
@ -69,7 +76,7 @@ const setUpModule = async (
const indexNamePattern = indices.join(',');
const jobOverrides = [
{
job_id: 'log-entry-categories-count' as const,
job_id: logEntryCategoriesJobType,
analysis_config: {
bucket_span: `${bucketSpan}ms`,
},
@ -88,7 +95,7 @@ const setUpModule = async (
];
const datafeedOverrides = [
{
job_id: 'log-entry-categories-count' as const,
job_id: logEntryCategoriesJobType,
runtime_mappings: runtimeMappings,
},
];
@ -130,8 +137,19 @@ const setUpModule = async (
);
};
const cleanUpModule = async (spaceId: string, logViewId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, logViewId, logEntryCategoriesJobTypes, fetch);
const cleanUpModule = async (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
fetch: HttpHandler
) => {
return await cleanUpJobsAndDatafeeds(
spaceId,
logViewId,
idFormat,
logEntryCategoriesJobTypes,
fetch
);
};
const validateSetupIndices = async (

View file

@ -8,6 +8,7 @@
import createContainer from 'constate';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { useMemo } from 'react';
import { IdFormat } from '../../../../../../common/http_api/latest';
import { useLogAnalysisModule } from '../../log_analysis_module';
import { useLogAnalysisModuleConfiguration } from '../../log_analysis_module_configuration';
import { useLogAnalysisModuleDefinition } from '../../log_analysis_module_definition';
@ -19,12 +20,14 @@ export const useLogEntryCategoriesModule = ({
indexPattern,
logViewId,
spaceId,
idFormat,
timestampField,
runtimeMappings,
}: {
indexPattern: string;
logViewId: string;
spaceId: string;
idFormat: IdFormat;
timestampField: string;
runtimeMappings: estypes.MappingRuntimeFields;
}) => {
@ -41,6 +44,7 @@ export const useLogEntryCategoriesModule = ({
const logAnalysisModule = useLogAnalysisModule({
moduleDescriptor: logEntryCategoriesModule,
idFormat,
sourceConfiguration,
});
@ -51,6 +55,7 @@ export const useLogEntryCategoriesModule = ({
const { fetchModuleDefinition, getIsJobDefinitionOutdated } = useLogAnalysisModuleDefinition({
sourceConfiguration,
idFormat,
moduleDescriptor: logEntryCategoriesModule,
});

View file

@ -8,10 +8,12 @@
import { i18n } from '@kbn/i18n';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import type { HttpHandler } from '@kbn/core/public';
import { IdFormat } from '../../../../../../common/http_api/latest';
import {
bucketSpan,
DatasetFilter,
getJobId,
logEntryRateJobType,
LogEntryRateJobType,
logEntryRateJobTypes,
partitionField,
@ -35,21 +37,26 @@ const moduleDescription = i18n.translate(
}
);
const getJobIds = (spaceId: string, logViewId: string) =>
const getJobIds = (spaceId: string, logViewId: string, idFormat: IdFormat) =>
logEntryRateJobTypes.reduce(
(accumulatedJobIds, jobType) => ({
...accumulatedJobIds,
[jobType]: getJobId(spaceId, logViewId, jobType),
[jobType]: getJobId(spaceId, logViewId, idFormat, jobType),
}),
{} as Record<LogEntryRateJobType, string>
);
const getJobSummary = async (spaceId: string, logViewId: string, fetch: HttpHandler) => {
const getJobSummary = async (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
fetch: HttpHandler
) => {
const response = await callJobsSummaryAPI(
{ spaceId, logViewId, jobTypes: logEntryRateJobTypes },
{ spaceId, logViewId, idFormat, jobTypes: logEntryRateJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, logViewId));
const jobIds = Object.values(getJobIds(spaceId, logViewId, idFormat));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
@ -68,7 +75,8 @@ const setUpModule = async (
const indexNamePattern = indices.join(',');
const jobOverrides = [
{
job_id: 'log-entry-rate' as const,
description: `Logs UI (${spaceId}/${sourceId}): Detects anomalies in the log entry ingestion rate`,
job_id: logEntryRateJobType,
analysis_config: {
bucket_span: `${bucketSpan}ms`,
},
@ -86,7 +94,7 @@ const setUpModule = async (
];
const datafeedOverrides = [
{
job_id: 'log-entry-rate' as const,
job_id: logEntryRateJobType,
runtime_mappings: runtimeMappings,
},
];
@ -122,8 +130,13 @@ const setUpModule = async (
);
};
const cleanUpModule = async (spaceId: string, logViewId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, logViewId, logEntryRateJobTypes, fetch);
const cleanUpModule = async (
spaceId: string,
logViewId: string,
idFormat: IdFormat,
fetch: HttpHandler
) => {
return await cleanUpJobsAndDatafeeds(spaceId, logViewId, idFormat, logEntryRateJobTypes, fetch);
};
const validateSetupIndices = async (

View file

@ -8,6 +8,7 @@
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import createContainer from 'constate';
import { useMemo } from 'react';
import { IdFormat } from '../../../../../../common/http_api/latest';
import { ModuleSourceConfiguration } from '../../log_analysis_module_types';
import { useLogAnalysisModule } from '../../log_analysis_module';
import { useLogAnalysisModuleConfiguration } from '../../log_analysis_module_configuration';
@ -18,12 +19,14 @@ export const useLogEntryRateModule = ({
indexPattern,
logViewId,
spaceId,
idFormat,
timestampField,
runtimeMappings,
}: {
indexPattern: string;
logViewId: string;
spaceId: string;
idFormat: IdFormat;
timestampField: string;
runtimeMappings: estypes.MappingRuntimeFields;
}) => {
@ -40,6 +43,7 @@ export const useLogEntryRateModule = ({
const logAnalysisModule = useLogAnalysisModule({
moduleDescriptor: logEntryRateModule,
idFormat,
sourceConfiguration,
});
@ -50,6 +54,7 @@ export const useLogEntryRateModule = ({
const { fetchModuleDefinition, getIsJobDefinitionOutdated } = useLogAnalysisModuleDefinition({
sourceConfiguration,
idFormat,
moduleDescriptor: logEntryRateModule,
});

View file

@ -8,7 +8,7 @@
import * as rt from 'io-ts';
import type { HttpHandler } from '@kbn/core/public';
import { jobCustomSettingsRT } from '../../../../common/log_analysis';
import { jobCustomSettingsRT } from '../../../../common/infra_ml';
import { decodeOrThrow } from '../../../../common/runtime_types';
export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => {

View file

@ -6,7 +6,7 @@
*/
import { useCallback, useMemo, useState } from 'react';
import { getJobId } from '../../../common/log_analysis';
import { getJobId } from '../../../common/infra_ml';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { JobSummary } from './api/ml_get_jobs_summary_api';

View file

@ -11,6 +11,7 @@ import { useLogsBreadcrumbs } from '../../../hooks/use_logs_breadcrumbs';
import { LogEntryCategoriesPageContent } from './page_content';
import { LogEntryCategoriesPageProviders } from './page_providers';
import { logCategoriesTitle } from '../../../translations';
import { LogMlJobIdFormatsShimProvider } from '../shared/use_log_ml_job_id_formats_shim';
export const LogEntryCategoriesPage = () => {
useLogsBreadcrumbs([
@ -21,9 +22,11 @@ export const LogEntryCategoriesPage = () => {
return (
<EuiErrorBoundary>
<LogEntryCategoriesPageProviders>
<LogEntryCategoriesPageContent />
</LogEntryCategoriesPageProviders>
<LogMlJobIdFormatsShimProvider>
<LogEntryCategoriesPageProviders>
<LogEntryCategoriesPageContent />
</LogEntryCategoriesPageProviders>
</LogMlJobIdFormatsShimProvider>
</EuiErrorBoundary>
);
};

View file

@ -9,7 +9,7 @@ import { i18n } from '@kbn/i18n';
import React, { useCallback, useEffect } from 'react';
import type { LazyObservabilityPageTemplateProps } from '@kbn/observability-shared-plugin/public';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { isJobStatusWithResults } from '../../../../common/log_analysis';
import { isJobStatusWithResults, logEntryCategoriesJobType } from '../../../../common/log_analysis';
import { LoadingPage } from '../../../components/loading_page';
import {
LogAnalysisSetupStatusUnknownPrompt,
@ -26,6 +26,7 @@ import { useLogEntryCategoriesModuleContext } from '../../../containers/logs/log
import { LogsPageTemplate } from '../shared/page_template';
import { LogEntryCategoriesResultsContent } from './page_results_content';
import { LogEntryCategoriesSetupContent } from './page_setup_content';
import { useLogMlJobIdFormatsShimContext } from '../shared/use_log_ml_job_id_formats_shim';
const logCategoriesTitle = i18n.translate('xpack.infra.logs.logCategoriesTitle', {
defaultMessage: 'Categories',
@ -52,6 +53,8 @@ export const LogEntryCategoriesPageContent = () => {
}
}, [fetchJobStatus, hasLogAnalysisReadCapabilities]);
const { idFormats } = useLogMlJobIdFormatsShimContext();
if (!hasLogAnalysisCapabilites) {
return (
<SubscriptionSplashPage
@ -81,12 +84,13 @@ export const LogEntryCategoriesPageContent = () => {
<LogAnalysisSetupStatusUnknownPrompt retry={fetchJobStatus} />
</CategoriesPageTemplate>
);
} else if (isJobStatusWithResults(jobStatus['log-entry-categories-count'])) {
} else if (isJobStatusWithResults(jobStatus[logEntryCategoriesJobType])) {
return (
<>
<LogEntryCategoriesResultsContent
onOpenSetup={showCategoriesModuleSetup}
pageTitle={logCategoriesTitle}
idFormat={idFormats![logEntryCategoriesJobType]}
/>
<LogAnalysisSetupFlyout allowedModules={allowedSetupModules} />
</>

View file

@ -7,12 +7,14 @@
import React from 'react';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { logEntryCategoriesJobType } from '../../../../common/log_analysis';
import { InlineLogViewSplashPage } from '../../../components/logging/inline_log_view_splash_page';
import { LogAnalysisSetupFlyoutStateProvider } from '../../../components/logging/log_analysis_setup/setup_flyout';
import { SourceLoadingPage } from '../../../components/source_loading_page';
import { LogEntryCategoriesModuleProvider } from '../../../containers/logs/log_analysis/modules/log_entry_categories';
import { useActiveKibanaSpace } from '../../../hooks/use_kibana_space';
import { ConnectedLogViewErrorPage } from '../shared/page_log_view_error';
import { useLogMlJobIdFormatsShimContext } from '../shared/use_log_ml_job_id_formats_shim';
export const LogEntryCategoriesPageProviders: React.FunctionComponent = ({ children }) => {
const {
@ -25,6 +27,8 @@ export const LogEntryCategoriesPageProviders: React.FunctionComponent = ({ child
revertToDefaultLogView,
} = useLogViewContext();
const { space } = useActiveKibanaSpace();
const { idFormats, isLoadingLogAnalysisIdFormats, hasFailedLoadingLogAnalysisIdFormats } =
useLogMlJobIdFormatsShimContext();
// This is a rather crude way of guarding the dependent providers against
// arguments that are only made available asynchronously. Ideally, we'd use
@ -33,9 +37,9 @@ export const LogEntryCategoriesPageProviders: React.FunctionComponent = ({ child
return null;
} else if (!isPersistedLogView) {
return <InlineLogViewSplashPage revertToDefaultLogView={revertToDefaultLogView} />;
} else if (hasFailedLoading) {
} else if (hasFailedLoading || hasFailedLoadingLogAnalysisIdFormats) {
return <ConnectedLogViewErrorPage />;
} else if (isLoading || isUninitialized) {
} else if (isLoading || isUninitialized || isLoadingLogAnalysisIdFormats || !idFormats) {
return <SourceLoadingPage />;
} else if (resolvedLogView != null) {
if (logViewReference.type === 'log-view-inline') {
@ -46,6 +50,7 @@ export const LogEntryCategoriesPageProviders: React.FunctionComponent = ({ child
indexPattern={resolvedLogView.indices}
logViewId={logViewReference.logViewId}
spaceId={space.id}
idFormat={idFormats[logEntryCategoriesJobType]}
timestampField={resolvedLogView.timestampField}
runtimeMappings={resolvedLogView.runtimeMappings}
>

View file

@ -16,6 +16,7 @@ import { useKibana } from '@kbn/kibana-react-plugin/public';
import { MLJobsAwaitingNodeWarning, ML_PAGES, useMlHref } from '@kbn/ml-plugin/public';
import { useTrackPageview } from '@kbn/observability-shared-plugin/public';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { logEntryCategoriesJobType } from '../../../../common/log_analysis';
import { TimeRange } from '../../../../common/time/time_range';
import { CategoryJobNoticesSection } from '../../../components/logging/log_analysis_job_status';
import { AnalyzeInMlButton } from '../../../components/logging/log_analysis_results';
@ -33,17 +34,19 @@ import {
StringTimeRange,
useLogEntryCategoriesResultsUrlState,
} from './use_log_entry_categories_results_url_state';
import { IdFormat } from '../../../../common/http_api/latest';
const JOB_STATUS_POLLING_INTERVAL = 30000;
interface LogEntryCategoriesResultsContentProps {
onOpenSetup: () => void;
pageTitle: string;
idFormat: IdFormat;
}
export const LogEntryCategoriesResultsContent: React.FunctionComponent<
LogEntryCategoriesResultsContentProps
> = ({ onOpenSetup, pageTitle }) => {
> = ({ onOpenSetup, pageTitle, idFormat }) => {
useTrackPageview({ app: 'infra_logs', path: 'log_entry_categories_results' });
useTrackPageview({ app: 'infra_logs', path: 'log_entry_categories_results', delay: 15000 });
@ -110,6 +113,7 @@ export const LogEntryCategoriesResultsContent: React.FunctionComponent<
filteredDatasets: categoryQueryDatasets,
onGetTopLogEntryCategoriesError: showLoadDataErrorNotification,
logViewReference: { type: 'log-view-reference', logViewId },
idFormat,
startTime: categoryQueryTimeRange.timeRange.startTime,
});
@ -195,7 +199,7 @@ export const LogEntryCategoriesResultsContent: React.FunctionComponent<
const analyzeInMlLink = useMlHref(ml, http.basePath.get(), {
page: ML_PAGES.ANOMALY_EXPLORER,
pageState: {
jobIds: [jobIds['log-entry-categories-count']],
jobIds: [jobIds[logEntryCategoriesJobType]],
timeRange: {
from: moment(categoryQueryTimeRange.timeRange.startTime).format('YYYY-MM-DDTHH:mm:ss.SSSZ'),
to: moment(categoryQueryTimeRange.timeRange.endTime).format('YYYY-MM-DDTHH:mm:ss.SSSZ'),
@ -264,7 +268,7 @@ export const LogEntryCategoriesResultsContent: React.FunctionComponent<
<EuiFlexItem grow={false}>
<TopCategoriesSection
isLoadingTopCategories={isLoadingTopLogEntryCategories}
jobId={jobIds['log-entry-categories-count']}
jobId={jobIds[logEntryCategoriesJobType]}
logViewReference={{ type: 'log-view-reference', logViewId }}
timeRange={categoryQueryTimeRange.timeRange}
topCategories={topLogEntryCategories}

View file

@ -7,10 +7,12 @@
import React, { useEffect } from 'react';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { logEntryCategoriesJobType } from '../../../../../../common/log_analysis';
import { useLogEntryCategoryExamples } from '../../use_log_entry_category_examples';
import { LogEntryExampleMessages } from '../../../../../components/logging/log_entry_examples/log_entry_examples';
import { TimeRange } from '../../../../../../common/time/time_range';
import { CategoryExampleMessage } from './category_example_message';
import { useLogMlJobIdFormatsShimContext } from '../../../shared/use_log_ml_job_id_formats_shim';
const exampleCount = 5;
@ -19,6 +21,8 @@ export const CategoryDetailsRow: React.FunctionComponent<{
timeRange: TimeRange;
logViewReference: PersistedLogViewReference;
}> = ({ categoryId, timeRange, logViewReference }) => {
const { idFormats } = useLogMlJobIdFormatsShimContext();
const {
getLogEntryCategoryExamples,
hasFailedLoadingLogEntryCategoryExamples,
@ -29,6 +33,7 @@ export const CategoryDetailsRow: React.FunctionComponent<{
endTime: timeRange.endTime,
exampleCount,
logViewReference,
idFormat: idFormats?.[logEntryCategoriesJobType],
startTime: timeRange.startTime,
});

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../../common/http_api/latest';
import {
getLogEntryCategoryDatasetsRequestPayloadRT,
@ -17,6 +18,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
logViewReference: PersistedLogViewReference;
idFormat: IdFormat;
startTime: number;
endTime: number;
}
@ -25,7 +27,7 @@ export const callGetLogEntryCategoryDatasetsAPI = async (
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const { logViewReference, startTime, endTime } = requestArgs;
const { logViewReference, idFormat, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, {
method: 'POST',
@ -33,6 +35,7 @@ export const callGetLogEntryCategoryDatasetsAPI = async (
getLogEntryCategoryDatasetsRequestPayloadRT.encode({
data: {
logView: logViewReference,
idFormat,
timeRange: {
startTime,
endTime,

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../../common/http_api/latest';
import {
getLogEntryCategoryExamplesRequestPayloadRT,
@ -17,6 +18,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
logViewReference: PersistedLogViewReference;
idFormat: IdFormat;
startTime: number;
endTime: number;
categoryId: number;
@ -27,7 +29,7 @@ export const callGetLogEntryCategoryExamplesAPI = async (
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const { logViewReference, startTime, endTime, categoryId, exampleCount } = requestArgs;
const { logViewReference, idFormat, startTime, endTime, categoryId, exampleCount } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, {
method: 'POST',
@ -37,6 +39,7 @@ export const callGetLogEntryCategoryExamplesAPI = async (
categoryId,
exampleCount,
logView: logViewReference,
idFormat,
timeRange: {
startTime,
endTime,

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../../common/http_api/latest';
import {
getLogEntryCategoriesRequestPayloadRT,
@ -18,6 +19,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
logViewReference: PersistedLogViewReference;
idFormat: IdFormat;
startTime: number;
endTime: number;
categoryCount: number;
@ -29,7 +31,8 @@ export const callGetTopLogEntryCategoriesAPI = async (
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const { logViewReference, startTime, endTime, categoryCount, datasets, sort } = requestArgs;
const { logViewReference, idFormat, startTime, endTime, categoryCount, datasets, sort } =
requestArgs;
const intervalDuration = endTime - startTime;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, {
@ -38,6 +41,7 @@ export const callGetTopLogEntryCategoriesAPI = async (
getLogEntryCategoriesRequestPayloadRT.encode({
data: {
logView: logViewReference,
idFormat,
timeRange: {
startTime,
endTime,

View file

@ -8,6 +8,7 @@
import { useMemo, useState } from 'react';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../common/http_api/latest';
import {
GetLogEntryCategoriesSuccessResponsePayload,
GetLogEntryCategoryDatasetsSuccessResponsePayload,
@ -32,6 +33,7 @@ export const useLogEntryCategoriesResults = ({
onGetLogEntryCategoryDatasetsError,
onGetTopLogEntryCategoriesError,
logViewReference,
idFormat,
startTime,
}: {
categoriesCount: number;
@ -40,6 +42,7 @@ export const useLogEntryCategoriesResults = ({
onGetLogEntryCategoryDatasetsError?: (error: Error) => void;
onGetTopLogEntryCategoriesError?: (error: Error) => void;
logViewReference: PersistedLogViewReference;
idFormat: IdFormat;
startTime: number;
}) => {
const [sortOptions, setSortOptions] = useState<SortOptions>({
@ -58,6 +61,7 @@ export const useLogEntryCategoriesResults = ({
return await callGetTopLogEntryCategoriesAPI(
{
logViewReference,
idFormat,
startTime,
endTime,
categoryCount: categoriesCount,
@ -80,7 +84,15 @@ export const useLogEntryCategoriesResults = ({
}
},
},
[categoriesCount, endTime, filteredDatasets, logViewReference.logViewId, startTime, sortOptions]
[
categoriesCount,
endTime,
filteredDatasets,
logViewReference.logViewId,
startTime,
sortOptions,
idFormat,
]
);
const [getLogEntryCategoryDatasetsRequest, getLogEntryCategoryDatasets] = useTrackedPromise(
@ -88,7 +100,7 @@ export const useLogEntryCategoriesResults = ({
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetLogEntryCategoryDatasetsAPI(
{ logViewReference, startTime, endTime },
{ logViewReference, idFormat, startTime, endTime },
services.http.fetch
);
},
@ -105,7 +117,7 @@ export const useLogEntryCategoriesResults = ({
}
},
},
[categoriesCount, endTime, logViewReference.logViewId, startTime]
[categoriesCount, endTime, logViewReference.logViewId, idFormat, startTime]
);
const isLoadingTopLogEntryCategories = useMemo(

View file

@ -8,6 +8,7 @@
import { useMemo, useState } from 'react';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../common/http_api/latest';
import { LogEntryCategoryExample } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
@ -18,12 +19,14 @@ export const useLogEntryCategoryExamples = ({
endTime,
exampleCount,
logViewReference,
idFormat,
startTime,
}: {
categoryId: number;
endTime: number;
exampleCount: number;
logViewReference: PersistedLogViewReference;
idFormat?: IdFormat;
startTime: number;
}) => {
const { services } = useKibanaContextForPlugin();
@ -36,9 +39,14 @@ export const useLogEntryCategoryExamples = ({
{
cancelPreviousOn: 'creation',
createPromise: async () => {
if (!idFormat) {
throw new Error('idFormat is undefined');
}
return await callGetLogEntryCategoryExamplesAPI(
{
logViewReference,
idFormat,
startTime,
endTime,
categoryId,
@ -51,7 +59,7 @@ export const useLogEntryCategoryExamples = ({
setLogEntryCategoryExamples(examples);
},
},
[categoryId, endTime, exampleCount, logViewReference, startTime]
[categoryId, endTime, exampleCount, logViewReference, startTime, idFormat]
);
const isLoadingLogEntryCategoryExamples = useMemo(

View file

@ -11,6 +11,7 @@ import { LogEntryRatePageContent } from './page_content';
import { LogEntryRatePageProviders } from './page_providers';
import { useLogsBreadcrumbs } from '../../../hooks/use_logs_breadcrumbs';
import { anomaliesTitle } from '../../../translations';
import { LogMlJobIdFormatsShimProvider } from '../shared/use_log_ml_job_id_formats_shim';
export const LogEntryRatePage = () => {
useLogsBreadcrumbs([
@ -20,9 +21,11 @@ export const LogEntryRatePage = () => {
]);
return (
<EuiErrorBoundary>
<LogEntryRatePageProviders>
<LogEntryRatePageContent />
</LogEntryRatePageProviders>
<LogMlJobIdFormatsShimProvider>
<LogEntryRatePageProviders>
<LogEntryRatePageContent />
</LogEntryRatePageProviders>
</LogMlJobIdFormatsShimProvider>
</EuiErrorBoundary>
);
};

View file

@ -10,7 +10,11 @@ import React, { memo, useCallback, useEffect } from 'react';
import useInterval from 'react-use/lib/useInterval';
import type { LazyObservabilityPageTemplateProps } from '@kbn/observability-shared-plugin/public';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { isJobStatusWithResults } from '../../../../common/log_analysis';
import {
isJobStatusWithResults,
logEntryCategoriesJobType,
logEntryRateJobType,
} from '../../../../common/log_analysis';
import { LoadingPage } from '../../../components/loading_page';
import {
LogAnalysisSetupStatusUnknownPrompt,
@ -28,6 +32,7 @@ import { useLogEntryRateModuleContext } from '../../../containers/logs/log_analy
import { LogsPageTemplate } from '../shared/page_template';
import { LogEntryRateResultsContent } from './page_results_content';
import { LogEntryRateSetupContent } from './page_setup_content';
import { useLogMlJobIdFormatsShimContext } from '../shared/use_log_ml_job_id_formats_shim';
const JOB_STATUS_POLLING_INTERVAL = 30000;
@ -89,6 +94,8 @@ export const LogEntryRatePageContent = memo(() => {
}
}, JOB_STATUS_POLLING_INTERVAL);
const { idFormats } = useLogMlJobIdFormatsShimContext();
if (!hasLogAnalysisCapabilites) {
return (
<SubscriptionSplashPage
@ -125,12 +132,12 @@ export const LogEntryRatePageContent = memo(() => {
</AnomaliesPageTemplate>
);
} else if (
isJobStatusWithResults(logEntryCategoriesJobStatus['log-entry-categories-count']) ||
isJobStatusWithResults(logEntryRateJobStatus['log-entry-rate'])
isJobStatusWithResults(logEntryCategoriesJobStatus[logEntryCategoriesJobType]) ||
isJobStatusWithResults(logEntryRateJobStatus[logEntryRateJobType])
) {
return (
<>
<LogEntryRateResultsContent pageTitle={anomaliesTitle} />
<LogEntryRateResultsContent idFormats={idFormats} pageTitle={anomaliesTitle} />
<LogAnalysisSetupFlyout />
</>
);

View file

@ -7,6 +7,7 @@
import React from 'react';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { logEntryCategoriesJobType, logEntryRateJobType } from '../../../../common/log_analysis';
import { InlineLogViewSplashPage } from '../../../components/logging/inline_log_view_splash_page';
import { LogAnalysisSetupFlyoutStateProvider } from '../../../components/logging/log_analysis_setup/setup_flyout';
import { SourceLoadingPage } from '../../../components/source_loading_page';
@ -15,6 +16,7 @@ import { LogEntryRateModuleProvider } from '../../../containers/logs/log_analysi
import { LogEntryFlyoutProvider } from '../../../containers/logs/log_flyout';
import { useActiveKibanaSpace } from '../../../hooks/use_kibana_space';
import { ConnectedLogViewErrorPage } from '../shared/page_log_view_error';
import { useLogMlJobIdFormatsShimContext } from '../shared/use_log_ml_job_id_formats_shim';
export const LogEntryRatePageProviders: React.FunctionComponent = ({ children }) => {
const {
@ -29,6 +31,9 @@ export const LogEntryRatePageProviders: React.FunctionComponent = ({ children })
const { space } = useActiveKibanaSpace();
const { idFormats, isLoadingLogAnalysisIdFormats, hasFailedLoadingLogAnalysisIdFormats } =
useLogMlJobIdFormatsShimContext();
// This is a rather crude way of guarding the dependent providers against
// arguments that are only made available asynchronously. Ideally, we'd use
// React concurrent mode and Suspense in order to handle that more gracefully.
@ -36,9 +41,9 @@ export const LogEntryRatePageProviders: React.FunctionComponent = ({ children })
return null;
} else if (!isPersistedLogView) {
return <InlineLogViewSplashPage revertToDefaultLogView={revertToDefaultLogView} />;
} else if (isLoading || isUninitialized) {
} else if (isLoading || isUninitialized || isLoadingLogAnalysisIdFormats || !idFormats) {
return <SourceLoadingPage />;
} else if (hasFailedLoading) {
} else if (hasFailedLoading || hasFailedLoadingLogAnalysisIdFormats) {
return <ConnectedLogViewErrorPage />;
} else if (resolvedLogView != null) {
if (logViewReference.type === 'log-view-inline') {
@ -50,6 +55,7 @@ export const LogEntryRatePageProviders: React.FunctionComponent = ({ children })
indexPattern={resolvedLogView.indices}
logViewId={logViewReference.logViewId}
spaceId={space.id}
idFormat={idFormats[logEntryRateJobType]}
timestampField={resolvedLogView.timestampField}
runtimeMappings={resolvedLogView.runtimeMappings}
>
@ -57,6 +63,7 @@ export const LogEntryRatePageProviders: React.FunctionComponent = ({ children })
indexPattern={resolvedLogView.indices}
logViewId={logViewReference.logViewId}
spaceId={space.id}
idFormat={idFormats[logEntryCategoriesJobType]}
timestampField={resolvedLogView.timestampField}
runtimeMappings={resolvedLogView.runtimeMappings}
>

View file

@ -15,7 +15,12 @@ import { useKibana } from '@kbn/kibana-react-plugin/public';
import { MLJobsAwaitingNodeWarning } from '@kbn/ml-plugin/public';
import { useTrackPageview } from '@kbn/observability-shared-plugin/public';
import { useLogViewContext, LogEntryFlyout } from '@kbn/logs-shared-plugin/public';
import { isJobStatusWithResults } from '../../../../common/log_analysis';
import { IdFormatByJobType } from '../../../../common/http_api/latest';
import {
isJobStatusWithResults,
logEntryCategoriesJobType,
logEntryRateJobType,
} from '../../../../common/log_analysis';
import { TimeKey } from '../../../../common/time';
import {
CategoryJobNoticesSection,
@ -45,7 +50,8 @@ export const PAGINATION_DEFAULTS = {
export const LogEntryRateResultsContent: React.FunctionComponent<{
pageTitle: string;
}> = ({ pageTitle }) => {
idFormats: IdFormatByJobType | null;
}> = ({ pageTitle, idFormats }) => {
useTrackPageview({ app: 'infra_logs', path: 'log_entry_rate_results' });
useTrackPageview({ app: 'infra_logs', path: 'log_entry_rate_results', delay: 15000 });
@ -82,11 +88,11 @@ export const LogEntryRateResultsContent: React.FunctionComponent<{
const jobIds = useMemo(() => {
return [
...(isJobStatusWithResults(logEntryRateJobStatus['log-entry-rate'])
? [logEntryRateJobIds['log-entry-rate']]
...(isJobStatusWithResults(logEntryRateJobStatus[logEntryRateJobType])
? [logEntryRateJobIds[logEntryRateJobType]]
: []),
...(isJobStatusWithResults(logEntryCategoriesJobStatus['log-entry-categories-count'])
? [logEntryCategoriesJobIds['log-entry-categories-count']]
...(isJobStatusWithResults(logEntryCategoriesJobStatus[logEntryCategoriesJobType])
? [logEntryCategoriesJobIds[logEntryCategoriesJobType]]
: []),
];
}, [
@ -146,6 +152,7 @@ export const LogEntryRateResultsContent: React.FunctionComponent<{
isLoadingDatasets,
} = useLogEntryAnomaliesResults({
logViewReference,
idFormats,
startTime: timeRange.value.startTime,
endTime: timeRange.value.endTime,
defaultSortOptions: SORT_DEFAULTS,
@ -199,6 +206,7 @@ export const LogEntryRateResultsContent: React.FunctionComponent<{
return (
<LogsPageTemplate
data-test-subj="logEntryRateResultsPage"
hasData={logViewStatus?.index !== 'missing'}
pageHeader={{
pageTitle,

View file

@ -12,11 +12,16 @@ import React from 'react';
import useMount from 'react-use/lib/useMount';
import { euiStyled } from '@kbn/kibana-react-plugin/common';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { isCategoryAnomaly, LogEntryAnomaly } from '../../../../../../common/log_analysis';
import {
isCategoryAnomaly,
LogEntryAnomaly,
logEntryRateJobType,
} from '../../../../../../common/log_analysis';
import { TimeRange } from '../../../../../../common/time/time_range';
import { LogEntryExampleMessages } from '../../../../../components/logging/log_entry_examples/log_entry_examples';
import { useLogEntryExamples } from '../../use_log_entry_examples';
import { LogEntryExampleMessage, LogEntryExampleMessageHeaders } from './log_entry_example';
import { useLogMlJobIdFormatsShimContext } from '../../../shared/use_log_ml_job_id_formats_shim';
const EXAMPLE_COUNT = 5;
@ -29,6 +34,7 @@ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
timeRange: TimeRange;
}> = ({ anomaly, timeRange }) => {
const { logViewReference } = useLogViewContext();
const { idFormats } = useLogMlJobIdFormatsShimContext();
if (logViewReference.type === 'log-view-inline') {
throw new Error('Logs ML features only support persisted Log Views');
@ -44,6 +50,7 @@ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
endTime: anomaly.startTime + anomaly.duration,
exampleCount: EXAMPLE_COUNT,
logViewReference,
idFormat: idFormats?.[logEntryRateJobType],
startTime: anomaly.startTime,
categoryId: isCategoryAnomaly(anomaly) ? anomaly.categoryId : undefined,
});

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormatByJobType } from '../../../../../common/http_api/latest';
import {
getLogEntryAnomaliesRequestPayloadRT,
getLogEntryAnomaliesSuccessReponsePayloadRT,
@ -17,6 +18,7 @@ import { AnomaliesSort, Pagination } from '../../../../../common/log_analysis';
interface RequestArgs {
logViewReference: PersistedLogViewReference;
idFormats: IdFormatByJobType;
startTime: number;
endTime: number;
sort: AnomaliesSort;
@ -25,13 +27,15 @@ interface RequestArgs {
}
export const callGetLogEntryAnomaliesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { logViewReference, startTime, endTime, sort, pagination, datasets } = requestArgs;
const { logViewReference, idFormats, startTime, endTime, sort, pagination, datasets } =
requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryAnomaliesRequestPayloadRT.encode({
data: {
logView: logViewReference,
idFormats,
timeRange: {
startTime,
endTime,

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormatByJobType } from '../../../../../common/http_api/latest';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
getLogEntryAnomaliesDatasetsRequestPayloadRT,
@ -16,6 +17,7 @@ import {
interface RequestArgs {
logViewReference: PersistedLogViewReference;
idFormats: IdFormatByJobType;
startTime: number;
endTime: number;
}
@ -24,13 +26,14 @@ export const callGetLogEntryAnomaliesDatasetsAPI = async (
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const { logViewReference, startTime, endTime } = requestArgs;
const { logViewReference, idFormats, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({
data: {
logView: logViewReference,
idFormats,
timeRange: {
startTime,
endTime,

View file

@ -7,6 +7,7 @@
import type { HttpHandler } from '@kbn/core/public';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../../common/http_api/latest';
import {
getLogEntryExamplesRequestPayloadRT,
@ -17,6 +18,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
logViewReference: PersistedLogViewReference;
idFormat: IdFormat;
startTime: number;
endTime: number;
dataset: string;
@ -25,7 +27,8 @@ interface RequestArgs {
}
export const callGetLogEntryExamplesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { logViewReference, startTime, endTime, dataset, exampleCount, categoryId } = requestArgs;
const { logViewReference, idFormat, startTime, endTime, dataset, exampleCount, categoryId } =
requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
method: 'POST',
body: JSON.stringify(
@ -34,6 +37,7 @@ export const callGetLogEntryExamplesAPI = async (requestArgs: RequestArgs, fetch
dataset,
exampleCount,
logView: logViewReference,
idFormat,
timeRange: {
startTime,
endTime,

View file

@ -8,6 +8,7 @@
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import useMount from 'react-use/lib/useMount';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormatByJobType } from '../../../../common/http_api/latest';
import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise';
import { callGetLogEntryAnomaliesAPI } from './service_calls/get_log_entry_anomalies';
import { callGetLogEntryAnomaliesDatasetsAPI } from './service_calls/get_log_entry_anomalies_datasets';
@ -139,6 +140,7 @@ export const useLogEntryAnomaliesResults = ({
endTime,
startTime,
logViewReference,
idFormats,
defaultSortOptions,
defaultPaginationOptions,
onGetLogEntryAnomaliesDatasetsError,
@ -147,6 +149,7 @@ export const useLogEntryAnomaliesResults = ({
endTime: number;
startTime: number;
logViewReference: PersistedLogViewReference;
idFormats: IdFormatByJobType | null;
defaultSortOptions: AnomaliesSort;
defaultPaginationOptions: Pick<Pagination, 'pageSize'>;
onGetLogEntryAnomaliesDatasetsError?: (error: Error) => void;
@ -175,6 +178,10 @@ export const useLogEntryAnomaliesResults = ({
{
cancelPreviousOn: 'creation',
createPromise: async () => {
if (!idFormats) {
throw new Error('idFormats is undefined');
}
const {
timeRange: { start: queryStartTime, end: queryEndTime },
sortOptions,
@ -185,6 +192,7 @@ export const useLogEntryAnomaliesResults = ({
return await callGetLogEntryAnomaliesAPI(
{
logViewReference,
idFormats,
startTime: queryStartTime,
endTime: queryEndTime,
sort: sortOptions,
@ -218,6 +226,7 @@ export const useLogEntryAnomaliesResults = ({
},
[
logViewReference,
idFormats,
dispatch,
reducerState.timeRange,
reducerState.sortOptions,
@ -294,8 +303,12 @@ export const useLogEntryAnomaliesResults = ({
{
cancelPreviousOn: 'creation',
createPromise: async () => {
if (!idFormats) {
throw new Error('idFormats is undefined');
}
return await callGetLogEntryAnomaliesDatasetsAPI(
{ logViewReference, startTime, endTime },
{ logViewReference, idFormats, startTime, endTime },
services.http.fetch
);
},
@ -312,7 +325,7 @@ export const useLogEntryAnomaliesResults = ({
}
},
},
[endTime, logViewReference, startTime]
[endTime, logViewReference, idFormats, startTime]
);
const isLoadingDatasets = useMemo(

View file

@ -8,6 +8,7 @@
import { useMemo, useState } from 'react';
import { PersistedLogViewReference } from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../../common/http_api/latest';
import { LogEntryExample } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
@ -18,6 +19,7 @@ export const useLogEntryExamples = ({
endTime,
exampleCount,
logViewReference,
idFormat,
startTime,
categoryId,
}: {
@ -25,6 +27,7 @@ export const useLogEntryExamples = ({
endTime: number;
exampleCount: number;
logViewReference: PersistedLogViewReference;
idFormat?: IdFormat;
startTime: number;
categoryId?: string;
}) => {
@ -35,9 +38,14 @@ export const useLogEntryExamples = ({
{
cancelPreviousOn: 'creation',
createPromise: async () => {
if (!idFormat) {
throw new Error('idFormat is undefined');
}
return await callGetLogEntryExamplesAPI(
{
logViewReference,
idFormat,
startTime,
endTime,
dataset,
@ -51,7 +59,7 @@ export const useLogEntryExamples = ({
setLogEntryExamples(examples);
},
},
[dataset, endTime, exampleCount, logViewReference, startTime]
[dataset, endTime, exampleCount, logViewReference, startTime, idFormat]
);
const isLoadingLogEntryExamples = useMemo(

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { HttpHandler } from '@kbn/core/public';
import { decodeOrThrow } from '../../../../common/runtime_types';
import {
getLogAnalysisIdFormatsRequestPayloadRT,
getLogAnalysisIdFormatsSuccessResponsePayloadRT,
LOG_ANALYSIS_GET_ID_FORMATS,
} from '../../../../common/http_api/latest';
interface RequestArgs {
spaceId: string;
logViewId: string;
}
export const callGetLogAnalysisIdFormats = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { logViewId, spaceId } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_ID_FORMATS, {
method: 'POST',
body: JSON.stringify(
getLogAnalysisIdFormatsRequestPayloadRT.encode({
data: {
logViewId,
spaceId,
},
})
),
version: '1',
});
return decodeOrThrow(getLogAnalysisIdFormatsSuccessResponsePayloadRT)(response);
};

View file

@ -0,0 +1,81 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import createContainer from 'constate';
import { useState, useEffect, useCallback } from 'react';
import { useLogViewContext } from '@kbn/logs-shared-plugin/public';
import { IdFormatByJobType, JobType } from '../../../../common/http_api/latest';
import { useActiveKibanaSpace } from '../../../hooks/use_kibana_space';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { callGetLogAnalysisIdFormats } from './call_get_log_analysis_id_formats';
const useLogMlJobIdFormatsShim = () => {
const [idFormats, setIdFormats] = useState<IdFormatByJobType | null>(null);
const { logViewReference } = useLogViewContext();
const { space } = useActiveKibanaSpace();
const { services } = useKibanaContextForPlugin();
const [getLogAnalysisIdFormatsRequest, getLogAnalysisIdFormats] = useTrackedPromise(
{
cancelPreviousOn: 'creation',
createPromise: async () => {
if (!space) {
return { data: null };
}
if (logViewReference.type === 'log-view-inline') {
throw new Error('Logs ML features only support persisted Log Views');
}
return await callGetLogAnalysisIdFormats(
{
logViewId: logViewReference.logViewId,
spaceId: space.id,
},
services.http.fetch
);
},
onResolve: ({ data }) => {
setIdFormats(data);
},
},
[logViewReference, space]
);
useEffect(() => {
getLogAnalysisIdFormats();
}, [getLogAnalysisIdFormats]);
const isLoadingLogAnalysisIdFormats = getLogAnalysisIdFormatsRequest.state === 'pending';
const hasFailedLoadingLogAnalysisIdFormats = getLogAnalysisIdFormatsRequest.state === 'rejected';
const migrateIdFormat = useCallback((jobType: JobType) => {
setIdFormats((previousValue) => {
if (!previousValue) {
return null;
}
return {
...previousValue,
[jobType]: 'hashed',
};
});
}, []);
return {
idFormats,
migrateIdFormat,
isLoadingLogAnalysisIdFormats,
hasFailedLoadingLogAnalysisIdFormats,
};
};
export const [LogMlJobIdFormatsShimProvider, useLogMlJobIdFormatsShimContext] =
createContainer(useLogMlJobIdFormatsShim);

View file

@ -21,6 +21,7 @@ import {
initGetLogEntryExamplesRoute,
initValidateLogAnalysisDatasetsRoute,
initValidateLogAnalysisIndicesRoute,
initGetLogAnalysisIdFormatsRoute,
} from './routes/log_analysis';
import { initMetadataRoute } from './routes/metadata';
import { initMetricsAPIRoute } from './routes/metrics_api';
@ -45,6 +46,7 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
initSnapshotRoute(libs);
initNodeDetailsRoute(libs);
initMetricsSourceConfigurationRoutes(libs);
initGetLogAnalysisIdFormatsRoute(libs);
initValidateLogAnalysisDatasetsRoute(libs);
initValidateLogAnalysisIndicesRoute(libs);
initGetLogEntryExamplesRoute(libs);

View file

@ -7,13 +7,16 @@
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { PersistedLogViewReference, ResolvedLogView } from '@kbn/logs-shared-plugin/common';
import { IdFormat, IdFormatByJobType } from '../../../common/http_api/latest';
import {
AnomaliesSort,
getJobId,
isCategoryAnomaly,
jobCustomSettingsRT,
LogEntryAnomalyDatasets,
logEntryCategoriesJobType,
logEntryCategoriesJobTypes,
logEntryRateJobType,
logEntryRateJobTypes,
Pagination,
} from '../../../common/log_analysis';
@ -55,10 +58,21 @@ interface MappedAnomalyHit {
async function getCompatibleAnomaliesJobIds(
spaceId: string,
logViewId: string,
idFormats: IdFormatByJobType,
mlAnomalyDetectors: MlAnomalyDetectors
) {
const logRateJobId = getJobId(spaceId, logViewId, logEntryRateJobTypes[0]);
const logCategoriesJobId = getJobId(spaceId, logViewId, logEntryCategoriesJobTypes[0]);
const logRateJobId = getJobId(
spaceId,
logViewId,
idFormats[logEntryRateJobType],
logEntryRateJobType
);
const logCategoriesJobId = getJobId(
spaceId,
logViewId,
idFormats[logEntryCategoriesJobType],
logEntryCategoriesJobType
);
const jobIds: string[] = [];
let jobSpans: TracingSpan[] = [];
@ -100,6 +114,7 @@ export async function getLogEntryAnomalies(
infra: Promise<Required<InfraRequestHandlerContext>>;
},
logView: PersistedLogViewReference,
idFormats: IdFormatByJobType,
startTime: number,
endTime: number,
sort: AnomaliesSort,
@ -115,6 +130,7 @@ export async function getLogEntryAnomalies(
} = await getCompatibleAnomaliesJobIds(
infraContext.spaceId,
logView.logViewId,
idFormats,
infraContext.mlAnomalyDetectors
);
@ -156,7 +172,8 @@ export async function getLogEntryAnomalies(
const logEntryCategoriesCountJobId = getJobId(
infraContext.spaceId,
logView.logViewId,
logEntryCategoriesJobTypes[0]
idFormats[logEntryCategoriesJobType],
logEntryCategoriesJobType
);
const { logEntryCategoriesById } = await fetchLogEntryCategories(
@ -332,6 +349,7 @@ export async function getLogEntryExamples(
infra: Promise<Required<InfraRequestHandlerContext>>;
},
logView: PersistedLogViewReference,
idFormat: IdFormat,
startTime: number,
endTime: number,
dataset: string,
@ -346,6 +364,7 @@ export async function getLogEntryExamples(
const jobId = getJobId(
infraContext.spaceId,
logView.logViewId,
idFormat,
categoryId != null ? logEntryCategoriesJobTypes[0] : logEntryRateJobTypes[0]
);
@ -371,6 +390,7 @@ export async function getLogEntryExamples(
} = await fetchLogEntryExamples(
context,
logView,
idFormat,
indices,
runtimeMappings,
timestampField,
@ -398,6 +418,7 @@ export async function fetchLogEntryExamples(
infra: Promise<Required<InfraRequestHandlerContext>>;
},
logView: PersistedLogViewReference,
idFormat: IdFormat,
indices: string,
runtimeMappings: estypes.MappingRuntimeFields,
timestampField: string,
@ -421,6 +442,7 @@ export async function fetchLogEntryExamples(
const logEntryCategoriesCountJobId = getJobId(
infraContext.spaceId,
logView.logViewId,
idFormat,
logEntryCategoriesJobTypes[0]
);
@ -484,6 +506,7 @@ export async function getLogEntryAnomaliesDatasets(
};
},
logView: PersistedLogViewReference,
idFormats: IdFormatByJobType,
startTime: number,
endTime: number
) {
@ -493,6 +516,7 @@ export async function getLogEntryAnomaliesDatasets(
} = await getCompatibleAnomaliesJobIds(
context.infra.spaceId,
logView.logViewId,
idFormats,
context.infra.mlAnomalyDetectors
);

View file

@ -12,6 +12,7 @@ import {
PersistedLogViewReference,
ResolvedLogView,
} from '@kbn/logs-shared-plugin/common';
import { IdFormat } from '../../../common/http_api/latest';
import {
CategoriesSort,
compareDatasetsByMaximumAnomalyScore,
@ -51,6 +52,7 @@ export async function getTopLogEntryCategories(
};
},
logView: PersistedLogViewReference,
idFormat: IdFormat,
startTime: number,
endTime: number,
categoryCount: number,
@ -63,6 +65,7 @@ export async function getTopLogEntryCategories(
const logEntryCategoriesCountJobId = getJobId(
context.infra.spaceId,
logView.logViewId,
idFormat,
logEntryCategoriesJobTypes[0]
);
@ -123,12 +126,14 @@ export async function getLogEntryCategoryDatasets(
};
},
logView: PersistedLogViewReference,
idFormat: IdFormat,
startTime: number,
endTime: number
) {
const logEntryCategoriesCountJobId = getJobId(
context.infra.spaceId,
logView.logViewId,
idFormat,
logEntryCategoriesJobTypes[0]
);
@ -147,6 +152,7 @@ export async function getLogEntryCategoryExamples(
};
},
logView: PersistedLogViewReference,
idFormat: IdFormat,
startTime: number,
endTime: number,
categoryId: number,
@ -158,6 +164,7 @@ export async function getLogEntryCategoryExamples(
const logEntryCategoriesCountJobId = getJobId(
context.infra.spaceId,
logView.logViewId,
idFormat,
logEntryCategoriesJobTypes[0]
);

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { IdFormat } from '../../../common/http_api/latest';
import { decodeOrThrow } from '../../../common/runtime_types';
import {
logRateModelPlotResponseRT,
@ -12,7 +13,7 @@ import {
LogRateModelPlotBucket,
CompositeTimestampPartitionKey,
} from './queries';
import { getJobId } from '../../../common/log_analysis';
import { getJobId, logEntryRateJobType } from '../../../common/log_analysis';
import type { MlSystem } from '../../types';
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
@ -25,12 +26,13 @@ export async function getLogEntryRateBuckets(
};
},
logViewId: string,
idFormat: IdFormat,
startTime: number,
endTime: number,
bucketDuration: number,
datasets?: string[]
) {
const logRateJobId = getJobId(context.infra.spaceId, logViewId, 'log-entry-rate');
const logRateJobId = getJobId(context.infra.spaceId, logViewId, idFormat, logEntryRateJobType);
let mlModelPlotBuckets: LogRateModelPlotBucket[] = [];
let afterLatestBatchKey: CompositeTimestampPartitionKey | undefined;

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MlAnomalyDetectors } from '@kbn/ml-plugin/server';
import { IdFormat, IdFormatByJobType, JobType } from '../../../common/http_api/latest';
import {
getJobId,
logEntryCategoriesJobType,
logEntryRateJobType,
} from '../../../common/log_analysis';
export async function resolveIdFormats(
spaceId: string,
logViewId: string,
mlAnomalyDetectors: MlAnomalyDetectors
): Promise<IdFormatByJobType> {
const entryRateFormat = await resolveIdFormat(
spaceId,
logViewId,
logEntryRateJobType,
mlAnomalyDetectors
);
const entryCategoriesCountFormat = await resolveIdFormat(
spaceId,
logViewId,
logEntryCategoriesJobType,
mlAnomalyDetectors
);
return {
[logEntryRateJobType]: entryRateFormat,
[logEntryCategoriesJobType]: entryCategoriesCountFormat,
};
}
async function resolveIdFormat(
spaceId: string,
logViewId: string,
jobType: JobType,
mlAnomalyDetectors: MlAnomalyDetectors
): Promise<IdFormat> {
try {
const hashedJobId = getJobId(spaceId, logViewId, 'hashed', jobType);
const hashedJobs = await mlAnomalyDetectors.jobs(hashedJobId);
if (hashedJobs.count > 0) {
return 'hashed';
}
} catch (e) {
// Ignore 404 in case the job isn't found
if (e.statusCode !== 404) {
throw e;
}
}
try {
const legacyJobId = getJobId(spaceId, logViewId, 'legacy', jobType);
const legacyJobs = await mlAnomalyDetectors.jobs(legacyJobId);
if (legacyJobs.count > 0) {
return 'legacy';
}
} catch (e) {
// Ignore 404 in case the job isn't found
if (e.statusCode !== 404) {
throw e;
}
}
return 'hashed';
}

View file

@ -0,0 +1,75 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import Boom from '@hapi/boom';
import { createValidationFunction } from '@kbn/logs-shared-plugin/common/runtime_types';
import {
LOG_ANALYSIS_GET_ID_FORMATS,
getLogAnalysisIdFormatsRequestPayloadRT,
getLogAnalysisIdFormatsSuccessResponsePayloadRT,
} from '../../../common/http_api/latest';
import { InfraBackendLibs } from '../../lib/infra_types';
import { isMlPrivilegesError } from '../../lib/log_analysis';
import { resolveIdFormats } from '../../lib/log_analysis/resolve_id_formats';
import { assertHasInfraMlPlugins } from '../../utils/request_context';
export const initGetLogAnalysisIdFormatsRoute = ({ framework }: InfraBackendLibs) => {
framework
.registerVersionedRoute({
access: 'internal',
method: 'post',
path: LOG_ANALYSIS_GET_ID_FORMATS,
})
.addVersion(
{
version: '1',
validate: {
request: {
body: createValidationFunction(getLogAnalysisIdFormatsRequestPayloadRT),
},
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
const {
data: { logViewId, spaceId },
} = request.body;
try {
const infraMlContext = await assertHasInfraMlPlugins(requestContext);
const mlAnomalyDetectors = (await infraMlContext.infra).mlAnomalyDetectors;
const idFormatByJobType = await resolveIdFormats(logViewId, spaceId, mlAnomalyDetectors);
return response.ok({
body: getLogAnalysisIdFormatsSuccessResponsePayloadRT.encode({
data: idFormatByJobType,
}),
});
} catch (error) {
if (Boom.isBoom(error)) {
throw error;
}
if (isMlPrivilegesError(error)) {
return response.customError({
statusCode: 403,
body: {
message: error.message,
},
});
}
return response.customError({
statusCode: error.statusCode ?? 500,
body: {
message: error.message ?? 'An unexpected error occurred',
},
});
}
})
);
};

View file

@ -7,3 +7,4 @@
export * from './results';
export * from './validation';
export { initGetLogAnalysisIdFormatsRoute } from './id_formats';

View file

@ -40,6 +40,7 @@ export const initGetLogEntryAnomaliesRoute = ({ framework }: InfraBackendLibs) =
const {
data: {
logView,
idFormats,
timeRange: { startTime, endTime },
sort: sortParam,
pagination: paginationParam,
@ -60,6 +61,7 @@ export const initGetLogEntryAnomaliesRoute = ({ framework }: InfraBackendLibs) =
} = await getLogEntryAnomalies(
infraMlContext,
logView,
idFormats,
startTime,
endTime,
sort,

View file

@ -39,6 +39,7 @@ export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBacken
const {
data: {
logView,
idFormats,
timeRange: { startTime, endTime },
},
} = request.body;
@ -49,6 +50,7 @@ export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBacken
const { datasets, timing } = await getLogEntryAnomaliesDatasets(
{ infra: await infraMlContext.infra },
logView,
idFormats,
startTime,
endTime
);

View file

@ -41,6 +41,7 @@ export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs)
categoryCount,
histograms,
logView,
idFormat,
timeRange: { startTime, endTime },
datasets,
sort,
@ -53,6 +54,7 @@ export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs)
const { data: topLogEntryCategories, timing } = await getTopLogEntryCategories(
{ infra: await infraMlContext.infra },
logView,
idFormat,
startTime,
endTime,
categoryCount,

View file

@ -39,6 +39,7 @@ export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackend
const {
data: {
logView,
idFormat,
timeRange: { startTime, endTime },
},
} = request.body;
@ -49,6 +50,7 @@ export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackend
const { data: logEntryCategoryDatasets, timing } = await getLogEntryCategoryDatasets(
{ infra: await infraMlContext.infra },
logView,
idFormat,
startTime,
endTime
);

View file

@ -44,6 +44,7 @@ export const initGetLogEntryCategoryExamplesRoute = ({
categoryId,
exampleCount,
logView,
idFormat,
timeRange: { startTime, endTime },
},
} = request.body;
@ -59,6 +60,7 @@ export const initGetLogEntryCategoryExamplesRoute = ({
const { data: logEntryCategoryExamples, timing } = await getLogEntryCategoryExamples(
{ infra: await infraMlContext.infra, core: await infraMlContext.core },
logView,
idFormat,
startTime,
endTime,
categoryId,

View file

@ -44,6 +44,7 @@ export const initGetLogEntryExamplesRoute = ({
dataset,
exampleCount,
logView,
idFormat,
timeRange: { startTime, endTime },
categoryId,
},
@ -60,6 +61,7 @@ export const initGetLogEntryExamplesRoute = ({
const { data: logEntryExamples, timing } = await getLogEntryExamples(
infraMlContext,
logView,
idFormat,
startTime,
endTime,
dataset,

View file

@ -29,6 +29,7 @@ export default ({ loadTestFile }: FtrProviderContext) => {
loadTestFile(require.resolve('./logs/log_stream_date_nano'));
loadTestFile(require.resolve('./logs/link_to'));
loadTestFile(require.resolve('./logs/log_stream'));
loadTestFile(require.resolve('./logs/ml_job_id_formats/tests'));
});
});
};

View file

@ -0,0 +1,46 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { IdFormat } from '@kbn/infra-plugin/common/http_api/latest';
const rateHashedPattern = /logs-[0-9a-fA-F]{32,}-log-entry-rate/;
const rateLegacyPattern = /kibana-logs-ui-.*-.*-log-entry-rate/;
const categoriesCountHashedPattern = /logs-[0-9a-fA-F]{32,}-log-entry-categories-count/;
const categoriesCountLegacyPattern = /kibana-logs-ui-.*-.*-log-entry-categories-count/;
export function assertIdFormats(
url: string,
expected: {
'log-entry-rate': IdFormat | undefined;
'log-entry-categories-count': IdFormat | undefined;
}
) {
const idFormats = extractIdFormats(url);
expect(idFormats).to.eql(expected);
}
function extractIdFormats(url: string) {
let rateFormat;
if (rateHashedPattern.test(url)) {
rateFormat = 'hashed';
} else if (rateLegacyPattern.test(url)) {
rateFormat = 'legacy';
}
let categoriesCountFormat;
if (categoriesCountHashedPattern.test(url)) {
categoriesCountFormat = 'hashed';
} else if (categoriesCountLegacyPattern.test(url)) {
categoriesCountFormat = 'legacy';
}
return {
'log-entry-rate': rateFormat,
'log-entry-categories-count': categoriesCountFormat,
};
}

View file

@ -0,0 +1,129 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { TypeOf } from '@kbn/config-schema';
import { setupModuleBodySchema } from '@kbn/ml-plugin/server/routes/schemas/modules';
export interface MlJob {
jobId: string;
module: 'logs_ui_analysis' | 'logs_ui_categories';
config: TypeOf<typeof setupModuleBodySchema>;
}
const rateConfig = {
prefix: '',
start: Date.now(),
indexPatternName: 'filebeat-*',
startDatafeed: true,
useDedicatedIndex: true,
jobOverrides: [
{
job_id: 'log-entry-rate',
analysis_config: {
bucket_span: '900000ms',
},
data_description: {
time_field: '@timestamp',
},
custom_settings: {
logs_source_config: {
indexPattern: 'filebeat-*',
timestampField: '@timestamp',
bucketSpan: 900000,
},
},
},
],
datafeedOverrides: [
{
job_id: 'log-entry-rate',
runtime_mappings: {},
},
],
};
const categoriesCountConfig = {
prefix: '',
start: Date.now(),
indexPatternName: 'filebeat-*',
startDatafeed: true,
useDedicatedIndex: true,
jobOverrides: [
{
job_id: 'log-entry-categories-count',
analysis_config: {
bucket_span: '900000ms',
},
data_description: {
time_field: '@timestamp',
},
custom_settings: {
logs_source_config: {
indexPattern: 'filebeat-*',
timestampField: '@timestamp',
bucketSpan: 900000,
datasetFilter: {
type: 'includeAll',
},
},
},
},
],
datafeedOverrides: [
{
job_id: 'log-entry-categories-count',
runtime_mappings: {},
},
],
query: {
bool: {
filter: [
{
exists: {
field: 'message',
},
},
],
},
},
};
export const hashedRateJob: MlJob = {
jobId: 'logs-11558ee526445db2b42eb3d6b4af58d0-log-entry-rate',
module: 'logs_ui_analysis',
config: {
...rateConfig,
prefix: 'logs-11558ee526445db2b42eb3d6b4af58d0-',
},
};
export const hashedCategoriesCountJob: MlJob = {
jobId: 'logs-11558ee526445db2b42eb3d6b4af58d0-log-entry-categories-count',
module: 'logs_ui_categories',
config: {
...categoriesCountConfig,
prefix: 'logs-11558ee526445db2b42eb3d6b4af58d0-',
},
};
export const legacyRateJob: MlJob = {
jobId: 'kibana-logs-ui-default-default-log-entry-rate',
module: 'logs_ui_analysis',
config: {
...rateConfig,
prefix: 'kibana-logs-ui-default-default-',
},
};
export const legacyCategoriesCountJob: MlJob = {
jobId: 'kibana-logs-ui-default-default-log-entry-categories-count',
module: 'logs_ui_categories',
config: {
...categoriesCountConfig,
prefix: 'kibana-logs-ui-default-default-',
},
};

View file

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MlApi } from '../../../../services/ml/api';
import { MlJob } from './ml_job_configs';
export function createMlJobHelper(ml: MlApi) {
async function createMlJobs(jobs: MlJob[]) {
for (const mlJob of jobs) {
await ml.setupModule(mlJob.module, mlJob.config, 'default');
await ml.waitForAnomalyDetectionJobToExist(mlJob.jobId);
}
}
async function deleteMlJobs(jobs: MlJob[]) {
for (const mlJob of jobs) {
await ml.deleteAnomalyDetectionJobES(mlJob.jobId);
await ml.waitForAnomalyDetectionJobNotToExist(mlJob.jobId);
}
}
return { createMlJobs, deleteMlJobs };
}
export type MlJobHelper = ReturnType<typeof createMlJobHelper>;

View file

@ -0,0 +1,101 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { Browser } from '../../../../../../../test/functional/services/common/browser';
import { CommonPageObject } from '../../../../../../../test/functional/page_objects/common_page';
type PerformanceResourceTimingWithHttpStatus = PerformanceResourceTiming & {
responseStatus: number;
};
export interface RequestLogEntry {
url: string;
timestamp: number;
status: number;
}
declare global {
interface Window {
ftrLogsUiAnomalies?: {
requests: RequestLogEntry[];
observer: PerformanceObserver;
};
}
}
export function createRequestTracker(browser: Browser, common: CommonPageObject) {
async function install() {
await browser.execute(() => {
function handleTimings(entryList: PerformanceObserverEntryList) {
const entries = entryList.getEntriesByType(
'resource'
) as PerformanceResourceTimingWithHttpStatus[];
entries
.filter((entry) => entry.initiatorType === 'fetch')
.forEach((entry) => {
if (window.ftrLogsUiAnomalies) {
window.ftrLogsUiAnomalies.requests.push({
url: entry.name,
timestamp: entry.startTime,
status: entry.responseStatus,
});
} else {
throw new Error('Request tracker not installed');
}
});
}
const observer = new PerformanceObserver(handleTimings);
observer.observe({ type: 'resource', buffered: true });
window.ftrLogsUiAnomalies = {
observer,
requests: [],
};
});
}
async function getRequests(pattern: RegExp, timeToWait: number = 0) {
if (timeToWait > 0) {
await common.sleep(timeToWait);
}
// Passing RegExp to the browser doesn't seem to serialize well
// so we pass a string, but .toString returns it like /pattern/ which
// when we compile it in the browser gets escaped to /\/pattern\//
// thus we remove the surrounding slashes
const patternString = pattern.toString();
const trimmedPattern = patternString.substring(1, patternString.length - 1);
return await browser.execute((browserPattern: string) => {
const regExp = new RegExp(browserPattern);
if (window.ftrLogsUiAnomalies) {
const entries = window.ftrLogsUiAnomalies.requests.filter((entry) =>
regExp.test(entry.url)
);
entries.sort((a, b) => a.timestamp - b.timestamp);
return entries;
} else {
throw new Error('Request tracker not installed');
}
}, trimmedPattern);
}
async function uninstall() {
await browser.execute(() => {
if (window.ftrLogsUiAnomalies) {
window.ftrLogsUiAnomalies.observer.disconnect();
delete window.ftrLogsUiAnomalies;
} else {
throw new Error('Request tracker not installed');
}
});
}
return { install, getRequests, uninstall };
}

View file

@ -0,0 +1,323 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { FtrProviderContext } from '../../../../ftr_provider_context';
import { assertIdFormats } from './assert_id_formats';
import { createMlJobHelper, MlJobHelper } from './ml_job_helper';
import { createRequestTracker } from './request_tracker';
import {
hashedRateJob,
hashedCategoriesCountJob,
legacyRateJob,
legacyCategoriesCountJob,
} from './ml_job_configs';
const anomalyDetectorsPattern =
/anomaly_detectors\/.*-log-entry-(rate|categories-count)\/results\/overall_buckets/;
export default ({ getService, getPageObjects }: FtrProviderContext) => {
const retry = getService('retry');
const esArchiver = getService('esArchiver');
const browser = getService('browser');
const pageObjects = getPageObjects(['common']);
const logsUi = getService('logsUi');
const ml = getService('ml');
const requestTracker = createRequestTracker(browser, pageObjects.common);
let mlJobHelper: MlJobHelper;
describe('ML job ID formats', function () {
this.tags('includeFirefox');
this.beforeAll(async () => {
// Access to ml.api has to happen inside a test or test hook
mlJobHelper = createMlJobHelper(ml.api);
await esArchiver.load('x-pack/test/functional/es_archives/infra/simple_logs');
});
this.afterAll(async () => {
await esArchiver.unload('x-pack/test/functional/es_archives/infra/simple_logs');
});
describe('hashed format', () => {
// The hashed format always takes priority. If, for some reason, the same job exists
// in both formats, only the hashed format job will be used.
it('loads rate job in the hashed ID format', async () => {
await mlJobHelper.createMlJobs([hashedRateJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': 'hashed',
'log-entry-categories-count': undefined,
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([hashedRateJob]);
});
it('loads category count job in the hashed ID format', async () => {
await mlJobHelper.createMlJobs([hashedCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': undefined,
'log-entry-categories-count': 'hashed',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([hashedCategoriesCountJob]);
});
it('loads rate and category count job in the hashed ID format', async () => {
await mlJobHelper.createMlJobs([hashedRateJob, hashedCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': 'hashed',
'log-entry-categories-count': 'hashed',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([hashedRateJob, hashedCategoriesCountJob]);
});
});
describe('legacy format', () => {
it('loads rate job in the legacy ID format', async () => {
await mlJobHelper.createMlJobs([legacyRateJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': 'legacy',
'log-entry-categories-count': undefined,
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([legacyRateJob]);
});
it('loads category count job in the legacy ID format', async () => {
await mlJobHelper.createMlJobs([legacyCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': undefined,
'log-entry-categories-count': 'legacy',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([legacyCategoriesCountJob]);
});
it('loads rate and category count job in the legacy ID format', async () => {
await mlJobHelper.createMlJobs([legacyRateJob, legacyCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': 'legacy',
'log-entry-categories-count': 'legacy',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([legacyRateJob, legacyCategoriesCountJob]);
});
});
describe('mixed formats', () => {
it('loads rate job in the hashed format and category count job in the legacy format', async () => {
await mlJobHelper.createMlJobs([hashedRateJob, legacyCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': 'hashed',
'log-entry-categories-count': 'legacy',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([hashedRateJob, legacyCategoriesCountJob]);
});
it('loads rate job in the legacy format and category count job in the hashed format', async () => {
await mlJobHelper.createMlJobs([legacyRateJob, hashedCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[0].url, {
'log-entry-rate': 'legacy',
'log-entry-categories-count': 'hashed',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([legacyRateJob, hashedCategoriesCountJob]);
});
});
describe('creation and recreation', () => {
it('create first ML job', async () => {
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getSetupScreen()).to.be.ok();
});
await logsUi.logEntryRatePage.startJobSetup();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getSetupFlyout()).to.be.ok();
});
await logsUi.logEntryRatePage.startRateJobCreation();
await retry.waitFor(
'Create ML job button is enabled',
async () => await logsUi.logEntryRatePage.canCreateJob()
);
await logsUi.logEntryRatePage.createJob();
await retry.waitFor(
'ML job created',
async () => await logsUi.logEntryRatePage.jobCreationDone()
);
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[requests.length - 1].url, {
'log-entry-rate': 'hashed',
'log-entry-categories-count': undefined,
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([hashedRateJob]);
});
it('create second ML job', async () => {
await mlJobHelper.createMlJobs([legacyRateJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
await logsUi.logEntryRatePage.manageMlJobs();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getSetupFlyout()).to.be.ok();
});
await logsUi.logEntryRatePage.startCategoriesCountJobCreation();
await retry.waitFor(
'Create ML job button is enabled',
async () => await logsUi.logEntryRatePage.canCreateJob()
);
await logsUi.logEntryRatePage.createJob();
await retry.waitFor(
'ML job created',
async () => await logsUi.logEntryRatePage.jobCreationDone()
);
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[requests.length - 1].url, {
'log-entry-rate': 'legacy',
'log-entry-categories-count': 'hashed',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([legacyRateJob, hashedCategoriesCountJob]);
});
it('migrate legacy job', async () => {
await mlJobHelper.createMlJobs([legacyRateJob, hashedCategoriesCountJob]);
await logsUi.logEntryRatePage.navigateTo();
await requestTracker.install();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getResultsScreen()).to.be.ok();
});
await logsUi.logEntryRatePage.manageMlJobs();
await retry.try(async () => {
expect(await logsUi.logEntryRatePage.getSetupFlyout()).to.be.ok();
});
await logsUi.logEntryRatePage.startRateJobCreation();
await retry.waitFor(
'Recreate ML job button is enabled',
async () => await logsUi.logEntryRatePage.canRecreateJob()
);
await logsUi.logEntryRatePage.recreateJob();
await retry.waitFor(
'ML job recreated',
async () => await logsUi.logEntryRatePage.jobCreationDone()
);
const requests = await requestTracker.getRequests(anomalyDetectorsPattern, 2000);
expect(requests).not.to.be.empty();
assertIdFormats(requests[requests.length - 1].url, {
'log-entry-rate': 'hashed',
'log-entry-categories-count': 'hashed',
});
await requestTracker.uninstall();
await mlJobHelper.deleteMlJobs([hashedRateJob, hashedCategoriesCountJob]);
});
});
});
};

View file

@ -21,8 +21,60 @@ export function LogEntryRatePageProvider({ getPageObjects, getService }: FtrProv
return await testSubjects.find('logEntryRateSetupPage');
},
async getResultsScreen(): Promise<WebElementWrapper> {
return await testSubjects.find('logEntryRateResultsPage');
},
async getNoDataScreen() {
return await testSubjects.find('noDataPage');
},
async startJobSetup() {
await testSubjects.click('infraLogEntryRateSetupContentMlSetupButton');
},
async manageMlJobs() {
await testSubjects.click('infraManageJobsButtonManageMlJobsButton');
},
async getSetupFlyout(): Promise<WebElementWrapper> {
return await testSubjects.find('infraLogAnalysisSetupFlyout');
},
async startRateJobCreation() {
const buttons = await testSubjects.findAll('infraCreateJobButtonButton');
await buttons[0].click();
},
async startCategoriesCountJobCreation() {
const buttons = await testSubjects.findAll('infraCreateJobButtonButton');
await buttons[1].click();
},
async canCreateJob() {
const createJobButton = await testSubjects.find('infraCreateMLJobsButtonCreateMlJobButton');
const disabled = await createJobButton.getAttribute('disabled');
return disabled !== 'true';
},
async createJob() {
await testSubjects.click('infraCreateMLJobsButtonCreateMlJobButton');
},
async canRecreateJob() {
const createJobButton = await testSubjects.find(
'infraRecreateMLJobsButtonRecreateMlJobsButton'
);
const disabled = await createJobButton.getAttribute('disabled');
return disabled !== 'true';
},
async recreateJob() {
await testSubjects.click('infraRecreateMLJobsButtonRecreateMlJobsButton');
},
async jobCreationDone() {
return await testSubjects.exists('infraProcessStepViewResultsButton');
},
};
}