mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
Backports the following commits to 7.x: - [Logs UI] Add ML job results APIs (#42356)
This commit is contained in:
parent
78abaf6d06
commit
b03c1e1415
40 changed files with 1246 additions and 801 deletions
|
@ -4,7 +4,5 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './search_results_api';
|
||||
export * from './search_summary_api';
|
||||
export * from './log_analysis';
|
||||
export * from './metadata_api';
|
||||
export * from './timed_api';
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './results';
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_entry_rate';
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import {
|
||||
badRequestErrorRT,
|
||||
conflictErrorRT,
|
||||
forbiddenErrorRT,
|
||||
metricStatisticsRT,
|
||||
timeRangeRT,
|
||||
} from '../../shared';
|
||||
|
||||
export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH =
|
||||
'/api/infra/log_analysis/results/log_entry_rate';
|
||||
|
||||
/**
|
||||
* request
|
||||
*/
|
||||
|
||||
export const getLogEntryRateRequestPayloadRT = rt.type({
|
||||
data: rt.type({
|
||||
bucketDuration: rt.number,
|
||||
sourceId: rt.string,
|
||||
timeRange: timeRangeRT,
|
||||
}),
|
||||
});
|
||||
|
||||
export type GetLogEntryRateRequestPayload = rt.TypeOf<typeof getLogEntryRateRequestPayloadRT>;
|
||||
|
||||
/**
|
||||
* response
|
||||
*/
|
||||
|
||||
export const logEntryRateAnomaly = rt.type({
|
||||
actualLogEntryRate: rt.number,
|
||||
anomalyScore: rt.number,
|
||||
duration: rt.number,
|
||||
startTime: rt.number,
|
||||
typicalLogEntryRate: rt.number,
|
||||
});
|
||||
|
||||
export const logEntryRateHistogramBucket = rt.type({
|
||||
anomalies: rt.array(logEntryRateAnomaly),
|
||||
duration: rt.number,
|
||||
logEntryRateStats: metricStatisticsRT,
|
||||
modelLowerBoundStats: metricStatisticsRT,
|
||||
modelUpperBoundStats: metricStatisticsRT,
|
||||
startTime: rt.number,
|
||||
});
|
||||
|
||||
export const getLogEntryRateSuccessReponsePayloadRT = rt.type({
|
||||
data: rt.type({
|
||||
bucketDuration: rt.number,
|
||||
histogramBuckets: rt.array(logEntryRateHistogramBucket),
|
||||
}),
|
||||
});
|
||||
|
||||
export type GetLogEntryRateSuccessResponsePayload = rt.TypeOf<
|
||||
typeof getLogEntryRateSuccessReponsePayloadRT
|
||||
>;
|
||||
|
||||
export const getLogEntryRateResponsePayloadRT = rt.union([
|
||||
getLogEntryRateSuccessReponsePayloadRT,
|
||||
badRequestErrorRT,
|
||||
conflictErrorRT,
|
||||
forbiddenErrorRT,
|
||||
]);
|
||||
|
||||
export type GetLogEntryRateReponsePayload = rt.TypeOf<typeof getLogEntryRateResponsePayloadRT>;
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { LogEntryFieldsMapping, LogEntryTime } from '../log_entry';
|
||||
import { SearchResult } from '../log_search_result';
|
||||
import { TimedApiResponse } from './timed_api';
|
||||
|
||||
interface CommonSearchResultsPostPayload {
|
||||
indices: string[];
|
||||
fields: LogEntryFieldsMapping;
|
||||
query: string;
|
||||
}
|
||||
|
||||
export interface AdjacentSearchResultsApiPostPayload extends CommonSearchResultsPostPayload {
|
||||
target: LogEntryTime;
|
||||
before: number;
|
||||
after: number;
|
||||
}
|
||||
|
||||
export interface AdjacentSearchResultsApiPostResponse extends TimedApiResponse {
|
||||
results: {
|
||||
before: SearchResult[];
|
||||
after: SearchResult[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface ContainedSearchResultsApiPostPayload extends CommonSearchResultsPostPayload {
|
||||
start: LogEntryTime;
|
||||
end: LogEntryTime;
|
||||
}
|
||||
|
||||
export interface ContainedSearchResultsApiPostResponse extends TimedApiResponse {
|
||||
results: SearchResult[];
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { LogEntryFieldsMapping } from '../log_entry';
|
||||
import { SearchSummaryBucket } from '../log_search_summary';
|
||||
import { SummaryBucketSize } from '../log_summary';
|
||||
import { TimedApiResponse } from './timed_api';
|
||||
|
||||
export interface SearchSummaryApiPostPayload {
|
||||
bucketSize: {
|
||||
unit: SummaryBucketSize;
|
||||
value: number;
|
||||
};
|
||||
fields: LogEntryFieldsMapping;
|
||||
indices: string[];
|
||||
start: number;
|
||||
end: number;
|
||||
query: string;
|
||||
}
|
||||
|
||||
export interface SearchSummaryApiPostResponse extends TimedApiResponse {
|
||||
buckets: SearchSummaryBucket[];
|
||||
}
|
23
x-pack/legacy/plugins/infra/common/http_api/shared/errors.ts
Normal file
23
x-pack/legacy/plugins/infra/common/http_api/shared/errors.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
const createErrorRuntimeType = <Attributes extends rt.Mixed = rt.UndefinedType>(
|
||||
statusCode: number,
|
||||
errorCode: string,
|
||||
attributes?: Attributes
|
||||
) =>
|
||||
rt.type({
|
||||
statusCode: rt.literal(statusCode),
|
||||
error: rt.literal(errorCode),
|
||||
message: rt.string,
|
||||
...(!!attributes ? { attributes } : {}),
|
||||
});
|
||||
|
||||
export const badRequestErrorRT = createErrorRuntimeType(400, 'Bad Request');
|
||||
export const forbiddenErrorRT = createErrorRuntimeType(403, 'Forbidden');
|
||||
export const conflictErrorRT = createErrorRuntimeType(409, 'Conflict');
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './errors';
|
||||
export * from './metric_statistics';
|
||||
export * from './time_range';
|
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const metricStatisticsRT = rt.type({
|
||||
avg: rt.union([rt.number, rt.null]),
|
||||
count: rt.number,
|
||||
max: rt.union([rt.number, rt.null]),
|
||||
min: rt.union([rt.number, rt.null]),
|
||||
sum: rt.number,
|
||||
});
|
|
@ -4,10 +4,9 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export interface ApiResponseTimings {
|
||||
[timing: string]: number;
|
||||
}
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export interface TimedApiResponse {
|
||||
timings: ApiResponseTimings;
|
||||
}
|
||||
export const timeRangeRT = rt.type({
|
||||
startTime: rt.number,
|
||||
endTime: rt.number,
|
||||
});
|
8
x-pack/legacy/plugins/infra/common/log_analysis/index.ts
Normal file
8
x-pack/legacy/plugins/infra/common/log_analysis/index.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_analysis';
|
||||
export * from './job_parameters';
|
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { JobType } from './log_analysis';
|
||||
|
||||
export const getJobId = (spaceId: string, sourceId: string, jobType: JobType) =>
|
||||
`kibana-logs-ui-${spaceId}-${sourceId}-${jobType}`;
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const jobTypeRT = rt.keyof({
|
||||
'log-entry-rate': null,
|
||||
});
|
||||
|
||||
export type JobType = rt.TypeOf<typeof jobTypeRT>;
|
||||
|
||||
export const jobStatusRT = rt.keyof({
|
||||
created: null,
|
||||
missing: null,
|
||||
running: null,
|
||||
});
|
||||
|
||||
export type JobStatus = rt.TypeOf<typeof jobStatusRT>;
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_analysis_results';
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import createContainer from 'constate-latest/dist/ts/src';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { useLogEntryRate } from './log_entry_rate';
|
||||
|
||||
export const useLogAnalysisResults = ({ sourceId }: { sourceId: string }) => {
|
||||
const { isLoading: isLoadingLogEntryRate, logEntryRate } = useLogEntryRate({ sourceId });
|
||||
|
||||
const isLoading = useMemo(() => isLoadingLogEntryRate, [isLoadingLogEntryRate]);
|
||||
|
||||
return {
|
||||
isLoading,
|
||||
logEntryRate,
|
||||
};
|
||||
};
|
||||
|
||||
export const LogAnalysisResults = createContainer(useLogAnalysisResults);
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useMemo, useState } from 'react';
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
|
||||
import {
|
||||
getLogEntryRateRequestPayloadRT,
|
||||
getLogEntryRateSuccessReponsePayloadRT,
|
||||
GetLogEntryRateSuccessResponsePayload,
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
|
||||
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
|
||||
|
||||
type LogEntryRateResults = GetLogEntryRateSuccessResponsePayload['data'];
|
||||
|
||||
export const useLogEntryRate = ({ sourceId }: { sourceId: string }) => {
|
||||
const [logEntryRate, setLogEntryRate] = useState<LogEntryRateResults | null>(null);
|
||||
|
||||
const [getLogEntryRateRequest, getLogEntryRate] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'resolution',
|
||||
createPromise: async () => {
|
||||
return await kfetch({
|
||||
method: 'POST',
|
||||
pathname: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
|
||||
body: JSON.stringify(
|
||||
getLogEntryRateRequestPayloadRT.encode({
|
||||
data: {
|
||||
sourceId, // TODO: get from hook arguments
|
||||
timeRange: {
|
||||
startTime: Date.now(), // TODO: get from hook arguments
|
||||
endTime: Date.now() + 1000 * 60 * 60, // TODO: get from hook arguments
|
||||
},
|
||||
bucketDuration: 15 * 60 * 1000, // TODO: get from hook arguments
|
||||
},
|
||||
})
|
||||
),
|
||||
});
|
||||
},
|
||||
onResolve: response => {
|
||||
const { data } = getLogEntryRateSuccessReponsePayloadRT
|
||||
.decode(response)
|
||||
.getOrElseL(throwErrors(createPlainError));
|
||||
|
||||
setLogEntryRate(data);
|
||||
},
|
||||
},
|
||||
[sourceId]
|
||||
);
|
||||
|
||||
const isLoading = useMemo(() => getLogEntryRateRequest.state === 'pending', [
|
||||
getLogEntryRateRequest.state,
|
||||
]);
|
||||
|
||||
return {
|
||||
getLogEntryRate,
|
||||
isLoading,
|
||||
logEntryRate,
|
||||
};
|
||||
};
|
|
@ -13,7 +13,7 @@ import { createSnapshotResolvers } from './graphql/snapshot';
|
|||
import { createSourceStatusResolvers } from './graphql/source_status';
|
||||
import { createSourcesResolvers } from './graphql/sources';
|
||||
import { InfraBackendLibs } from './lib/infra_types';
|
||||
import { initLegacyLoggingRoutes } from './logging_legacy';
|
||||
import { initLogAnalysisGetLogEntryRateRoute } from './routes/log_analysis';
|
||||
import { initMetricExplorerRoute } from './routes/metrics_explorer';
|
||||
import { initMetadataRoute } from './routes/metadata';
|
||||
|
||||
|
@ -31,8 +31,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
|
|||
|
||||
libs.framework.registerGraphQLEndpoint('/api/infra/graphql', schema);
|
||||
|
||||
initLegacyLoggingRoutes(libs.framework);
|
||||
initIpToHostName(libs);
|
||||
initLogAnalysisGetLogEntryRateRoute(libs);
|
||||
initMetricExplorerRoute(libs);
|
||||
initMetadataRoute(libs);
|
||||
};
|
||||
|
|
|
@ -47,6 +47,11 @@ export interface InfraBackendFrameworkAdapter {
|
|||
method: 'indices.getAlias' | 'indices.get',
|
||||
options?: object
|
||||
): Promise<InfraDatabaseGetIndicesResponse>;
|
||||
callWithRequest(
|
||||
req: InfraFrameworkRequest,
|
||||
method: 'ml.getBuckets',
|
||||
options?: object
|
||||
): Promise<InfraDatabaseGetIndicesResponse>;
|
||||
callWithRequest(
|
||||
req: InfraFrameworkRequest,
|
||||
method: string,
|
||||
|
@ -54,6 +59,7 @@ export interface InfraBackendFrameworkAdapter {
|
|||
): Promise<InfraDatabaseSearchResponse>;
|
||||
getIndexPatternsService(req: InfraFrameworkRequest<any>): Legacy.IndexPatternsService;
|
||||
getSavedObjectsService(): Legacy.SavedObjectsService;
|
||||
getSpaceId(request: InfraFrameworkRequest<any>): string;
|
||||
makeTSVBRequest(
|
||||
req: InfraFrameworkRequest,
|
||||
model: InfraMetricModel,
|
||||
|
|
|
@ -112,10 +112,19 @@ export class InfraKibanaBackendFrameworkAdapter implements InfraBackendFramework
|
|||
}
|
||||
}
|
||||
|
||||
const frozenIndicesParams = ['search', 'msearch'].includes(endpoint)
|
||||
? {
|
||||
ignore_throttled: !includeFrozen,
|
||||
}
|
||||
: {};
|
||||
|
||||
const fields = await callWithRequest(
|
||||
internalRequest,
|
||||
endpoint,
|
||||
{ ...params, ignore_throttled: !includeFrozen },
|
||||
{
|
||||
...params,
|
||||
...frozenIndicesParams,
|
||||
},
|
||||
...rest
|
||||
);
|
||||
return fields;
|
||||
|
@ -137,6 +146,10 @@ export class InfraKibanaBackendFrameworkAdapter implements InfraBackendFramework
|
|||
});
|
||||
}
|
||||
|
||||
public getSpaceId(request: InfraFrameworkRequest): string {
|
||||
return this.server.plugins.spaces.getSpaceId(request[internalInfraFrameworkRequest]);
|
||||
}
|
||||
|
||||
public getSavedObjectsService() {
|
||||
return this.server.savedObjects;
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import { InfraFieldsDomain } from '../domains/fields_domain';
|
|||
import { InfraLogEntriesDomain } from '../domains/log_entries_domain';
|
||||
import { InfraMetricsDomain } from '../domains/metrics_domain';
|
||||
import { InfraBackendLibs, InfraDomainLibs } from '../infra_types';
|
||||
import { InfraLogAnalysis } from '../log_analysis';
|
||||
import { InfraSnapshot } from '../snapshot';
|
||||
import { InfraSourceStatus } from '../source_status';
|
||||
import { InfraSources } from '../sources';
|
||||
|
@ -31,6 +32,7 @@ export function compose(server: Server): InfraBackendLibs {
|
|||
sources,
|
||||
});
|
||||
const snapshot = new InfraSnapshot({ sources, framework });
|
||||
const logAnalysis = new InfraLogAnalysis({ framework });
|
||||
|
||||
const domainLibs: InfraDomainLibs = {
|
||||
fields: new InfraFieldsDomain(new FrameworkFieldsAdapter(framework), {
|
||||
|
@ -45,6 +47,7 @@ export function compose(server: Server): InfraBackendLibs {
|
|||
const libs: InfraBackendLibs = {
|
||||
configuration,
|
||||
framework,
|
||||
logAnalysis,
|
||||
snapshot,
|
||||
sources,
|
||||
sourceStatus,
|
||||
|
|
|
@ -10,9 +10,10 @@ import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from './adapters/
|
|||
import { InfraFieldsDomain } from './domains/fields_domain';
|
||||
import { InfraLogEntriesDomain } from './domains/log_entries_domain';
|
||||
import { InfraMetricsDomain } from './domains/metrics_domain';
|
||||
import { InfraLogAnalysis } from './log_analysis/log_analysis';
|
||||
import { InfraSnapshot } from './snapshot';
|
||||
import { InfraSourceStatus } from './source_status';
|
||||
import { InfraSources } from './sources';
|
||||
import { InfraSourceStatus } from './source_status';
|
||||
|
||||
export interface InfraDomainLibs {
|
||||
fields: InfraFieldsDomain;
|
||||
|
@ -23,6 +24,7 @@ export interface InfraDomainLibs {
|
|||
export interface InfraBackendLibs extends InfraDomainLibs {
|
||||
configuration: InfraConfigurationAdapter;
|
||||
framework: InfraBackendFrameworkAdapter;
|
||||
logAnalysis: InfraLogAnalysis;
|
||||
snapshot: InfraSnapshot;
|
||||
sources: InfraSources;
|
||||
sourceStatus: InfraSourceStatus;
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export class NoLogRateResultsIndexError extends Error {
|
||||
constructor(message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './errors';
|
||||
export * from './log_analysis';
|
|
@ -0,0 +1,201 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
import { getJobId } from '../../../common/log_analysis';
|
||||
import { throwErrors, createPlainError } from '../../../common/runtime_types';
|
||||
import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../adapters/framework';
|
||||
import { NoLogRateResultsIndexError } from './errors';
|
||||
|
||||
const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-';
|
||||
|
||||
export class InfraLogAnalysis {
|
||||
constructor(
|
||||
private readonly libs: {
|
||||
framework: InfraBackendFrameworkAdapter;
|
||||
}
|
||||
) {}
|
||||
|
||||
public getJobIds(request: InfraFrameworkRequest, sourceId: string) {
|
||||
return {
|
||||
logEntryRate: getJobId(this.libs.framework.getSpaceId(request), sourceId, 'log-entry-rate'),
|
||||
};
|
||||
}
|
||||
|
||||
public async getLogEntryRateBuckets(
|
||||
request: InfraFrameworkRequest,
|
||||
sourceId: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
bucketDuration: number
|
||||
) {
|
||||
const logRateJobId = this.getJobIds(request, sourceId).logEntryRate;
|
||||
|
||||
const mlModelPlotResponse = await this.libs.framework.callWithRequest(request, 'search', {
|
||||
allowNoIndices: true,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
gte: startTime,
|
||||
lt: endTime,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
terms: {
|
||||
result_type: ['model_plot', 'record'],
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
detector_index: {
|
||||
value: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
timestamp_buckets: {
|
||||
date_histogram: {
|
||||
field: 'timestamp',
|
||||
fixed_interval: `${bucketDuration}ms`,
|
||||
},
|
||||
aggs: {
|
||||
filter_model_plot: {
|
||||
filter: {
|
||||
term: {
|
||||
result_type: 'model_plot',
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
stats_model_lower: {
|
||||
stats: {
|
||||
field: 'model_lower',
|
||||
},
|
||||
},
|
||||
stats_model_upper: {
|
||||
stats: {
|
||||
field: 'model_upper',
|
||||
},
|
||||
},
|
||||
stats_actual: {
|
||||
stats: {
|
||||
field: 'actual',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
filter_records: {
|
||||
filter: {
|
||||
term: {
|
||||
result_type: 'record',
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
top_hits_record: {
|
||||
top_hits: {
|
||||
_source: Object.keys(logRateMlRecordRT.props),
|
||||
size: 100,
|
||||
sort: [
|
||||
{
|
||||
timestamp: 'asc',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
ignoreUnavailable: true,
|
||||
index: `${ML_ANOMALY_INDEX_PREFIX}${logRateJobId}`,
|
||||
size: 0,
|
||||
trackScores: false,
|
||||
trackTotalHits: false,
|
||||
});
|
||||
|
||||
if (mlModelPlotResponse._shards.total === 0) {
|
||||
throw new NoLogRateResultsIndexError(
|
||||
`Failed to find ml result index for job ${logRateJobId}.`
|
||||
);
|
||||
}
|
||||
|
||||
const mlModelPlotBuckets = logRateModelPlotResponseRT
|
||||
.decode(mlModelPlotResponse)
|
||||
.map(response => response.aggregations.timestamp_buckets.buckets)
|
||||
.getOrElseL(throwErrors(createPlainError));
|
||||
|
||||
return mlModelPlotBuckets.map(bucket => ({
|
||||
anomalies: bucket.filter_records.top_hits_record.hits.hits.map(({ _source: record }) => ({
|
||||
actualLogEntryRate: record.actual[0],
|
||||
anomalyScore: record.record_score,
|
||||
duration: record.bucket_span * 1000,
|
||||
startTime: record.timestamp,
|
||||
typicalLogEntryRate: record.typical[0],
|
||||
})),
|
||||
duration: bucketDuration,
|
||||
logEntryRateStats: bucket.filter_model_plot.stats_actual,
|
||||
modelLowerBoundStats: bucket.filter_model_plot.stats_model_lower,
|
||||
modelUpperBoundStats: bucket.filter_model_plot.stats_model_upper,
|
||||
startTime: bucket.key,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
const logRateMlRecordRT = rt.type({
|
||||
actual: rt.array(rt.number),
|
||||
bucket_span: rt.number,
|
||||
record_score: rt.number,
|
||||
timestamp: rt.number,
|
||||
typical: rt.array(rt.number),
|
||||
});
|
||||
|
||||
const logRateStatsAggregationRT = rt.type({
|
||||
avg: rt.union([rt.number, rt.null]),
|
||||
count: rt.number,
|
||||
max: rt.union([rt.number, rt.null]),
|
||||
min: rt.union([rt.number, rt.null]),
|
||||
sum: rt.number,
|
||||
});
|
||||
|
||||
const logRateModelPlotResponseRT = rt.type({
|
||||
aggregations: rt.type({
|
||||
timestamp_buckets: rt.type({
|
||||
buckets: rt.array(
|
||||
rt.type({
|
||||
key: rt.number,
|
||||
filter_records: rt.type({
|
||||
doc_count: rt.number,
|
||||
top_hits_record: rt.type({
|
||||
hits: rt.type({
|
||||
hits: rt.array(
|
||||
rt.type({
|
||||
_source: logRateMlRecordRT,
|
||||
})
|
||||
),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
filter_model_plot: rt.type({
|
||||
doc_count: rt.number,
|
||||
stats_actual: logRateStatsAggregationRT,
|
||||
stats_model_lower: logRateStatsAggregationRT,
|
||||
stats_model_upper: logRateStatsAggregationRT,
|
||||
}),
|
||||
})
|
||||
),
|
||||
}),
|
||||
}),
|
||||
});
|
|
@ -1,189 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { boomify } from 'boom';
|
||||
import { SearchParams } from 'elasticsearch';
|
||||
import * as Joi from 'joi';
|
||||
|
||||
import {
|
||||
AdjacentSearchResultsApiPostPayload,
|
||||
AdjacentSearchResultsApiPostResponse,
|
||||
} from '../../common/http_api';
|
||||
import { LogEntryFieldsMapping, LogEntryTime } from '../../common/log_entry';
|
||||
import { SearchResult } from '../../common/log_search_result';
|
||||
import {
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraDatabaseSearchResponse,
|
||||
InfraWrappableRequest,
|
||||
} from '../lib/adapters/framework';
|
||||
import { convertHitToSearchResult } from './converters';
|
||||
import { isHighlightedHit, SortedHit } from './elasticsearch';
|
||||
import { fetchLatestTime } from './latest_log_entries';
|
||||
import { indicesSchema, logEntryFieldsMappingSchema, logEntryTimeSchema } from './schemas';
|
||||
|
||||
const INITIAL_HORIZON_OFFSET = 1000 * 60 * 60 * 24;
|
||||
const MAX_HORIZON = 9999999999999;
|
||||
|
||||
export const initAdjacentSearchResultsRoutes = (framework: InfraBackendFrameworkAdapter) => {
|
||||
const callWithRequest = framework.callWithRequest;
|
||||
|
||||
framework.registerRoute<
|
||||
InfraWrappableRequest<AdjacentSearchResultsApiPostPayload>,
|
||||
Promise<AdjacentSearchResultsApiPostResponse>
|
||||
>({
|
||||
options: {
|
||||
validate: {
|
||||
payload: Joi.object().keys({
|
||||
after: Joi.number()
|
||||
.min(0)
|
||||
.default(0),
|
||||
before: Joi.number()
|
||||
.min(0)
|
||||
.default(0),
|
||||
fields: logEntryFieldsMappingSchema.required(),
|
||||
indices: indicesSchema.required(),
|
||||
query: Joi.string().required(),
|
||||
target: logEntryTimeSchema.required(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
handler: async (request, h) => {
|
||||
const timings = {
|
||||
esRequestSent: Date.now(),
|
||||
esResponseProcessed: 0,
|
||||
};
|
||||
|
||||
try {
|
||||
const search = <Hit>(params: SearchParams) =>
|
||||
callWithRequest<Hit, any>(request, 'search', params);
|
||||
|
||||
const latestTime = await fetchLatestTime(
|
||||
search,
|
||||
request.payload.indices,
|
||||
request.payload.fields.time
|
||||
);
|
||||
const searchResultsAfterTarget = await fetchSearchResults(
|
||||
search,
|
||||
request.payload.indices,
|
||||
request.payload.fields,
|
||||
{
|
||||
tiebreaker: request.payload.target.tiebreaker - 1,
|
||||
time: request.payload.target.time,
|
||||
},
|
||||
request.payload.after,
|
||||
'asc',
|
||||
request.payload.query,
|
||||
request.payload.target.time + INITIAL_HORIZON_OFFSET,
|
||||
latestTime
|
||||
);
|
||||
const searchResultsBeforeTarget = (await fetchSearchResults(
|
||||
search,
|
||||
request.payload.indices,
|
||||
request.payload.fields,
|
||||
request.payload.target,
|
||||
request.payload.before,
|
||||
'desc',
|
||||
request.payload.query,
|
||||
request.payload.target.time - INITIAL_HORIZON_OFFSET
|
||||
)).reverse();
|
||||
|
||||
timings.esResponseProcessed = Date.now();
|
||||
|
||||
return {
|
||||
results: {
|
||||
after: searchResultsAfterTarget,
|
||||
before: searchResultsBeforeTarget,
|
||||
},
|
||||
timings,
|
||||
};
|
||||
} catch (requestError) {
|
||||
throw boomify(requestError);
|
||||
}
|
||||
},
|
||||
method: 'POST',
|
||||
path: '/api/logging/adjacent-search-results',
|
||||
});
|
||||
};
|
||||
|
||||
export async function fetchSearchResults(
|
||||
search: <Hit>(params: SearchParams) => Promise<InfraDatabaseSearchResponse<Hit>>,
|
||||
indices: string[],
|
||||
fields: LogEntryFieldsMapping,
|
||||
target: LogEntryTime,
|
||||
size: number,
|
||||
direction: 'asc' | 'desc',
|
||||
query: string,
|
||||
horizon: number,
|
||||
maxHorizon: number = MAX_HORIZON
|
||||
): Promise<SearchResult[]> {
|
||||
if (size <= 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const request = {
|
||||
allowNoIndices: true,
|
||||
body: {
|
||||
_source: false,
|
||||
highlight: {
|
||||
boundary_scanner: 'word',
|
||||
fields: {
|
||||
[fields.message]: {},
|
||||
},
|
||||
fragment_size: 1,
|
||||
number_of_fragments: 100,
|
||||
post_tags: [''],
|
||||
pre_tags: [''],
|
||||
},
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
query_string: {
|
||||
default_field: fields.message,
|
||||
default_operator: 'AND',
|
||||
query,
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[fields.time]: {
|
||||
[direction === 'asc' ? 'gte' : 'lte']: target.time,
|
||||
[direction === 'asc' ? 'lte' : 'gte']: horizon,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
search_after: [target.time, target.tiebreaker],
|
||||
size,
|
||||
sort: [{ [fields.time]: direction }, { [fields.tiebreaker]: direction }],
|
||||
},
|
||||
ignoreUnavailable: true,
|
||||
index: indices,
|
||||
};
|
||||
const response = await search<SortedHit>(request);
|
||||
|
||||
const hits = response.hits.hits as SortedHit[];
|
||||
const nextHorizon = horizon + (horizon - target.time);
|
||||
|
||||
if (hits.length >= size || nextHorizon < 0 || nextHorizon > maxHorizon) {
|
||||
const filteredHits = hits.filter(isHighlightedHit);
|
||||
return filteredHits.map(convertHitToSearchResult(fields));
|
||||
} else {
|
||||
return fetchSearchResults(
|
||||
search,
|
||||
indices,
|
||||
fields,
|
||||
target,
|
||||
size,
|
||||
direction,
|
||||
query,
|
||||
nextHorizon,
|
||||
maxHorizon
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as Boom from 'boom';
|
||||
import { SearchParams } from 'elasticsearch';
|
||||
import * as Joi from 'joi';
|
||||
|
||||
import {
|
||||
ContainedSearchResultsApiPostPayload,
|
||||
ContainedSearchResultsApiPostResponse,
|
||||
} from '../../common/http_api';
|
||||
import { isLessOrEqual, LogEntryFieldsMapping, LogEntryTime } from '../../common/log_entry';
|
||||
import { SearchResult } from '../../common/log_search_result';
|
||||
import {
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraDatabaseSearchResponse,
|
||||
InfraWrappableRequest,
|
||||
} from '../lib/adapters/framework';
|
||||
import { convertHitToSearchResult } from './converters';
|
||||
import { isHighlightedHit, SortedHit } from './elasticsearch';
|
||||
import { indicesSchema, logEntryFieldsMappingSchema, logEntryTimeSchema } from './schemas';
|
||||
|
||||
export const initContainedSearchResultsRoutes = (framework: InfraBackendFrameworkAdapter) => {
|
||||
const callWithRequest = framework.callWithRequest;
|
||||
|
||||
framework.registerRoute<
|
||||
InfraWrappableRequest<ContainedSearchResultsApiPostPayload>,
|
||||
Promise<ContainedSearchResultsApiPostResponse>
|
||||
>({
|
||||
options: {
|
||||
validate: {
|
||||
payload: Joi.object().keys({
|
||||
end: logEntryTimeSchema.required(),
|
||||
fields: logEntryFieldsMappingSchema.required(),
|
||||
indices: indicesSchema.required(),
|
||||
query: Joi.string().required(),
|
||||
start: logEntryTimeSchema.required(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
handler: async request => {
|
||||
const timings = {
|
||||
esRequestSent: Date.now(),
|
||||
esResponseProcessed: 0,
|
||||
};
|
||||
|
||||
try {
|
||||
const search = <Hit>(params: SearchParams) =>
|
||||
callWithRequest<Hit>(request, 'search', params);
|
||||
|
||||
const searchResults = await fetchSearchResultsBetween(
|
||||
search,
|
||||
request.payload.indices,
|
||||
request.payload.fields,
|
||||
request.payload.start,
|
||||
request.payload.end,
|
||||
request.payload.query
|
||||
);
|
||||
|
||||
timings.esResponseProcessed = Date.now();
|
||||
|
||||
return {
|
||||
results: searchResults,
|
||||
timings,
|
||||
};
|
||||
} catch (requestError) {
|
||||
throw Boom.boomify(requestError);
|
||||
}
|
||||
},
|
||||
method: 'POST',
|
||||
path: '/api/logging/contained-search-results',
|
||||
});
|
||||
};
|
||||
|
||||
export async function fetchSearchResultsBetween(
|
||||
search: <Hit>(params: SearchParams) => Promise<InfraDatabaseSearchResponse<Hit, any>>,
|
||||
indices: string[],
|
||||
fields: LogEntryFieldsMapping,
|
||||
start: LogEntryTime,
|
||||
end: LogEntryTime,
|
||||
query: string
|
||||
): Promise<SearchResult[]> {
|
||||
const request = {
|
||||
allowNoIndices: true,
|
||||
body: {
|
||||
_source: false,
|
||||
highlight: {
|
||||
boundary_scanner: 'word',
|
||||
fields: {
|
||||
[fields.message]: {},
|
||||
},
|
||||
fragment_size: 1,
|
||||
number_of_fragments: 100,
|
||||
post_tags: [''],
|
||||
pre_tags: [''],
|
||||
},
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
query_string: {
|
||||
default_field: fields.message,
|
||||
default_operator: 'AND',
|
||||
query,
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[fields.time]: {
|
||||
gte: start.time,
|
||||
lte: end.time,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
search_after: [start.time, start.tiebreaker - 1],
|
||||
size: 10000,
|
||||
sort: [{ [fields.time]: 'asc' }, { [fields.tiebreaker]: 'asc' }],
|
||||
},
|
||||
ignoreUnavailable: true,
|
||||
index: indices,
|
||||
};
|
||||
const response = await search<SortedHit>(request);
|
||||
|
||||
const hits = response.hits.hits as SortedHit[];
|
||||
const filteredHits = hits
|
||||
.filter(hit => isLessOrEqual({ time: hit.sort[0], tiebreaker: hit.sort[1] }, end))
|
||||
.filter(isHighlightedHit);
|
||||
return filteredHits.map(convertHitToSearchResult(fields));
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import invert from 'lodash/fp/invert';
|
||||
import mapKeys from 'lodash/fp/mapKeys';
|
||||
|
||||
import { LogEntryFieldsMapping } from '../../common/log_entry';
|
||||
import { SearchResult } from '../../common/log_search_result';
|
||||
import { SearchSummaryBucket } from '../../common/log_search_summary';
|
||||
import {
|
||||
DateHistogramResponse,
|
||||
HighlightedHit,
|
||||
Hit,
|
||||
HitsBucket,
|
||||
isBucketWithAggregation,
|
||||
} from './elasticsearch';
|
||||
|
||||
export const convertHitToSearchResult = (fields: LogEntryFieldsMapping) => {
|
||||
const invertedFields = invert(fields);
|
||||
return (hit: HighlightedHit): SearchResult => {
|
||||
const matches = mapKeys(key => invertedFields[key], hit.highlight || {});
|
||||
return {
|
||||
fields: {
|
||||
tiebreaker: hit.sort[1], // use the sort property to get the normalized values
|
||||
time: hit.sort[0],
|
||||
},
|
||||
gid: getHitGid(hit),
|
||||
matches,
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export const convertDateHistogramToSearchSummaryBuckets = (
|
||||
fields: LogEntryFieldsMapping,
|
||||
end: number
|
||||
) => (buckets: DateHistogramResponse['buckets']): SearchSummaryBucket[] =>
|
||||
buckets.reduceRight(
|
||||
({ previousStart, aggregatedBuckets }, bucket) => {
|
||||
const representative =
|
||||
isBucketWithAggregation<HitsBucket, 'top_entries'>(bucket, 'top_entries') &&
|
||||
bucket.top_entries.hits.hits.length > 0
|
||||
? convertHitToSearchResult(fields)(bucket.top_entries.hits.hits[0])
|
||||
: null;
|
||||
return {
|
||||
aggregatedBuckets: [
|
||||
...(representative
|
||||
? [
|
||||
{
|
||||
count: bucket.doc_count,
|
||||
end: previousStart,
|
||||
representative,
|
||||
start: bucket.key,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...aggregatedBuckets,
|
||||
],
|
||||
previousStart: bucket.key,
|
||||
};
|
||||
},
|
||||
{ previousStart: end, aggregatedBuckets: [] } as {
|
||||
previousStart: number;
|
||||
aggregatedBuckets: SearchSummaryBucket[];
|
||||
}
|
||||
).aggregatedBuckets;
|
||||
|
||||
const getHitGid = (hit: Hit): string => `${hit._index}:${hit._type}:${hit._id}`;
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { MSearchParams, MSearchResponse, SearchParams, SearchResponse } from 'elasticsearch';
|
||||
|
||||
export interface ESCluster {
|
||||
callWithRequest(
|
||||
request: any,
|
||||
endpoint: 'msearch',
|
||||
clientOptions: MSearchParams,
|
||||
options?: object
|
||||
): Promise<MSearchResponse<object>>;
|
||||
callWithRequest(
|
||||
request: any,
|
||||
endpoint: 'search',
|
||||
clientOptions: SearchParams,
|
||||
options?: object
|
||||
): Promise<SearchResponse<object>>;
|
||||
callWithRequest(
|
||||
request: any,
|
||||
endpoint: string,
|
||||
clientOptions?: object,
|
||||
options?: object
|
||||
): Promise<never>;
|
||||
}
|
||||
|
||||
export type Hit = SearchResponse<object>['hits']['hits'][0];
|
||||
|
||||
export interface SortedHit extends Hit {
|
||||
sort: any[];
|
||||
_source: {
|
||||
[field: string]: any;
|
||||
};
|
||||
}
|
||||
|
||||
export interface HighlightedHit extends SortedHit {
|
||||
highlight?: {
|
||||
[field: string]: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export const isHighlightedHit = (hit: Hit): hit is HighlightedHit => !!hit.highlight;
|
||||
|
||||
export interface DateHistogramBucket {
|
||||
key: number;
|
||||
key_as_string: string;
|
||||
doc_count: number;
|
||||
}
|
||||
|
||||
export interface HitsBucket {
|
||||
hits: {
|
||||
total: number;
|
||||
max_score: number | null;
|
||||
hits: SortedHit[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface DateHistogramResponse {
|
||||
buckets: DateHistogramBucket[];
|
||||
}
|
||||
|
||||
export type WithSubAggregation<
|
||||
SubAggregationType,
|
||||
SubAggregationName extends string,
|
||||
BucketType
|
||||
> = BucketType & { [subAggregationName in SubAggregationName]: SubAggregationType };
|
||||
|
||||
export const isBucketWithAggregation = <
|
||||
SubAggregationType extends object,
|
||||
SubAggregationName extends string = any,
|
||||
BucketType extends object = {}
|
||||
>(
|
||||
bucket: BucketType,
|
||||
aggregationName: SubAggregationName
|
||||
): bucket is WithSubAggregation<SubAggregationType, SubAggregationName, BucketType> =>
|
||||
aggregationName in bucket;
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { InfraBackendFrameworkAdapter } from '../lib/adapters/framework';
|
||||
import { initAdjacentSearchResultsRoutes } from './adjacent_search_results';
|
||||
import { initContainedSearchResultsRoutes } from './contained_search_results';
|
||||
import { initSearchSummaryRoutes } from './search_summary';
|
||||
|
||||
export const initLegacyLoggingRoutes = (framework: InfraBackendFrameworkAdapter) => {
|
||||
initAdjacentSearchResultsRoutes(framework);
|
||||
initContainedSearchResultsRoutes(framework);
|
||||
initSearchSummaryRoutes(framework);
|
||||
};
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { SearchParams } from 'elasticsearch';
|
||||
|
||||
import { InfraDatabaseSearchResponse } from '../lib/adapters/framework';
|
||||
|
||||
export async function fetchLatestTime(
|
||||
search: <Hit, Aggregations>(
|
||||
params: SearchParams
|
||||
) => Promise<InfraDatabaseSearchResponse<Hit, Aggregations>>,
|
||||
indices: string[],
|
||||
timeField: string
|
||||
): Promise<number> {
|
||||
const response = await search<any, { max_time?: { value: number } }>({
|
||||
allowNoIndices: true,
|
||||
body: {
|
||||
aggregations: {
|
||||
max_time: {
|
||||
max: {
|
||||
field: timeField,
|
||||
},
|
||||
},
|
||||
},
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
size: 0,
|
||||
},
|
||||
ignoreUnavailable: true,
|
||||
index: indices,
|
||||
});
|
||||
|
||||
if (response.aggregations && response.aggregations.max_time) {
|
||||
return response.aggregations.max_time.value;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as Joi from 'joi';
|
||||
|
||||
export const timestampSchema = Joi.number()
|
||||
.integer()
|
||||
.min(0);
|
||||
|
||||
export const logEntryFieldsMappingSchema = Joi.object().keys({
|
||||
message: Joi.string().required(),
|
||||
tiebreaker: Joi.string().required(),
|
||||
time: Joi.string().required(),
|
||||
});
|
||||
|
||||
export const logEntryTimeSchema = Joi.object().keys({
|
||||
tiebreaker: Joi.number().integer(),
|
||||
time: timestampSchema,
|
||||
});
|
||||
|
||||
export const indicesSchema = Joi.array().items(Joi.string());
|
||||
|
||||
export const summaryBucketSizeSchema = Joi.object().keys({
|
||||
unit: Joi.string()
|
||||
.valid(['y', 'M', 'w', 'd', 'h', 'm', 's'])
|
||||
.required(),
|
||||
value: Joi.number()
|
||||
.integer()
|
||||
.min(0)
|
||||
.required(),
|
||||
});
|
|
@ -1,156 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as Boom from 'boom';
|
||||
import { SearchParams } from 'elasticsearch';
|
||||
import * as Joi from 'joi';
|
||||
|
||||
import { SearchSummaryApiPostPayload, SearchSummaryApiPostResponse } from '../../common/http_api';
|
||||
import { LogEntryFieldsMapping } from '../../common/log_entry';
|
||||
import { SearchSummaryBucket } from '../../common/log_search_summary';
|
||||
import { SummaryBucketSize } from '../../common/log_summary';
|
||||
import {
|
||||
InfraBackendFrameworkAdapter,
|
||||
InfraDatabaseSearchResponse,
|
||||
InfraWrappableRequest,
|
||||
} from '../lib/adapters/framework';
|
||||
import { convertDateHistogramToSearchSummaryBuckets } from './converters';
|
||||
import { DateHistogramResponse } from './elasticsearch';
|
||||
import {
|
||||
indicesSchema,
|
||||
logEntryFieldsMappingSchema,
|
||||
summaryBucketSizeSchema,
|
||||
timestampSchema,
|
||||
} from './schemas';
|
||||
|
||||
export const initSearchSummaryRoutes = (framework: InfraBackendFrameworkAdapter) => {
|
||||
const callWithRequest = framework.callWithRequest;
|
||||
|
||||
framework.registerRoute<
|
||||
InfraWrappableRequest<SearchSummaryApiPostPayload, {}, {}>,
|
||||
Promise<SearchSummaryApiPostResponse>
|
||||
>({
|
||||
options: {
|
||||
validate: {
|
||||
payload: Joi.object().keys({
|
||||
bucketSize: summaryBucketSizeSchema.required(),
|
||||
end: timestampSchema.required(),
|
||||
fields: logEntryFieldsMappingSchema.required(),
|
||||
indices: indicesSchema.required(),
|
||||
query: Joi.string().required(),
|
||||
start: timestampSchema.required(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
handler: async request => {
|
||||
const timings = {
|
||||
esRequestSent: Date.now(),
|
||||
esResponseProcessed: 0,
|
||||
};
|
||||
|
||||
try {
|
||||
const search = <Hit, Aggregations>(params: SearchParams) =>
|
||||
callWithRequest<Hit, Aggregations>(request, 'search', params);
|
||||
const summaryBuckets = await fetchSummaryBuckets(
|
||||
search,
|
||||
request.payload.indices,
|
||||
request.payload.fields,
|
||||
request.payload.start,
|
||||
request.payload.end,
|
||||
request.payload.bucketSize,
|
||||
request.payload.query
|
||||
);
|
||||
|
||||
timings.esResponseProcessed = Date.now();
|
||||
|
||||
return {
|
||||
buckets: summaryBuckets,
|
||||
timings,
|
||||
};
|
||||
} catch (requestError) {
|
||||
throw Boom.boomify(requestError);
|
||||
}
|
||||
},
|
||||
method: 'POST',
|
||||
path: '/api/logging/search-summary',
|
||||
});
|
||||
};
|
||||
|
||||
async function fetchSummaryBuckets(
|
||||
search: <Hit, Aggregations>(
|
||||
params: SearchParams
|
||||
) => Promise<InfraDatabaseSearchResponse<Hit, Aggregations>>,
|
||||
indices: string[],
|
||||
fields: LogEntryFieldsMapping,
|
||||
start: number,
|
||||
end: number,
|
||||
bucketSize: {
|
||||
unit: SummaryBucketSize;
|
||||
value: number;
|
||||
},
|
||||
query: string
|
||||
): Promise<SearchSummaryBucket[]> {
|
||||
const response = await search<any, { count_by_date?: DateHistogramResponse }>({
|
||||
allowNoIndices: true,
|
||||
body: {
|
||||
aggregations: {
|
||||
count_by_date: {
|
||||
aggregations: {
|
||||
top_entries: {
|
||||
top_hits: {
|
||||
_source: [fields.message],
|
||||
size: 1,
|
||||
sort: [{ [fields.time]: 'desc' }, { [fields.tiebreaker]: 'desc' }],
|
||||
},
|
||||
},
|
||||
},
|
||||
date_histogram: {
|
||||
extended_bounds: {
|
||||
max: end,
|
||||
min: start,
|
||||
},
|
||||
field: fields.time,
|
||||
interval: `${bucketSize.value}${bucketSize.unit}`,
|
||||
min_doc_count: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
query_string: {
|
||||
default_field: fields.message,
|
||||
default_operator: 'AND',
|
||||
query,
|
||||
},
|
||||
},
|
||||
{
|
||||
range: {
|
||||
[fields.time]: {
|
||||
format: 'epoch_millis',
|
||||
gte: start,
|
||||
lt: end,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
},
|
||||
ignoreUnavailable: true,
|
||||
index: indices,
|
||||
});
|
||||
|
||||
if (response.aggregations && response.aggregations.count_by_date) {
|
||||
return convertDateHistogramToSearchSummaryBuckets(fields, end)(
|
||||
response.aggregations.count_by_date.buckets
|
||||
);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './results';
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_entry_rate';
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
|
||||
import { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import {
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
|
||||
getLogEntryRateRequestPayloadRT,
|
||||
getLogEntryRateSuccessReponsePayloadRT,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { throwErrors } from '../../../../common/runtime_types';
|
||||
import { NoLogRateResultsIndexError } from '../../../lib/log_analysis';
|
||||
|
||||
export const initLogAnalysisGetLogEntryRateRoute = ({
|
||||
framework,
|
||||
logAnalysis,
|
||||
}: InfraBackendLibs) => {
|
||||
framework.registerRoute({
|
||||
method: 'POST',
|
||||
path: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
|
||||
handler: async (req, res) => {
|
||||
const payload = getLogEntryRateRequestPayloadRT
|
||||
.decode(req.payload)
|
||||
.getOrElseL(throwErrors(Boom.badRequest));
|
||||
|
||||
const logEntryRateBuckets = await logAnalysis
|
||||
.getLogEntryRateBuckets(
|
||||
req,
|
||||
payload.data.sourceId,
|
||||
payload.data.timeRange.startTime,
|
||||
payload.data.timeRange.endTime,
|
||||
payload.data.bucketDuration
|
||||
)
|
||||
.catch(err => {
|
||||
if (err instanceof NoLogRateResultsIndexError) {
|
||||
throw Boom.boomify(err, { statusCode: 404 });
|
||||
}
|
||||
|
||||
throw Boom.boomify(err, { statusCode: ('statusCode' in err && err.statusCode) || 500 });
|
||||
});
|
||||
|
||||
return res.response(
|
||||
getLogEntryRateSuccessReponsePayloadRT.encode({
|
||||
data: {
|
||||
bucketDuration: payload.data.bucketDuration,
|
||||
histogramBuckets: logEntryRateBuckets,
|
||||
},
|
||||
})
|
||||
);
|
||||
},
|
||||
});
|
||||
};
|
|
@ -7,6 +7,7 @@
|
|||
export default function ({ loadTestFile }) {
|
||||
describe('InfraOps Endpoints', () => {
|
||||
loadTestFile(require.resolve('./metadata'));
|
||||
loadTestFile(require.resolve('./log_analysis'));
|
||||
loadTestFile(require.resolve('./log_entries'));
|
||||
loadTestFile(require.resolve('./log_entry_highlights'));
|
||||
loadTestFile(require.resolve('./log_summary'));
|
||||
|
|
116
x-pack/test/api_integration/apis/infra/log_analysis.ts
Normal file
116
x-pack/test/api_integration/apis/infra/log_analysis.ts
Normal file
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import {
|
||||
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
|
||||
getLogEntryRateRequestPayloadRT,
|
||||
getLogEntryRateSuccessReponsePayloadRT,
|
||||
} from '../../../../legacy/plugins/infra/common/http_api/log_analysis';
|
||||
import {
|
||||
createPlainError,
|
||||
throwErrors,
|
||||
} from '../../../../legacy/plugins/infra/common/runtime_types';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
const TIME_BEFORE_START = 1564315100000;
|
||||
const TIME_AFTER_END = 1565040700000;
|
||||
const COMMON_HEADERS = {
|
||||
'kbn-xsrf': 'some-xsrf-token',
|
||||
};
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
describe('log analysis apis', () => {
|
||||
before(() => esArchiver.load('infra/8.0.0/ml_anomalies_log_rate'));
|
||||
after(() => esArchiver.unload('infra/8.0.0/ml_anomalies_log_rate'));
|
||||
|
||||
describe('log rate results', () => {
|
||||
describe('with the default source', () => {
|
||||
before(() => esArchiver.load('empty_kibana'));
|
||||
after(() => esArchiver.unload('empty_kibana'));
|
||||
|
||||
it('should return buckets when the results index exists with matching documents', async () => {
|
||||
const { body } = await supertest
|
||||
.post(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
getLogEntryRateRequestPayloadRT.encode({
|
||||
data: {
|
||||
sourceId: 'default',
|
||||
timeRange: {
|
||||
startTime: TIME_BEFORE_START,
|
||||
endTime: TIME_AFTER_END,
|
||||
},
|
||||
bucketDuration: 15 * 60 * 1000,
|
||||
},
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logEntryRateBuckets = getLogEntryRateSuccessReponsePayloadRT
|
||||
.decode(body)
|
||||
.getOrElseL(throwErrors(createPlainError));
|
||||
|
||||
expect(logEntryRateBuckets.data.bucketDuration).to.be(15 * 60 * 1000);
|
||||
expect(logEntryRateBuckets.data.histogramBuckets).to.not.be.empty();
|
||||
expect(
|
||||
logEntryRateBuckets.data.histogramBuckets.some(bucket => bucket.anomalies.length > 0)
|
||||
).to.be(true);
|
||||
});
|
||||
|
||||
it('should return no buckets when the results index exists without matching documents', async () => {
|
||||
const { body } = await supertest
|
||||
.post(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
getLogEntryRateRequestPayloadRT.encode({
|
||||
data: {
|
||||
sourceId: 'default',
|
||||
timeRange: {
|
||||
startTime: TIME_BEFORE_START - 10 * 15 * 60 * 1000,
|
||||
endTime: TIME_BEFORE_START,
|
||||
},
|
||||
bucketDuration: 15 * 60 * 1000,
|
||||
},
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logEntryRateBuckets = getLogEntryRateSuccessReponsePayloadRT
|
||||
.decode(body)
|
||||
.getOrElseL(throwErrors(createPlainError));
|
||||
|
||||
expect(logEntryRateBuckets.data.bucketDuration).to.be(15 * 60 * 1000);
|
||||
expect(logEntryRateBuckets.data.histogramBuckets).to.be.empty();
|
||||
});
|
||||
|
||||
it('should return a NotFound error when the results index does not exist', async () => {
|
||||
await supertest
|
||||
.post(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
getLogEntryRateRequestPayloadRT.encode({
|
||||
data: {
|
||||
sourceId: 'does-not-exist',
|
||||
timeRange: {
|
||||
startTime: TIME_BEFORE_START,
|
||||
endTime: TIME_AFTER_END,
|
||||
},
|
||||
bucketDuration: 15 * 60 * 1000,
|
||||
},
|
||||
})
|
||||
)
|
||||
.expect(404);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
Binary file not shown.
|
@ -0,0 +1,532 @@
|
|||
{
|
||||
"type": "index",
|
||||
"value": {
|
||||
"aliases": {
|
||||
".ml-anomalies-.write-felix-logs-combined-1": {
|
||||
},
|
||||
".ml-anomalies-.write-felix-logs-combined-4": {
|
||||
},
|
||||
".ml-anomalies-.write-felix-logs-rate-1": {
|
||||
},
|
||||
".ml-anomalies-.write-kibana-logs-ui-default-default-log-entry-rate": {
|
||||
},
|
||||
".ml-anomalies-felix-logs-combined-1": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"job_id": {
|
||||
"boost": 1,
|
||||
"value": "felix-logs-combined-1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
".ml-anomalies-felix-logs-combined-4": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"job_id": {
|
||||
"boost": 1,
|
||||
"value": "felix-logs-combined-4"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
".ml-anomalies-felix-logs-rate-1": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"job_id": {
|
||||
"boost": 1,
|
||||
"value": "felix-logs-rate-1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
".ml-anomalies-kibana-logs-ui-default-default-log-entry-rate": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"job_id": {
|
||||
"boost": 1,
|
||||
"value": "kibana-logs-ui-default-default-log-entry-rate"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"index": ".ml-anomalies-shared",
|
||||
"mappings": {
|
||||
"_meta": {
|
||||
"version": "8.0.0"
|
||||
},
|
||||
"dynamic_templates": [
|
||||
{
|
||||
"strings_as_keywords": {
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"match": "*"
|
||||
}
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"actual": {
|
||||
"type": "double"
|
||||
},
|
||||
"all_field_values": {
|
||||
"analyzer": "whitespace",
|
||||
"type": "text"
|
||||
},
|
||||
"anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"average_bucket_processing_time_ms": {
|
||||
"type": "double"
|
||||
},
|
||||
"bucket_allocation_failures_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"bucket_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"bucket_influencers": {
|
||||
"properties": {
|
||||
"anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"bucket_span": {
|
||||
"type": "long"
|
||||
},
|
||||
"influencer_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"initial_anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"is_interim": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"job_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"probability": {
|
||||
"type": "double"
|
||||
},
|
||||
"raw_anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"result_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
}
|
||||
},
|
||||
"type": "nested"
|
||||
},
|
||||
"bucket_span": {
|
||||
"type": "long"
|
||||
},
|
||||
"by_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"by_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"category_id": {
|
||||
"type": "long"
|
||||
},
|
||||
"causes": {
|
||||
"properties": {
|
||||
"actual": {
|
||||
"type": "double"
|
||||
},
|
||||
"by_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"by_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"correlated_by_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"function": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"function_description": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"over_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"over_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"partition_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"partition_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"probability": {
|
||||
"type": "double"
|
||||
},
|
||||
"typical": {
|
||||
"type": "double"
|
||||
}
|
||||
},
|
||||
"type": "nested"
|
||||
},
|
||||
"description": {
|
||||
"type": "text"
|
||||
},
|
||||
"detector_index": {
|
||||
"type": "integer"
|
||||
},
|
||||
"earliest_record_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"empty_bucket_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"event_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"examples": {
|
||||
"type": "text"
|
||||
},
|
||||
"exponential_average_bucket_processing_time_ms": {
|
||||
"type": "double"
|
||||
},
|
||||
"field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"forecast_create_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"forecast_end_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"forecast_expiry_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"forecast_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"forecast_lower": {
|
||||
"type": "double"
|
||||
},
|
||||
"forecast_memory_bytes": {
|
||||
"type": "long"
|
||||
},
|
||||
"forecast_messages": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"forecast_prediction": {
|
||||
"type": "double"
|
||||
},
|
||||
"forecast_progress": {
|
||||
"type": "double"
|
||||
},
|
||||
"forecast_start_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"forecast_status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"forecast_upper": {
|
||||
"type": "double"
|
||||
},
|
||||
"function": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"function_description": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"influencer_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"influencer_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"influencer_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"influencers": {
|
||||
"properties": {
|
||||
"influencer_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"influencer_field_values": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "nested"
|
||||
},
|
||||
"initial_anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"initial_influencer_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"initial_record_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"input_bytes": {
|
||||
"type": "long"
|
||||
},
|
||||
"input_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"input_record_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"invalid_date_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"is_interim": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"job_id": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"last_data_time": {
|
||||
"type": "date"
|
||||
},
|
||||
"latest_empty_bucket_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"latest_record_time_stamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"latest_record_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"latest_result_time_stamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"latest_sparse_bucket_timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"log_time": {
|
||||
"type": "date"
|
||||
},
|
||||
"max_matching_length": {
|
||||
"type": "long"
|
||||
},
|
||||
"maximum_bucket_processing_time_ms": {
|
||||
"type": "double"
|
||||
},
|
||||
"memory_status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"min_version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"minimum_bucket_processing_time_ms": {
|
||||
"type": "double"
|
||||
},
|
||||
"missing_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"mlcategory": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"model_bytes": {
|
||||
"type": "long"
|
||||
},
|
||||
"model_bytes_exceeded": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"model_bytes_memory_limit": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"model_feature": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"model_lower": {
|
||||
"type": "double"
|
||||
},
|
||||
"model_median": {
|
||||
"type": "double"
|
||||
},
|
||||
"model_size_stats": {
|
||||
"properties": {
|
||||
"bucket_allocation_failures_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"job_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"log_time": {
|
||||
"type": "date"
|
||||
},
|
||||
"memory_status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"model_bytes": {
|
||||
"type": "long"
|
||||
},
|
||||
"model_bytes_exceeded": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"model_bytes_memory_limit": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"result_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"total_by_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"total_over_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"total_partition_field_count": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_upper": {
|
||||
"type": "double"
|
||||
},
|
||||
"multi_bucket_impact": {
|
||||
"type": "double"
|
||||
},
|
||||
"out_of_order_timestamp_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"over_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"over_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"partition_field_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"partition_field_value": {
|
||||
"copy_to": [
|
||||
"all_field_values"
|
||||
],
|
||||
"type": "keyword"
|
||||
},
|
||||
"probability": {
|
||||
"type": "double"
|
||||
},
|
||||
"processed_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"processed_record_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"processing_time_ms": {
|
||||
"type": "long"
|
||||
},
|
||||
"quantiles": {
|
||||
"enabled": false,
|
||||
"type": "object"
|
||||
},
|
||||
"raw_anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"record_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"regex": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"result_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"retain": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"scheduled_events": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"search_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"snapshot_doc_count": {
|
||||
"type": "integer"
|
||||
},
|
||||
"snapshot_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"sparse_bucket_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"terms": {
|
||||
"type": "text"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"total_by_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"total_over_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"total_partition_field_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"total_search_time_ms": {
|
||||
"type": "double"
|
||||
},
|
||||
"typical": {
|
||||
"type": "double"
|
||||
}
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"index": {
|
||||
"auto_expand_replicas": "0-1",
|
||||
"number_of_replicas": "1",
|
||||
"number_of_shards": "1",
|
||||
"query": {
|
||||
"default_field": "all_field_values"
|
||||
},
|
||||
"translog": {
|
||||
"durability": "async"
|
||||
},
|
||||
"unassigned": {
|
||||
"node_left": {
|
||||
"delayed_timeout": "1m"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -45,7 +45,7 @@ export default function({ getService }: FtrProviderContext) {
|
|||
const { body } = await supertest
|
||||
.post(`${getURLAPIBaseURL()}${SAVED_OBJECT_WITH_SECRET_TYPE}`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ attributes: savedObjectOriginalAttributes }, {})
|
||||
.send({ attributes: savedObjectOriginalAttributes })
|
||||
.expect(200);
|
||||
|
||||
savedObject = body;
|
||||
|
@ -170,7 +170,7 @@ export default function({ getService }: FtrProviderContext) {
|
|||
const { body: response } = await supertest
|
||||
.put(`${getURLAPIBaseURL()}${SAVED_OBJECT_WITH_SECRET_TYPE}/${savedObject.id}`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ attributes: updatedAttributes }, {})
|
||||
.send({ attributes: updatedAttributes })
|
||||
.expect(200);
|
||||
|
||||
expect(response.attributes).to.eql({
|
||||
|
@ -202,7 +202,7 @@ export default function({ getService }: FtrProviderContext) {
|
|||
const { body: response } = await supertest
|
||||
.put(`${getURLAPIBaseURL()}${SAVED_OBJECT_WITH_SECRET_TYPE}/${savedObject.id}`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ attributes: updatedAttributes }, {})
|
||||
.send({ attributes: updatedAttributes })
|
||||
.expect(200);
|
||||
|
||||
expect(response.attributes).to.eql({
|
||||
|
@ -225,7 +225,7 @@ export default function({ getService }: FtrProviderContext) {
|
|||
const { body: response } = await supertest
|
||||
.put(`${getURLAPIBaseURL()}${SAVED_OBJECT_WITH_SECRET_TYPE}/${savedObject.id}`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ attributes: updatedAttributes }, {})
|
||||
.send({ attributes: updatedAttributes })
|
||||
.expect(200);
|
||||
|
||||
expect(response.attributes).to.eql({
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue