mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Logs UI][Metrics UI] Remove deprecated config fields from APIs and SavedObjects (#116821) (#117573)
* [Logs UI][Metrics UI] Remove deprecated config fields from APIs * Fix typecheck * Fix typecheck * Fix typecheck * Fix jest * Fix functional test * Remove extraneous timeField args * Typecheck fix * Consolidate log file changes to ResolvedLogSourceConfiguration * Fix merge * Revert additional logs files * Revert inventory models * Revert log_analysis api * Fix timefield reference in process list * Restore logs page files, fix typecheck on mock * Fix functional test * Restore inventory models index * Fix typecheck on getFilteredMetrics * Look CI if you don't tell me all the type errors at once I can't fix them all * Maybe this is the last typecheck fix who knows * Restore reading timestamp field from data view Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
d798b739d3
commit
863063f3e3
84 changed files with 155 additions and 411 deletions
|
@ -8,7 +8,6 @@
|
|||
export const DEFAULT_SOURCE_ID = 'default';
|
||||
export const METRICS_INDEX_PATTERN = 'metrics-*,metricbeat-*';
|
||||
export const LOGS_INDEX_PATTERN = 'logs-*,filebeat-*,kibana_sample_data_logs*';
|
||||
export const TIMESTAMP_FIELD = '@timestamp';
|
||||
export const METRICS_APP = 'metrics';
|
||||
export const LOGS_APP = 'logs';
|
||||
|
||||
|
@ -16,3 +15,9 @@ export const METRICS_FEATURE_ID = 'infrastructure';
|
|||
export const LOGS_FEATURE_ID = 'logs';
|
||||
|
||||
export type InfraFeatureId = typeof METRICS_FEATURE_ID | typeof LOGS_FEATURE_ID;
|
||||
|
||||
export const TIMESTAMP_FIELD = '@timestamp';
|
||||
export const TIEBREAKER_FIELD = '_doc';
|
||||
export const HOST_FIELD = 'host.name';
|
||||
export const CONTAINER_FIELD = 'container.id';
|
||||
export const POD_FIELD = 'kubernetes.pod.uid';
|
||||
|
|
|
@ -14,7 +14,6 @@ const AggValueRT = rt.type({
|
|||
|
||||
export const ProcessListAPIRequestRT = rt.type({
|
||||
hostTerm: rt.record(rt.string, rt.string),
|
||||
timefield: rt.string,
|
||||
indexPattern: rt.string,
|
||||
to: rt.number,
|
||||
sortBy: rt.type({
|
||||
|
@ -102,7 +101,6 @@ export type ProcessListAPIResponse = rt.TypeOf<typeof ProcessListAPIResponseRT>;
|
|||
|
||||
export const ProcessListAPIChartRequestRT = rt.type({
|
||||
hostTerm: rt.record(rt.string, rt.string),
|
||||
timefield: rt.string,
|
||||
indexPattern: rt.string,
|
||||
to: rt.number,
|
||||
command: rt.string,
|
||||
|
|
|
@ -10,7 +10,6 @@ import { MetricsUIAggregationRT } from '../inventory_models/types';
|
|||
import { afterKeyObjectRT } from './metrics_explorer';
|
||||
|
||||
export const MetricsAPITimerangeRT = rt.type({
|
||||
field: rt.string,
|
||||
from: rt.number,
|
||||
to: rt.number,
|
||||
interval: rt.string,
|
||||
|
|
|
@ -41,7 +41,6 @@ export const metricsExplorerMetricRT = rt.intersection([
|
|||
]);
|
||||
|
||||
export const timeRangeRT = rt.type({
|
||||
field: rt.string,
|
||||
from: rt.number,
|
||||
to: rt.number,
|
||||
interval: rt.string,
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { POD_FIELD, HOST_FIELD, CONTAINER_FIELD } from '../constants';
|
||||
import { host } from './host';
|
||||
import { pod } from './pod';
|
||||
import { awsEC2 } from './aws_ec2';
|
||||
|
@ -30,31 +31,23 @@ export const findInventoryModel = (type: InventoryItemType) => {
|
|||
return model;
|
||||
};
|
||||
|
||||
interface InventoryFields {
|
||||
host: string;
|
||||
pod: string;
|
||||
container: string;
|
||||
timestamp: string;
|
||||
tiebreaker: string;
|
||||
}
|
||||
|
||||
const LEGACY_TYPES = ['host', 'pod', 'container'];
|
||||
|
||||
const getFieldByType = (type: InventoryItemType, fields: InventoryFields) => {
|
||||
export const getFieldByType = (type: InventoryItemType) => {
|
||||
switch (type) {
|
||||
case 'pod':
|
||||
return fields.pod;
|
||||
return POD_FIELD;
|
||||
case 'host':
|
||||
return fields.host;
|
||||
return HOST_FIELD;
|
||||
case 'container':
|
||||
return fields.container;
|
||||
return CONTAINER_FIELD;
|
||||
}
|
||||
};
|
||||
|
||||
export const findInventoryFields = (type: InventoryItemType, fields?: InventoryFields) => {
|
||||
export const findInventoryFields = (type: InventoryItemType) => {
|
||||
const inventoryModel = findInventoryModel(type);
|
||||
if (fields && LEGACY_TYPES.includes(type)) {
|
||||
const id = getFieldByType(type, fields) || inventoryModel.fields.id;
|
||||
if (LEGACY_TYPES.includes(type)) {
|
||||
const id = getFieldByType(type) || inventoryModel.fields.id;
|
||||
return {
|
||||
...inventoryModel.fields,
|
||||
id,
|
||||
|
|
|
@ -16,11 +16,6 @@ export const logSourceConfigurationOriginRT = rt.keyof({
|
|||
export type LogSourceConfigurationOrigin = rt.TypeOf<typeof logSourceConfigurationOriginRT>;
|
||||
|
||||
const logSourceFieldsConfigurationRT = rt.strict({
|
||||
container: rt.string,
|
||||
host: rt.string,
|
||||
pod: rt.string,
|
||||
timestamp: rt.string,
|
||||
tiebreaker: rt.string,
|
||||
message: rt.array(rt.string),
|
||||
});
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { DataView, DataViewsContract } from '../../../../../src/plugins/data_views/common';
|
||||
import { ObjectEntries } from '../utility_types';
|
||||
import { TIMESTAMP_FIELD, TIEBREAKER_FIELD } from '../constants';
|
||||
import { ResolveLogSourceConfigurationError } from './errors';
|
||||
import {
|
||||
LogSourceColumnConfiguration,
|
||||
|
@ -61,8 +62,8 @@ const resolveLegacyReference = async (
|
|||
|
||||
return {
|
||||
indices: sourceConfiguration.logIndices.indexName,
|
||||
timestampField: sourceConfiguration.fields.timestamp,
|
||||
tiebreakerField: sourceConfiguration.fields.tiebreaker,
|
||||
timestampField: TIMESTAMP_FIELD,
|
||||
tiebreakerField: TIEBREAKER_FIELD,
|
||||
messageField: sourceConfiguration.fields.message,
|
||||
fields,
|
||||
runtimeMappings: {},
|
||||
|
@ -91,8 +92,8 @@ const resolveKibanaIndexPatternReference = async (
|
|||
|
||||
return {
|
||||
indices: indexPattern.title,
|
||||
timestampField: indexPattern.timeFieldName ?? '@timestamp',
|
||||
tiebreakerField: '_doc',
|
||||
timestampField: indexPattern.timeFieldName ?? TIMESTAMP_FIELD,
|
||||
tiebreakerField: TIEBREAKER_FIELD,
|
||||
messageField: ['message'],
|
||||
fields: indexPattern.fields,
|
||||
runtimeMappings: resolveRuntimeMappings(indexPattern),
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { omit } from 'lodash';
|
||||
import {
|
||||
SourceConfigurationRT,
|
||||
SourceStatusRuntimeType,
|
||||
|
@ -22,7 +21,6 @@ export const metricsSourceConfigurationPropertiesRT = rt.strict({
|
|||
metricAlias: SourceConfigurationRT.props.metricAlias,
|
||||
inventoryDefaultView: SourceConfigurationRT.props.inventoryDefaultView,
|
||||
metricsExplorerDefaultView: SourceConfigurationRT.props.metricsExplorerDefaultView,
|
||||
fields: rt.strict(omit(SourceConfigurationRT.props.fields.props, 'message')),
|
||||
anomalyThreshold: rt.number,
|
||||
});
|
||||
|
||||
|
@ -32,9 +30,6 @@ export type MetricsSourceConfigurationProperties = rt.TypeOf<
|
|||
|
||||
export const partialMetricsSourceConfigurationPropertiesRT = rt.partial({
|
||||
...metricsSourceConfigurationPropertiesRT.type.props,
|
||||
fields: rt.partial({
|
||||
...metricsSourceConfigurationPropertiesRT.type.props.fields.type.props,
|
||||
}),
|
||||
});
|
||||
|
||||
export type PartialMetricsSourceConfigurationProperties = rt.TypeOf<
|
||||
|
|
|
@ -50,12 +50,7 @@ export const sourceConfigurationConfigFilePropertiesRT = rt.type({
|
|||
sources: rt.type({
|
||||
default: rt.partial({
|
||||
fields: rt.partial({
|
||||
timestamp: rt.string,
|
||||
message: rt.array(rt.string),
|
||||
tiebreaker: rt.string,
|
||||
host: rt.string,
|
||||
container: rt.string,
|
||||
pod: rt.string,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
|
@ -113,11 +108,6 @@ export type InfraSourceConfigurationColumn = rt.TypeOf<typeof SourceConfiguratio
|
|||
*/
|
||||
|
||||
const SourceConfigurationFieldsRT = rt.type({
|
||||
container: rt.string,
|
||||
host: rt.string,
|
||||
pod: rt.string,
|
||||
tiebreaker: rt.string,
|
||||
timestamp: rt.string,
|
||||
message: rt.array(rt.string),
|
||||
});
|
||||
|
||||
|
|
|
@ -54,14 +54,6 @@ describe('ExpressionChart', () => {
|
|||
metricAlias: 'metricbeat-*',
|
||||
inventoryDefaultView: 'host',
|
||||
metricsExplorerDefaultView: 'host',
|
||||
// @ts-ignore
|
||||
fields: {
|
||||
timestamp: '@timestamp',
|
||||
container: 'container.id',
|
||||
host: 'host.name',
|
||||
pod: 'kubernetes.pod.uid',
|
||||
tiebreaker: '_doc',
|
||||
},
|
||||
anomalyThreshold: 20,
|
||||
},
|
||||
};
|
||||
|
|
|
@ -73,11 +73,6 @@ export const createBasicSourceConfiguration = (sourceId: string): LogSourceConfi
|
|||
},
|
||||
logColumns: [],
|
||||
fields: {
|
||||
container: 'CONTAINER_FIELD',
|
||||
host: 'HOST_FIELD',
|
||||
pod: 'POD_FIELD',
|
||||
tiebreaker: 'TIEBREAKER_FIELD',
|
||||
timestamp: 'TIMESTAMP_FIELD',
|
||||
message: ['MESSAGE_FIELD'],
|
||||
},
|
||||
name: sourceId,
|
||||
|
|
|
@ -19,7 +19,7 @@ export const useInfraMLModule = <JobType extends string>({
|
|||
moduleDescriptor: ModuleDescriptor<JobType>;
|
||||
}) => {
|
||||
const { services } = useKibanaContextForPlugin();
|
||||
const { spaceId, sourceId, timestampField } = sourceConfiguration;
|
||||
const { spaceId, sourceId } = sourceConfiguration;
|
||||
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
|
||||
|
||||
const [, fetchJobStatus] = useTrackedPromise(
|
||||
|
@ -64,7 +64,6 @@ export const useInfraMLModule = <JobType extends string>({
|
|||
indices: selectedIndices,
|
||||
sourceId,
|
||||
spaceId,
|
||||
timestampField,
|
||||
},
|
||||
partitionField,
|
||||
},
|
||||
|
@ -91,7 +90,7 @@ export const useInfraMLModule = <JobType extends string>({
|
|||
dispatchModuleStatus({ type: 'failedSetup' });
|
||||
},
|
||||
},
|
||||
[moduleDescriptor.setUpModule, spaceId, sourceId, timestampField]
|
||||
[moduleDescriptor.setUpModule, spaceId, sourceId]
|
||||
);
|
||||
|
||||
const [cleanUpModuleRequest, cleanUpModule] = useTrackedPromise(
|
||||
|
|
|
@ -45,8 +45,7 @@ export const isJobConfigurationOutdated =
|
|||
isSubset(
|
||||
new Set(jobConfiguration.indexPattern.split(',')),
|
||||
new Set(currentSourceConfiguration.indices)
|
||||
) &&
|
||||
jobConfiguration.timestampField === currentSourceConfiguration.timestampField
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -49,12 +49,10 @@ export interface ModuleDescriptor<JobType extends string> {
|
|||
) => Promise<DeleteJobsResponsePayload>;
|
||||
validateSetupIndices?: (
|
||||
indices: string[],
|
||||
timestampField: string,
|
||||
fetch: HttpHandler
|
||||
) => Promise<ValidationIndicesResponsePayload>;
|
||||
validateSetupDatasets?: (
|
||||
indices: string[],
|
||||
timestampField: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
fetch: HttpHandler
|
||||
|
@ -65,7 +63,6 @@ export interface ModuleSourceConfiguration {
|
|||
indices: string[];
|
||||
sourceId: string;
|
||||
spaceId: string;
|
||||
timestampField: string;
|
||||
}
|
||||
|
||||
interface ManyCategoriesWarningReason {
|
||||
|
|
|
@ -17,21 +17,18 @@ export const useMetricHostsModule = ({
|
|||
indexPattern,
|
||||
sourceId,
|
||||
spaceId,
|
||||
timestampField,
|
||||
}: {
|
||||
indexPattern: string;
|
||||
sourceId: string;
|
||||
spaceId: string;
|
||||
timestampField: string;
|
||||
}) => {
|
||||
const sourceConfiguration: ModuleSourceConfiguration = useMemo(
|
||||
() => ({
|
||||
indices: indexPattern.split(','),
|
||||
sourceId,
|
||||
spaceId,
|
||||
timestampField,
|
||||
}),
|
||||
[indexPattern, sourceId, spaceId, timestampField]
|
||||
[indexPattern, sourceId, spaceId]
|
||||
);
|
||||
|
||||
const infraMLModule = useInfraMLModule({
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
MetricsHostsJobType,
|
||||
bucketSpan,
|
||||
} from '../../../../../common/infra_ml';
|
||||
import { TIMESTAMP_FIELD } from '../../../../../common/constants';
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
import MemoryJob from '../../../../../../ml/server/models/data_recognizer/modules/metrics_ui_hosts/ml/hosts_memory_usage.json';
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
|
@ -68,7 +69,7 @@ const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler)
|
|||
start,
|
||||
end,
|
||||
filter,
|
||||
moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField },
|
||||
moduleSourceConfiguration: { spaceId, sourceId, indices },
|
||||
partitionField,
|
||||
} = setUpModuleArgs;
|
||||
|
||||
|
@ -93,13 +94,13 @@ const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler)
|
|||
return {
|
||||
job_id: id,
|
||||
data_description: {
|
||||
time_field: timestampField,
|
||||
time_field: TIMESTAMP_FIELD,
|
||||
},
|
||||
analysis_config,
|
||||
custom_settings: {
|
||||
metrics_source_config: {
|
||||
indexPattern: indexNamePattern,
|
||||
timestampField,
|
||||
timestampField: TIMESTAMP_FIELD,
|
||||
bucketSpan,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -17,21 +17,18 @@ export const useMetricK8sModule = ({
|
|||
indexPattern,
|
||||
sourceId,
|
||||
spaceId,
|
||||
timestampField,
|
||||
}: {
|
||||
indexPattern: string;
|
||||
sourceId: string;
|
||||
spaceId: string;
|
||||
timestampField: string;
|
||||
}) => {
|
||||
const sourceConfiguration: ModuleSourceConfiguration = useMemo(
|
||||
() => ({
|
||||
indices: indexPattern.split(','),
|
||||
sourceId,
|
||||
spaceId,
|
||||
timestampField,
|
||||
}),
|
||||
[indexPattern, sourceId, spaceId, timestampField]
|
||||
[indexPattern, sourceId, spaceId]
|
||||
);
|
||||
|
||||
const infraMLModule = useInfraMLModule({
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
MetricK8sJobType,
|
||||
bucketSpan,
|
||||
} from '../../../../../common/infra_ml';
|
||||
import { TIMESTAMP_FIELD } from '../../../../../common/constants';
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
import MemoryJob from '../../../../../../ml/server/models/data_recognizer/modules/metrics_ui_k8s/ml/k8s_memory_usage.json';
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
|
@ -69,7 +70,7 @@ const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler)
|
|||
start,
|
||||
end,
|
||||
filter,
|
||||
moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField },
|
||||
moduleSourceConfiguration: { spaceId, sourceId, indices },
|
||||
partitionField,
|
||||
} = setUpModuleArgs;
|
||||
|
||||
|
@ -93,13 +94,13 @@ const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler)
|
|||
return {
|
||||
job_id: id,
|
||||
data_description: {
|
||||
time_field: timestampField,
|
||||
time_field: TIMESTAMP_FIELD,
|
||||
},
|
||||
analysis_config,
|
||||
custom_settings: {
|
||||
metrics_source_config: {
|
||||
indexPattern: indexNamePattern,
|
||||
timestampField,
|
||||
timestampField: TIMESTAMP_FIELD,
|
||||
bucketSpan,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -14,7 +14,6 @@ import {
|
|||
SnapshotNodeMetric,
|
||||
SnapshotNodePath,
|
||||
} from '../../common/http_api/snapshot_api';
|
||||
import { MetricsSourceConfigurationProperties } from '../../common/metrics_sources';
|
||||
import { WaffleSortOption } from '../pages/metrics/inventory_view/hooks/use_waffle_options';
|
||||
|
||||
export interface InfraWaffleMapNode {
|
||||
|
@ -124,7 +123,6 @@ export enum InfraWaffleMapRuleOperator {
|
|||
}
|
||||
|
||||
export interface InfraWaffleMapOptions {
|
||||
fields?: Omit<MetricsSourceConfigurationProperties['fields'], 'message'> | null;
|
||||
formatter: InfraFormatterType;
|
||||
formatTemplate: string;
|
||||
metric: SnapshotMetricInput;
|
||||
|
|
|
@ -151,7 +151,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('default');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'HOST_FIELD: HOST_NAME')"`
|
||||
`"(language:kuery,query:'host.name: HOST_NAME')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toEqual(null);
|
||||
});
|
||||
|
@ -172,7 +172,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('default');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'(HOST_FIELD: HOST_NAME) and (FILTER_FIELD:FILTER_VALUE)')"`
|
||||
`"(language:kuery,query:'(host.name: HOST_NAME) and (FILTER_FIELD:FILTER_VALUE)')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toMatchInlineSnapshot(
|
||||
`"(end:'2019-02-20T14:58:09.404Z',position:(tiebreaker:0,time:1550671089404),start:'2019-02-20T12:58:09.404Z',streamLive:!f)"`
|
||||
|
@ -193,7 +193,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('OTHER_SOURCE');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'HOST_FIELD: HOST_NAME')"`
|
||||
`"(language:kuery,query:'host.name: HOST_NAME')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toEqual(null);
|
||||
});
|
||||
|
@ -229,7 +229,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('default');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'CONTAINER_FIELD: CONTAINER_ID')"`
|
||||
`"(language:kuery,query:'container.id: CONTAINER_ID')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toEqual(null);
|
||||
});
|
||||
|
@ -250,7 +250,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('default');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'(CONTAINER_FIELD: CONTAINER_ID) and (FILTER_FIELD:FILTER_VALUE)')"`
|
||||
`"(language:kuery,query:'(container.id: CONTAINER_ID) and (FILTER_FIELD:FILTER_VALUE)')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toMatchInlineSnapshot(
|
||||
`"(end:'2019-02-20T14:58:09.404Z',position:(tiebreaker:0,time:1550671089404),start:'2019-02-20T12:58:09.404Z',streamLive:!f)"`
|
||||
|
@ -287,7 +287,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('default');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'POD_FIELD: POD_UID')"`
|
||||
`"(language:kuery,query:'kubernetes.pod.uid: POD_UID')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toEqual(null);
|
||||
});
|
||||
|
@ -306,7 +306,7 @@ describe('LinkToLogsPage component', () => {
|
|||
const searchParams = new URLSearchParams(history.location.search);
|
||||
expect(searchParams.get('sourceId')).toEqual('default');
|
||||
expect(searchParams.get('logFilter')).toMatchInlineSnapshot(
|
||||
`"(language:kuery,query:'(POD_FIELD: POD_UID) and (FILTER_FIELD:FILTER_VALUE)')"`
|
||||
`"(language:kuery,query:'(kubernetes.pod.uid: POD_UID) and (FILTER_FIELD:FILTER_VALUE)')"`
|
||||
);
|
||||
expect(searchParams.get('logPosition')).toMatchInlineSnapshot(
|
||||
`"(end:'2019-02-20T14:58:09.404Z',position:(tiebreaker:0,time:1550671089404),start:'2019-02-20T12:58:09.404Z',streamLive:!f)"`
|
||||
|
|
|
@ -34,12 +34,11 @@ export const RedirectToNodeLogs = ({
|
|||
location,
|
||||
}: RedirectToNodeLogsType) => {
|
||||
const { services } = useKibanaContextForPlugin();
|
||||
const { isLoading, loadSource, sourceConfiguration } = useLogSource({
|
||||
const { isLoading, loadSource } = useLogSource({
|
||||
fetch: services.http.fetch,
|
||||
sourceId,
|
||||
indexPatternsService: services.data.indexPatterns,
|
||||
});
|
||||
const fields = sourceConfiguration?.configuration.fields;
|
||||
|
||||
useMount(() => {
|
||||
loadSource();
|
||||
|
@ -57,11 +56,9 @@ export const RedirectToNodeLogs = ({
|
|||
})}
|
||||
/>
|
||||
);
|
||||
} else if (fields == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nodeFilter = `${findInventoryFields(nodeType, fields).id}: ${nodeId}`;
|
||||
const nodeFilter = `${findInventoryFields(nodeType).id}: ${nodeId}`;
|
||||
const userFilter = getFilterFromLocation(location);
|
||||
const filter = userFilter ? `(${nodeFilter}) and (${userFilter})` : nodeFilter;
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ import { PageContent } from '../../../../components/page';
|
|||
import { useWaffleTimeContext } from '../hooks/use_waffle_time';
|
||||
import { useWaffleFiltersContext } from '../hooks/use_waffle_filters';
|
||||
import { DEFAULT_LEGEND, useWaffleOptionsContext } from '../hooks/use_waffle_options';
|
||||
import { useSourceContext } from '../../../../containers/metrics_source';
|
||||
import { InfraFormatterType } from '../../../../lib/lib';
|
||||
import { euiStyled } from '../../../../../../../../src/plugins/kibana_react/common';
|
||||
import { Toolbar } from './toolbars/toolbar';
|
||||
|
@ -41,7 +40,6 @@ interface Props {
|
|||
export const Layout = React.memo(
|
||||
({ shouldLoadDefault, currentView, reload, interval, nodes, loading }: Props) => {
|
||||
const [showLoading, setShowLoading] = useState(true);
|
||||
const { source } = useSourceContext();
|
||||
const {
|
||||
metric,
|
||||
groupBy,
|
||||
|
@ -65,7 +63,6 @@ export const Layout = React.memo(
|
|||
legend: createLegend(legendPalette, legendSteps, legendReverseColors),
|
||||
metric,
|
||||
sort,
|
||||
fields: source?.configuration?.fields,
|
||||
groupBy,
|
||||
};
|
||||
|
||||
|
|
|
@ -67,13 +67,11 @@ export const AnomalyDetectionFlyout = () => {
|
|||
indexPattern={source?.configuration.metricAlias ?? ''}
|
||||
sourceId={'default'}
|
||||
spaceId={space.id}
|
||||
timestampField={source?.configuration.fields.timestamp ?? ''}
|
||||
>
|
||||
<MetricK8sModuleProvider
|
||||
indexPattern={source?.configuration.metricAlias ?? ''}
|
||||
sourceId={'default'}
|
||||
spaceId={space.id}
|
||||
timestampField={source?.configuration.fields.timestamp ?? ''}
|
||||
>
|
||||
<EuiFlyout onClose={closeFlyout} data-test-subj="loadMLFlyout">
|
||||
{screenName === 'home' && (
|
||||
|
|
|
@ -25,15 +25,13 @@ const TabComponent = (props: TabProps) => {
|
|||
const endTimestamp = props.currentTime;
|
||||
const startTimestamp = endTimestamp - 60 * 60 * 1000; // 60 minutes
|
||||
const { nodeType } = useWaffleOptionsContext();
|
||||
const { options, node } = props;
|
||||
const { node } = props;
|
||||
|
||||
const throttledTextQuery = useThrottle(textQuery, textQueryThrottleInterval);
|
||||
|
||||
const filter = useMemo(() => {
|
||||
const query = [
|
||||
...(options.fields != null
|
||||
? [`${findInventoryFields(nodeType, options.fields).id}: "${node.id}"`]
|
||||
: []),
|
||||
`${findInventoryFields(nodeType).id}: "${node.id}"`,
|
||||
...(throttledTextQuery !== '' ? [throttledTextQuery] : []),
|
||||
].join(' and ');
|
||||
|
||||
|
@ -41,7 +39,7 @@ const TabComponent = (props: TabProps) => {
|
|||
language: 'kuery',
|
||||
query,
|
||||
};
|
||||
}, [options.fields, nodeType, node.id, throttledTextQuery]);
|
||||
}, [nodeType, node.id, throttledTextQuery]);
|
||||
|
||||
const onQueryChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setTextQuery(e.target.value);
|
||||
|
|
|
@ -71,14 +71,12 @@ const TabComponent = (props: TabProps) => {
|
|||
]);
|
||||
const { sourceId, createDerivedIndexPattern } = useSourceContext();
|
||||
const { nodeType, accountId, region, customMetrics } = useWaffleOptionsContext();
|
||||
const { currentTime, options, node } = props;
|
||||
const { currentTime, node } = props;
|
||||
const derivedIndexPattern = useMemo(
|
||||
() => createDerivedIndexPattern('metrics'),
|
||||
[createDerivedIndexPattern]
|
||||
);
|
||||
let filter = options.fields
|
||||
? `${findInventoryFields(nodeType, options.fields).id}: "${node.id}"`
|
||||
: '';
|
||||
let filter = `${findInventoryFields(nodeType).id}: "${node.id}"`;
|
||||
|
||||
if (filter) {
|
||||
filter = convertKueryToElasticSearchQuery(filter, derivedIndexPattern);
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
EuiIconTip,
|
||||
Query,
|
||||
} from '@elastic/eui';
|
||||
import { getFieldByType } from '../../../../../../../../common/inventory_models';
|
||||
import {
|
||||
useProcessList,
|
||||
SortBy,
|
||||
|
@ -28,7 +29,7 @@ import { SummaryTable } from './summary_table';
|
|||
import { ProcessesTable } from './processes_table';
|
||||
import { parseSearchString } from './parse_search_string';
|
||||
|
||||
const TabComponent = ({ currentTime, node, nodeType, options }: TabProps) => {
|
||||
const TabComponent = ({ currentTime, node, nodeType }: TabProps) => {
|
||||
const [searchBarState, setSearchBarState] = useState<Query>(Query.MATCH_ALL);
|
||||
const [searchFilter, setSearchFilter] = useState<string>('');
|
||||
const [sortBy, setSortBy] = useState<SortBy>({
|
||||
|
@ -36,22 +37,17 @@ const TabComponent = ({ currentTime, node, nodeType, options }: TabProps) => {
|
|||
isAscending: false,
|
||||
});
|
||||
|
||||
const timefield = options.fields!.timestamp;
|
||||
|
||||
const hostTerm = useMemo(() => {
|
||||
const field =
|
||||
options.fields && Reflect.has(options.fields, nodeType)
|
||||
? Reflect.get(options.fields, nodeType)
|
||||
: nodeType;
|
||||
const field = getFieldByType(nodeType) ?? nodeType;
|
||||
return { [field]: node.name };
|
||||
}, [options, node, nodeType]);
|
||||
}, [node, nodeType]);
|
||||
|
||||
const {
|
||||
loading,
|
||||
error,
|
||||
response,
|
||||
makeRequest: reload,
|
||||
} = useProcessList(hostTerm, timefield, currentTime, sortBy, parseSearchString(searchFilter));
|
||||
} = useProcessList(hostTerm, currentTime, sortBy, parseSearchString(searchFilter));
|
||||
|
||||
const debouncedSearchOnChange = useMemo(
|
||||
() => debounce<(queryText: string) => void>((queryText) => setSearchFilter(queryText), 500),
|
||||
|
@ -73,7 +69,7 @@ const TabComponent = ({ currentTime, node, nodeType, options }: TabProps) => {
|
|||
|
||||
return (
|
||||
<TabContent>
|
||||
<ProcessListContextProvider hostTerm={hostTerm} to={currentTime} timefield={timefield}>
|
||||
<ProcessListContextProvider hostTerm={hostTerm} to={currentTime}>
|
||||
<SummaryTable
|
||||
isLoading={loading}
|
||||
processSummary={(!error ? response?.summary : null) ?? { total: 0 }}
|
||||
|
|
|
@ -124,11 +124,7 @@ export class Node extends React.PureComponent<Props, State> {
|
|||
|
||||
{isAlertFlyoutVisible && (
|
||||
<AlertFlyout
|
||||
filter={
|
||||
options.fields
|
||||
? `${findInventoryFields(nodeType, options.fields).id}: "${node.id}"`
|
||||
: ''
|
||||
}
|
||||
filter={`${findInventoryFields(nodeType).id}: "${node.id}"`}
|
||||
options={options}
|
||||
nodeType={nodeType}
|
||||
setVisible={this.setAlertFlyoutVisible}
|
||||
|
|
|
@ -64,16 +64,14 @@ export const NodeContextMenu: React.FC<Props & { theme?: EuiTheme }> = withTheme
|
|||
return { label: <EuiCode>host.ip</EuiCode>, value: node.ip };
|
||||
}
|
||||
} else {
|
||||
if (options.fields) {
|
||||
const { id } = findInventoryFields(nodeType, options.fields);
|
||||
return {
|
||||
label: <EuiCode>{id}</EuiCode>,
|
||||
value: node.id,
|
||||
};
|
||||
}
|
||||
const { id } = findInventoryFields(nodeType);
|
||||
return {
|
||||
label: <EuiCode>{id}</EuiCode>,
|
||||
value: node.id,
|
||||
};
|
||||
}
|
||||
return { label: '', value: '' };
|
||||
}, [nodeType, node.ip, node.id, options.fields]);
|
||||
}, [nodeType, node.ip, node.id]);
|
||||
|
||||
const nodeLogsMenuItemLinkProps = useLinkProps(
|
||||
getNodeLogsUrl({
|
||||
|
@ -184,11 +182,7 @@ export const NodeContextMenu: React.FC<Props & { theme?: EuiTheme }> = withTheme
|
|||
|
||||
{flyoutVisible && (
|
||||
<AlertFlyout
|
||||
filter={
|
||||
options.fields
|
||||
? `${findInventoryFields(nodeType, options.fields).id}: "${node.id}"`
|
||||
: ''
|
||||
}
|
||||
filter={`${findInventoryFields(nodeType).id}: "${node.id}"`}
|
||||
options={options}
|
||||
nodeType={nodeType}
|
||||
setVisible={setFlyoutVisible}
|
||||
|
|
|
@ -22,7 +22,6 @@ export interface SortBy {
|
|||
|
||||
export function useProcessList(
|
||||
hostTerm: Record<string, string>,
|
||||
timefield: string,
|
||||
to: number,
|
||||
sortBy: SortBy,
|
||||
searchFilter: object
|
||||
|
@ -51,7 +50,6 @@ export function useProcessList(
|
|||
'POST',
|
||||
JSON.stringify({
|
||||
hostTerm,
|
||||
timefield,
|
||||
indexPattern,
|
||||
to,
|
||||
sortBy: parsedSortBy,
|
||||
|
@ -75,15 +73,11 @@ export function useProcessList(
|
|||
};
|
||||
}
|
||||
|
||||
function useProcessListParams(props: {
|
||||
hostTerm: Record<string, string>;
|
||||
timefield: string;
|
||||
to: number;
|
||||
}) {
|
||||
const { hostTerm, timefield, to } = props;
|
||||
function useProcessListParams(props: { hostTerm: Record<string, string>; to: number }) {
|
||||
const { hostTerm, to } = props;
|
||||
const { createDerivedIndexPattern } = useSourceContext();
|
||||
const indexPattern = createDerivedIndexPattern('metrics').title;
|
||||
return { hostTerm, indexPattern, timefield, to };
|
||||
return { hostTerm, indexPattern, to };
|
||||
}
|
||||
const ProcessListContext = createContainter(useProcessListParams);
|
||||
export const [ProcessListContextProvider, useProcessListContext] = ProcessListContext;
|
||||
|
|
|
@ -25,14 +25,13 @@ export function useProcessListRowChart(command: string) {
|
|||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
||||
const { hostTerm, timefield, indexPattern, to } = useProcessListContext();
|
||||
const { hostTerm, indexPattern, to } = useProcessListContext();
|
||||
|
||||
const { error, loading, response, makeRequest } = useHTTPRequest<ProcessListAPIChartResponse>(
|
||||
'/api/metrics/process_list/chart',
|
||||
'POST',
|
||||
JSON.stringify({
|
||||
hostTerm,
|
||||
timefield,
|
||||
indexPattern,
|
||||
to,
|
||||
command,
|
||||
|
|
|
@ -10,13 +10,6 @@ import { InfraWaffleMapOptions, InfraFormatterType } from '../../../../lib/lib';
|
|||
import { SnapshotMetricType } from '../../../../../common/inventory_models/types';
|
||||
|
||||
const options: InfraWaffleMapOptions = {
|
||||
fields: {
|
||||
container: 'container.id',
|
||||
pod: 'kubernetes.pod.uid',
|
||||
host: 'host.name',
|
||||
timestamp: '@timestanp',
|
||||
tiebreaker: '@timestamp',
|
||||
},
|
||||
formatter: InfraFormatterType.percent,
|
||||
formatTemplate: '{{value}}',
|
||||
metric: { type: 'cpu' },
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { InfraWaffleMapNode, InfraWaffleMapOptions } from '../../../../lib/lib';
|
||||
import { InventoryItemType } from '../../../../../common/inventory_models/types';
|
||||
import { getFieldByType } from '../../../../../common/inventory_models';
|
||||
import { LinkDescriptor } from '../../../../hooks/use_link_props';
|
||||
|
||||
export const createUptimeLink = (
|
||||
|
@ -24,7 +24,7 @@ export const createUptimeLink = (
|
|||
},
|
||||
};
|
||||
}
|
||||
const field = get(options, ['fields', nodeType], '');
|
||||
const field = getFieldByType(nodeType);
|
||||
return {
|
||||
app: 'uptime',
|
||||
hash: '/',
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { InfraMetadataFeature } from '../../../../../common/http_api/metadata_api';
|
||||
import { InventoryMetric } from '../../../../../common/inventory_models/types';
|
||||
import { metrics } from '../../../../../common/inventory_models/metrics';
|
||||
import { TIMESTAMP_FIELD } from '../../../../../common/constants';
|
||||
|
||||
export const getFilteredMetrics = (
|
||||
requiredMetrics: InventoryMetric[],
|
||||
|
@ -20,7 +21,7 @@ export const getFilteredMetrics = (
|
|||
const metricModelCreator = metrics.tsvb[metric];
|
||||
// We just need to get a dummy version of the model so we can filter
|
||||
// using the `requires` attribute.
|
||||
const metricModel = metricModelCreator('@timestamp', 'test', '>=1m');
|
||||
const metricModel = metricModelCreator(TIMESTAMP_FIELD, 'test', '>=1m');
|
||||
return metricMetadata.some((m) => m && metricModel.requires.includes(m));
|
||||
});
|
||||
};
|
||||
|
|
|
@ -27,6 +27,7 @@ import {
|
|||
import { createTSVBLink } from './helpers/create_tsvb_link';
|
||||
import { getNodeDetailUrl } from '../../../link_to/redirect_to_node_detail';
|
||||
import { InventoryItemType } from '../../../../../common/inventory_models/types';
|
||||
import { HOST_FIELD, POD_FIELD, CONTAINER_FIELD } from '../../../../../common/constants';
|
||||
import { useLinkProps } from '../../../../hooks/use_link_props';
|
||||
|
||||
export interface Props {
|
||||
|
@ -44,13 +45,13 @@ const fieldToNodeType = (
|
|||
groupBy: string | string[]
|
||||
): InventoryItemType | undefined => {
|
||||
const fields = Array.isArray(groupBy) ? groupBy : [groupBy];
|
||||
if (fields.includes(source.fields.host)) {
|
||||
if (fields.includes(HOST_FIELD)) {
|
||||
return 'host';
|
||||
}
|
||||
if (fields.includes(source.fields.pod)) {
|
||||
if (fields.includes(POD_FIELD)) {
|
||||
return 'pod';
|
||||
}
|
||||
if (fields.includes(source.fields.container)) {
|
||||
if (fields.includes(CONTAINER_FIELD)) {
|
||||
return 'container';
|
||||
}
|
||||
};
|
||||
|
|
|
@ -79,7 +79,7 @@ describe('createTSVBLink()', () => {
|
|||
app: 'visualize',
|
||||
hash: '/create',
|
||||
search: {
|
||||
_a: "(filters:!(),linked:!f,query:(language:kuery,query:''),uiState:(),vis:(aggs:!(),params:(axis_formatter:number,axis_min:0,axis_position:left,axis_scale:normal,default_index_pattern:'my-beats-*',filter:(language:kuery,query:'host.name : \"example-01\"'),id:test-id,index_pattern:'my-beats-*',interval:auto,series:!((axis_position:right,chart_type:line,color:#6092C0,fill:0,formatter:percent,id:test-id,label:'avg(system.cpu.user.pct)',line_width:2,metrics:!((field:system.cpu.user.pct,id:test-id,type:avg)),point_size:0,separate_axis:0,split_mode:everything,stacked:none,value_template:{{value}})),show_grid:1,show_legend:1,time_field:time,type:timeseries),title:example-01,type:metrics))",
|
||||
_a: "(filters:!(),linked:!f,query:(language:kuery,query:''),uiState:(),vis:(aggs:!(),params:(axis_formatter:number,axis_min:0,axis_position:left,axis_scale:normal,default_index_pattern:'my-beats-*',filter:(language:kuery,query:'host.name : \"example-01\"'),id:test-id,index_pattern:'my-beats-*',interval:auto,series:!((axis_position:right,chart_type:line,color:#6092C0,fill:0,formatter:percent,id:test-id,label:'avg(system.cpu.user.pct)',line_width:2,metrics:!((field:system.cpu.user.pct,id:test-id,type:avg)),point_size:0,separate_axis:0,split_mode:everything,stacked:none,value_template:{{value}})),show_grid:1,show_legend:1,time_field:'@timestamp',type:timeseries),title:example-01,type:metrics))",
|
||||
_g: '(refreshInterval:(pause:!t,value:0),time:(from:now-1h,to:now))',
|
||||
type: 'metrics',
|
||||
},
|
||||
|
@ -97,7 +97,7 @@ describe('createTSVBLink()', () => {
|
|||
app: 'visualize',
|
||||
hash: '/create',
|
||||
search: {
|
||||
_a: "(filters:!(),linked:!f,query:(language:kuery,query:''),uiState:(),vis:(aggs:!(),params:(axis_formatter:number,axis_min:0,axis_position:left,axis_scale:normal,default_index_pattern:'my-beats-*',filter:(language:kuery,query:'system.network.name:lo* and host.name : \"example-01\"'),id:test-id,index_pattern:'my-beats-*',interval:auto,series:!((axis_position:right,chart_type:line,color:#6092C0,fill:0,formatter:percent,id:test-id,label:'avg(system.cpu.user.pct)',line_width:2,metrics:!((field:system.cpu.user.pct,id:test-id,type:avg)),point_size:0,separate_axis:0,split_mode:everything,stacked:none,value_template:{{value}})),show_grid:1,show_legend:1,time_field:time,type:timeseries),title:example-01,type:metrics))",
|
||||
_a: "(filters:!(),linked:!f,query:(language:kuery,query:''),uiState:(),vis:(aggs:!(),params:(axis_formatter:number,axis_min:0,axis_position:left,axis_scale:normal,default_index_pattern:'my-beats-*',filter:(language:kuery,query:'system.network.name:lo* and host.name : \"example-01\"'),id:test-id,index_pattern:'my-beats-*',interval:auto,series:!((axis_position:right,chart_type:line,color:#6092C0,fill:0,formatter:percent,id:test-id,label:'avg(system.cpu.user.pct)',line_width:2,metrics:!((field:system.cpu.user.pct,id:test-id,type:avg)),point_size:0,separate_axis:0,split_mode:everything,stacked:none,value_template:{{value}})),show_grid:1,show_legend:1,time_field:'@timestamp',type:timeseries),title:example-01,type:metrics))",
|
||||
_g: '(refreshInterval:(pause:!t,value:0),time:(from:now-1h,to:now))',
|
||||
type: 'metrics',
|
||||
},
|
||||
|
@ -161,7 +161,7 @@ describe('createTSVBLink()', () => {
|
|||
app: 'visualize',
|
||||
hash: '/create',
|
||||
search: {
|
||||
_a: "(filters:!(),linked:!f,query:(language:kuery,query:''),uiState:(),vis:(aggs:!(),params:(axis_formatter:number,axis_min:0,axis_position:left,axis_scale:normal,default_index_pattern:'metric*',filter:(language:kuery,query:'host.name : \"example-01\"'),id:test-id,index_pattern:'metric*',interval:auto,series:!((axis_position:right,chart_type:line,color:#6092C0,fill:0,formatter:percent,id:test-id,label:'avg(system.cpu.user.pct)',line_width:2,metrics:!((field:system.cpu.user.pct,id:test-id,type:avg)),point_size:0,separate_axis:0,split_mode:everything,stacked:none,value_template:{{value}})),show_grid:1,show_legend:1,time_field:time,type:timeseries),title:example-01,type:metrics))",
|
||||
_a: "(filters:!(),linked:!f,query:(language:kuery,query:''),uiState:(),vis:(aggs:!(),params:(axis_formatter:number,axis_min:0,axis_position:left,axis_scale:normal,default_index_pattern:'metric*',filter:(language:kuery,query:'host.name : \"example-01\"'),id:test-id,index_pattern:'metric*',interval:auto,series:!((axis_position:right,chart_type:line,color:#6092C0,fill:0,formatter:percent,id:test-id,label:'avg(system.cpu.user.pct)',line_width:2,metrics:!((field:system.cpu.user.pct,id:test-id,type:avg)),point_size:0,separate_axis:0,split_mode:everything,stacked:none,value_template:{{value}})),show_grid:1,show_legend:1,time_field:'@timestamp',type:timeseries),title:example-01,type:metrics))",
|
||||
_g: '(refreshInterval:(pause:!t,value:0),time:(from:now-1h,to:now))',
|
||||
type: 'metrics',
|
||||
},
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { encode } from 'rison-node';
|
||||
import uuid from 'uuid';
|
||||
import { set } from '@elastic/safer-lodash-set';
|
||||
import { TIMESTAMP_FIELD } from '../../../../../../common/constants';
|
||||
import { MetricsSourceConfigurationProperties } from '../../../../../../common/metrics_sources';
|
||||
import { colorTransformer, Color } from '../../../../../../common/color_palette';
|
||||
import { MetricsExplorerSeries } from '../../../../../../common/http_api/metrics_explorer';
|
||||
|
@ -169,7 +170,7 @@ export const createTSVBLink = (
|
|||
series: options.metrics.map(mapMetricToSeries(chartOptions)),
|
||||
show_grid: 1,
|
||||
show_legend: 1,
|
||||
time_field: (source && source.fields.timestamp) || '@timestamp',
|
||||
time_field: TIMESTAMP_FIELD,
|
||||
type: 'timeseries',
|
||||
filter: createFilterFromOptions(options, series),
|
||||
},
|
||||
|
|
|
@ -84,7 +84,6 @@ export function useMetricsExplorerData(
|
|||
void 0,
|
||||
timerange: {
|
||||
...timerange,
|
||||
field: source.fields.timestamp,
|
||||
from: from.valueOf(),
|
||||
to: to.valueOf(),
|
||||
},
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { encode } from 'rison-node';
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { FetchData, FetchDataParams, LogsFetchDataResponse } from '../../../observability/public';
|
||||
import { DEFAULT_SOURCE_ID } from '../../common/constants';
|
||||
import { DEFAULT_SOURCE_ID, TIMESTAMP_FIELD } from '../../common/constants';
|
||||
import { callFetchLogSourceConfigurationAPI } from '../containers/logs/log_source/api/fetch_log_source_configuration';
|
||||
import { callFetchLogSourceStatusAPI } from '../containers/logs/log_source/api/fetch_log_source_status';
|
||||
import { InfraClientCoreSetup, InfraClientStartDeps } from '../types';
|
||||
|
@ -30,7 +30,6 @@ interface StatsAggregation {
|
|||
|
||||
interface LogParams {
|
||||
index: string;
|
||||
timestampField: string;
|
||||
}
|
||||
|
||||
type StatsAndSeries = Pick<LogsFetchDataResponse, 'stats' | 'series'>;
|
||||
|
@ -63,7 +62,6 @@ export function getLogsOverviewDataFetcher(
|
|||
const { stats, series } = await fetchLogsOverview(
|
||||
{
|
||||
index: resolvedLogSourceConfiguration.indices,
|
||||
timestampField: resolvedLogSourceConfiguration.timestampField,
|
||||
},
|
||||
params,
|
||||
data
|
||||
|
@ -117,7 +115,7 @@ async function fetchLogsOverview(
|
|||
function buildLogOverviewQuery(logParams: LogParams, params: FetchDataParams) {
|
||||
return {
|
||||
range: {
|
||||
[logParams.timestampField]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gt: new Date(params.absoluteTime.start).toISOString(),
|
||||
lte: new Date(params.absoluteTime.end).toISOString(),
|
||||
format: 'strict_date_optional_time',
|
||||
|
@ -137,7 +135,7 @@ function buildLogOverviewAggregations(logParams: LogParams, params: FetchDataPar
|
|||
aggs: {
|
||||
series: {
|
||||
date_histogram: {
|
||||
field: logParams.timestampField,
|
||||
field: TIMESTAMP_FIELD,
|
||||
fixed_interval: params.intervalString,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -150,7 +150,6 @@ describe('Logs UI Observability Homepage Functions', () => {
|
|||
type: 'index_pattern',
|
||||
indexPatternId: 'test-index-pattern',
|
||||
},
|
||||
fields: { timestamp: '@timestamp', tiebreaker: '_doc' },
|
||||
},
|
||||
},
|
||||
} as GetLogSourceConfigurationSuccessResponsePayload);
|
||||
|
|
|
@ -13,6 +13,13 @@ import {
|
|||
DeprecationsDetails,
|
||||
GetDeprecationsContext,
|
||||
} from 'src/core/server';
|
||||
import {
|
||||
TIMESTAMP_FIELD,
|
||||
TIEBREAKER_FIELD,
|
||||
CONTAINER_FIELD,
|
||||
HOST_FIELD,
|
||||
POD_FIELD,
|
||||
} from '../common/constants';
|
||||
import { InfraSources } from './lib/sources';
|
||||
|
||||
const deprecatedFieldMessage = (fieldName: string, defaultValue: string, configNames: string[]) =>
|
||||
|
@ -28,11 +35,11 @@ const deprecatedFieldMessage = (fieldName: string, defaultValue: string, configN
|
|||
});
|
||||
|
||||
const DEFAULT_VALUES = {
|
||||
timestamp: '@timestamp',
|
||||
tiebreaker: '_doc',
|
||||
container: 'container.id',
|
||||
host: 'host.name',
|
||||
pod: 'kubernetes.pod.uid',
|
||||
timestamp: TIMESTAMP_FIELD,
|
||||
tiebreaker: TIEBREAKER_FIELD,
|
||||
container: CONTAINER_FIELD,
|
||||
host: HOST_FIELD,
|
||||
pod: POD_FIELD,
|
||||
};
|
||||
|
||||
const FIELD_DEPRECATION_FACTORIES: Record<string, (configNames: string[]) => DeprecationsDetails> =
|
||||
|
|
|
@ -24,6 +24,7 @@ import {
|
|||
import { SortedSearchHit } from '../framework';
|
||||
import { KibanaFramework } from '../framework/kibana_framework_adapter';
|
||||
import { ResolvedLogSourceConfiguration } from '../../../../common/log_sources';
|
||||
import { TIMESTAMP_FIELD, TIEBREAKER_FIELD } from '../../../../common/constants';
|
||||
|
||||
const TIMESTAMP_FORMAT = 'epoch_millis';
|
||||
|
||||
|
@ -64,8 +65,8 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
: {};
|
||||
|
||||
const sort = {
|
||||
[resolvedLogSourceConfiguration.timestampField]: sortDirection,
|
||||
[resolvedLogSourceConfiguration.tiebreakerField]: sortDirection,
|
||||
[TIMESTAMP_FIELD]: sortDirection,
|
||||
[TIEBREAKER_FIELD]: sortDirection,
|
||||
};
|
||||
|
||||
const esQuery = {
|
||||
|
@ -83,7 +84,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
...createFilterClauses(query, highlightQuery),
|
||||
{
|
||||
range: {
|
||||
[resolvedLogSourceConfiguration.timestampField]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: startTimestamp,
|
||||
lte: endTimestamp,
|
||||
format: TIMESTAMP_FORMAT,
|
||||
|
@ -146,7 +147,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
aggregations: {
|
||||
count_by_date: {
|
||||
date_range: {
|
||||
field: resolvedLogSourceConfiguration.timestampField,
|
||||
field: TIMESTAMP_FIELD,
|
||||
format: TIMESTAMP_FORMAT,
|
||||
ranges: bucketIntervalStarts.map((bucketIntervalStart) => ({
|
||||
from: bucketIntervalStart.getTime(),
|
||||
|
@ -157,10 +158,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
top_hits_by_key: {
|
||||
top_hits: {
|
||||
size: 1,
|
||||
sort: [
|
||||
{ [resolvedLogSourceConfiguration.timestampField]: 'asc' },
|
||||
{ [resolvedLogSourceConfiguration.tiebreakerField]: 'asc' },
|
||||
],
|
||||
sort: [{ [TIMESTAMP_FIELD]: 'asc' }, { [TIEBREAKER_FIELD]: 'asc' }],
|
||||
_source: false,
|
||||
},
|
||||
},
|
||||
|
@ -173,7 +171,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
...createQueryFilterClauses(filterQuery),
|
||||
{
|
||||
range: {
|
||||
[resolvedLogSourceConfiguration.timestampField]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: startTimestamp,
|
||||
lte: endTimestamp,
|
||||
format: TIMESTAMP_FORMAT,
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { flatten, get } from 'lodash';
|
||||
import { KibanaRequest } from 'src/core/server';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { NodeDetailsMetricData } from '../../../../common/http_api/node_details_api';
|
||||
import { KibanaFramework } from '../framework/kibana_framework_adapter';
|
||||
import { InfraMetricsAdapter, InfraMetricsRequestOptions } from './adapter_types';
|
||||
|
@ -36,7 +37,7 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
rawRequest: KibanaRequest
|
||||
): Promise<NodeDetailsMetricData[]> {
|
||||
const indexPattern = `${options.sourceConfiguration.metricAlias}`;
|
||||
const fields = findInventoryFields(options.nodeType, options.sourceConfiguration.fields);
|
||||
const fields = findInventoryFields(options.nodeType);
|
||||
const nodeField = fields.id;
|
||||
|
||||
const search = <Aggregation>(searchOptions: object) =>
|
||||
|
@ -122,11 +123,7 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
max: options.timerange.to,
|
||||
};
|
||||
|
||||
const model = createTSVBModel(
|
||||
options.sourceConfiguration.fields.timestamp,
|
||||
indexPattern,
|
||||
options.timerange.interval
|
||||
);
|
||||
const model = createTSVBModel(TIMESTAMP_FIELD, indexPattern, options.timerange.interval);
|
||||
|
||||
const client = <Hit = {}, Aggregation = undefined>(
|
||||
opts: CallWithRequestParams
|
||||
|
@ -137,7 +134,6 @@ export class KibanaMetricsAdapter implements InfraMetricsAdapter {
|
|||
client,
|
||||
{
|
||||
indexPattern: `${options.sourceConfiguration.metricAlias}`,
|
||||
timestampField: options.sourceConfiguration.fields.timestamp,
|
||||
timerange: options.timerange,
|
||||
},
|
||||
model.requires
|
||||
|
|
|
@ -17,7 +17,6 @@ export const libsMock = {
|
|||
type: 'index_pattern',
|
||||
indexPatternId: 'some-id',
|
||||
},
|
||||
fields: { timestamp: '@timestamp' },
|
||||
},
|
||||
});
|
||||
},
|
||||
|
|
|
@ -74,7 +74,6 @@ export const evaluateAlert = <Params extends EvaluatedAlertParams = EvaluatedAle
|
|||
esClient,
|
||||
criterion,
|
||||
config.metricAlias,
|
||||
config.fields.timestamp,
|
||||
groupBy,
|
||||
filterQuery,
|
||||
timeframe,
|
||||
|
@ -147,7 +146,6 @@ const getMetric: (
|
|||
esClient: ElasticsearchClient,
|
||||
params: MetricExpressionParams,
|
||||
index: string,
|
||||
timefield: string,
|
||||
groupBy: string | undefined | string[],
|
||||
filterQuery: string | undefined,
|
||||
timeframe?: { start?: number; end: number },
|
||||
|
@ -156,7 +154,6 @@ const getMetric: (
|
|||
esClient,
|
||||
params,
|
||||
index,
|
||||
timefield,
|
||||
groupBy,
|
||||
filterQuery,
|
||||
timeframe,
|
||||
|
@ -172,7 +169,6 @@ const getMetric: (
|
|||
|
||||
const searchBody = getElasticsearchMetricQuery(
|
||||
params,
|
||||
timefield,
|
||||
calculatedTimerange,
|
||||
hasGroupBy ? groupBy : undefined,
|
||||
filterQuery
|
||||
|
|
|
@ -17,7 +17,6 @@ describe("The Metric Threshold Alert's getElasticsearchMetricQuery", () => {
|
|||
timeSize: 1,
|
||||
} as MetricExpressionParams;
|
||||
|
||||
const timefield = '@timestamp';
|
||||
const groupBy = 'host.doggoname';
|
||||
const timeframe = {
|
||||
start: moment().subtract(5, 'minutes').valueOf(),
|
||||
|
@ -25,7 +24,7 @@ describe("The Metric Threshold Alert's getElasticsearchMetricQuery", () => {
|
|||
};
|
||||
|
||||
describe('when passed no filterQuery', () => {
|
||||
const searchBody = getElasticsearchMetricQuery(expressionParams, timefield, timeframe, groupBy);
|
||||
const searchBody = getElasticsearchMetricQuery(expressionParams, timeframe, groupBy);
|
||||
test('includes a range filter', () => {
|
||||
expect(
|
||||
searchBody.query.bool.filter.find((filter) => filter.hasOwnProperty('range'))
|
||||
|
@ -47,7 +46,6 @@ describe("The Metric Threshold Alert's getElasticsearchMetricQuery", () => {
|
|||
|
||||
const searchBody = getElasticsearchMetricQuery(
|
||||
expressionParams,
|
||||
timefield,
|
||||
timeframe,
|
||||
groupBy,
|
||||
filterQuery
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../../../../common/constants';
|
||||
import { networkTraffic } from '../../../../../common/inventory_models/shared/metrics/snapshot/network_traffic';
|
||||
import { MetricExpressionParams, Aggregators } from '../types';
|
||||
import { createPercentileAggregation } from './create_percentile_aggregation';
|
||||
|
@ -21,7 +22,6 @@ const getParsedFilterQuery: (filterQuery: string | undefined) => Record<string,
|
|||
|
||||
export const getElasticsearchMetricQuery = (
|
||||
{ metric, aggType, timeUnit, timeSize }: MetricExpressionParams,
|
||||
timefield: string,
|
||||
timeframe: { start: number; end: number },
|
||||
groupBy?: string | string[],
|
||||
filterQuery?: string
|
||||
|
@ -56,9 +56,9 @@ export const getElasticsearchMetricQuery = (
|
|||
? {
|
||||
aggregatedIntervals: {
|
||||
date_histogram: {
|
||||
field: timefield,
|
||||
field: TIMESTAMP_FIELD,
|
||||
fixed_interval: interval,
|
||||
offset: calculateDateHistogramOffset({ from, to, interval, field: timefield }),
|
||||
offset: calculateDateHistogramOffset({ from, to, interval }),
|
||||
extended_bounds: {
|
||||
min: from,
|
||||
max: to,
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../../common/constants';
|
||||
import { ProcessListAPIRequest, ProcessListAPIQueryAggregation } from '../../../common/http_api';
|
||||
import { ESSearchClient } from '../metrics/types';
|
||||
import { CMDLINE_FIELD } from './common';
|
||||
|
@ -13,7 +14,7 @@ const TOP_N = 10;
|
|||
|
||||
export const getProcessList = async (
|
||||
search: ESSearchClient,
|
||||
{ hostTerm, timefield, indexPattern, to, sortBy, searchFilter }: ProcessListAPIRequest
|
||||
{ hostTerm, indexPattern, to, sortBy, searchFilter }: ProcessListAPIRequest
|
||||
) => {
|
||||
const body = {
|
||||
size: 0,
|
||||
|
@ -22,7 +23,7 @@ export const getProcessList = async (
|
|||
filter: [
|
||||
{
|
||||
range: {
|
||||
[timefield]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: to - 60 * 1000, // 1 minute
|
||||
lte: to,
|
||||
},
|
||||
|
@ -47,7 +48,7 @@ export const getProcessList = async (
|
|||
size: 1,
|
||||
sort: [
|
||||
{
|
||||
[timefield]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
order: 'desc',
|
||||
},
|
||||
},
|
||||
|
@ -93,7 +94,7 @@ export const getProcessList = async (
|
|||
size: 1,
|
||||
sort: [
|
||||
{
|
||||
[timefield]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
order: 'desc',
|
||||
},
|
||||
},
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import { first } from 'lodash';
|
||||
import { TIMESTAMP_FIELD } from '../../../common/constants';
|
||||
import {
|
||||
ProcessListAPIChartRequest,
|
||||
ProcessListAPIChartQueryAggregation,
|
||||
|
@ -17,7 +18,7 @@ import { CMDLINE_FIELD } from './common';
|
|||
|
||||
export const getProcessListChart = async (
|
||||
search: ESSearchClient,
|
||||
{ hostTerm, timefield, indexPattern, to, command }: ProcessListAPIChartRequest
|
||||
{ hostTerm, indexPattern, to, command }: ProcessListAPIChartRequest
|
||||
) => {
|
||||
const body = {
|
||||
size: 0,
|
||||
|
@ -26,7 +27,7 @@ export const getProcessListChart = async (
|
|||
filter: [
|
||||
{
|
||||
range: {
|
||||
[timefield]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: to - 60 * 1000, // 1 minute
|
||||
lte: to,
|
||||
},
|
||||
|
@ -60,7 +61,7 @@ export const getProcessListChart = async (
|
|||
aggs: {
|
||||
timeseries: {
|
||||
date_histogram: {
|
||||
field: timefield,
|
||||
field: TIMESTAMP_FIELD,
|
||||
fixed_interval: '1m',
|
||||
extended_bounds: {
|
||||
min: to - 60 * 15 * 1000, // 15 minutes,
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { TIEBREAKER_FIELD } from '../../../../common/constants';
|
||||
import { ANOMALY_THRESHOLD } from '../../../../common/infra_ml';
|
||||
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
|
||||
import {
|
||||
|
@ -20,9 +21,6 @@ import {
|
|||
import { InfluencerFilter } from '../common';
|
||||
import { Sort, Pagination } from '../../../../common/http_api/infra_ml';
|
||||
|
||||
// TODO: Reassess validity of this against ML docs
|
||||
const TIEBREAKER_FIELD = '_doc';
|
||||
|
||||
const sortToMlFieldMap = {
|
||||
dataset: 'partition_field_value',
|
||||
anomalyScore: 'record_score',
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { TIEBREAKER_FIELD } from '../../../../common/constants';
|
||||
import { ANOMALY_THRESHOLD } from '../../../../common/infra_ml';
|
||||
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
|
||||
import {
|
||||
|
@ -20,9 +21,6 @@ import {
|
|||
import { InfluencerFilter } from '../common';
|
||||
import { Sort, Pagination } from '../../../../common/http_api/infra_ml';
|
||||
|
||||
// TODO: Reassess validity of this against ML docs
|
||||
const TIEBREAKER_FIELD = '_doc';
|
||||
|
||||
const sortToMlFieldMap = {
|
||||
dataset: 'partition_field_value',
|
||||
anomalyScore: 'record_score',
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { set } from '@elastic/safer-lodash-set';
|
||||
import { ThrowReporter } from 'io-ts/lib/ThrowReporter';
|
||||
import { TIMESTAMP_FIELD } from '../../../common/constants';
|
||||
import { MetricsAPIRequest, MetricsAPIResponse, afterKeyObjectRT } from '../../../common/http_api';
|
||||
import {
|
||||
ESSearchClient,
|
||||
|
@ -36,7 +37,7 @@ export const query = async (
|
|||
const filter: Array<Record<string, any>> = [
|
||||
{
|
||||
range: {
|
||||
[options.timerange.field]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: options.timerange.from,
|
||||
lte: options.timerange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -21,7 +21,6 @@ export const calculatedInterval = async (search: ESSearchClient, options: Metric
|
|||
search,
|
||||
{
|
||||
indexPattern: options.indexPattern,
|
||||
timestampField: options.timerange.field,
|
||||
timerange: { from: options.timerange.from, to: options.timerange.to },
|
||||
},
|
||||
options.modules
|
||||
|
|
|
@ -13,7 +13,6 @@ const keys = ['example-0'];
|
|||
|
||||
const options: MetricsAPIRequest = {
|
||||
timerange: {
|
||||
field: '@timestamp',
|
||||
from: moment('2020-01-01T00:00:00Z').valueOf(),
|
||||
to: moment('2020-01-01T00:00:00Z').add(5, 'minute').valueOf(),
|
||||
interval: '1m',
|
||||
|
|
|
@ -11,7 +11,6 @@ import { MetricsAPIRequest } from '../../../../common/http_api';
|
|||
|
||||
const options: MetricsAPIRequest = {
|
||||
timerange: {
|
||||
field: '@timestamp',
|
||||
from: moment('2020-01-01T00:00:00Z').valueOf(),
|
||||
to: moment('2020-01-01T01:00:00Z').valueOf(),
|
||||
interval: '>=1m',
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { MetricsAPIRequest } from '../../../../common/http_api/metrics_api';
|
||||
import { calculateDateHistogramOffset } from './calculate_date_histogram_offset';
|
||||
import { createMetricsAggregations } from './create_metrics_aggregations';
|
||||
|
@ -15,7 +16,7 @@ export const createAggregations = (options: MetricsAPIRequest) => {
|
|||
const histogramAggregation = {
|
||||
histogram: {
|
||||
date_histogram: {
|
||||
field: options.timerange.field,
|
||||
field: TIMESTAMP_FIELD,
|
||||
fixed_interval: intervalString,
|
||||
offset: options.alignDataToEnd ? calculateDateHistogramOffset(options.timerange) : '0s',
|
||||
extended_bounds: {
|
||||
|
|
|
@ -11,7 +11,6 @@ import { createMetricsAggregations } from './create_metrics_aggregations';
|
|||
|
||||
const options: MetricsAPIRequest = {
|
||||
timerange: {
|
||||
field: '@timestamp',
|
||||
from: moment('2020-01-01T00:00:00Z').valueOf(),
|
||||
to: moment('2020-01-01T01:00:00Z').valueOf(),
|
||||
interval: '>=1m',
|
||||
|
|
|
@ -5,11 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
METRICS_INDEX_PATTERN,
|
||||
LOGS_INDEX_PATTERN,
|
||||
TIMESTAMP_FIELD,
|
||||
} from '../../../common/constants';
|
||||
import { METRICS_INDEX_PATTERN, LOGS_INDEX_PATTERN } from '../../../common/constants';
|
||||
import { InfraSourceConfiguration } from '../../../common/source_configuration/source_configuration';
|
||||
|
||||
export const defaultSourceConfiguration: InfraSourceConfiguration = {
|
||||
|
@ -21,12 +17,7 @@ export const defaultSourceConfiguration: InfraSourceConfiguration = {
|
|||
indexName: LOGS_INDEX_PATTERN,
|
||||
},
|
||||
fields: {
|
||||
container: 'container.id',
|
||||
host: 'host.name',
|
||||
message: ['message', '@message'],
|
||||
pod: 'kubernetes.pod.uid',
|
||||
tiebreaker: '_doc',
|
||||
timestamp: TIMESTAMP_FIELD,
|
||||
},
|
||||
inventoryDefaultView: '0',
|
||||
metricsExplorerDefaultView: '0',
|
||||
|
|
|
@ -101,12 +101,7 @@ const sourceConfigurationWithIndexPatternReference: InfraSourceConfiguration = {
|
|||
name: 'NAME',
|
||||
description: 'DESCRIPTION',
|
||||
fields: {
|
||||
container: 'CONTAINER_FIELD',
|
||||
host: 'HOST_FIELD',
|
||||
message: ['MESSAGE_FIELD'],
|
||||
pod: 'POD_FIELD',
|
||||
tiebreaker: 'TIEBREAKER_FIELD',
|
||||
timestamp: 'TIMESTAMP_FIELD',
|
||||
},
|
||||
logColumns: [],
|
||||
logIndices: {
|
||||
|
|
|
@ -24,13 +24,6 @@ describe('the InfraSources lib', () => {
|
|||
attributes: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logIndices: { type: 'index_pattern', indexPatternId: 'log_index_pattern_0' },
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
references: [
|
||||
{
|
||||
|
@ -50,13 +43,6 @@ describe('the InfraSources lib', () => {
|
|||
configuration: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_INDEX_PATTERN' },
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
@ -67,12 +53,6 @@ describe('the InfraSources lib', () => {
|
|||
default: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_ALIAS' },
|
||||
fields: {
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
@ -82,11 +62,7 @@ describe('the InfraSources lib', () => {
|
|||
version: 'foo',
|
||||
type: infraSourceConfigurationSavedObjectName,
|
||||
updated_at: '2000-01-01T00:00:00.000Z',
|
||||
attributes: {
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
},
|
||||
},
|
||||
attributes: {},
|
||||
references: [],
|
||||
});
|
||||
|
||||
|
@ -99,13 +75,6 @@ describe('the InfraSources lib', () => {
|
|||
configuration: {
|
||||
metricAlias: 'METRIC_ALIAS',
|
||||
logIndices: { type: 'index_pattern', indexPatternId: 'LOG_ALIAS' },
|
||||
fields: {
|
||||
container: 'CONTAINER',
|
||||
host: 'HOST',
|
||||
pod: 'POD',
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
@ -133,13 +102,6 @@ describe('the InfraSources lib', () => {
|
|||
configuration: {
|
||||
metricAlias: expect.any(String),
|
||||
logIndices: expect.any(Object),
|
||||
fields: {
|
||||
container: expect.any(String),
|
||||
host: expect.any(String),
|
||||
pod: expect.any(String),
|
||||
tiebreaker: expect.any(String),
|
||||
timestamp: expect.any(String),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
|
|
@ -53,12 +53,7 @@ export const config: PluginConfigDescriptor = {
|
|||
schema.object({
|
||||
fields: schema.maybe(
|
||||
schema.object({
|
||||
timestamp: schema.maybe(schema.string()),
|
||||
message: schema.maybe(schema.arrayOf(schema.string())),
|
||||
tiebreaker: schema.maybe(schema.string()),
|
||||
host: schema.maybe(schema.string()),
|
||||
container: schema.maybe(schema.string()),
|
||||
pod: schema.maybe(schema.string()),
|
||||
})
|
||||
),
|
||||
})
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { InventoryCloudAccount } from '../../../../common/http_api/inventory_meta_api';
|
||||
import {
|
||||
InfraMetadataAggregationResponse,
|
||||
|
@ -49,7 +50,7 @@ export const getCloudMetadata = async (
|
|||
must: [
|
||||
{
|
||||
range: {
|
||||
[sourceConfiguration.fields.timestamp]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: currentTime - 86400000, // 24 hours ago
|
||||
lte: currentTime,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
import { KibanaFramework } from '../../../lib/adapters/framework/kibana_framework_adapter';
|
||||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { CLOUD_METRICS_MODULES } from '../../../lib/constants';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
|
||||
export interface InfraCloudMetricsAdapterResponse {
|
||||
buckets: InfraMetadataAggregationBucket[];
|
||||
|
@ -36,7 +37,7 @@ export const getCloudMetricsMetadata = async (
|
|||
{ match: { 'cloud.instance.id': instanceId } },
|
||||
{
|
||||
range: {
|
||||
[sourceConfiguration.fields.timestamp]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: timeRange.from,
|
||||
lte: timeRange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -15,6 +15,7 @@ import { KibanaFramework } from '../../../lib/adapters/framework/kibana_framewor
|
|||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { findInventoryFields } from '../../../../common/inventory_models';
|
||||
import { InventoryItemType } from '../../../../common/inventory_models/types';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
|
||||
export interface InfraMetricsAdapterResponse {
|
||||
id: string;
|
||||
|
@ -30,7 +31,7 @@ export const getMetricMetadata = async (
|
|||
nodeType: InventoryItemType,
|
||||
timeRange: { from: number; to: number }
|
||||
): Promise<InfraMetricsAdapterResponse> => {
|
||||
const fields = findInventoryFields(nodeType, sourceConfiguration.fields);
|
||||
const fields = findInventoryFields(nodeType);
|
||||
const metricQuery = {
|
||||
allow_no_indices: true,
|
||||
ignore_unavailable: true,
|
||||
|
@ -45,7 +46,7 @@ export const getMetricMetadata = async (
|
|||
},
|
||||
{
|
||||
range: {
|
||||
[sourceConfiguration.fields.timestamp]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: timeRange.from,
|
||||
lte: timeRange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -15,6 +15,7 @@ import { getPodNodeName } from './get_pod_node_name';
|
|||
import { CLOUD_METRICS_MODULES } from '../../../lib/constants';
|
||||
import { findInventoryFields } from '../../../../common/inventory_models';
|
||||
import { InventoryItemType } from '../../../../common/inventory_models/types';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
|
||||
export const getNodeInfo = async (
|
||||
framework: KibanaFramework,
|
||||
|
@ -50,8 +51,7 @@ export const getNodeInfo = async (
|
|||
}
|
||||
return {};
|
||||
}
|
||||
const fields = findInventoryFields(nodeType, sourceConfiguration.fields);
|
||||
const timestampField = sourceConfiguration.fields.timestamp;
|
||||
const fields = findInventoryFields(nodeType);
|
||||
const params = {
|
||||
allow_no_indices: true,
|
||||
ignore_unavailable: true,
|
||||
|
@ -60,14 +60,14 @@ export const getNodeInfo = async (
|
|||
body: {
|
||||
size: 1,
|
||||
_source: ['host.*', 'cloud.*', 'agent.*'],
|
||||
sort: [{ [timestampField]: 'desc' }],
|
||||
sort: [{ [TIMESTAMP_FIELD]: 'desc' }],
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{ match: { [fields.id]: nodeId } },
|
||||
{
|
||||
range: {
|
||||
[timestampField]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: timeRange.from,
|
||||
lte: timeRange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -10,6 +10,7 @@ import { KibanaFramework } from '../../../lib/adapters/framework/kibana_framewor
|
|||
import { InfraSourceConfiguration } from '../../../lib/sources';
|
||||
import { findInventoryFields } from '../../../../common/inventory_models';
|
||||
import type { InfraPluginRequestHandlerContext } from '../../../types';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
|
||||
export const getPodNodeName = async (
|
||||
framework: KibanaFramework,
|
||||
|
@ -19,8 +20,7 @@ export const getPodNodeName = async (
|
|||
nodeType: 'host' | 'pod' | 'container',
|
||||
timeRange: { from: number; to: number }
|
||||
): Promise<string | undefined> => {
|
||||
const fields = findInventoryFields(nodeType, sourceConfiguration.fields);
|
||||
const timestampField = sourceConfiguration.fields.timestamp;
|
||||
const fields = findInventoryFields(nodeType);
|
||||
const params = {
|
||||
allow_no_indices: true,
|
||||
ignore_unavailable: true,
|
||||
|
@ -29,7 +29,7 @@ export const getPodNodeName = async (
|
|||
body: {
|
||||
size: 1,
|
||||
_source: ['kubernetes.node.name'],
|
||||
sort: [{ [timestampField]: 'desc' }],
|
||||
sort: [{ [TIMESTAMP_FIELD]: 'desc' }],
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
|
@ -37,7 +37,7 @@ export const getPodNodeName = async (
|
|||
{ exists: { field: `kubernetes.node.name` } },
|
||||
{
|
||||
range: {
|
||||
[timestampField]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: timeRange.from,
|
||||
lte: timeRange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -10,7 +10,6 @@ import { convertRequestToMetricsAPIOptions } from './convert_request_to_metrics_
|
|||
|
||||
const BASE_REQUEST: MetricsExplorerRequestBody = {
|
||||
timerange: {
|
||||
field: '@timestamp',
|
||||
from: new Date('2020-01-01T00:00:00Z').getTime(),
|
||||
to: new Date('2020-01-01T01:00:00Z').getTime(),
|
||||
interval: '1m',
|
||||
|
@ -22,7 +21,6 @@ const BASE_REQUEST: MetricsExplorerRequestBody = {
|
|||
|
||||
const BASE_METRICS_UI_OPTIONS: MetricsAPIRequest = {
|
||||
timerange: {
|
||||
field: '@timestamp',
|
||||
from: new Date('2020-01-01T00:00:00Z').getTime(),
|
||||
to: new Date('2020-01-01T01:00:00Z').getTime(),
|
||||
interval: '1m',
|
||||
|
|
|
@ -44,7 +44,6 @@ export const findIntervalForMetrics = async (
|
|||
client,
|
||||
{
|
||||
indexPattern: options.indexPattern,
|
||||
timestampField: options.timerange.field,
|
||||
timerange: options.timerange,
|
||||
},
|
||||
modules.filter(Boolean) as string[]
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { ESSearchClient } from '../../../lib/metrics/types';
|
||||
|
||||
interface EventDatasetHit {
|
||||
|
@ -19,7 +20,7 @@ export const getDatasetForField = async (
|
|||
client: ESSearchClient,
|
||||
field: string,
|
||||
indexPattern: string,
|
||||
timerange: { field: string; to: number; from: number }
|
||||
timerange: { to: number; from: number }
|
||||
) => {
|
||||
const params = {
|
||||
allow_no_indices: true,
|
||||
|
@ -33,7 +34,7 @@ export const getDatasetForField = async (
|
|||
{ exists: { field } },
|
||||
{
|
||||
range: {
|
||||
[timerange.field]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: timerange.from,
|
||||
lte: timerange.to,
|
||||
format: 'epoch_millis',
|
||||
|
@ -45,7 +46,7 @@ export const getDatasetForField = async (
|
|||
},
|
||||
size: 1,
|
||||
_source: ['event.dataset'],
|
||||
sort: [{ [timerange.field]: { order: 'desc' } }],
|
||||
sort: [{ [TIMESTAMP_FIELD]: { order: 'desc' } }],
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import { isArray } from 'lodash';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { MetricsAPIRequest } from '../../../../common/http_api';
|
||||
import { ESSearchClient } from '../../../lib/metrics/types';
|
||||
|
||||
|
@ -26,7 +27,7 @@ export const queryTotalGroupings = async (
|
|||
let filters: Array<Record<string, any>> = [
|
||||
{
|
||||
range: {
|
||||
[options.timerange.field]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: options.timerange.from,
|
||||
lte: options.timerange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { MetricsSourceConfiguration } from '../../../../common/metrics_sources';
|
||||
import { TopNodesRequest } from '../../../../common/http_api/overview_api';
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
|
||||
export const createTopNodesQuery = (
|
||||
options: TopNodesRequest,
|
||||
|
@ -22,7 +23,7 @@ export const createTopNodesQuery = (
|
|||
filter: [
|
||||
{
|
||||
range: {
|
||||
[source.configuration.fields.timestamp]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: options.timerange.from,
|
||||
lte: options.timerange.to,
|
||||
},
|
||||
|
@ -49,7 +50,7 @@ export const createTopNodesQuery = (
|
|||
{ field: 'host.name' },
|
||||
{ field: 'cloud.provider' },
|
||||
],
|
||||
sort: { '@timestamp': 'desc' },
|
||||
sort: { [TIMESTAMP_FIELD]: 'desc' },
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -42,10 +42,7 @@ export const applyMetadataToLastPath = (
|
|||
if (firstMetaDoc && lastPath) {
|
||||
// We will need the inventory fields so we can use the field paths to get
|
||||
// the values from the metadata document
|
||||
const inventoryFields = findInventoryFields(
|
||||
snapshotRequest.nodeType,
|
||||
source.configuration.fields
|
||||
);
|
||||
const inventoryFields = findInventoryFields(snapshotRequest.nodeType);
|
||||
// Set the label as the name and fallback to the id OR path.value
|
||||
lastPath.label = (firstMetaDoc[inventoryFields.name] ?? lastPath.value) as string;
|
||||
// If the inventory fields contain an ip address, we need to try and set that
|
||||
|
|
|
@ -25,7 +25,6 @@ const createInterval = async (client: ESSearchClient, options: InfraSnapshotRequ
|
|||
client,
|
||||
{
|
||||
indexPattern: options.sourceConfiguration.metricAlias,
|
||||
timestampField: options.sourceConfiguration.fields.timestamp,
|
||||
timerange: { from: timerange.from, to: timerange.to },
|
||||
},
|
||||
modules,
|
||||
|
@ -81,7 +80,6 @@ const aggregationsToModules = async (
|
|||
async (field) =>
|
||||
await getDatasetForField(client, field as string, options.sourceConfiguration.metricAlias, {
|
||||
...options.timerange,
|
||||
field: options.sourceConfiguration.fields.timestamp,
|
||||
})
|
||||
)
|
||||
);
|
||||
|
|
|
@ -16,7 +16,6 @@ import { LogQueryFields } from '../../../services/log_queries/get_log_query_fiel
|
|||
|
||||
export interface SourceOverrides {
|
||||
indexPattern: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
const transformAndQueryData = async ({
|
||||
|
|
|
@ -47,12 +47,7 @@ const source: InfraSource = {
|
|||
indexPatternId: 'kibana_index_pattern',
|
||||
},
|
||||
fields: {
|
||||
container: 'container.id',
|
||||
host: 'host.name',
|
||||
message: ['message', '@message'],
|
||||
pod: 'kubernetes.pod.uid',
|
||||
tiebreaker: '_doc',
|
||||
timestamp: '@timestamp',
|
||||
},
|
||||
inventoryDefaultView: '0',
|
||||
metricsExplorerDefaultView: '0',
|
||||
|
@ -80,7 +75,7 @@ const snapshotRequest: SnapshotRequest = {
|
|||
|
||||
const metricsApiRequest = {
|
||||
indexPattern: 'metrics-*,metricbeat-*',
|
||||
timerange: { field: '@timestamp', from: 1605705900000, to: 1605706200000, interval: '60s' },
|
||||
timerange: { from: 1605705900000, to: 1605706200000, interval: '60s' },
|
||||
metrics: [
|
||||
{
|
||||
id: 'cpu',
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../../../common/constants';
|
||||
import { findInventoryFields, findInventoryModel } from '../../../../common/inventory_models';
|
||||
import { MetricsAPIRequest, SnapshotRequest } from '../../../../common/http_api';
|
||||
import { ESSearchClient } from '../../../lib/metrics/types';
|
||||
|
@ -37,7 +38,6 @@ export const transformRequestToMetricsAPIRequest = async ({
|
|||
const metricsApiRequest: MetricsAPIRequest = {
|
||||
indexPattern: sourceOverrides?.indexPattern ?? source.configuration.metricAlias,
|
||||
timerange: {
|
||||
field: sourceOverrides?.timestamp ?? source.configuration.fields.timestamp,
|
||||
from: timeRangeWithIntervalApplied.from,
|
||||
to: timeRangeWithIntervalApplied.to,
|
||||
interval: timeRangeWithIntervalApplied.interval,
|
||||
|
@ -69,10 +69,7 @@ export const transformRequestToMetricsAPIRequest = async ({
|
|||
inventoryModel.nodeFilter?.forEach((f) => filters.push(f));
|
||||
}
|
||||
|
||||
const inventoryFields = findInventoryFields(
|
||||
snapshotRequest.nodeType,
|
||||
source.configuration.fields
|
||||
);
|
||||
const inventoryFields = findInventoryFields(snapshotRequest.nodeType);
|
||||
if (snapshotRequest.groupBy) {
|
||||
const groupBy = snapshotRequest.groupBy.map((g) => g.field).filter(Boolean) as string[];
|
||||
metricsApiRequest.groupBy = [...groupBy, inventoryFields.id];
|
||||
|
@ -86,7 +83,7 @@ export const transformRequestToMetricsAPIRequest = async ({
|
|||
size: 1,
|
||||
metrics: [{ field: inventoryFields.name }],
|
||||
sort: {
|
||||
[source.configuration.fields.timestamp]: 'desc',
|
||||
[TIMESTAMP_FIELD]: 'desc',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -289,12 +289,7 @@ const createSourceConfigurationMock = (): InfraSource => ({
|
|||
},
|
||||
],
|
||||
fields: {
|
||||
pod: 'POD_FIELD',
|
||||
host: 'HOST_FIELD',
|
||||
container: 'CONTAINER_FIELD',
|
||||
message: ['MESSAGE_FIELD'],
|
||||
timestamp: 'TIMESTAMP_FIELD',
|
||||
tiebreaker: 'TIEBREAKER_FIELD',
|
||||
},
|
||||
anomalyThreshold: 20,
|
||||
},
|
||||
|
|
|
@ -244,12 +244,7 @@ const createSourceConfigurationMock = (): InfraSource => ({
|
|||
metricsExplorerDefaultView: 'DEFAULT_VIEW',
|
||||
logColumns: [],
|
||||
fields: {
|
||||
pod: 'POD_FIELD',
|
||||
host: 'HOST_FIELD',
|
||||
container: 'CONTAINER_FIELD',
|
||||
message: ['MESSAGE_FIELD'],
|
||||
timestamp: 'TIMESTAMP_FIELD',
|
||||
tiebreaker: 'TIEBREAKER_FIELD',
|
||||
},
|
||||
anomalyThreshold: 20,
|
||||
},
|
||||
|
|
|
@ -12,7 +12,6 @@ import { KibanaFramework } from '../../lib/adapters/framework/kibana_framework_a
|
|||
|
||||
export interface LogQueryFields {
|
||||
indexPattern: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export const createGetLogQueryFields = (sources: InfraSources, framework: KibanaFramework) => {
|
||||
|
@ -29,7 +28,6 @@ export const createGetLogQueryFields = (sources: InfraSources, framework: Kibana
|
|||
|
||||
return {
|
||||
indexPattern: resolvedLogSourceConfiguration.indices,
|
||||
timestamp: resolvedLogSourceConfiguration.timestampField,
|
||||
};
|
||||
};
|
||||
};
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TIMESTAMP_FIELD } from '../../common/constants';
|
||||
import { findInventoryModel } from '../../common/inventory_models';
|
||||
// import { KibanaFramework } from '../lib/adapters/framework/kibana_framework_adapter';
|
||||
import { InventoryItemType } from '../../common/inventory_models/types';
|
||||
|
@ -12,7 +13,6 @@ import { ESSearchClient } from '../lib/metrics/types';
|
|||
|
||||
interface Options {
|
||||
indexPattern: string;
|
||||
timestampField: string;
|
||||
timerange: {
|
||||
from: number;
|
||||
to: number;
|
||||
|
@ -44,7 +44,7 @@ export const calculateMetricInterval = async (
|
|||
filter: [
|
||||
{
|
||||
range: {
|
||||
[options.timestampField]: {
|
||||
[TIMESTAMP_FIELD]: {
|
||||
gte: from,
|
||||
lte: options.timerange.to,
|
||||
format: 'epoch_millis',
|
||||
|
|
|
@ -42,13 +42,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
return resp.then((data) => {
|
||||
expect(data).to.have.property('source');
|
||||
expect(data?.source.configuration.metricAlias).to.equal('metrics-*,metricbeat-*');
|
||||
expect(data?.source.configuration.fields).to.eql({
|
||||
container: 'container.id',
|
||||
host: 'host.name',
|
||||
pod: 'kubernetes.pod.uid',
|
||||
tiebreaker: '_doc',
|
||||
timestamp: '@timestamp',
|
||||
});
|
||||
expect(data?.source).to.have.property('status');
|
||||
expect(data?.source.status?.metricIndicesExist).to.equal(true);
|
||||
});
|
||||
|
|
|
@ -40,8 +40,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_name',
|
||||
indexName: 'logs-*,filebeat-*,kibana_sample_data_logs*',
|
||||
});
|
||||
expect(configuration.fields.timestamp).to.be('@timestamp');
|
||||
expect(configuration.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration.logColumns[0]).to.have.key('timestampColumn');
|
||||
expect(configuration.logColumns[1]).to.have.key('fieldColumn');
|
||||
expect(configuration.logColumns[2]).to.have.key('messageColumn');
|
||||
|
@ -58,10 +56,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_pattern',
|
||||
indexPatternId: 'kip-id',
|
||||
},
|
||||
fields: {
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
logColumns: [
|
||||
{
|
||||
messageColumn: {
|
||||
|
@ -83,8 +77,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_pattern',
|
||||
indexPatternId: 'kip-id',
|
||||
});
|
||||
expect(configuration.fields.timestamp).to.be('TIMESTAMP');
|
||||
expect(configuration.fields.tiebreaker).to.be('TIEBREAKER');
|
||||
expect(configuration.logColumns).to.have.length(1);
|
||||
expect(configuration.logColumns[0]).to.have.key('messageColumn');
|
||||
|
||||
|
@ -111,8 +103,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_name',
|
||||
indexName: 'logs-*,filebeat-*,kibana_sample_data_logs*',
|
||||
});
|
||||
expect(configuration.fields.timestamp).to.be('@timestamp');
|
||||
expect(configuration.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration.logColumns).to.have.length(3);
|
||||
expect(configuration.logColumns[0]).to.have.key('timestampColumn');
|
||||
expect(configuration.logColumns[1]).to.have.key('fieldColumn');
|
||||
|
@ -142,10 +132,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_pattern',
|
||||
indexPatternId: 'kip-id',
|
||||
},
|
||||
fields: {
|
||||
tiebreaker: 'TIEBREAKER',
|
||||
timestamp: 'TIMESTAMP',
|
||||
},
|
||||
logColumns: [
|
||||
{
|
||||
messageColumn: {
|
||||
|
@ -166,8 +152,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_pattern',
|
||||
indexPatternId: 'kip-id',
|
||||
});
|
||||
expect(configuration.fields.timestamp).to.be('TIMESTAMP');
|
||||
expect(configuration.fields.tiebreaker).to.be('TIEBREAKER');
|
||||
expect(configuration.logColumns).to.have.length(1);
|
||||
expect(configuration.logColumns[0]).to.have.key('messageColumn');
|
||||
});
|
||||
|
@ -189,8 +173,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
type: 'index_name',
|
||||
indexName: 'logs-*,filebeat-*,kibana_sample_data_logs*',
|
||||
});
|
||||
expect(configuration.fields.timestamp).to.be('@timestamp');
|
||||
expect(configuration.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration.logColumns).to.have.length(3);
|
||||
expect(configuration.logColumns[0]).to.have.key('timestampColumn');
|
||||
expect(configuration.logColumns[1]).to.have.key('fieldColumn');
|
||||
|
|
|
@ -54,11 +54,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
metricsExplorerDefaultView: 'default',
|
||||
anomalyThreshold: 70,
|
||||
fields: {
|
||||
container: 'container.id',
|
||||
host: 'host.name',
|
||||
pod: 'kubernetes.od.uid',
|
||||
tiebreaker: '_doc',
|
||||
timestamp: '@timestamp',
|
||||
message: ['message'],
|
||||
},
|
||||
logColumns: [
|
||||
|
|
|
@ -37,11 +37,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
start: moment().subtract(25, 'minutes').valueOf(),
|
||||
end: moment().valueOf(),
|
||||
};
|
||||
const searchBody = getElasticsearchMetricQuery(
|
||||
getSearchParams(aggType),
|
||||
'@timestamp',
|
||||
timeframe
|
||||
);
|
||||
const searchBody = getElasticsearchMetricQuery(getSearchParams(aggType), timeframe);
|
||||
const result = await client.search({
|
||||
index,
|
||||
// @ts-expect-error @elastic/elasticsearch AggregationsBucketsPath is not valid
|
||||
|
@ -61,7 +57,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
};
|
||||
const searchBody = getElasticsearchMetricQuery(
|
||||
getSearchParams('avg'),
|
||||
'@timestamp',
|
||||
timeframe,
|
||||
undefined,
|
||||
'{"bool":{"should":[{"match_phrase":{"agent.hostname":"foo"}}],"minimum_should_match":1}}'
|
||||
|
@ -85,7 +80,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
};
|
||||
const searchBody = getElasticsearchMetricQuery(
|
||||
getSearchParams(aggType),
|
||||
'@timestamp',
|
||||
timeframe,
|
||||
'agent.id'
|
||||
);
|
||||
|
@ -106,7 +100,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
};
|
||||
const searchBody = getElasticsearchMetricQuery(
|
||||
getSearchParams('avg'),
|
||||
'@timestamp',
|
||||
timeframe,
|
||||
'agent.id',
|
||||
'{"bool":{"should":[{"match_phrase":{"agent.hostname":"foo"}}],"minimum_should_match":1}}'
|
||||
|
|
|
@ -66,11 +66,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
expect(configuration?.name).to.be('UPDATED_NAME');
|
||||
expect(configuration?.description).to.be('UPDATED_DESCRIPTION');
|
||||
expect(configuration?.metricAlias).to.be('metricbeat-**');
|
||||
expect(configuration?.fields.host).to.be('host.name');
|
||||
expect(configuration?.fields.pod).to.be('kubernetes.pod.uid');
|
||||
expect(configuration?.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration?.fields.timestamp).to.be('@timestamp');
|
||||
expect(configuration?.fields.container).to.be('container.id');
|
||||
expect(configuration?.anomalyThreshold).to.be(50);
|
||||
expect(status?.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
@ -104,40 +99,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
expect(status?.metricIndicesExist).to.be(true);
|
||||
});
|
||||
|
||||
it('applies a single nested field update to an existing source', async () => {
|
||||
const creationResponse = await patchRequest({
|
||||
name: 'NAME',
|
||||
fields: {
|
||||
host: 'HOST',
|
||||
},
|
||||
});
|
||||
|
||||
const initialVersion = creationResponse?.source.version;
|
||||
const createdAt = creationResponse?.source.updatedAt;
|
||||
|
||||
expect(initialVersion).to.be.a('string');
|
||||
expect(createdAt).to.be.greaterThan(0);
|
||||
|
||||
const updateResponse = await patchRequest({
|
||||
fields: {
|
||||
container: 'UPDATED_CONTAINER',
|
||||
},
|
||||
});
|
||||
|
||||
const version = updateResponse?.source.version;
|
||||
const updatedAt = updateResponse?.source.updatedAt;
|
||||
const configuration = updateResponse?.source.configuration;
|
||||
|
||||
expect(version).to.be.a('string');
|
||||
expect(version).to.not.be(initialVersion);
|
||||
expect(updatedAt).to.be.greaterThan(createdAt || 0);
|
||||
expect(configuration?.fields.container).to.be('UPDATED_CONTAINER');
|
||||
expect(configuration?.fields.host).to.be('HOST');
|
||||
expect(configuration?.fields.pod).to.be('kubernetes.pod.uid');
|
||||
expect(configuration?.fields.tiebreaker).to.be('_doc');
|
||||
expect(configuration?.fields.timestamp).to.be('@timestamp');
|
||||
});
|
||||
|
||||
it('validates anomalyThreshold is between range 1-100', async () => {
|
||||
// create config with bad request
|
||||
await supertest
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue